Merge branch 'release/2.6.0' into develop

This commit is contained in:
Milan Kolar 2020-03-10 20:41:17 +01:00
commit c5bde07aee
33 changed files with 138 additions and 1010 deletions

View file

@ -9,7 +9,7 @@ from pypeapp import config
import logging
log = logging.getLogger(__name__)
__version__ = "2.5.0"
__version__ = "2.6.0"
PROJECT_PLUGINS_PATH = os.environ.get("PYPE_PROJECT_PLUGINS")
PACKAGE_DIR = os.path.dirname(__file__)

View file

@ -26,11 +26,7 @@ class SyncToAvalonEvent(BaseEvent):
dbcon = DbConnector()
ignore_entTypes = [
"socialfeed", "socialnotification", "note",
"assetversion", "job", "user", "reviewsessionobject", "timer",
"timelog", "auth_userrole", "appointment", "notelabellink"
]
interest_entTypes = ["show", "task"]
ignore_ent_types = ["Milestone"]
ignore_keys = ["statusid", "thumbid"]
@ -137,9 +133,10 @@ class SyncToAvalonEvent(BaseEvent):
if self._avalon_ents_by_id is None:
self._avalon_ents_by_id = {}
proj, ents = self.avalon_entities
self._avalon_ents_by_id[proj["_id"]] = proj
for ent in ents:
self._avalon_ents_by_id[ent["_id"]] = ent
if proj:
self._avalon_ents_by_id[proj["_id"]] = proj
for ent in ents:
self._avalon_ents_by_id[ent["_id"]] = ent
return self._avalon_ents_by_id
@property
@ -159,13 +156,14 @@ class SyncToAvalonEvent(BaseEvent):
if self._avalon_ents_by_ftrack_id is None:
self._avalon_ents_by_ftrack_id = {}
proj, ents = self.avalon_entities
ftrack_id = proj["data"]["ftrackId"]
self._avalon_ents_by_ftrack_id[ftrack_id] = proj
for ent in ents:
ftrack_id = ent["data"].get("ftrackId")
if ftrack_id is None:
continue
self._avalon_ents_by_ftrack_id[ftrack_id] = ent
if proj:
ftrack_id = proj["data"]["ftrackId"]
self._avalon_ents_by_ftrack_id[ftrack_id] = proj
for ent in ents:
ftrack_id = ent["data"].get("ftrackId")
if ftrack_id is None:
continue
self._avalon_ents_by_ftrack_id[ftrack_id] = ent
return self._avalon_ents_by_ftrack_id
@property
@ -508,7 +506,7 @@ class SyncToAvalonEvent(BaseEvent):
found_actions = set()
for ent_info in entities_info:
entityType = ent_info["entityType"]
if entityType in self.ignore_entTypes:
if entityType not in self.interest_entTypes:
continue
entity_type = ent_info.get("entity_type")

View file

@ -369,13 +369,6 @@ def main(args):
# store socket connection object
ObjectFactory.sock = sock
statuse_names = {
"main": "Main process",
"storer": "Event Storer",
"processor": "Event Processor"
}
ObjectFactory.status_factory = StatusFactory(statuse_names)
ObjectFactory.status_factory["main"].update(server_info)
_returncode = 0
try:
@ -429,6 +422,13 @@ if __name__ == "__main__":
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
statuse_names = {
"main": "Main process",
"storer": "Event Storer",
"processor": "Event Processor"
}
ObjectFactory.status_factory = StatusFactory(statuse_names)
checker_thread = OutputChecker()
ObjectFactory.checker_thread = checker_thread
checker_thread.start()

View file

@ -432,7 +432,7 @@ def add_deadline_tab(node):
node.addKnob(nuke.Tab_Knob("Deadline"))
knob = nuke.Int_Knob("deadlineChunkSize", "Chunk Size")
knob.setValue(1)
knob.setValue(0)
node.addKnob(knob)
knob = nuke.Int_Knob("deadlinePriority", "Priority")

View file

@ -1,60 +0,0 @@
import os
import subprocess
import pyblish.api
CREATE_NO_WINDOW = 0x08000000
def deadline_command(cmd):
# Find Deadline
path = os.environ.get("DEADLINE_PATH", None)
assert path is not None, "Variable 'DEADLINE_PATH' must be set"
executable = os.path.join(path, "deadlinecommand")
if os.name == "nt":
executable += ".exe"
assert os.path.exists(
executable), "Deadline executable not found at %s" % executable
assert cmd, "Must have a command"
query = (executable, cmd)
process = subprocess.Popen(query, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
creationflags=CREATE_NO_WINDOW)
out, err = process.communicate()
return out
class CollectDeadlineUser(pyblish.api.ContextPlugin):
"""Retrieve the local active Deadline user"""
order = pyblish.api.CollectorOrder + 0.499
label = "Deadline User"
hosts = ['maya', 'fusion', 'nuke']
families = [
"renderlayer",
"saver.deadline",
"imagesequence"
]
def process(self, context):
"""Inject the current working file"""
user = None
try:
user = deadline_command("GetCurrentUserName").strip()
except:
self.log.warning("Deadline command seems not to be working")
if not user:
self.log.warning("No Deadline user found. "
"Do you have Deadline installed?")
return
self.log.info("Found Deadline user: {}".format(user))
context.data['deadlineUser'] = user

View file

@ -1,127 +0,0 @@
import os
import json
import re
import pyblish.api
import clique
class CollectJSON(pyblish.api.ContextPlugin):
""" Collecting the json files in current directory. """
label = "JSON"
order = pyblish.api.CollectorOrder
hosts = ['maya']
def version_get(self, string, prefix):
""" Extract version information from filenames. Code from Foundry"s
nukescripts.version_get()
"""
regex = r"[/_.]{}\d+".format(prefix)
matches = re.findall(regex, string, re.IGNORECASE)
if not len(matches):
msg = "No '_{}#' found in '{}'".format(prefix, string)
raise ValueError(msg)
return matches[-1:][0][1], re.search(r"\d+", matches[-1:][0]).group()
def process(self, context):
current_file = context.data.get("currentFile", '')
# Skip if current file is not a directory
if not os.path.isdir(current_file):
return
# Traverse directory and collect collections from json files.
instances = []
for root, dirs, files in os.walk(current_file):
for f in files:
if f.endswith(".json"):
with open(os.path.join(root, f)) as json_data:
for data in json.load(json_data):
instances.append(data)
# Validate instance based on supported families.
valid_families = ["img", "cache", "scene", "mov"]
valid_data = []
for data in instances:
families = data.get("families", []) + [data["family"]]
family_type = list(set(families) & set(valid_families))
if family_type:
valid_data.append(data)
# Create existing output instance.
scanned_dirs = []
files = []
collections = []
for data in valid_data:
if "collection" not in data.keys():
continue
if data["collection"] is None:
continue
instance_collection = clique.parse(data["collection"])
try:
version = self.version_get(
os.path.basename(instance_collection.format()), "v"
)[1]
except KeyError:
# Ignore any output that is not versioned
continue
# Getting collections of all previous versions and current version
for count in range(1, int(version) + 1):
# Generate collection
version_string = "v" + str(count).zfill(len(version))
head = instance_collection.head.replace(
"v" + version, version_string
)
collection = clique.Collection(
head=head.replace("\\", "/"),
padding=instance_collection.padding,
tail=instance_collection.tail
)
collection.version = count
# Scan collection directory
scan_dir = os.path.dirname(collection.head)
if scan_dir not in scanned_dirs and os.path.exists(scan_dir):
for f in os.listdir(scan_dir):
file_path = os.path.join(scan_dir, f)
files.append(file_path.replace("\\", "/"))
scanned_dirs.append(scan_dir)
# Match files to collection and add
for f in files:
if collection.match(f):
collection.add(f)
# Skip if no files were found in the collection
if not list(collection):
continue
# Skip existing collections
if collection in collections:
continue
instance = context.create_instance(name=data["name"])
version = self.version_get(
os.path.basename(collection.format()), "v"
)[1]
basename = os.path.basename(collection.format())
instance.data["label"] = "{0} - {1}".format(
data["name"], basename
)
families = data["families"] + [data["family"]]
family = list(set(valid_families) & set(families))[0]
instance.data["family"] = family
instance.data["families"] = ["output"]
instance.data["collection"] = collection
instance.data["version"] = int(version)
instance.data["publish"] = False
collections.append(collection)

View file

@ -1,88 +0,0 @@
import os
import re
import copy
from avalon import io
from pprint import pprint
import pyblish.api
from avalon import api
texture_extensions = ['.tif', '.tiff', '.jpg', '.jpeg', '.tx', '.png', '.tga',
'.psd', '.dpx', '.hdr', '.hdri', '.exr', '.sxr', '.psb']
class CollectTextures(pyblish.api.ContextPlugin):
"""
Gather all texture files in working directory, traversing whole structure.
"""
order = pyblish.api.CollectorOrder
targets = ["texture"]
label = "Textures"
hosts = ["shell"]
def process(self, context):
if os.environ.get("PYPE_PUBLISH_PATHS"):
paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep)
else:
cwd = context.get("workspaceDir", os.getcwd())
paths = [cwd]
textures = []
for path in paths:
for dir, subdir, files in os.walk(path):
textures.extend(
os.path.join(dir, x) for x in files
if os.path.splitext(x)[1].lower() in texture_extensions)
self.log.info("Got {} texture files.".format(len(textures)))
if len(textures) < 1:
raise RuntimeError("no textures found.")
asset_name = os.environ.get("AVALON_ASSET")
family = 'texture'
subset = 'Main'
project = io.find_one({'type': 'project'})
asset = io.find_one({
'type': 'asset',
'name': asset_name
})
context.data['project'] = project
context.data['asset'] = asset
for tex in textures:
self.log.info("Processing: {}".format(tex))
name, ext = os.path.splitext(tex)
simple_name = os.path.splitext(os.path.basename(tex))[0]
instance = context.create_instance(simple_name)
instance.data.update({
"subset": subset,
"asset": asset_name,
"label": simple_name,
"name": simple_name,
"family": family,
"families": [family, 'ftrack'],
})
instance.data['destination_list'] = list()
instance.data['representations'] = list()
instance.data['source'] = 'pype command'
texture_data = {}
texture_data['anatomy_template'] = 'texture'
texture_data["ext"] = ext
texture_data["label"] = simple_name
texture_data["name"] = "texture"
texture_data["stagingDir"] = os.path.dirname(tex)
texture_data["files"] = os.path.basename(tex)
texture_data["thumbnail"] = False
texture_data["preview"] = False
instance.data["representations"].append(texture_data)
self.log.info("collected instance: {}".format(instance.data))
self.log.info("All collected.")

View file

@ -1,51 +0,0 @@
import os
import json
import datetime
import time
import pyblish.api
import clique
class ExtractJSON(pyblish.api.ContextPlugin):
""" Extract all instances to a serialized json file. """
order = pyblish.api.IntegratorOrder
label = "JSON"
hosts = ['maya']
def process(self, context):
workspace = os.path.join(
os.path.dirname(context.data["currentFile"]), "workspace",
"instances")
if not os.path.exists(workspace):
os.makedirs(workspace)
output_data = []
for instance in context:
self.log.debug(instance['data'])
data = {}
for key, value in instance.data.iteritems():
if isinstance(value, clique.Collection):
value = value.format()
try:
json.dumps(value)
data[key] = value
except KeyError:
msg = "\"{0}\"".format(value)
msg += " in instance.data[\"{0}\"]".format(key)
msg += " could not be serialized."
self.log.debug(msg)
output_data.append(data)
timestamp = datetime.datetime.fromtimestamp(
time.time()).strftime("%Y%m%d-%H%M%S")
filename = timestamp + "_instances.json"
with open(os.path.join(workspace, filename), "w") as outfile:
outfile.write(json.dumps(output_data, indent=4, sort_keys=True))

View file

@ -1,86 +0,0 @@
import os
import pyblish.api
import subprocess
import clique
class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
"""Resolve any dependency issies
This plug-in resolves any paths which, if not updated might break
the published file.
The order of families is important, when working with lookdev you want to
first publish the texture, update the texture paths in the nodes and then
publish the shading network. Same goes for file dependent assets.
"""
label = "Extract Quicktime"
order = pyblish.api.ExtractorOrder
families = ["imagesequence", "render", "write", "source"]
hosts = ["shell"]
def process(self, instance):
# fps = instance.data.get("fps")
# start = instance.data.get("startFrame")
# stagingdir = os.path.normpath(instance.data.get("stagingDir"))
#
# collected_frames = os.listdir(stagingdir)
# collections, remainder = clique.assemble(collected_frames)
#
# full_input_path = os.path.join(
# stagingdir, collections[0].format('{head}{padding}{tail}')
# )
# self.log.info("input {}".format(full_input_path))
#
# filename = collections[0].format('{head}')
# if not filename.endswith('.'):
# filename += "."
# movFile = filename + "mov"
# full_output_path = os.path.join(stagingdir, movFile)
#
# self.log.info("output {}".format(full_output_path))
#
# config_data = instance.context.data['output_repre_config']
#
# proj_name = os.environ.get('AVALON_PROJECT', '__default__')
# profile = config_data.get(proj_name, config_data['__default__'])
#
# input_args = []
# # overrides output file
# input_args.append("-y")
# # preset's input data
# input_args.extend(profile.get('input', []))
# # necessary input data
# input_args.append("-start_number {}".format(start))
# input_args.append("-i {}".format(full_input_path))
# input_args.append("-framerate {}".format(fps))
#
# output_args = []
# # preset's output data
# output_args.extend(profile.get('output', []))
# # output filename
# output_args.append(full_output_path)
# mov_args = [
# "ffmpeg",
# " ".join(input_args),
# " ".join(output_args)
# ]
# subprocess_mov = " ".join(mov_args)
# sub_proc = subprocess.Popen(subprocess_mov)
# sub_proc.wait()
#
# if not os.path.isfile(full_output_path):
# raise("Quicktime wasn't created succesfully")
#
# if "representations" not in instance.data:
# instance.data["representations"] = []
#
# representation = {
# 'name': 'mov',
# 'ext': 'mov',
# 'files': movFile,
# "stagingDir": stagingdir,
# "preview": True
# }
# instance.data["representations"].append(representation)

View file

@ -1,153 +0,0 @@
import os
import subprocess
import pyblish.api
import filelink
class ExtractTranscode(pyblish.api.InstancePlugin):
"""Extracts review movie from image sequence.
Offset to get images to transcode from.
"""
order = pyblish.api.ExtractorOrder + 0.1
label = "Transcode"
optional = True
families = ["review"]
def find_previous_index(self, index, indexes):
"""Finds the closest previous value in a list from a value."""
data = []
for i in indexes:
if i >= index:
continue
data.append(index - i)
return indexes[data.index(min(data))]
def process(self, instance):
if "collection" in instance.data.keys():
self.process_image(instance)
if "output_path" in instance.data.keys():
self.process_movie(instance)
def process_image(self, instance):
collection = instance.data.get("collection", [])
if not list(collection):
msg = "Skipping \"{0}\" because no frames was found."
self.log.warning(msg.format(instance.data["name"]))
return
# Temporary fill the missing frames.
missing = collection.holes()
if not collection.is_contiguous():
pattern = collection.format("{head}{padding}{tail}")
for index in missing.indexes:
dst = pattern % index
src_index = self.find_previous_index(
index, list(collection.indexes)
)
src = pattern % src_index
filelink.create(src, dst)
# Generate args.
# Has to be yuv420p for compatibility with older players and smooth
# playback. This does come with a sacrifice of more visible banding
# issues.
# -crf 18 is visually lossless.
args = [
"ffmpeg", "-y",
"-start_number", str(min(collection.indexes)),
"-framerate", str(instance.context.data["framerate"]),
"-i", collection.format("{head}{padding}{tail}"),
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
"-vframes",
str(max(collection.indexes) - min(collection.indexes) + 1),
"-vf",
"scale=trunc(iw/2)*2:trunc(ih/2)*2",
]
if instance.data.get("baked_colorspace_movie"):
args = [
"ffmpeg", "-y",
"-i", instance.data["baked_colorspace_movie"],
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
]
args.append(collection.format("{head}.mov"))
self.log.debug("Executing args: {0}".format(args))
# Can't use subprocess.check_output, cause Houdini doesn't like that.
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE,
cwd=os.path.dirname(args[-1])
)
output = p.communicate()[0]
# Remove temporary frame fillers
for f in missing:
os.remove(f)
if p.returncode != 0:
raise ValueError(output)
self.log.debug(output)
def process_movie(self, instance):
# Generate args.
# Has to be yuv420p for compatibility with older players and smooth
# playback. This does come with a sacrifice of more visible banding
# issues.
args = [
"ffmpeg", "-y",
"-i", instance.data["output_path"],
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
]
if instance.data.get("baked_colorspace_movie"):
args = [
"ffmpeg", "-y",
"-i", instance.data["baked_colorspace_movie"],
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
]
split = os.path.splitext(instance.data["output_path"])
args.append(split[0] + "_review.mov")
self.log.debug("Executing args: {0}".format(args))
# Can't use subprocess.check_output, cause Houdini doesn't like that.
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE,
cwd=os.path.dirname(args[-1])
)
output = p.communicate()[0]
if p.returncode != 0:
raise ValueError(output)
self.log.debug(output)

View file

@ -108,9 +108,13 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin):
if resolution_height:
anatomy_data["resolution_height"] = resolution_height
pixel_aspect = instance.data.get("pixelAspect")
if pixel_aspect:
anatomy_data["pixel_aspect"] = float("{:0.2f}".format(pixel_aspect))
fps = instance.data.get("fps")
if resolution_height:
anatomy_data["fps"] = fps
anatomy_data["fps"] = float("{:0.2f}".format(fps))
instance.data["projectEntity"] = project_entity
instance.data["assetEntity"] = asset_entity

View file

@ -33,6 +33,11 @@ class ExtractBurnin(pype.api.Extractor):
duration = frame_end - frame_start + 1
prep_data = copy.deepcopy(instance.data["anatomyData"])
if "slate.farm" in instance.data["families"]:
frame_start += 1
duration -= 1
prep_data.update({
"frame_start": frame_start,
"frame_end": frame_end,
@ -42,22 +47,6 @@ class ExtractBurnin(pype.api.Extractor):
"intent": instance.context.data.get("intent", "")
})
slate_frame_start = frame_start
slate_frame_end = frame_end
slate_duration = duration
# exception for slate workflow
if "slate" in instance.data["families"]:
slate_frame_start = frame_start - 1
slate_frame_end = frame_end
slate_duration = slate_frame_end - slate_frame_start + 1
prep_data.update({
"slate_frame_start": slate_frame_start,
"slate_frame_end": slate_frame_end,
"slate_duration": slate_duration
})
# get anatomy project
anatomy = instance.context.data['anatomy']
@ -101,6 +90,26 @@ class ExtractBurnin(pype.api.Extractor):
filled_anatomy = anatomy.format_all(_prep_data)
_prep_data["anatomy"] = filled_anatomy.get_solved()
# dealing with slates
slate_frame_start = frame_start
slate_frame_end = frame_end
slate_duration = duration
# exception for slate workflow
if ("slate" in instance.data["families"]):
if "slate-frame" in repre.get("tags", []):
slate_frame_start = frame_start - 1
slate_frame_end = frame_end
slate_duration = duration + 1
self.log.debug("__1 slate_frame_start: {}".format(slate_frame_start))
_prep_data.update({
"slate_frame_start": slate_frame_start,
"slate_frame_end": slate_frame_end,
"slate_duration": slate_duration
})
burnin_data = {
"input": full_movie_path.replace("\\", "/"),
"codec": repre.get("codec", []),

View file

@ -278,6 +278,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
stagingdir = repre['stagingDir']
if repre.get('anatomy_template'):
template_name = repre['anatomy_template']
if repre.get("outputName"):
template_data["output"] = repre['outputName']
template = os.path.normpath(
anatomy.templates[template_name]["path"])
@ -389,9 +391,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
template_data["representation"] = repre['ext']
if repre.get("outputName"):
template_data["output"] = repre['outputName']
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
template_filled = anatomy_filled[template_name]["path"]
@ -441,7 +440,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
if sequence_repre and repre.get("frameStart"):
representation['context']['frame'] = (
src_padding_exp % int(repre.get("frameStart"))
dst_padding_exp % int(repre.get("frameStart"))
)
self.log.debug("__ representation: {}".format(representation))

View file

@ -193,11 +193,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
metadata_path = os.path.normpath(metadata_path)
mount_root = os.path.normpath(os.environ["PYPE_STUDIO_PROJECTS_MOUNT"])
network_root = os.path.normpath(
os.environ["PYPE_STUDIO_PROJECTS_PATH"]
)
network_root = os.environ["PYPE_STUDIO_PROJECTS_PATH"]
metadata_path = metadata_path.replace(mount_root, network_root)
metadata_path = os.path.normpath(metadata_path)
# Generate the payload for Deadline submission
payload = {
@ -209,7 +207,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"UserName": job["Props"]["User"],
"Comment": instance.context.data.get("comment", ""),
"Priority": job["Props"]["Pri"],
"Pool": self.deadline_pool
"Pool": self.deadline_pool,
"OutputDirectory0": output_dir
},
"PluginInfo": {
"Version": "3.6",

View file

@ -1 +0,0 @@
# usually used for mattepainting

View file

@ -1,46 +0,0 @@
import pyblish.api
@pyblish.api.log
class CollectRenderTarget(pyblish.api.InstancePlugin):
"""Collect families for all instances"""
order = pyblish.api.CollectorOrder + 0.2
label = "Collect Render Target"
hosts = ["nuke", "nukeassist"]
families = ['write']
def process(self, instance):
node = instance[0]
self.log.info('processing {}'.format(node))
families = []
if instance.data.get('families'):
families += instance.data['families']
# set for ftrack to accept
# instance.data["families"] = ["ftrack"]
if node["render"].value():
# dealing with local/farm rendering
if node["render_farm"].value():
families.append("render.farm")
else:
families.append("render.local")
else:
families.append("render.frames")
# to ignore staging dir op in integrate
instance.data['transfer'] = False
families.append('ftrack')
instance.data["families"] = families
# Sort/grouped by family (preserving local index)
instance.context[:] = sorted(instance.context, key=self.sort_by_family)
def sort_by_family(self, instance):
"""Sort by family"""
return instance.data.get("families", instance.data.get("family"))

View file

@ -1,147 +0,0 @@
import os
import json
import getpass
from avalon import api
from avalon.vendor import requests
import pyblish.api
class NukeSubmitDeadline(pyblish.api.InstancePlugin):
# TODO: rewrite docstring to nuke
"""Submit current Comp to Deadline
Renders are submitted to a Deadline Web Service as
supplied via the environment variable DEADLINE_REST_URL
"""
label = "Submit to Deadline"
order = pyblish.api.IntegratorOrder
hosts = ["nuke"]
families = ["write", "render.deadline"]
def process(self, instance):
context = instance.context
key = "__hasRun{}".format(self.__class__.__name__)
if context.data.get(key, False):
return
else:
context.data[key] = True
DEADLINE_REST_URL = api.Session.get("DEADLINE_REST_URL",
"http://localhost:8082")
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
# Collect all saver instances in context that are to be rendered
write_instances = []
for instance in context[:]:
if not self.families[0] in instance.data.get("families"):
# Allow only saver family instances
continue
if not instance.data.get("publish", True):
# Skip inactive instances
continue
self.log.debug(instance.data["name"])
write_instances.append(instance)
if not write_instances:
raise RuntimeError("No instances found for Deadline submittion")
hostVersion = int(context.data["hostVersion"])
filepath = context.data["currentFile"]
filename = os.path.basename(filepath)
comment = context.data.get("comment", "")
deadline_user = context.data.get("deadlineUser", getpass.getuser())
# Documentation for keys available at:
# https://docs.thinkboxsoftware.com
# /products/deadline/8.0/1_User%20Manual/manual
# /manual-submission.html#job-info-file-options
payload = {
"JobInfo": {
# Top-level group name
"BatchName": filename,
# Job name, as seen in Monitor
"Name": filename,
# User, as seen in Monitor
"UserName": deadline_user,
# Use a default submission pool for Nuke
"Pool": "nuke",
"Plugin": "Nuke",
"Frames": "{start}-{end}".format(
start=int(instance.data["frameStart"]),
end=int(instance.data["frameEnd"])
),
"Comment": comment,
},
"PluginInfo": {
# Input
"FlowFile": filepath,
# Mandatory for Deadline
"Version": str(hostVersion),
# Render in high quality
"HighQuality": True,
# Whether saver output should be checked after rendering
# is complete
"CheckOutput": True,
# Proxy: higher numbers smaller images for faster test renders
# 1 = no proxy quality
"Proxy": 1,
},
# Mandatory for Deadline, may be empty
"AuxFiles": []
}
# Enable going to rendered frames from Deadline Monitor
for index, instance in enumerate(write_instances):
path = instance.data["path"]
folder, filename = os.path.split(path)
payload["JobInfo"]["OutputDirectory%d" % index] = folder
payload["JobInfo"]["OutputFilename%d" % index] = filename
# Include critical variables with submission
keys = [
# TODO: This won't work if the slaves don't have accesss to
# these paths, such as if slaves are running Linux and the
# submitter is on Windows.
"PYTHONPATH",
"NUKE_PATH"
# "OFX_PLUGIN_PATH",
]
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,
value=environment[key]
) for index, key in enumerate(environment)
})
self.log.info("Submitting..")
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# E.g. http://192.168.0.1:8082/api/jobs
url = "{}/api/jobs".format(DEADLINE_REST_URL)
response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
# Store the response for dependent job submission plug-ins
for instance in write_instances:
instance.data["deadlineSubmissionJob"] = response.json()

View file

@ -1,24 +0,0 @@
import pyblish.api
class IncrementTestPlugin(pyblish.api.ContextPlugin):
"""Increment current script version."""
order = pyblish.api.CollectorOrder + 0.5
label = "Test Plugin"
hosts = ['nuke']
def process(self, context):
instances = context[:]
prerender_check = list()
families_check = list()
for instance in instances:
if ("prerender" in str(instance)):
prerender_check.append(instance)
if instance.data.get("families", None):
families_check.append(True)
if len(prerender_check) != len(families_check):
self.log.info(prerender_check)
self.log.info(families_check)

View file

@ -1,68 +0,0 @@
import nuke
import os
import pyblish.api
from avalon import io
# TODO: add repair function
@pyblish.api.log
class ValidateSettingsNuke(pyblish.api.Validator):
""" Validates settings """
families = ['scene']
hosts = ['nuke']
optional = True
label = 'Settings'
def process(self, instance):
asset = io.find_one({"name": os.environ['AVALON_ASSET']})
try:
avalon_resolution = asset["data"].get("resolution", '')
avalon_pixel_aspect = asset["data"].get("pixelAspect", '')
avalon_fps = asset["data"].get("fps", '')
avalon_first = asset["data"].get("frameStart", '')
avalon_last = asset["data"].get("frameEnd", '')
avalon_crop = asset["data"].get("crop", '')
except KeyError:
print(
"No resolution information found for \"{0}\".".format(
asset["name"]
)
)
return
# validating first frame
local_first = nuke.root()['first_frame'].value()
msg = 'First frame is incorrect.'
msg += '\n\nLocal first: %s' % local_first
msg += '\n\nOnline first: %s' % avalon_first
assert local_first == avalon_first, msg
# validating last frame
local_last = nuke.root()['last_frame'].value()
msg = 'Last frame is incorrect.'
msg += '\n\nLocal last: %s' % local_last
msg += '\n\nOnline last: %s' % avalon_last
assert local_last == avalon_last, msg
# validating fps
local_fps = nuke.root()['fps'].value()
msg = 'FPS is incorrect.'
msg += '\n\nLocal fps: %s' % local_fps
msg += '\n\nOnline fps: %s' % avalon_fps
assert local_fps == avalon_fps, msg
# validating resolution width
local_width = nuke.root().format().width()
msg = 'Width is incorrect.'
msg += '\n\nLocal width: %s' % local_width
msg += '\n\nOnline width: %s' % avalon_resolution[0]
assert local_width == avalon_resolution[0], msg
# validating resolution width
local_height = nuke.root().format().height()
msg = 'Height is incorrect.'
msg += '\n\nLocal height: %s' % local_height
msg += '\n\nOnline height: %s' % avalon_resolution[1]
assert local_height == avalon_resolution[1], msg

View file

@ -1,33 +0,0 @@
import nuke
import pyblish.api
class RepairNukeProxyModeAction(pyblish.api.Action):
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
nuke.root()["proxy"].setValue(0)
class ValidateNukeProxyMode(pyblish.api.ContextPlugin):
"""Validates against having proxy mode on."""
order = pyblish.api.ValidatorOrder
optional = True
label = "Proxy Mode"
actions = [RepairNukeProxyModeAction]
hosts = ["nuke", "nukeassist"]
# targets = ["default", "process"]
def process(self, context):
msg = (
"Proxy mode is not supported. Please disable Proxy Mode in the "
"Project settings."
)
assert not nuke.root()["proxy"].getValue(), msg

View file

@ -41,6 +41,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
handle_end = instance.context.data["handleEnd"]
first_frame = int(nuke.root()["first_frame"].getValue())
last_frame = int(nuke.root()["last_frame"].getValue())
frame_length = int(
last_frame - first_frame + 1
)
if node["use_limit"].getValue():
handles = 0
@ -81,8 +84,26 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
collected_frames = [f for f in os.listdir(output_dir)
if ext in f]
if collected_frames:
representation['frameStart'] = "%0{}d".format(
collected_frames_len = len(collected_frames)
frame_start_str = "%0{}d".format(
len(str(last_frame))) % first_frame
representation['frameStart'] = frame_start_str
# in case slate is expected and not yet rendered
self.log.debug("_ frame_length: {}".format(frame_length))
self.log.debug(
"_ collected_frames_len: {}".format(
collected_frames_len))
# this will only run if slate frame is not already
# rendered from previews publishes
if "slate" in instance.data["families"] \
and (frame_length == collected_frames_len):
frame_slate_str = "%0{}d".format(
len(str(last_frame))) % (first_frame - 1)
slate_frame = collected_frames[0].replace(
frame_start_str, frame_slate_str)
collected_frames.insert(0, slate_frame)
representation['files'] = collected_frames
instance.data["representations"].append(representation)
except Exception:

View file

@ -33,6 +33,7 @@ class ExtractSlateFrame(pype.api.Extractor):
self.render_slate(instance)
def render_slate(self, instance):
node_subset_name = instance.data.get("name", None)
node = instance[0] # group node
self.log.info("Creating staging dir...")
@ -47,6 +48,10 @@ class ExtractSlateFrame(pype.api.Extractor):
self.log.info(
"StagingDir `{0}`...".format(instance.data["stagingDir"]))
frame_length = int(
instance.data["frameEnd"] - instance.data["frameStart"] + 1
)
temporary_nodes = []
collection = instance.data.get("collection", None)
@ -56,10 +61,16 @@ class ExtractSlateFrame(pype.api.Extractor):
"{head}{padding}{tail}"))
fhead = collection.format("{head}")
collected_frames_len = int(len(collection.indexes))
# get first and last frame
first_frame = min(collection.indexes) - 1
if "slate" in instance.data["families"]:
self.log.info('frame_length: {}'.format(frame_length))
self.log.info(
'len(collection.indexes): {}'.format(collected_frames_len)
)
if ("slate" in instance.data["families"]) \
and (frame_length != collected_frames_len):
first_frame += 1
last_frame = first_frame
@ -103,6 +114,8 @@ class ExtractSlateFrame(pype.api.Extractor):
# Render frames
nuke.execute(write_node.name(), int(first_frame), int(last_frame))
# also render slate as sequence frame
nuke.execute(node_subset_name, int(first_frame), int(last_frame))
self.log.debug(
"slate frame path: {}".format(instance.data["slateFrame"]))

View file

@ -22,6 +22,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
families = ["render.farm"]
optional = True
deadline_priority = 50
deadline_pool = ""
deadline_pool_secondary = ""
deadline_chunk_size = 1
def process(self, instance):
node = instance[0]
@ -89,7 +94,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
jobname = "%s - %s" % (script_name, instance.name)
output_filename_0 = self.preview_fname(render_path)
output_directory_0 = render_dir.replace("\\", "/")
if not responce_data:
responce_data = {}
@ -100,6 +104,15 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
except OSError:
pass
# define chunk and priority
chunk_size = instance.data.get("deadlineChunkSize")
if chunk_size == 0:
chunk_size = self.deadline_chunk_size
priority = instance.data.get("deadlinePriority")
if priority != 50:
priority = self.deadline_priority
payload = {
"JobInfo": {
# Top-level group name
@ -111,10 +124,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
# Arbitrary username, for visualisation in Monitor
"UserName": self._deadline_user,
"Priority": instance.data["deadlinePriority"],
"Priority": priority,
"ChunkSize": chunk_size,
"Pool": "2d",
"SecondaryPool": "2d",
"Pool": self.deadline_pool,
"SecondaryPool": self.deadline_pool_secondary,
"Plugin": "Nuke",
"Frames": "{start}-{end}".format(

View file

@ -76,7 +76,8 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
'len(collection.indexes): {}'.format(collected_frames_len)
)
if "slate" in instance.data["families"]:
if ("slate" in instance.data["families"]) \
and (frame_length != collected_frames_len):
collected_frames_len -= 1
assert (collected_frames_len == frame_length), (

View file

@ -42,6 +42,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
width = int(sequence.format().width())
height = int(sequence.format().height())
pixel_aspect = sequence.format().pixelAspect()
fps = context.data["fps"]
# build data for inner nukestudio project property
data = {
@ -161,9 +162,10 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
"asset": asset,
"hierarchy": hierarchy,
"parents": parents,
"width": width,
"height": height,
"resolutionWidth": width,
"resolutionHeight": height,
"pixelAspect": pixel_aspect,
"fps": fps,
"tasks": instance.data["tasks"]
})
@ -223,9 +225,12 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
instance.data["parents"] = s_asset_data["parents"]
instance.data["hierarchy"] = s_asset_data["hierarchy"]
instance.data["tasks"] = s_asset_data["tasks"]
instance.data["width"] = s_asset_data["width"]
instance.data["height"] = s_asset_data["height"]
instance.data["resolutionWidth"] = s_asset_data[
"resolutionWidth"]
instance.data["resolutionHeight"] = s_asset_data[
"resolutionHeight"]
instance.data["pixelAspect"] = s_asset_data["pixelAspect"]
instance.data["fps"] = s_asset_data["fps"]
# adding frame start if any on instance
start_frame = s_asset_data.get("startingFrame")
@ -275,8 +280,8 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
# adding SourceResolution if Tag was present
if instance.data.get("main"):
in_info['custom_attributes'].update({
"resolutionWidth": instance.data["width"],
"resolutionHeight": instance.data["height"],
"resolutionWidth": instance.data["resolutionWidth"],
"resolutionHeight": instance.data["resolutionHeight"],
"pixelAspect": instance.data["pixelAspect"]
})

View file

@ -83,7 +83,7 @@ class CollectPlates(api.InstancePlugin):
class CollectPlatesData(api.InstancePlugin):
"""Collect plates"""
order = api.CollectorOrder + 0.495
order = api.CollectorOrder + 0.48
label = "Collect Plates Data"
hosts = ["nukestudio"]
families = ["plate"]
@ -126,7 +126,7 @@ class CollectPlatesData(api.InstancePlugin):
transfer_data = [
"handleStart", "handleEnd", "sourceIn", "sourceOut", "frameStart",
"frameEnd", "sourceInH", "sourceOutH", "clipIn", "clipOut",
"clipInH", "clipOutH", "asset", "track", "version", "width", "height", "pixelAspect"
"clipInH", "clipOutH", "asset", "track", "version", "resolutionWidth", "resolutionHeight", "pixelAspect", "fps"
]
# pass data to version

View file

@ -196,7 +196,8 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
"asset": asset_name,
"family": instance.data["family"],
"subset": subset_name,
"version": version_number
"version": version_number,
"hierarchy": instance.data["hierarchy"]
})
resolution_width = instance.data.get("resolutionWidth")
@ -207,9 +208,13 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
if resolution_height:
anatomy_data["resolution_height"] = resolution_height
pixel_aspect = instance.data.get("pixelAspect")
if pixel_aspect:
anatomy_data["pixel_aspect"] = float("{:0.2f}".format(pixel_aspect))
fps = instance.data.get("fps")
if resolution_height:
anatomy_data["fps"] = fps
anatomy_data["fps"] = float("{:0.2f}".format(fps))
instance.data["projectEntity"] = project_entity
instance.data["assetEntity"] = asset_entity

View file

@ -241,7 +241,7 @@ class TasksModel(TreeModel):
self.endResetModel()
def flags(self, index):
return QtCore.Qt.ItemIsEnabled
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
def headerData(self, section, orientation, role):

View file

@ -1,54 +0,0 @@
'''
Simple socket server using threads
'''
import socket
import sys
import threading
import StringIO
import contextlib
import nuke
HOST = ''
PORT = 8888
@contextlib.contextmanager
def stdoutIO(stdout=None):
old = sys.stdout
if stdout is None:
stdout = StringIO.StringIO()
sys.stdout = stdout
yield stdout
sys.stdout = old
def _exec(data):
with stdoutIO() as s:
exec(data)
return s.getvalue()
def server_start():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((HOST, PORT))
s.listen(5)
while 1:
client, address = s.accept()
try:
data = client.recv(4096)
if data:
result = nuke.executeInMainThreadWithResult(_exec, args=(data))
client.send(str(result))
except SystemExit:
result = self.encode('SERVER: Shutting down...')
client.send(str(result))
raise
finally:
client.close()
t = threading.Thread(None, server_start)
t.setDaemon(True)
t.start()

View file

@ -1,6 +1,5 @@
import os
import sys
import atom_server
import KnobScripter
from pype.nuke.lib import (