Merge branch 'develop' into hotfix/eallin_fixes_nks_nk

This commit is contained in:
Jakub Jezek 2019-11-06 10:09:09 +01:00
commit 275bd6da64
14 changed files with 115 additions and 33 deletions

View file

@ -15,6 +15,7 @@ log = Logger().get_logger(__name__)
class RVAction(BaseAction):
""" Launch RV action """
ignore_me = "rv" not in config.get_presets()
identifier = "rv.launch.action"
label = "rv"
description = "rv Launcher"
@ -42,8 +43,9 @@ class RVAction(BaseAction):
)
else:
# if not, fallback to config file location
self.config_data = config.get_presets()['rv']['config']
self.set_rv_path()
if "rv" in config.get_presets():
self.config_data = config.get_presets()['rv']['config']
self.set_rv_path()
if self.rv_path is None:
return

View file

@ -379,6 +379,10 @@ def add_rendering_knobs(node):
knob = nuke.Boolean_Knob("render_farm", "Render on Farm")
knob.setValue(False)
node.addKnob(knob)
if "review" not in node.knobs():
knob = nuke.Boolean_Knob("review", "Review")
knob.setValue(True)
node.addKnob(knob)
return node
@ -389,6 +393,14 @@ def add_deadline_tab(node):
knob.setValue(1)
node.addKnob(knob)
knob = nuke.Int_Knob("deadlinePriority", "Priority")
knob.setValue(50)
node.addKnob(knob)
def get_deadline_knob_names():
return ["Deadline", "deadlineChunkSize", "deadlinePriority"]
def create_backdrop(label="", color=None, layer=0,
nodes=None):
@ -543,17 +555,34 @@ class WorkfileSettings(object):
assert isinstance(root_dict, dict), log.error(
"set_root_colorspace(): argument should be dictionary")
log.debug(">> root_dict: {}".format(root_dict))
# first set OCIO
if self._root_node["colorManagement"].value() \
not in str(root_dict["colorManagement"]):
self._root_node["colorManagement"].setValue(
str(root_dict["colorManagement"]))
log.debug("nuke.root()['{0}'] changed to: {1}".format(
"colorManagement", root_dict["colorManagement"]))
root_dict.pop("colorManagement")
# second set ocio version
if self._root_node["OCIO_config"].value() \
not in str(root_dict["OCIO_config"]):
self._root_node["OCIO_config"].setValue(
str(root_dict["OCIO_config"]))
log.debug("nuke.root()['{0}'] changed to: {1}".format(
"OCIO_config", root_dict["OCIO_config"]))
root_dict.pop("OCIO_config")
# third set ocio custom path
if root_dict.get("customOCIOConfigPath"):
self._root_node["customOCIOConfigPath"].setValue(
str(root_dict["customOCIOConfigPath"]).format(**os.environ)
)
log.debug("nuke.root()['{}'] changed to: {}".format(
"customOCIOConfigPath", root_dict["customOCIOConfigPath"]))
root_dict.pop("customOCIOConfigPath")
# then set the rest
for knob, value in root_dict.items():

View file

@ -44,7 +44,15 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
input_data = context.data["hierarchyContext"]
self.import_to_ftrack(input_data)
# self.import_to_ftrack(input_data)
try:
self.import_to_ftrack(input_data)
except Exception as exc:
import sys
import traceback
self.log.info(traceback.format_exc(sys.exc_info()))
raise Exception("failed")
def import_to_ftrack(self, input_data, parent=None):
for entity_name in input_data:
@ -66,9 +74,9 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# try to find if entity already exists
else:
query = '{} where name is "{}" and parent_id is "{}"'.format(
entity_type, entity_name, parent['id']
)
query = 'TypedContext where name is "{0}" and project.full_name is "{1}"'.format(
entity_name, self.ft_project["full_name"]
)
try:
entity = self.session.query(query).one()
except Exception:

View file

@ -66,7 +66,7 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{root}/{project}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
@ -95,7 +95,6 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset['silo']
subset = io.find_one({"type": "subset",
"name": subset_name,
@ -126,7 +125,6 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"silo": silo,
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,

View file

@ -314,8 +314,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
index_frame_start = int(repre.get("frameStart"))
dst_padding_exp = src_padding_exp
dst_start_frame = None
for i in src_collection.indexes:
src_padding = src_padding_exp % i
# for adding first frame into db
if not dst_start_frame:
dst_start_frame = src_padding
src_file_name = "{0}{1}{2}".format(
src_head, src_padding, src_tail)
@ -326,19 +332,22 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
dst_padding = dst_padding_exp % index_frame_start
index_frame_start += 1
dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail).replace("..", ".")
dst = "{0}{1}{2}".format(
dst_head,
dst_padding,
dst_tail).replace("..", ".")
self.log.debug("destination: `{}`".format(dst))
src = os.path.join(stagingdir, src_file_name)
self.log.debug("source: {}".format(src))
instance.data["transfers"].append([src, dst])
repre['published_path'] = "{0}{1}{2}".format(dst_head,
dst_padding_exp,
dst_tail)
# for imagesequence version data
hashes = '#' * len(dst_padding)
dst = os.path.normpath("{0}{1}{2}".format(
dst_head, hashes, dst_tail))
dst = "{0}{1}{2}".format(
dst_head,
dst_start_frame,
dst_tail).replace("..", ".")
repre['published_path'] = dst
else:
# Single file

View file

@ -174,7 +174,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"JobDependency0": job["_id"],
"UserName": job["Props"]["User"],
"Comment": instance.context.data.get("comment", ""),
"InitialStatus": state
"InitialStatus": state,
"Priority": job["Props"]["Pri"]
},
"PluginInfo": {
"Version": "3.6",

View file

@ -46,7 +46,8 @@ class ValidateFtrackAttributes(pyblish.api.InstancePlugin):
"Missing FTrack Task entity in context")
host = pyblish.api.current_host()
to_check = context.data["presets"][host].get("ftrack_attributes")
to_check = context.data["presets"].get(
host, {}).get("ftrack_attributes")
if not to_check:
self.log.warning("ftrack_attributes preset not found")
return

View file

@ -110,6 +110,7 @@ class LoadSequence(api.Loader):
last += self.handle_end
file = self.fname.replace("\\", "/")
log.info("file: {}\n".format(self.fname))
repr_cont = context["representation"]["context"]
@ -118,6 +119,11 @@ class LoadSequence(api.Loader):
repr_cont["subset"],
repr_cont["representation"])
if "#" not in file:
frame = repr_cont.get("frame")
padding = len(frame)
file = file.replace(frame, "#"*padding)
# Create the Loader with the filename path set
with viewer_update_and_undo_stop():
# TODO: it might be universal read to img/geo/camera

View file

@ -1,5 +1,5 @@
import pyblish.api
import nuke
class CollectReview(pyblish.api.InstancePlugin):
"""Collect review instance from rendered frames
@ -9,9 +9,20 @@ class CollectReview(pyblish.api.InstancePlugin):
family = "review"
label = "Collect Review"
hosts = ["nuke"]
families = ["render", "render.local"]
families = ["render", "render.local", "render.farm"]
def process(self, instance):
if instance.data["families"]:
instance.data["families"].append("review")
self.log.info("Review collected: `{}`".format(instance))
node = instance[0]
if "review" not in node.knobs():
knob = nuke.Boolean_Knob("review", "Review")
knob.setValue(True)
node.addKnob(knob)
if not node["review"].value():
return
instance.data["families"].append("review")
instance.data['families'].append('ftrack')
self.log.info("Review collected: `{}`".format(instance))

View file

@ -65,7 +65,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
)
if 'render' in instance.data['families']:
instance.data['families'].append('ftrack')
if "representations" not in instance.data:
instance.data["representations"] = list()
@ -78,22 +77,22 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
try:
collected_frames = os.listdir(output_dir)
if collected_frames:
representation['frameStart'] = "%0{}d".format(
len(str(last_frame))) % first_frame
representation['files'] = collected_frames
instance.data["representations"].append(representation)
except Exception:
instance.data["representations"].append(representation)
self.log.debug("couldn't collect frames: {}".format(label))
if 'render.local' in instance.data['families']:
instance.data['families'].append('ftrack')
# Add version data to instance
version_data = {
"handles": handle_start,
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStart": first_frame,
"frameEnd": last_frame,
"frameStart": first_frame + handle_start,
"frameEnd": last_frame - handle_end,
"version": int(version),
"colorspace": node["colorspace"].value(),
"families": [instance.data["family"]],
@ -106,6 +105,10 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
if "deadlineChunkSize" in group_node.knobs():
deadlineChunkSize = group_node["deadlineChunkSize"].value()
deadlinePriority = 50
if "deadlinePriority" in group_node.knobs():
deadlinePriority = group_node["deadlinePriority"].value()
instance.data.update({
"versionData": version_data,
"path": path,
@ -117,7 +120,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"frameEnd": last_frame,
"outputType": output_type,
"colorspace": node["colorspace"].value(),
"deadlineChunkSize": deadlineChunkSize
"deadlineChunkSize": deadlineChunkSize,
"deadlinePriority": deadlinePriority
})
self.log.debug("instance.data: {}".format(instance.data))

View file

@ -58,6 +58,7 @@ class NukeRenderLocal(pype.api.Extractor):
repre = {
'name': ext,
'ext': ext,
'frameStart': "%0{}d".format(len(str(last_frame))) % first_frame,
'files': collected_frames,
"stagingDir": out_dir,
"anatomy_template": "render"

View file

@ -85,6 +85,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
end=int(instance.data["frameEnd"])
),
"ChunkSize": instance.data["deadlineChunkSize"],
"Priority": instance.data["deadlinePriority"],
"Comment": comment,

View file

@ -22,6 +22,13 @@ class RepairNukeWriteDeadlineTab(pyblish.api.Action):
for instance in instances:
group_node = [x for x in instance if x.Class() == "Group"][0]
# Remove exising knobs.
knob_names = pype.nuke.lib.get_deadline_knob_names()
for name, knob in group_node.knobs().iteritems():
if name in knob_names:
group_node.removeKnob(knob)
pype.nuke.lib.add_deadline_tab(group_node)
@ -38,5 +45,9 @@ class ValidateNukeWriteDeadlineTab(pyblish.api.InstancePlugin):
def process(self, instance):
group_node = [x for x in instance if x.Class() == "Group"][0]
msg = "Deadline tab missing on \"{}\"".format(group_node.name())
assert "Deadline" in group_node.knobs(), msg
knob_names = pype.nuke.lib.get_deadline_knob_names()
missing_knobs = []
for name in knob_names:
if name not in group_node.knobs().keys():
missing_knobs.append(name)
assert not missing_knobs, "Missing knobs: {}".format(missing_knobs)