From 3a7ce1e1b2497b40dde65c70cf06ede0f8a2bafc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Dec 2018 14:14:58 +0100 Subject: [PATCH 01/24] preparation scripts added from past projects --- .../global/_publish_unused/transcode.py | 153 ++++++++++++++++++ .../extract_nuke_baked_colorspace.py | 107 ++++++++++++ 2 files changed, 260 insertions(+) create mode 100644 pype/plugins/global/_publish_unused/transcode.py create mode 100644 pype/plugins/nuke/_publish_unused/extract_nuke_baked_colorspace.py diff --git a/pype/plugins/global/_publish_unused/transcode.py b/pype/plugins/global/_publish_unused/transcode.py new file mode 100644 index 0000000000..6da65e3cc7 --- /dev/null +++ b/pype/plugins/global/_publish_unused/transcode.py @@ -0,0 +1,153 @@ +import os +import subprocess + +import pyblish.api +import filelink + + +class ExtractTranscode(pyblish.api.InstancePlugin): + """Extracts review movie from image sequence. + + Offset to get images to transcode from. + """ + + order = pyblish.api.ExtractorOrder + 0.1 + label = "Transcode" + optional = True + families = ["review"] + + def find_previous_index(self, index, indexes): + """Finds the closest previous value in a list from a value.""" + + data = [] + for i in indexes: + if i >= index: + continue + data.append(index - i) + + return indexes[data.index(min(data))] + + def process(self, instance): + + if "collection" in instance.data.keys(): + self.process_image(instance) + + if "output_path" in instance.data.keys(): + self.process_movie(instance) + + def process_image(self, instance): + + collection = instance.data.get("collection", []) + + if not list(collection): + msg = "Skipping \"{0}\" because no frames was found." + self.log.warning(msg.format(instance.data["name"])) + return + + # Temporary fill the missing frames. + missing = collection.holes() + if not collection.is_contiguous(): + pattern = collection.format("{head}{padding}{tail}") + for index in missing.indexes: + dst = pattern % index + src_index = self.find_previous_index( + index, list(collection.indexes) + ) + src = pattern % src_index + + filelink.create(src, dst) + + # Generate args. + # Has to be yuv420p for compatibility with older players and smooth + # playback. This does come with a sacrifice of more visible banding + # issues. + # -crf 18 is visually lossless. + args = [ + "ffmpeg", "-y", + "-start_number", str(min(collection.indexes)), + "-framerate", str(instance.context.data["framerate"]), + "-i", collection.format("{head}{padding}{tail}"), + "-pix_fmt", "yuv420p", + "-crf", "18", + "-timecode", "00:00:00:01", + "-vframes", + str(max(collection.indexes) - min(collection.indexes) + 1), + "-vf", + "scale=trunc(iw/2)*2:trunc(ih/2)*2", + ] + + if instance.data.get("baked_colorspace_movie"): + args = [ + "ffmpeg", "-y", + "-i", instance.data["baked_colorspace_movie"], + "-pix_fmt", "yuv420p", + "-crf", "18", + "-timecode", "00:00:00:01", + ] + + args.append(collection.format("{head}.mov")) + + self.log.debug("Executing args: {0}".format(args)) + + # Can't use subprocess.check_output, cause Houdini doesn't like that. + p = subprocess.Popen( + args, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + stdin=subprocess.PIPE, + cwd=os.path.dirname(args[-1]) + ) + + output = p.communicate()[0] + + # Remove temporary frame fillers + for f in missing: + os.remove(f) + + if p.returncode != 0: + raise ValueError(output) + + self.log.debug(output) + + def process_movie(self, instance): + # Generate args. + # Has to be yuv420p for compatibility with older players and smooth + # playback. This does come with a sacrifice of more visible banding + # issues. + args = [ + "ffmpeg", "-y", + "-i", instance.data["output_path"], + "-pix_fmt", "yuv420p", + "-crf", "18", + "-timecode", "00:00:00:01", + ] + + if instance.data.get("baked_colorspace_movie"): + args = [ + "ffmpeg", "-y", + "-i", instance.data["baked_colorspace_movie"], + "-pix_fmt", "yuv420p", + "-crf", "18", + "-timecode", "00:00:00:01", + ] + + split = os.path.splitext(instance.data["output_path"]) + args.append(split[0] + "_review.mov") + + self.log.debug("Executing args: {0}".format(args)) + + # Can't use subprocess.check_output, cause Houdini doesn't like that. + p = subprocess.Popen( + args, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + stdin=subprocess.PIPE, + cwd=os.path.dirname(args[-1]) + ) + + output = p.communicate()[0] + + if p.returncode != 0: + raise ValueError(output) + + self.log.debug(output) diff --git a/pype/plugins/nuke/_publish_unused/extract_nuke_baked_colorspace.py b/pype/plugins/nuke/_publish_unused/extract_nuke_baked_colorspace.py new file mode 100644 index 0000000000..f2561bd7c5 --- /dev/null +++ b/pype/plugins/nuke/_publish_unused/extract_nuke_baked_colorspace.py @@ -0,0 +1,107 @@ +import os +import tempfile +import shutil + +import nuke + +import pyblish.api + + +class ExtractNukeBakedColorspace(pyblish.api.InstancePlugin): + """Extracts movie with baked in luts + + V:\Remote Apps\ffmpeg\bin>ffmpeg -y -i + V:/FUGA/VFX_OUT/VFX_070010/v02/VFX_070010_comp_v02._baked.mov + -pix_fmt yuv420p + -crf 18 + -timecode 00:00:00:01 + V:/FUGA/VFX_OUT/VFX_070010/v02/VFX_070010_comp_v02..mov + + """ + + order = pyblish.api.ExtractorOrder + label = "Baked Colorspace" + optional = True + families = ["review"] + hosts = ["nuke"] + + def process(self, instance): + + if "collection" not in instance.data.keys(): + return + + # Store selection + selection = [i for i in nuke.allNodes() if i["selected"].getValue()] + + # Deselect all nodes to prevent external connections + [i["selected"].setValue(False) for i in nuke.allNodes()] + + temporary_nodes = [] + + # Create nodes + first_frame = min(instance.data["collection"].indexes) + last_frame = max(instance.data["collection"].indexes) + + temp_dir = tempfile.mkdtemp() + for f in instance.data["collection"]: + shutil.copy(f, os.path.join(temp_dir, os.path.basename(f))) + + node = previous_node = nuke.createNode("Read") + node["file"].setValue( + os.path.join(temp_dir, + os.path.basename(instance.data["collection"].format( + "{head}{padding}{tail}"))).replace("\\", "/")) + + node["first"].setValue(first_frame) + node["origfirst"].setValue(first_frame) + node["last"].setValue(last_frame) + node["origlast"].setValue(last_frame) + temporary_nodes.append(node) + + reformat_node = nuke.createNode("Reformat") + reformat_node["format"].setValue("HD_1080") + reformat_node["resize"].setValue("fit") + reformat_node["filter"].setValue("Lanczos6") + reformat_node["black_outside"].setValue(True) + reformat_node.setInput(0, previous_node) + previous_node = reformat_node + temporary_nodes.append(reformat_node) + + viewer_process_node = nuke.ViewerProcess.node() + dag_node = None + if viewer_process_node: + dag_node = nuke.createNode(viewer_process_node.Class()) + dag_node.setInput(0, previous_node) + previous_node = dag_node + temporary_nodes.append(dag_node) + # Copy viewer process values + excludedKnobs = ["name", "xpos", "ypos"] + for item in viewer_process_node.knobs().keys(): + if item not in excludedKnobs and item in dag_node.knobs(): + x1 = viewer_process_node[item] + x2 = dag_node[item] + x2.fromScript(x1.toScript(False)) + else: + self.log.warning("No viewer node found.") + + write_node = nuke.createNode("Write") + path = instance.data["collection"].format("{head}_baked.mov") + instance.data["baked_colorspace_movie"] = path + write_node["file"].setValue(path.replace("\\", "/")) + write_node["file_type"].setValue("mov") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + + # Render frames + nuke.execute(write_node.name(), int(first_frame), int(last_frame)) + + # Clean up + for node in temporary_nodes: + nuke.delete(node) + + shutil.rmtree(temp_dir) + + # Restore selection + [i["selected"].setValue(False) for i in nuke.allNodes()] + [i["selected"].setValue(True) for i in selection] From 217b537aaa95cf534c4755e3b7a4187b5cef85c4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Dec 2018 19:02:37 +0100 Subject: [PATCH 02/24] adding onScriptSave to synchronize avalon writes to script version --- pype/api.py | 4 +- pype/nuke/__init__.py | 1 + pype/nuke/lib.py | 46 ++++++++++++++++++++- pype/plugins/nuke/publish/collect_writes.py | 2 +- pype/templates.py | 11 ++--- setup/nuke/nuke_path/menu.py | 32 +++----------- 6 files changed, 61 insertions(+), 35 deletions(-) diff --git a/pype/api.py b/pype/api.py index 36094feb7f..746d18bc32 100644 --- a/pype/api.py +++ b/pype/api.py @@ -32,7 +32,7 @@ from .templates import ( get_asset, get_task, fill_avalon_workdir, - get_version_from_workfile + get_version_from_path ) __all__ = [ @@ -61,7 +61,7 @@ __all__ = [ "get_asset", "get_task", "fill_avalon_workdir", - "get_version_from_workfile", + "get_version_from_path", # preloaded templates "Anatomy", diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index 1a892c4187..c5d3652a73 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -67,6 +67,7 @@ if nuke_handler.get_name() \ not in [handler.get_name() for handler in api.Logger.logging.root.handlers[:]]: api.Logger.logging.getLogger().addHandler(nuke_handler) + api.Logger.logging.getLogger().setLevel(api.Logger.logging.DEBUG) if not self.nLogger: self.nLogger = api.Logger diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 8971a43714..332070d946 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1,4 +1,5 @@ import sys +import os from collections import OrderedDict from pprint import pprint from avalon.vendor.Qt import QtGui @@ -12,6 +13,49 @@ self = sys.modules[__name__] self._project = None +def onScriptLoad(): + if nuke.env['LINUX']: + nuke.tcl('load ffmpegReader') + nuke.tcl('load ffmpegWriter') + else: + nuke.tcl('load movReader') + nuke.tcl('load movWriter') + + +def writes_version_sync(): + try: + rootVersion = pype.get_version_from_path(nuke.root().name()) + padding = len(rootVersion) + new_version = str("{" + ":0>{}".format(padding) + "}").format( + int(rootVersion) + ) + log.info("new_version: {}".format(new_version)) + except Exception: + return + + for each in nuke.allNodes(): + if each.Class() == 'Write': + avalon_knob_data = get_avalon_knob_data(each) + if not avalon_knob_data: + continue + try: + node_file = each['file'].value() + log.info("node_file: {}".format(node_file)) + + node_version = pype.get_version_from_path(node_file, None) + log.info("node_version: {}".format(node_version)) + + node_new_file = node_file.replace(node_version, new_version) + each['file'].setValue(node_new_file) + except Exception as e: + log.debug("Write node: `{}` has no version in path: {}".format(each.name(), e)) + + +def version_up_script(): + import nukescripts + nukescripts.script_and_write_nodes_version_up() + + def format_anatomy(data): from .templates import ( get_anatomy @@ -26,7 +70,7 @@ def format_anatomy(data): data.update({ "hierarchy": pype.get_hiearchy(), "frame": "#"*padding, - "VERSION": pype.get_version_from_workfile(file) + "VERSION": pype.get_version_from_path(file) }) # log.info("format_anatomy:anatomy: {}".format(anatomy)) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 33ac8592a5..c13f3dcaa4 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -70,7 +70,7 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): if node["render_farm"].value(): families = "{}.farm".format(instance.data["avalonKnob"]["families"][0]) else: - families = "{}.local".format(instance.data["avalonKnob"]["families"][0]) + families = "{}.local".format(instance.data["avalonKnob"]["families"]) self.log.debug("checking for error: {}".format(label)) instance.data.update({ diff --git a/pype/templates.py b/pype/templates.py index 7e4b962d52..437f3d4e81 100644 --- a/pype/templates.py +++ b/pype/templates.py @@ -34,15 +34,16 @@ def reset_data_from_templates(): log.info("Data from templates were Unloaded...") -def get_version_from_workfile(file): +def get_version_from_path(file, warning=True): pattern = re.compile(r"_v([0-9]*)") try: v_string = pattern.findall(file)[0] return v_string - except IndexError: - log.error("templates:get_version_from_workfile:" - "`{}` missing version string." - "Example `v004`".format(file)) + except IndexError as e: + if warning: + log.error("Exception: {} \nPlease save as version! " + "\nExample: script_name_v001.nk \n{}".format( + __name__ + ".get_version_from_path()", e)) def get_project_code(): diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py index 6cfa1e3373..3613bc99f2 100644 --- a/setup/nuke/nuke_path/menu.py +++ b/setup/nuke/nuke_path/menu.py @@ -1,32 +1,12 @@ -from avalon.tools import workfiles +from pype.nuke.lib import writes_version_sync, onScriptLoad import nuke -# auto fix version paths in write nodes following root name of script -# cmd = ''' -# import re -# rootVersion=re.search('[vV]\d+', os.path.split(nuke.root().name())[1]).group() -# for each in nuke.allNodes(): -# if each.Class() == 'Write': -# each['file'].setValue(re.sub('[vV]\d+', rootVersion, each['file'].value())) -# ''' -# nuke.knobDefault('onScriptSave', cmd) -# -# print '\n>>> menu.py: Function for automatic check of version in write nodes is added\n' +from pype.api import Logger -ffmpeg_cmd = '''if nuke.env['LINUX']: - nuke.tcl('load ffmpegReader') - nuke.tcl('load ffmpegWriter') -else: - nuke.tcl('load movReader') - nuke.tcl('load movWriter')''' -nuke.knobDefault('onScriptLoad', ffmpeg_cmd) +log = Logger.getLogger(__name__, "nuke") -# # run avalon's tool Workfiles -# workfiles = '''from avalon.tools import workfiles -# if nuke.Root().name() == 'Root': -# nuke.scriptClear() -# workfiles.show(os.environ["AVALON_WORKDIR"])''' -# nuke.knobDefault('onCreate', workfiles) +nuke.addOnScriptSave(writes_version_sync) +nuke.addOnScriptSave(onScriptLoad) -# workfiles.show(os.environ["AVALON_WORKDIR"]) +log.info('Automatic syncing of write file knob to script version') From e5daf2162acaf3534515e845a818ab7b867ebf86 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 10 Dec 2018 10:06:56 +0100 Subject: [PATCH 03/24] updating integrate_render_frames - adding render.frames, still.frames, prerender.frames --- pype/nuke/__init__.py | 11 ++----- .../ftrack/integrate_ftrack_instances.py | 4 ++- pype/plugins/nuke/publish/collect_writes.py | 30 ++++++++++++------- .../nuke/publish/integrate_rendered_frames.py | 27 +++++++++-------- 4 files changed, 40 insertions(+), 32 deletions(-) diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index c5d3652a73..8fc7cd3599 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -53,7 +53,7 @@ class NukeHandler(api.Logger.logging.Handler): msg = self.format(record) if record.levelname.lower() in [ - "warning", + # "warning", "critical", "fatal", "error" @@ -67,7 +67,7 @@ if nuke_handler.get_name() \ not in [handler.get_name() for handler in api.Logger.logging.root.handlers[:]]: api.Logger.logging.getLogger().addHandler(nuke_handler) - api.Logger.logging.getLogger().setLevel(api.Logger.logging.DEBUG) + api.Logger.logging.getLogger().setLevel(api.Logger.logging.INFO) if not self.nLogger: self.nLogger = api.Logger @@ -114,12 +114,7 @@ def install(): # Disable all families except for the ones we explicitly want to see family_states = [ "write", - "lifeGroup", - "backdrop", - "imagesequence", - "mov" - "camera", - "pointcache", + "review" ] avalon.data["familiesStateDefault"] = False diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py index 441dd6f88d..b927e2b445 100644 --- a/pype/plugins/ftrack/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -13,6 +13,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.48 label = 'Integrate Ftrack Component' + families = ["render.frames", "still.frames"] family_mapping = {'camera': 'cam', 'look': 'look', @@ -23,7 +24,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'pointcache': 'cache', 'review': 'mov', 'write': 'img', - 'render': 'render'} + 'render': 'render' + } def process(self, instance): diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index c13f3dcaa4..fec9af26f6 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -58,19 +58,29 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): # preredered frames if not node["render"].value(): - families = "prerendered.frames" - collected_frames = os.listdir(output_dir) - self.log.debug("collected_frames: {}".format(label)) - if "files" not in instance.data: - instance.data["files"] = list() - instance.data["files"].append(collected_frames) - instance.data['transfer'] = False + try: + families = [ + "{}.frames".format(instance.data["avalonKnob"]["families"]), + 'ftrack' + ] + collected_frames = os.listdir(output_dir) + self.log.debug("collected_frames: {}".format(label)) + if "files" not in instance.data: + instance.data["files"] = list() + instance.data["files"].append(collected_frames) + instance.data['transfer'] = False + except Exception: + node["render"].setValue(True) + raise self.log.warning("needs to refresh the publishing") else: # dealing with local/farm rendering if node["render_farm"].value(): - families = "{}.farm".format(instance.data["avalonKnob"]["families"][0]) + families = [ + "{}.farm".format(instance.data["avalonKnob"]["families"])] else: - families = "{}.local".format(instance.data["avalonKnob"]["families"]) + families = [ + "{}.local".format(instance.data["avalonKnob"]["families"]) + ] self.log.debug("checking for error: {}".format(label)) instance.data.update({ @@ -78,7 +88,7 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): "outputDir": output_dir, "ext": ext, "label": label, - "families": [families, 'ftrack'], + "families": families, "startFrame": first_frame, "endFrame": last_frame, "outputType": output_type, diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py index 8c178df4e4..e73733bdf6 100644 --- a/pype/plugins/nuke/publish/integrate_rendered_frames.py +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -24,7 +24,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): label = "Integrate Frames" order = pyblish.api.IntegratorOrder - families = ["prerendered.frames"] + families = ["render.frames", "still.frames", "prerender.frames"] def process(self, instance): @@ -211,7 +211,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin): template = anatomy.render.path instance.data["transfers"].append([src, dst]) - representation = { "schema": "pype:representation-2.0", "type": "representation", @@ -223,21 +222,23 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # Imprint shortcut to context # for performance reasons. "context": { - "root": root, - "project": PROJECT, - "projectcode": project['data']['code'], - 'task': api.Session["AVALON_TASK"], - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": version["name"], - "hierarchy": hierarchy, - "representation": ext[1:] + "root": root, + "project": PROJECT, + "projectcode": project['data']['code'], + 'task': api.Session["AVALON_TASK"], + "silo": asset['silo'], + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "version": version["name"], + "hierarchy": hierarchy, + "representation": ext[1:] } } destination_list.append(dst) instance.data['destination_list'] = destination_list + self.log.warning("instance: {}".format(instance)) + instance[0]["render"].setValue(False) representations.append(representation) self.log.info("Registering {} items".format(len(representations))) From a1b58e2a6388ce2b1a2611a66bd2bd33fb68a0a9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 10 Dec 2018 10:07:21 +0100 Subject: [PATCH 04/24] adding increment script version --- .../nuke/publish/increment_script_version.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 pype/plugins/nuke/publish/increment_script_version.py diff --git a/pype/plugins/nuke/publish/increment_script_version.py b/pype/plugins/nuke/publish/increment_script_version.py new file mode 100644 index 0000000000..59689ebd7a --- /dev/null +++ b/pype/plugins/nuke/publish/increment_script_version.py @@ -0,0 +1,18 @@ + +import nuke +import pyblish.api + + +class IncrementScriptVersion(pyblish.api.Extractor): + """Increment current script version.""" + + order = pyblish.api.Extractor.order - 0.35 + label = "Increment Current Script Version" + optional = True + hosts = ['nuke'] + + def process(self, context): + from pype.lib import version_up + path = context.data["currentFile"] + nuke.scriptSaveAs(version_up(path)) + self.log.info('Incrementing script version') From f445864e017e51da41ee3014a5a767bc260c5f3b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 10 Dec 2018 18:53:27 +0100 Subject: [PATCH 05/24] update render version synchronize --- pype/nuke/lib.py | 3 ++- .../ftrack/integrate_ftrack_instances.py | 6 ++++- pype/plugins/nuke/publish/collect_writes.py | 13 +++++++--- .../nuke/publish/increment_script_version.py | 8 +++--- .../nuke/publish/integrate_rendered_frames.py | 3 +-- pype/plugins/nuke/publish/render_local.py | 26 ++++++++++++++++++- .../plugins/nuke/publish/write_next_render.py | 17 ++++++++++++ 7 files changed, 64 insertions(+), 12 deletions(-) create mode 100644 pype/plugins/nuke/publish/write_next_render.py diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 332070d946..f4b317e252 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -36,7 +36,8 @@ def writes_version_sync(): for each in nuke.allNodes(): if each.Class() == 'Write': avalon_knob_data = get_avalon_knob_data(each) - if not avalon_knob_data: + if avalon_knob_data['families'] not in ["render"]: + log.info(avalon_knob_data['families']) continue try: node_file = each['file'].value() diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py index b927e2b445..f88fd00f58 100644 --- a/pype/plugins/ftrack/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -13,7 +13,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.48 label = 'Integrate Ftrack Component' - families = ["render.frames", "still.frames"] family_mapping = {'camera': 'cam', 'look': 'look', @@ -27,7 +26,12 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'render': 'render' } + exclude = ["prerender.frames"] + def process(self, instance): + for ex in self.exclude: + if ex in instance.data['families']: + return self.log.debug('instance {}'.format(instance)) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index fec9af26f6..df1dfee129 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -1,5 +1,5 @@ import os - +import tempfile import nuke import pyblish.api import logging @@ -47,6 +47,8 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): path = nuke.filename(node) output_dir = os.path.dirname(path) self.log.debug('output dir: {}'.format(output_dir)) + + instance.data.update({"stagingDir": output_dir}) # Include start and end render frame in label name = node.name() @@ -60,7 +62,8 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): if not node["render"].value(): try: families = [ - "{}.frames".format(instance.data["avalonKnob"]["families"]), + "{}.frames".format( + instance.data["avalonKnob"]["families"]), 'ftrack' ] collected_frames = os.listdir(output_dir) @@ -71,7 +74,8 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): instance.data['transfer'] = False except Exception: node["render"].setValue(True) - raise self.log.warning("needs to refresh the publishing") + raise AttributeError( + "Files in `{}`. Needs to refresh the publishing".format(output_dir)) else: # dealing with local/farm rendering if node["render_farm"].value(): @@ -81,6 +85,8 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): families = [ "{}.local".format(instance.data["avalonKnob"]["families"]) ] + # adding for local renderings + instance.data.update({"stagingDir": tempfile.mkdtemp().replace("\\", "/")}) self.log.debug("checking for error: {}".format(label)) instance.data.update({ @@ -92,7 +98,6 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): "startFrame": first_frame, "endFrame": last_frame, "outputType": output_type, - "stagingDir": output_dir, "colorspace": node["colorspace"].value(), "handles": int(asset_data["data"].get("handles", 0)), "step": 1, diff --git a/pype/plugins/nuke/publish/increment_script_version.py b/pype/plugins/nuke/publish/increment_script_version.py index 59689ebd7a..c04a0a92bc 100644 --- a/pype/plugins/nuke/publish/increment_script_version.py +++ b/pype/plugins/nuke/publish/increment_script_version.py @@ -3,16 +3,18 @@ import nuke import pyblish.api -class IncrementScriptVersion(pyblish.api.Extractor): +class IncrementScriptVersion(pyblish.api.InstancePlugin): """Increment current script version.""" - order = pyblish.api.Extractor.order - 0.35 + order = pyblish.api.IntegratorOrder + 9 label = "Increment Current Script Version" optional = True hosts = ['nuke'] + families = ["render.frames"] - def process(self, context): + def process(self, instance): from pype.lib import version_up + context = instance.context path = context.data["currentFile"] nuke.scriptSaveAs(version_up(path)) self.log.info('Incrementing script version') diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py index e73733bdf6..6428d16748 100644 --- a/pype/plugins/nuke/publish/integrate_rendered_frames.py +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -97,6 +97,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): assumed_data = instance.data["assumedTemplateData"] assumed_version = assumed_data["version"] + if assumed_version != next_version: raise AttributeError("Assumed version 'v{0:03d}' does not match" "next version in database " @@ -237,8 +238,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin): } destination_list.append(dst) instance.data['destination_list'] = destination_list - self.log.warning("instance: {}".format(instance)) - instance[0]["render"].setValue(False) representations.append(representation) self.log.info("Registering {} items".format(len(representations))) diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py index eee67d1e40..95d303991f 100644 --- a/pype/plugins/nuke/publish/render_local.py +++ b/pype/plugins/nuke/publish/render_local.py @@ -1,5 +1,7 @@ import pyblish.api import nuke +import shutil +import os class NukeRenderLocal(pyblish.api.InstancePlugin): @@ -17,7 +19,7 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): families = ["render.local", "prerender.local", "still.local"] def process(self, instance): - + node = instance[0] # This should be a ContextPlugin, but this is a workaround # for a bug in pyblish to run once for a family: issue #250 context = instance.context @@ -33,6 +35,12 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): last_frame = instance.data.get("endFrame", None) node_subset_name = instance.data.get("name", None) + # swap path to stageDir + temp_dir = instance.data.get("stagingDir") + output_dir = instance.data.get("outputDir") + path = node['file'].value() + node['file'].setValue(path.replace(output_dir, temp_dir)) + self.log.info("Starting render") self.log.info("Start frame: {}".format(first_frame)) self.log.info("End frame: {}".format(last_frame)) @@ -43,6 +51,22 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): int(first_frame), int(last_frame) ) + + # copy data to correct dir + if not os.path.exists(output_dir): + os.makedirs(output_dir) + self.log.info("output dir has been created") + + for f in os.listdir(temp_dir): + self.log.info(f) + shutil.copy(os.path.join(temp_dir, os.path.basename(f)), + os.path.join(output_dir, os.path.basename(f))) + + # swap path back to publish path + path = node['file'].value() + node['file'].setValue(path.replace(temp_dir, output_dir)) + # swith to prerendered.frames instance[0]["render"].setValue(False) + self.log.info('Finished render') diff --git a/pype/plugins/nuke/publish/write_next_render.py b/pype/plugins/nuke/publish/write_next_render.py new file mode 100644 index 0000000000..0a24b68f3f --- /dev/null +++ b/pype/plugins/nuke/publish/write_next_render.py @@ -0,0 +1,17 @@ +import pyblish.api + + +class WriteToRender(pyblish.api.InstancePlugin): + """Swith Render knob on write instance to on, + so next time publish will be set to render + """ + + order = pyblish.api.IntegratorOrder + 11 + label = "Write to render next" + optional = True + hosts = ["nuke", "nukeassist"] + families = ["render.frames", "still.frames", "prerender.frames"] + + def process(self, instance): + instance[0]["render"].setValue(True) + self.log.info("Swith write node render to `on`") From 958aeab2e60f8080309f5ae152dbdbf3ec2ba273 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Dec 2018 18:01:47 +0100 Subject: [PATCH 06/24] wip improving publishing version --- .../ftrack/integrate_ftrack_instances.py | 2 +- pype/plugins/global/publish/integrate.py | 24 +++---- .../_publish_unused/integrate_staging_dir.py | 27 ++++++++ pype/plugins/nuke/publish/collect_families.py | 49 +++++++++++++ .../plugins/nuke/publish/collect_instances.py | 19 +++--- pype/plugins/nuke/publish/collect_script.py | 40 +++++++++++ pype/plugins/nuke/publish/collect_writes.py | 68 +++++++------------ .../nuke/publish/extract_output_directory.py | 5 +- ...ender_local.py => extract_render_local.py} | 13 ---- ...{script_save.py => extract_script_save.py} | 2 +- .../nuke/publish/extract_write_next_render.py | 23 +++++++ .../nuke/publish/integrate_rendered_frames.py | 12 ++-- ...version.py => integrate_script_version.py} | 2 +- .../nuke/publish/validate_collection.py | 11 +-- .../nuke/publish/validate_write_families.py | 38 +++++++++++ .../plugins/nuke/publish/write_next_render.py | 17 ----- 16 files changed, 237 insertions(+), 115 deletions(-) create mode 100644 pype/plugins/nuke/_publish_unused/integrate_staging_dir.py create mode 100644 pype/plugins/nuke/publish/collect_families.py create mode 100644 pype/plugins/nuke/publish/collect_script.py rename pype/plugins/nuke/publish/{render_local.py => extract_render_local.py} (79%) rename pype/plugins/nuke/publish/{script_save.py => extract_script_save.py} (83%) create mode 100644 pype/plugins/nuke/publish/extract_write_next_render.py rename pype/plugins/nuke/publish/{increment_script_version.py => integrate_script_version.py} (91%) create mode 100644 pype/plugins/nuke/publish/validate_write_families.py delete mode 100644 pype/plugins/nuke/publish/write_next_render.py diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py index f88fd00f58..5b5d3d3612 100644 --- a/pype/plugins/ftrack/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -26,7 +26,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'render': 'render' } - exclude = ["prerender.frames"] + exclude = [] def process(self, instance): for ex in self.exclude: diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index 698eb907e9..7e56255020 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -135,7 +135,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # \|________| # root = api.registered_root() - hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents'] + hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents'] if hierarchy: # hierarchy = os.path.sep.join(hierarchy) hierarchy = os.path.join(*hierarchy) @@ -226,17 +226,17 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Imprint shortcut to context # for performance reasons. "context": { - "root": root, - "project": PROJECT, - "projectcode": project['data']['code'], - 'task': api.Session["AVALON_TASK"], - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": version["name"], - "hierarchy": hierarchy, - "representation": ext[1:] + "root": root, + "project": PROJECT, + "projectcode": project['data']['code'], + 'task': api.Session["AVALON_TASK"], + "silo": asset['silo'], + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "version": version["name"], + "hierarchy": hierarchy, + "representation": ext[1:] } } diff --git a/pype/plugins/nuke/_publish_unused/integrate_staging_dir.py b/pype/plugins/nuke/_publish_unused/integrate_staging_dir.py new file mode 100644 index 0000000000..e05c42ae50 --- /dev/null +++ b/pype/plugins/nuke/_publish_unused/integrate_staging_dir.py @@ -0,0 +1,27 @@ +import pyblish.api +import shutil +import os + + +class CopyStagingDir(pyblish.api.InstancePlugin): + """Copy data rendered into temp local directory + """ + + order = pyblish.api.IntegratorOrder - 2 + label = "Copy data from temp dir" + hosts = ["nuke", "nukeassist"] + families = ["render.local"] + + def process(self, instance): + temp_dir = instance.data.get("stagingDir") + output_dir = instance.data.get("outputDir") + + # copy data to correct dir + if not os.path.exists(output_dir): + os.makedirs(output_dir) + self.log.info("output dir has been created") + + for f in os.listdir(temp_dir): + self.log.info("copy file to correct destination: {}".format(f)) + shutil.copy(os.path.join(temp_dir, os.path.basename(f)), + os.path.join(output_dir, os.path.basename(f))) diff --git a/pype/plugins/nuke/publish/collect_families.py b/pype/plugins/nuke/publish/collect_families.py new file mode 100644 index 0000000000..e5f7529a41 --- /dev/null +++ b/pype/plugins/nuke/publish/collect_families.py @@ -0,0 +1,49 @@ +import pyblish.api + + +@pyblish.api.log +class CollectInstanceFamilies(pyblish.api.ContextPlugin): + """Collect families for all instances""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Define Families" + hosts = ["nuke", "nukeassist"] + + def process(self, context): + self.log.info('context.data["instances"]: {}'.format(context.data["instances"])) + for instance in context.data["instances"]: + + if "write" in instance.data["family"]: + if not instance.data["publish"]: + continue + + node = instance[0] + + if not node["render"].value(): + families = [ + "{}.frames".format( + instance.data["avalonKnob"]["families"]), + 'ftrack'] + # to ignore staging dir op in integrate + instance.data['transfer'] = False + else: + # dealing with local/farm rendering + if node["render_farm"].value(): + families = [ + "{}.farm".format( + instance.data["avalonKnob"]["families"]), + 'ftrack'] + else: + families = [ + "{}.local".format( + instance.data["avalonKnob"]["families"]), + 'ftrack'] + + instance.data.update({"families": families}) + + # Sort/grouped by family (preserving local index) + context[:] = sorted(context, key=self.sort_by_family) + + def sort_by_family(self, instance): + """Sort by family""" + return instance.data.get("families", instance.data.get("family")) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 91f4fcaac8..14e12f8670 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -2,6 +2,7 @@ import os import nuke import pyblish.api +from avalon import io, api from pype.nuke.lib import get_avalon_knob_data @@ -9,11 +10,14 @@ from pype.nuke.lib import get_avalon_knob_data class CollectNukeInstances(pyblish.api.ContextPlugin): """Collect all nodes with Avalon knob.""" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder + 0.01 label = "Collect Instances" hosts = ["nuke", "nukeassist"] def process(self, context): + asset_data = io.find_one({"type": "asset", + "name": api.Session["AVALON_ASSET"]}) + self.log.debug("asset_data: {}".format(asset_data["data"])) instances = [] # creating instances per write node for node in nuke.allNodes(): @@ -44,10 +48,12 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "label": node.name(), "name": node.name(), "subset": subset, - "families": [avalon_knob_data["families"]], "family": avalon_knob_data["family"], "avalonKnob": avalon_knob_data, - "publish": node.knob('publish') + "publish": node.knob('publish').value(), + "handles": int(asset_data["data"].get("handles", 0)), + "step": 1, + "fps": int(nuke.root()['fps'].value()) }) self.log.info("collected instance: {}".format(instance.data)) @@ -55,11 +61,4 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): context.data["instances"] = instances - # Sort/grouped by family (preserving local index) - context[:] = sorted(context, key=self.sort_by_family) - self.log.debug("context: {}".format(context)) - - def sort_by_family(self, instance): - """Sort by family""" - return instance.data.get("families", instance.data.get("family")) diff --git a/pype/plugins/nuke/publish/collect_script.py b/pype/plugins/nuke/publish/collect_script.py new file mode 100644 index 0000000000..a52ed84dbc --- /dev/null +++ b/pype/plugins/nuke/publish/collect_script.py @@ -0,0 +1,40 @@ +from avalon import api, io +import nuke +import pyblish.api +import os + + +class CollectScript(pyblish.api.ContextPlugin): + """Publish current script version.""" + + order = pyblish.api.CollectorOrder + 0.1 + label = "Collect Script to publish" + hosts = ['nuke'] + + def process(self, context): + asset_data = io.find_one({"type": "asset", + "name": api.Session["AVALON_ASSET"]}) + self.log.debug("asset_data: {}".format(asset_data["data"])) + + # creating instances per write node + file_path = nuke.root()['name'].value() + base_name = os.path.basename(file_path) + subset = base_name.split("_v")[0] + + # Create instance + instance = context.create_instance(subset) + + instance.data.update({ + "subset": subset, + "asset": os.environ["AVALON_ASSET"], + "label": base_name, + "name": base_name, + "subset": subset, + "family": "script", + "handles": int(asset_data["data"].get("handles", 0)), + "step": 1, + "fps": int(nuke.root()['fps'].value()) + + }) + self.log.info('Publishing script version') + context.data["instances"].append(instance) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index df1dfee129..8493e3423d 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -3,7 +3,7 @@ import tempfile import nuke import pyblish.api import logging -from avalon import io, api + log = logging.getLogger(__name__) @@ -17,10 +17,11 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): hosts = ["nuke", "nukeassist"] def process(self, context): - asset_data = io.find_one({"type": "asset", - "name": api.Session["AVALON_ASSET"]}) - self.log.debug("asset_data: {}".format(asset_data["data"])) for instance in context.data["instances"]: + + if not instance.data["publish"]: + continue + self.log.debug("checking instance: {}".format(instance)) node = instance[0] @@ -48,10 +49,9 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): output_dir = os.path.dirname(path) self.log.debug('output dir: {}'.format(output_dir)) - instance.data.update({"stagingDir": output_dir}) - # Include start and end render frame in label + # create label name = node.name() - + # Include start and end render frame in label label = "{0} ({1}-{2})".format( name, int(first_frame), @@ -59,55 +59,35 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): ) # preredered frames - if not node["render"].value(): - try: - families = [ - "{}.frames".format( - instance.data["avalonKnob"]["families"]), - 'ftrack' - ] - collected_frames = os.listdir(output_dir) - self.log.debug("collected_frames: {}".format(label)) - if "files" not in instance.data: - instance.data["files"] = list() - instance.data["files"].append(collected_frames) - instance.data['transfer'] = False - except Exception: - node["render"].setValue(True) - raise AttributeError( - "Files in `{}`. Needs to refresh the publishing".format(output_dir)) - else: - # dealing with local/farm rendering - if node["render_farm"].value(): - families = [ - "{}.farm".format(instance.data["avalonKnob"]["families"])] - else: - families = [ - "{}.local".format(instance.data["avalonKnob"]["families"]) - ] - # adding for local renderings - instance.data.update({"stagingDir": tempfile.mkdtemp().replace("\\", "/")}) + # collect frames by try + # collect families in next file + if "files" not in instance.data: + instance.data["files"] = list() + + try: + collected_frames = os.listdir(output_dir) + self.log.debug("collected_frames: {}".format(label)) + + instance.data["files"].append(collected_frames) + except Exception: + pass + + # adding stage dir for faster local renderings + staging_dir = tempfile.mkdtemp().replace("\\", "/") + instance.data.update({"stagingDir": staging_dir}) + self.log.debug('staging_dir: {}'.format(staging_dir)) - self.log.debug("checking for error: {}".format(label)) instance.data.update({ "path": path, "outputDir": output_dir, "ext": ext, "label": label, - "families": families, "startFrame": first_frame, "endFrame": last_frame, "outputType": output_type, "colorspace": node["colorspace"].value(), - "handles": int(asset_data["data"].get("handles", 0)), - "step": 1, - "fps": int(nuke.root()['fps'].value()) }) self.log.debug("instance.data: {}".format(instance.data)) self.log.debug("context: {}".format(context)) - - def sort_by_family(self, instance): - """Sort by family""" - return instance.data.get("families", instance.data.get("family")) diff --git a/pype/plugins/nuke/publish/extract_output_directory.py b/pype/plugins/nuke/publish/extract_output_directory.py index 36ddb35e30..d999d200de 100644 --- a/pype/plugins/nuke/publish/extract_output_directory.py +++ b/pype/plugins/nuke/publish/extract_output_directory.py @@ -6,7 +6,7 @@ import pyblish.api class ExtractOutputDirectory(pyblish.api.InstancePlugin): """Extracts the output path for any collection or single output_path.""" - order = pyblish.api.ExtractorOrder - 0.1 + order = pyblish.api.ExtractorOrder - 0.05 label = "Output Directory" optional = True @@ -16,9 +16,6 @@ class ExtractOutputDirectory(pyblish.api.InstancePlugin): path = None - if "collection" in instance.data.keys(): - path = instance.data["collection"].format() - if "output_path" in instance.data.keys(): path = instance.data["path"] diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/extract_render_local.py similarity index 79% rename from pype/plugins/nuke/publish/render_local.py rename to pype/plugins/nuke/publish/extract_render_local.py index 95d303991f..95733b1d85 100644 --- a/pype/plugins/nuke/publish/render_local.py +++ b/pype/plugins/nuke/publish/extract_render_local.py @@ -52,21 +52,8 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): int(last_frame) ) - # copy data to correct dir - if not os.path.exists(output_dir): - os.makedirs(output_dir) - self.log.info("output dir has been created") - - for f in os.listdir(temp_dir): - self.log.info(f) - shutil.copy(os.path.join(temp_dir, os.path.basename(f)), - os.path.join(output_dir, os.path.basename(f))) - # swap path back to publish path path = node['file'].value() node['file'].setValue(path.replace(temp_dir, output_dir)) - # swith to prerendered.frames - instance[0]["render"].setValue(False) - self.log.info('Finished render') diff --git a/pype/plugins/nuke/publish/script_save.py b/pype/plugins/nuke/publish/extract_script_save.py similarity index 83% rename from pype/plugins/nuke/publish/script_save.py rename to pype/plugins/nuke/publish/extract_script_save.py index 472742f464..0c8e561fd7 100644 --- a/pype/plugins/nuke/publish/script_save.py +++ b/pype/plugins/nuke/publish/extract_script_save.py @@ -6,7 +6,7 @@ class ExtractScriptSave(pyblish.api.Extractor): """ """ label = 'Script Save' - order = pyblish.api.Extractor.order - 0.45 + order = pyblish.api.Extractor.order - 0.1 hosts = ['nuke'] def process(self, instance): diff --git a/pype/plugins/nuke/publish/extract_write_next_render.py b/pype/plugins/nuke/publish/extract_write_next_render.py new file mode 100644 index 0000000000..d13e67a563 --- /dev/null +++ b/pype/plugins/nuke/publish/extract_write_next_render.py @@ -0,0 +1,23 @@ +import pyblish.api + + +class WriteToRender(pyblish.api.InstancePlugin): + """Swith Render knob on write instance to on, + so next time publish will be set to render + """ + + order = pyblish.api.ExtractorOrder + 0.1 + label = "Write to render next" + optional = True + hosts = ["nuke", "nukeassist"] + families = ["write"] + + def process(self, instance): + if [f for f in instance.data["families"] + if ".frames" in f]: + instance[0]["render"].setValue(True) + self.log.info("Swith write node render to `on`") + else: + # swith to + instance[0]["render"].setValue(False) + self.log.info("Swith write node render to `Off`") diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py index 6428d16748..9472cdf36b 100644 --- a/pype/plugins/nuke/publish/integrate_rendered_frames.py +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -24,7 +24,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): label = "Integrate Frames" order = pyblish.api.IntegratorOrder - families = ["render.frames", "still.frames", "prerender.frames"] + families = ["render.frames", "still.frames", "prerender.frames", + "render.local", "still.local", "prerender.local"] def process(self, instance): @@ -97,7 +98,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin): assumed_data = instance.data["assumedTemplateData"] assumed_version = assumed_data["version"] - if assumed_version != next_version: raise AttributeError("Assumed version 'v{0:03d}' does not match" "next version in database " @@ -112,11 +112,9 @@ class IntegrateFrames(pyblish.api.InstancePlugin): locations=[LOCATION], data=version_data) - self.log.debug("version: {}".format(version)) self.log.debug("Creating version ...") - version_id = io.insert_one(version).inserted_id - self.log.debug("version_id: {}".format(version_id)) + # Write to disk # _ # | | @@ -129,11 +127,10 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # root = api.registered_root() hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents'] - if hierarchy: # hierarchy = os.path.sep.join(hierarchy) hierarchy = os.path.join(*hierarchy) - self.log.debug("hierarchy: {}".format(hierarchy)) + template_data = {"root": root, "project": {"name": PROJECT, "code": project['data']['code']}, @@ -199,6 +196,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # |_______| # fname = files + self.log.info("fname: {}".format(fname)) assert not os.path.isabs(fname), ( "Given file name is a full path" ) diff --git a/pype/plugins/nuke/publish/increment_script_version.py b/pype/plugins/nuke/publish/integrate_script_version.py similarity index 91% rename from pype/plugins/nuke/publish/increment_script_version.py rename to pype/plugins/nuke/publish/integrate_script_version.py index c04a0a92bc..c664cbb05d 100644 --- a/pype/plugins/nuke/publish/increment_script_version.py +++ b/pype/plugins/nuke/publish/integrate_script_version.py @@ -6,7 +6,7 @@ import pyblish.api class IncrementScriptVersion(pyblish.api.InstancePlugin): """Increment current script version.""" - order = pyblish.api.IntegratorOrder + 9 + order = pyblish.api.IntegratorOrder + 0.9 label = "Increment Current Script Version" optional = True hosts = ['nuke'] diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 27f258d3b3..44b6825883 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -23,9 +23,9 @@ class RepairCollectionAction(pyblish.api.Action): class ValidateCollection(pyblish.api.InstancePlugin): """ Validates file output. """ - order = pyblish.api.ValidatorOrder - # optional = True - families = ['prerendered.frames'] + order = pyblish.api.ValidatorOrder + 0.1 + families = ["render.frames", "still.frames", "prerender.frames"] + label = "Check prerendered frames" hosts = ["nuke"] actions = [RepairCollectionAction] @@ -47,11 +47,12 @@ class ValidateCollection(pyblish.api.InstancePlugin): basename, ext = os.path.splitext(list(collections[0])[0]) assert all(ext == os.path.splitext(name)[1] - for name in collections[0]), self.log.info( + for name in collections[0]), self.log.info( "Files had varying suffixes" ) - assert not any(os.path.isabs(name) for name in collections[0]), self.log.info("some file name are absolute") + assert not any(os.path.isabs(name) + for name in collections[0]), self.log.info("some file name are absolute") self.log.info('frame_length: {}'.format(frame_length)) self.log.info('len(list(instance.data["files"])): {}'.format( diff --git a/pype/plugins/nuke/publish/validate_write_families.py b/pype/plugins/nuke/publish/validate_write_families.py new file mode 100644 index 0000000000..238c9bc2ee --- /dev/null +++ b/pype/plugins/nuke/publish/validate_write_families.py @@ -0,0 +1,38 @@ +import os +import pyblish.api +import clique + + +@pyblish.api.log +class RepairWriteFamiliesAction(pyblish.api.Action): + label = "Fix Write's render attributes" + on = "failed" + icon = "wrench" + + def process(self, context, plugin): + context[0][0]["render"].setValue(True) + self.log.info("Rendering toggled ON") + + +class ValidateWriteFamilies(pyblish.api.InstancePlugin): + """ Validates write families. """ + + order = pyblish.api.ValidatorOrder + label = "Check correct writes families" + hosts = ["nuke"] + families = ["write"] + actions = [RepairWriteFamiliesAction] + + def process(self, instance): + self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) + + if not [f for f in instance.data["families"] + if ".frames" in f]: + return + + assert instance.data["files"], self.log.info( + "`{}`: Swith `Render` on! \n" + "No available frames to add to database. \n" + "Use repair to render all frames".format(__name__)) + + self.log.info("Checked correct writes families") diff --git a/pype/plugins/nuke/publish/write_next_render.py b/pype/plugins/nuke/publish/write_next_render.py deleted file mode 100644 index 0a24b68f3f..0000000000 --- a/pype/plugins/nuke/publish/write_next_render.py +++ /dev/null @@ -1,17 +0,0 @@ -import pyblish.api - - -class WriteToRender(pyblish.api.InstancePlugin): - """Swith Render knob on write instance to on, - so next time publish will be set to render - """ - - order = pyblish.api.IntegratorOrder + 11 - label = "Write to render next" - optional = True - hosts = ["nuke", "nukeassist"] - families = ["render.frames", "still.frames", "prerender.frames"] - - def process(self, instance): - instance[0]["render"].setValue(True) - self.log.info("Swith write node render to `on`") From c2af511e0698a78e12d1ac5525e0860851bd9051 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Dec 2018 14:59:43 +0100 Subject: [PATCH 07/24] update publishing script with write render --- .../ftrack/integrate_ftrack_instances.py | 1 + pype/plugins/global/publish/integrate.py | 1 + pype/plugins/nuke/publish/collect_families.py | 31 +++++++++---------- pype/plugins/nuke/publish/collect_script.py | 24 ++++++++++++-- pype/plugins/nuke/publish/collect_writes.py | 3 +- pype/plugins/nuke/publish/extract_script.py | 21 +++++++++++++ 6 files changed, 60 insertions(+), 21 deletions(-) create mode 100644 pype/plugins/nuke/publish/extract_script.py diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py index 42370e3087..24cc43423c 100644 --- a/pype/plugins/ftrack/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -24,6 +24,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'pointcache': 'cache', 'write': 'img', 'render': 'render', + 'script': 'comp', 'review': 'mov'} exclude = [] diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index 45a1a3fcea..4f50b9b3ef 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -36,6 +36,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "vrayproxy", "yetiRig", "yeticache", + "script", "review"] def process(self, instance): diff --git a/pype/plugins/nuke/publish/collect_families.py b/pype/plugins/nuke/publish/collect_families.py index e5f7529a41..a361ceceab 100644 --- a/pype/plugins/nuke/publish/collect_families.py +++ b/pype/plugins/nuke/publish/collect_families.py @@ -6,40 +6,37 @@ class CollectInstanceFamilies(pyblish.api.ContextPlugin): """Collect families for all instances""" order = pyblish.api.CollectorOrder + 0.2 - label = "Define Families" + label = "Collect Families" hosts = ["nuke", "nukeassist"] def process(self, context): - self.log.info('context.data["instances"]: {}'.format(context.data["instances"])) for instance in context.data["instances"]: + if not instance.data["publish"]: + continue + + # set for ftrack to accept + instance.data["families"] = ["ftrack"] + if "write" in instance.data["family"]: - if not instance.data["publish"]: - continue node = instance[0] if not node["render"].value(): - families = [ - "{}.frames".format( - instance.data["avalonKnob"]["families"]), - 'ftrack'] + families = "{}.frames".format( + instance.data["avalonKnob"]["families"]) # to ignore staging dir op in integrate instance.data['transfer'] = False else: # dealing with local/farm rendering if node["render_farm"].value(): - families = [ - "{}.farm".format( - instance.data["avalonKnob"]["families"]), - 'ftrack'] + families = "{}.farm".format( + instance.data["avalonKnob"]["families"]) else: - families = [ - "{}.local".format( - instance.data["avalonKnob"]["families"]), - 'ftrack'] + families = "{}.local".format( + instance.data["avalonKnob"]["families"]) - instance.data.update({"families": families}) + instance.data["families"].append(families) # Sort/grouped by family (preserving local index) context[:] = sorted(context, key=self.sort_by_family) diff --git a/pype/plugins/nuke/publish/collect_script.py b/pype/plugins/nuke/publish/collect_script.py index a52ed84dbc..1a933186de 100644 --- a/pype/plugins/nuke/publish/collect_script.py +++ b/pype/plugins/nuke/publish/collect_script.py @@ -2,6 +2,8 @@ from avalon import api, io import nuke import pyblish.api import os +import tempfile +from avalon.nuke.lib import add_publish_knob class CollectScript(pyblish.api.ContextPlugin): @@ -16,13 +18,26 @@ class CollectScript(pyblish.api.ContextPlugin): "name": api.Session["AVALON_ASSET"]}) self.log.debug("asset_data: {}".format(asset_data["data"])) + root = nuke.root() + add_publish_knob(root) + # creating instances per write node - file_path = nuke.root()['name'].value() + file_path = root['name'].value() base_name = os.path.basename(file_path) subset = base_name.split("_v")[0] + # Get frame range + first_frame = int(root["first_frame"].getValue()) + last_frame = int(root["last_frame"].getValue()) + # Create instance instance = context.create_instance(subset) + instance.add(root) + + # adding stage dir for faster local renderings + staging_dir = tempfile.mkdtemp().replace("\\", "/") + instance.data.update({"stagingDir": staging_dir}) + self.log.debug('staging_dir: {}'.format(staging_dir)) instance.data.update({ "subset": subset, @@ -30,11 +45,14 @@ class CollectScript(pyblish.api.ContextPlugin): "label": base_name, "name": base_name, "subset": subset, + "startFrame": first_frame, + "endFrame": last_frame, + "publish": root.knob('publish').value(), "family": "script", "handles": int(asset_data["data"].get("handles", 0)), "step": 1, - "fps": int(nuke.root()['fps'].value()) - + "fps": int(root['fps'].value()), + "files": base_name }) self.log.info('Publishing script version') context.data["instances"].append(instance) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 8493e3423d..96f14c7eaa 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -22,12 +22,13 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): if not instance.data["publish"]: continue - self.log.debug("checking instance: {}".format(instance)) node = instance[0] if node.Class() != "Write": continue + self.log.debug("checking instance: {}".format(instance)) + # Determine defined file type ext = node["file_type"].value() diff --git a/pype/plugins/nuke/publish/extract_script.py b/pype/plugins/nuke/publish/extract_script.py new file mode 100644 index 0000000000..573fb9598a --- /dev/null +++ b/pype/plugins/nuke/publish/extract_script.py @@ -0,0 +1,21 @@ +import nuke +import pyblish.api +import os + + +class ExtractScript(pyblish.api.InstancePlugin): + """Publish script + """ + label = 'Extract Script' + order = pyblish.api.ExtractorOrder - 0.05 + hosts = ['nuke'] + families = ["script"] + + def process(self, instance): + + self.log.info('Extracting script') + staging_dir = instance.data["stagingDir"] + file_name = instance.data["name"] + path = os.path.join(staging_dir, file_name) + + nuke.scriptSaveAs(path) From c6c4411fed1d7fca45c27bd19317356bc76c5b2f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Dec 2018 18:14:24 +0100 Subject: [PATCH 08/24] update validate families --- pype/maya/action.py | 2 +- pype/nuke/__init__.py | 1 + pype/nuke/actions.py | 58 +++++++++++++++++++ pype/nuke/lib.py | 1 - .../maya/publish/validate_joints_hidden.py | 2 +- pype/plugins/nuke/publish/collect_script.py | 18 +++--- .../nuke/publish/validate_collection.py | 3 + .../nuke/publish/validate_write_families.py | 39 ++++++++----- 8 files changed, 101 insertions(+), 23 deletions(-) create mode 100644 pype/nuke/actions.py diff --git a/pype/maya/action.py b/pype/maya/action.py index 6281a82409..2dcdb82dc9 100644 --- a/pype/maya/action.py +++ b/pype/maya/action.py @@ -125,4 +125,4 @@ class SelectInvalidAction(pyblish.api.Action): cmds.select(invalid, replace=True, noExpand=True) else: self.log.info("No invalid nodes found.") - cmds.select(deselect=True) \ No newline at end of file + cmds.select(deselect=True) diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index 8fc7cd3599..948645fd0d 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -87,6 +87,7 @@ def reload_config(): "app.api", "{}.api".format(AVALON_CONFIG), "{}.templates".format(AVALON_CONFIG), + "{}.nuke.actions".format(AVALON_CONFIG), "{}.nuke.templates".format(AVALON_CONFIG), "{}.nuke.menu".format(AVALON_CONFIG) ): diff --git a/pype/nuke/actions.py b/pype/nuke/actions.py new file mode 100644 index 0000000000..640e41a7de --- /dev/null +++ b/pype/nuke/actions.py @@ -0,0 +1,58 @@ +# absolute_import is needed to counter the `module has no cmds error` in Maya +from __future__ import absolute_import + +import pyblish.api + +from avalon.nuke.lib import ( + reset_selection, + select_nodes +) + +from ..action import get_errored_instances_from_context + + +class SelectInvalidAction(pyblish.api.Action): + """Select invalid nodes in Maya when plug-in failed. + + To retrieve the invalid nodes this assumes a static `get_invalid()` + method is available on the plugin. + + """ + label = "Select invalid nodes" + on = "failed" # This action is only available on a failed plug-in + icon = "search" # Icon from Awesome Icon + + def process(self, context, plugin): + + try: + import nuke + except ImportError: + raise ImportError("Current host is not Nuke") + + errored_instances = get_errored_instances_from_context(context) + + # Apply pyblish.logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(errored_instances, plugin) + + # Get the invalid nodes for the plug-ins + self.log.info("Finding invalid nodes..") + invalid = list() + for instance in instances: + invalid_nodes = plugin.get_invalid(instance) + + if invalid_nodes: + if isinstance(invalid_nodes, (list, tuple)): + invalid.append(invalid_nodes[0]) + else: + self.log.warning("Plug-in returned to be invalid, " + "but has no selectable nodes.") + + # Ensure unique (process each node only once) + invalid = list(set(invalid)) + + if invalid: + self.log.info("Selecting invalid nodes: {}".format(invalid)) + reset_selection() + select_nodes(invalid) + else: + self.log.info("No invalid nodes found.") diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index f4b317e252..2f002ce130 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1,5 +1,4 @@ import sys -import os from collections import OrderedDict from pprint import pprint from avalon.vendor.Qt import QtGui diff --git a/pype/plugins/maya/publish/validate_joints_hidden.py b/pype/plugins/maya/publish/validate_joints_hidden.py index 4e35cd8c74..acc1dd07a7 100644 --- a/pype/plugins/maya/publish/validate_joints_hidden.py +++ b/pype/plugins/maya/publish/validate_joints_hidden.py @@ -19,7 +19,7 @@ class ValidateJointsHidden(pyblish.api.InstancePlugin): order = pype.api.ValidateContentsOrder hosts = ['maya'] - families = ["rig'] + families = ["rig"] category = 'rig' version = (0, 1, 0) label = "Joints Hidden" diff --git a/pype/plugins/nuke/publish/collect_script.py b/pype/plugins/nuke/publish/collect_script.py index 1a933186de..099217c8bb 100644 --- a/pype/plugins/nuke/publish/collect_script.py +++ b/pype/plugins/nuke/publish/collect_script.py @@ -3,7 +3,10 @@ import nuke import pyblish.api import os import tempfile -from avalon.nuke.lib import add_publish_knob +from avalon.nuke.lib import ( + add_publish_knob, + add_avalon_tab_knob +) class CollectScript(pyblish.api.ContextPlugin): @@ -16,9 +19,10 @@ class CollectScript(pyblish.api.ContextPlugin): def process(self, context): asset_data = io.find_one({"type": "asset", "name": api.Session["AVALON_ASSET"]}) - self.log.debug("asset_data: {}".format(asset_data["data"])) + self.log.info("asset_data: {}".format(asset_data["data"])) root = nuke.root() + add_avalon_tab_knob(root) add_publish_knob(root) # creating instances per write node @@ -37,18 +41,18 @@ class CollectScript(pyblish.api.ContextPlugin): # adding stage dir for faster local renderings staging_dir = tempfile.mkdtemp().replace("\\", "/") instance.data.update({"stagingDir": staging_dir}) - self.log.debug('staging_dir: {}'.format(staging_dir)) + self.log.info('nukescript: staging_dir: {}'.format(staging_dir)) instance.data.update({ - "subset": subset, - "asset": os.environ["AVALON_ASSET"], + "subset": os.getenv("AVALON_TASK", None), + "asset": os.getenv("AVALON_ASSET", None), "label": base_name, "name": base_name, - "subset": subset, "startFrame": first_frame, "endFrame": last_frame, "publish": root.knob('publish').value(), - "family": "script", + "family": "nukescript", + "representation": "nk", "handles": int(asset_data["data"].get("handles", 0)), "step": 1, "fps": int(root['fps'].value()), diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 44b6825883..09498c58de 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -32,6 +32,9 @@ class ValidateCollection(pyblish.api.InstancePlugin): def process(self, instance): self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) + if not instance.data["files"]: + return + collections, remainder = clique.assemble(*instance.data['files']) self.log.info('collections: {}'.format(str(collections))) diff --git a/pype/plugins/nuke/publish/validate_write_families.py b/pype/plugins/nuke/publish/validate_write_families.py index 238c9bc2ee..1dfdbc06d5 100644 --- a/pype/plugins/nuke/publish/validate_write_families.py +++ b/pype/plugins/nuke/publish/validate_write_families.py @@ -1,19 +1,21 @@ -import os + import pyblish.api -import clique +import pype.api +import pype.nuke.actions -@pyblish.api.log class RepairWriteFamiliesAction(pyblish.api.Action): label = "Fix Write's render attributes" on = "failed" icon = "wrench" - def process(self, context, plugin): - context[0][0]["render"].setValue(True) + def process(self, instance, plugin): + self.log.info("instance {}".format(instance)) + instance["render"].setValue(True) self.log.info("Rendering toggled ON") +@pyblish.api.log class ValidateWriteFamilies(pyblish.api.InstancePlugin): """ Validates write families. """ @@ -21,18 +23,29 @@ class ValidateWriteFamilies(pyblish.api.InstancePlugin): label = "Check correct writes families" hosts = ["nuke"] families = ["write"] - actions = [RepairWriteFamiliesAction] - - def process(self, instance): - self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) + actions = [pype.nuke.actions.SelectInvalidAction, pype.api.RepairAction] + @staticmethod + def get_invalid(instance): if not [f for f in instance.data["families"] if ".frames" in f]: return - assert instance.data["files"], self.log.info( - "`{}`: Swith `Render` on! \n" - "No available frames to add to database. \n" - "Use repair to render all frames".format(__name__)) + if not instance.data["files"]: + return (instance) + + def process(self, instance): + self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) + invalid = self.get_invalid(instance) + + if invalid: + raise ValueError(str("`{}`: Switch `Render` on! " + "> {}".format(__name__, invalid))) self.log.info("Checked correct writes families") + + @classmethod + def repair(cls, instance): + cls.log.info("instance {}".format(instance)) + instance[0]["render"].setValue(True) + cls.log.info("Rendering toggled ON") From c608e1335fa9b700eab426caa5238ac34c398df8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 18 Dec 2018 12:59:49 +0100 Subject: [PATCH 09/24] implemented review [mov, thumbnail], upversioning script sinchronized with publishing write --- pype/plugins/ftrack/integrate_ftrack_api.py | 2 +- .../ftrack/integrate_ftrack_instances.py | 2 +- .../global/publish/collect_templates.py | 12 +- pype/plugins/global/publish/integrate.py | 3 +- .../_publish_unused/collect_render_target.py | 47 ----- .../extract_nuke_baked_colorspace.py | 107 ---------- pype/plugins/nuke/publish/collect_families.py | 14 +- pype/plugins/nuke/publish/collect_review.py | 27 +++ pype/plugins/nuke/publish/collect_script.py | 14 +- pype/plugins/nuke/publish/collect_writes.py | 7 +- pype/plugins/nuke/publish/extract_frames.py | 21 ++ .../nuke/publish/extract_render_local.py | 16 +- pype/plugins/nuke/publish/extract_review.py | 186 ++++++++++++++++++ pype/plugins/nuke/publish/extract_script.py | 29 ++- .../nuke/publish/integrate_rendered_frames.py | 30 ++- .../nuke/publish/integrate_script_version.py | 7 +- .../nuke/publish/validate_collection.py | 28 ++- 17 files changed, 326 insertions(+), 226 deletions(-) delete mode 100644 pype/plugins/nuke/_publish_unused/collect_render_target.py delete mode 100644 pype/plugins/nuke/_publish_unused/extract_nuke_baked_colorspace.py create mode 100644 pype/plugins/nuke/publish/collect_review.py create mode 100644 pype/plugins/nuke/publish/extract_frames.py create mode 100644 pype/plugins/nuke/publish/extract_review.py diff --git a/pype/plugins/ftrack/integrate_ftrack_api.py b/pype/plugins/ftrack/integrate_ftrack_api.py index 279dfb9b54..ce827dba04 100644 --- a/pype/plugins/ftrack/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/integrate_ftrack_api.py @@ -281,7 +281,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): # Inform user about no changes to the database. if (component_entity and not component_overwrite and - not new_component): + not new_component): data["component"] = component_entity self.log.info( "Found existing component, and no request to overwrite. " diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py index 5e0c108442..4c68855fd3 100644 --- a/pype/plugins/ftrack/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -24,7 +24,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'pointcache': 'cache', 'write': 'img', 'render': 'render', - 'script': 'comp', + 'nukescript': 'comp', 'review': 'mov'} exclude = [] diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 48b6c448e3..fb6de894bd 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -1,7 +1,5 @@ -from app.api import ( - Templates -) +import pype.api as pype import pyblish.api @@ -13,8 +11,6 @@ class CollectTemplates(pyblish.api.ContextPlugin): label = "Collect Templates" def process(self, context): - """Inject the current working file""" - templates = Templates( - type=["anatomy"] - ) - context.data['anatomy'] = templates.anatomy + pype.load_data_from_templates() + context.data['anatomy'] = pype.Anatomy + self.log.info("Anatomy templates collected...") diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index 2dbdb13ced..0d3aca32aa 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -36,7 +36,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "vrayproxy", "yetiRig", "yeticache", - "script", + "nukescript", "review"] def process(self, instance): @@ -47,7 +47,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.integrate(instance) def register(self, instance): - # Required environment variables PROJECT = api.Session["AVALON_PROJECT"] ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] diff --git a/pype/plugins/nuke/_publish_unused/collect_render_target.py b/pype/plugins/nuke/_publish_unused/collect_render_target.py deleted file mode 100644 index 86a38f26b6..0000000000 --- a/pype/plugins/nuke/_publish_unused/collect_render_target.py +++ /dev/null @@ -1,47 +0,0 @@ -import pyblish.api - - -class CollectNukeRenderMode(pyblish.api.InstancePlugin): - # TODO: rewrite docstring to nuke - """Collect current comp's render Mode - - Options: - local - deadline - - Note that this value is set for each comp separately. When you save the - comp this information will be stored in that file. If for some reason the - available tool does not visualize which render mode is set for the - current comp, please run the following line in the console (Py2) - - comp.GetData("rendermode") - - This will return the name of the current render mode as seen above under - Options. - - """ - - order = pyblish.api.CollectorOrder + 0.4 - label = "Collect Render Mode" - hosts = ["nuke"] - families = ["write", "render.local"] - - def process(self, instance): - """Collect all image sequence tools""" - options = ["local", "deadline"] - - node = instance[0] - - if bool(node["render_local"].getValue()): - rendermode = "local" - else: - rendermode = "deadline" - - assert rendermode in options, "Must be supported render mode" - - # Append family - instance.data["families"].remove("render") - family = "render.{0}".format(rendermode) - instance.data["families"].append(family) - - self.log.info("Render mode: {0}".format(rendermode)) diff --git a/pype/plugins/nuke/_publish_unused/extract_nuke_baked_colorspace.py b/pype/plugins/nuke/_publish_unused/extract_nuke_baked_colorspace.py deleted file mode 100644 index f2561bd7c5..0000000000 --- a/pype/plugins/nuke/_publish_unused/extract_nuke_baked_colorspace.py +++ /dev/null @@ -1,107 +0,0 @@ -import os -import tempfile -import shutil - -import nuke - -import pyblish.api - - -class ExtractNukeBakedColorspace(pyblish.api.InstancePlugin): - """Extracts movie with baked in luts - - V:\Remote Apps\ffmpeg\bin>ffmpeg -y -i - V:/FUGA/VFX_OUT/VFX_070010/v02/VFX_070010_comp_v02._baked.mov - -pix_fmt yuv420p - -crf 18 - -timecode 00:00:00:01 - V:/FUGA/VFX_OUT/VFX_070010/v02/VFX_070010_comp_v02..mov - - """ - - order = pyblish.api.ExtractorOrder - label = "Baked Colorspace" - optional = True - families = ["review"] - hosts = ["nuke"] - - def process(self, instance): - - if "collection" not in instance.data.keys(): - return - - # Store selection - selection = [i for i in nuke.allNodes() if i["selected"].getValue()] - - # Deselect all nodes to prevent external connections - [i["selected"].setValue(False) for i in nuke.allNodes()] - - temporary_nodes = [] - - # Create nodes - first_frame = min(instance.data["collection"].indexes) - last_frame = max(instance.data["collection"].indexes) - - temp_dir = tempfile.mkdtemp() - for f in instance.data["collection"]: - shutil.copy(f, os.path.join(temp_dir, os.path.basename(f))) - - node = previous_node = nuke.createNode("Read") - node["file"].setValue( - os.path.join(temp_dir, - os.path.basename(instance.data["collection"].format( - "{head}{padding}{tail}"))).replace("\\", "/")) - - node["first"].setValue(first_frame) - node["origfirst"].setValue(first_frame) - node["last"].setValue(last_frame) - node["origlast"].setValue(last_frame) - temporary_nodes.append(node) - - reformat_node = nuke.createNode("Reformat") - reformat_node["format"].setValue("HD_1080") - reformat_node["resize"].setValue("fit") - reformat_node["filter"].setValue("Lanczos6") - reformat_node["black_outside"].setValue(True) - reformat_node.setInput(0, previous_node) - previous_node = reformat_node - temporary_nodes.append(reformat_node) - - viewer_process_node = nuke.ViewerProcess.node() - dag_node = None - if viewer_process_node: - dag_node = nuke.createNode(viewer_process_node.Class()) - dag_node.setInput(0, previous_node) - previous_node = dag_node - temporary_nodes.append(dag_node) - # Copy viewer process values - excludedKnobs = ["name", "xpos", "ypos"] - for item in viewer_process_node.knobs().keys(): - if item not in excludedKnobs and item in dag_node.knobs(): - x1 = viewer_process_node[item] - x2 = dag_node[item] - x2.fromScript(x1.toScript(False)) - else: - self.log.warning("No viewer node found.") - - write_node = nuke.createNode("Write") - path = instance.data["collection"].format("{head}_baked.mov") - instance.data["baked_colorspace_movie"] = path - write_node["file"].setValue(path.replace("\\", "/")) - write_node["file_type"].setValue("mov") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - - # Render frames - nuke.execute(write_node.name(), int(first_frame), int(last_frame)) - - # Clean up - for node in temporary_nodes: - nuke.delete(node) - - shutil.rmtree(temp_dir) - - # Restore selection - [i["selected"].setValue(False) for i in nuke.allNodes()] - [i["selected"].setValue(True) for i in selection] diff --git a/pype/plugins/nuke/publish/collect_families.py b/pype/plugins/nuke/publish/collect_families.py index a361ceceab..226df3b168 100644 --- a/pype/plugins/nuke/publish/collect_families.py +++ b/pype/plugins/nuke/publish/collect_families.py @@ -23,20 +23,20 @@ class CollectInstanceFamilies(pyblish.api.ContextPlugin): node = instance[0] if not node["render"].value(): - families = "{}.frames".format( - instance.data["avalonKnob"]["families"]) + families = ["{}.frames".format( + instance.data["avalonKnob"]["families"])] # to ignore staging dir op in integrate instance.data['transfer'] = False else: # dealing with local/farm rendering if node["render_farm"].value(): - families = "{}.farm".format( - instance.data["avalonKnob"]["families"]) + families = ["{}.farm".format( + instance.data["avalonKnob"]["families"])] else: - families = "{}.local".format( - instance.data["avalonKnob"]["families"]) + families = ["{}.local".format( + instance.data["avalonKnob"]["families"])] - instance.data["families"].append(families) + instance.data["families"].extend(families) # Sort/grouped by family (preserving local index) context[:] = sorted(context, key=self.sort_by_family) diff --git a/pype/plugins/nuke/publish/collect_review.py b/pype/plugins/nuke/publish/collect_review.py new file mode 100644 index 0000000000..03f5437e86 --- /dev/null +++ b/pype/plugins/nuke/publish/collect_review.py @@ -0,0 +1,27 @@ +import pyblish.api + + +class CollectReview(pyblish.api.InstancePlugin): + """Collect review instance from rendered frames + """ + + order = pyblish.api.CollectorOrder + 0.3 + family = "review" + label = "Collect Review" + hosts = ["nuke"] + families = ["write"] + + family_targets = [".local", ".frames"] + + def process(self, instance): + families = [(f, search) for f in instance.data["families"] + for search in self.family_targets + if search in f][0] + + if families: + root_femilies = families[0].replace(families[1], "") + instance.data["families"].append(".".join([ + root_femilies, + self.family + ])) + self.log.info("Review collected: `{}`".format(instance)) diff --git a/pype/plugins/nuke/publish/collect_script.py b/pype/plugins/nuke/publish/collect_script.py index 099217c8bb..92557b2665 100644 --- a/pype/plugins/nuke/publish/collect_script.py +++ b/pype/plugins/nuke/publish/collect_script.py @@ -2,7 +2,6 @@ from avalon import api, io import nuke import pyblish.api import os -import tempfile from avalon.nuke.lib import ( add_publish_knob, add_avalon_tab_knob @@ -25,10 +24,11 @@ class CollectScript(pyblish.api.ContextPlugin): add_avalon_tab_knob(root) add_publish_knob(root) + family = "nukescript" # creating instances per write node file_path = root['name'].value() base_name = os.path.basename(file_path) - subset = base_name.split("_v")[0] + subset = "{0}_{1}".format(os.getenv("AVALON_TASK", None), family) # Get frame range first_frame = int(root["first_frame"].getValue()) @@ -38,25 +38,19 @@ class CollectScript(pyblish.api.ContextPlugin): instance = context.create_instance(subset) instance.add(root) - # adding stage dir for faster local renderings - staging_dir = tempfile.mkdtemp().replace("\\", "/") - instance.data.update({"stagingDir": staging_dir}) - self.log.info('nukescript: staging_dir: {}'.format(staging_dir)) - instance.data.update({ - "subset": os.getenv("AVALON_TASK", None), + "subset": subset, "asset": os.getenv("AVALON_ASSET", None), "label": base_name, "name": base_name, "startFrame": first_frame, "endFrame": last_frame, "publish": root.knob('publish').value(), - "family": "nukescript", + "family": family, "representation": "nk", "handles": int(asset_data["data"].get("handles", 0)), "step": 1, "fps": int(root['fps'].value()), - "files": base_name }) self.log.info('Publishing script version') context.data["instances"].append(instance) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 96f14c7eaa..dd3247ae8f 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -68,16 +68,11 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): try: collected_frames = os.listdir(output_dir) self.log.debug("collected_frames: {}".format(label)) - instance.data["files"].append(collected_frames) + except Exception: pass - # adding stage dir for faster local renderings - staging_dir = tempfile.mkdtemp().replace("\\", "/") - instance.data.update({"stagingDir": staging_dir}) - self.log.debug('staging_dir: {}'.format(staging_dir)) - instance.data.update({ "path": path, "outputDir": output_dir, diff --git a/pype/plugins/nuke/publish/extract_frames.py b/pype/plugins/nuke/publish/extract_frames.py new file mode 100644 index 0000000000..bdbcb75cea --- /dev/null +++ b/pype/plugins/nuke/publish/extract_frames.py @@ -0,0 +1,21 @@ +import pyblish + + +class ExtractFramesToIntegrate(pyblish.api.InstancePlugin): + """Extract rendered frames for integrator + """ + + order = pyblish.api.ExtractorOrder + label = "Extract rendered frames" + hosts = ["nuke"] + families = ["render.frames", "prerender.frames", "still.frames"] + + def process(self, instance): + + staging_dir = instance.data.get('stagingDir', None) + output_dir = instance.data.get('outputDir', None) + + if not staging_dir: + staging_dir = output_dir + instance.data['stagingDir'] = staging_dir + instance.data['transfer'] = False diff --git a/pype/plugins/nuke/publish/extract_render_local.py b/pype/plugins/nuke/publish/extract_render_local.py index 95733b1d85..6feab98786 100644 --- a/pype/plugins/nuke/publish/extract_render_local.py +++ b/pype/plugins/nuke/publish/extract_render_local.py @@ -1,10 +1,10 @@ import pyblish.api import nuke -import shutil import os +import pype -class NukeRenderLocal(pyblish.api.InstancePlugin): +class NukeRenderLocal(pype.api.Extractor): # TODO: rewrite docstring to nuke """Render the current Fusion composition locally. @@ -36,7 +36,7 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): node_subset_name = instance.data.get("name", None) # swap path to stageDir - temp_dir = instance.data.get("stagingDir") + temp_dir = self.staging_dir(instance).replace("\\", "/") output_dir = instance.data.get("outputDir") path = node['file'].value() node['file'].setValue(path.replace(output_dir, temp_dir)) @@ -56,4 +56,14 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): path = node['file'].value() node['file'].setValue(path.replace(temp_dir, output_dir)) + if "files" not in instance.data: + instance.data["files"] = list() + + instance.data["files"].append(os.listdir(temp_dir)) + + self.log.info("Extracted instance '{0}' to: {1}".format( + instance.name, + output_dir + )) + self.log.info('Finished render') diff --git a/pype/plugins/nuke/publish/extract_review.py b/pype/plugins/nuke/publish/extract_review.py new file mode 100644 index 0000000000..6541774c3b --- /dev/null +++ b/pype/plugins/nuke/publish/extract_review.py @@ -0,0 +1,186 @@ +import os +import nuke +import pyblish.api +import pype + + +class ExtractDataForReview(pype.api.Extractor): + """Extracts movie and thumbnail with baked in luts + + must be run after extract_render_local.py + + """ + + order = pyblish.api.ExtractorOrder + 0.01 + label = "Data for review" + optional = True + + families = ["write"] + hosts = ["nuke"] + family_targets = [".local", ".review"] + + def process(self, instance): + + families = [f for f in instance.data["families"] + for search in self.family_targets + if search in f] + if not families: + return + + # Store selection + selection = [i for i in nuke.allNodes() if i["selected"].getValue()] + + # Deselect all nodes to prevent external connections + [i["selected"].setValue(False) for i in nuke.allNodes()] + + self.staging_dir(instance) + self.render_review_representation(instance, + representation="mov") + self.transcode_mov(instance) + self.render_review_representation(instance, + representation="jpeg") + # Restore selection + [i["selected"].setValue(False) for i in nuke.allNodes()] + [i["selected"].setValue(True) for i in selection] + + def transcode_mov(self, instance): + import subprocess + + collection = instance.data["collection"] + staging_dir = instance.data["stagingDir"] + file_name = collection.format("{head}mov") + + review_mov = os.path.join(staging_dir, file_name) + + if instance.data.get("baked_colorspace_movie"): + args = [ + "ffmpeg", "-y", + "-i", instance.data["baked_colorspace_movie"], + "-pix_fmt", "yuv420p", + "-crf", "18", + "-timecode", "00:00:00:01", + ] + + args.append(review_mov) + + self.log.debug("Executing args: {0}".format(args)) + + self.log.info("transcoding review mov: {0}".format(review_mov)) + p = subprocess.Popen( + args, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + stdin=subprocess.PIPE, + cwd=os.path.dirname(args[-1]) + ) + + output = p.communicate()[0] + + if p.returncode != 0: + raise ValueError(output) + + self.log.debug("Removing `{0}`...".format( + instance.data["baked_colorspace_movie"])) + os.remove(instance.data["baked_colorspace_movie"]) + + instance.data["files"].append(file_name) + + def render_review_representation(self, + instance, + representation="mov"): + + assert instance.data['files'], "Instance data files should't be empty!" + + import clique + import nuke + temporary_nodes = [] + staging_dir = instance.data["stagingDir"] + + collection = instance.data.get("collection", None) + + if not collection: + collections, remainder = clique.assemble(*instance.data['files']) + collection = collections[0] + instance.data["collection"] = collection + + # Create nodes + first_frame = min(collection.indexes) + last_frame = max(collection.indexes) + + self.log.warning("first_frame: {}".format(first_frame)) + self.log.warning("last_frame: {}".format(last_frame)) + + node = previous_node = nuke.createNode("Read") + + node["file"].setValue( + os.path.join(staging_dir, + os.path.basename(collection.format( + "{head}{padding}{tail}"))).replace("\\", "/")) + + node["first"].setValue(first_frame) + node["origfirst"].setValue(first_frame) + node["last"].setValue(last_frame) + node["origlast"].setValue(last_frame) + temporary_nodes.append(node) + + reformat_node = nuke.createNode("Reformat") + reformat_node["format"].setValue("HD_1080") + reformat_node["resize"].setValue("fit") + reformat_node["filter"].setValue("Lanczos6") + reformat_node["black_outside"].setValue(True) + reformat_node.setInput(0, previous_node) + previous_node = reformat_node + temporary_nodes.append(reformat_node) + + viewer_process_node = nuke.ViewerProcess.node() + dag_node = None + if viewer_process_node: + dag_node = nuke.createNode(viewer_process_node.Class()) + dag_node.setInput(0, previous_node) + previous_node = dag_node + temporary_nodes.append(dag_node) + # Copy viewer process values + excludedKnobs = ["name", "xpos", "ypos"] + for item in viewer_process_node.knobs().keys(): + if item not in excludedKnobs and item in dag_node.knobs(): + x1 = viewer_process_node[item] + x2 = dag_node[item] + x2.fromScript(x1.toScript(False)) + else: + self.log.warning("No viewer node found.") + + # create write node + write_node = nuke.createNode("Write") + + if representation in "mov": + file = collection.format("{head}baked.mov") + path = os.path.join(staging_dir, file).replace("\\", "/") + instance.data["baked_colorspace_movie"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("mov") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + + elif representation in "jpeg": + file = collection.format("{head}jpeg") + path = os.path.join(staging_dir, file).replace("\\", "/") + instance.data["thumbnail"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("jpeg") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + + # retime for + first_frame = int(last_frame)/2 + last_frame = int(last_frame)/2 + # add into files for integration as representation + instance.data["files"].append(file) + + # Render frames + nuke.execute(write_node.name(), int(first_frame), int(last_frame)) + + # Clean up + for node in temporary_nodes: + nuke.delete(node) diff --git a/pype/plugins/nuke/publish/extract_script.py b/pype/plugins/nuke/publish/extract_script.py index 573fb9598a..f0ed438094 100644 --- a/pype/plugins/nuke/publish/extract_script.py +++ b/pype/plugins/nuke/publish/extract_script.py @@ -1,21 +1,34 @@ -import nuke + import pyblish.api import os +import pype +import shutil -class ExtractScript(pyblish.api.InstancePlugin): +class ExtractScript(pype.api.Extractor): """Publish script """ label = 'Extract Script' order = pyblish.api.ExtractorOrder - 0.05 + optional = True hosts = ['nuke'] - families = ["script"] + families = ["nukescript"] def process(self, instance): + self.log.debug("instance extracting: {}".format(instance.data)) + current_script = instance.context.data["currentFile"] - self.log.info('Extracting script') - staging_dir = instance.data["stagingDir"] - file_name = instance.data["name"] - path = os.path.join(staging_dir, file_name) + # Define extract output file path + dir_path = self.staging_dir(instance) + filename = "{0}".format(instance.data["name"]) + path = os.path.join(dir_path, filename) - nuke.scriptSaveAs(path) + self.log.info("Performing extraction..") + shutil.copy(current_script, path) + + if "files" not in instance.data: + instance.data["files"] = list() + + instance.data["files"].append(filename) + + self.log.info("Extracted instance '%s' to: %s" % (instance.name, path)) diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py index 9472cdf36b..6d1434a77f 100644 --- a/pype/plugins/nuke/publish/integrate_rendered_frames.py +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -24,14 +24,21 @@ class IntegrateFrames(pyblish.api.InstancePlugin): label = "Integrate Frames" order = pyblish.api.IntegratorOrder - families = ["render.frames", "still.frames", "prerender.frames", - "render.local", "still.local", "prerender.local"] + family_targets = [".frames", ".local", ".review"] def process(self, instance): + families = [f for f in instance.data["families"] + for search in self.family_targets + if search in f] + + if not families: + return + self.register(instance) self.log.info("Integrating Asset in to the database ...") + self.log.info("instance.data: {}".format(instance.data)) if instance.data.get('transfer', True): self.integrate(instance) @@ -97,7 +104,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): self.log.info("Verifying version from assumed destination") assumed_data = instance.data["assumedTemplateData"] - assumed_version = assumed_data["version"] + assumed_version = assumed_data["VERSION"] if assumed_version != next_version: raise AttributeError("Assumed version 'v{0:03d}' does not match" "next version in database " @@ -149,7 +156,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # Each should be a single representation (as such, a single extension) representations = [] destination_list = [] - + self.log.debug("integrate_frames:instance.data[files]: {}".format( + instance.data["files"])) for files in instance.data["files"]: # Collection # _______ @@ -167,7 +175,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): dst_collection = [] for fname in collection: - + self.log.warning("fname: {}".format(fname)) filename, ext = os.path.splitext(fname) _, frame = os.path.splitext(filename) @@ -195,7 +203,12 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # | | # |_______| # + + if template_data.get("frame"): + template_data.pop("frame") + fname = files + self.log.info("fname: {}".format(fname)) assert not os.path.isabs(fname), ( "Given file name is a full path" @@ -205,9 +218,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin): template_data["representation"] = ext[1:] src = os.path.join(stagingdir, fname) + anatomy_filled = anatomy.format(template_data) dst = anatomy_filled.render.path template = anatomy.render.path + instance.data["transfers"].append([src, dst]) representation = { @@ -254,6 +269,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin): transfers = instance.data["transfers"] for src, dest in transfers: + src = os.path.normpath(src) + dest = os.path.normpath(dest) + if src in dest: + continue + self.log.info("Copying file .. {} -> {}".format(src, dest)) self.copy_file(src, dest) diff --git a/pype/plugins/nuke/publish/integrate_script_version.py b/pype/plugins/nuke/publish/integrate_script_version.py index c664cbb05d..aa37101af0 100644 --- a/pype/plugins/nuke/publish/integrate_script_version.py +++ b/pype/plugins/nuke/publish/integrate_script_version.py @@ -3,18 +3,17 @@ import nuke import pyblish.api -class IncrementScriptVersion(pyblish.api.InstancePlugin): +class IncrementScriptVersion(pyblish.api.ContextPlugin): """Increment current script version.""" order = pyblish.api.IntegratorOrder + 0.9 label = "Increment Current Script Version" optional = True hosts = ['nuke'] - families = ["render.frames"] + families = ["nukescript", "render.local", "render.frames"] - def process(self, instance): + def process(self, context): from pype.lib import version_up - context = instance.context path = context.data["currentFile"] nuke.scriptSaveAs(version_up(path)) self.log.info('Incrementing script version') diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 09498c58de..54b3537055 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -38,28 +38,22 @@ class ValidateCollection(pyblish.api.InstancePlugin): collections, remainder = clique.assemble(*instance.data['files']) self.log.info('collections: {}'.format(str(collections))) + collection = collections[0] + frame_length = instance.data["endFrame"] \ - instance.data["startFrame"] + 1 if frame_length is not 1: - assert len(collections) == 1, self.log.info( - "There are multiple collections in the folder") - assert collections[0].is_contiguous(), self.log.info("Some frames appear to be missing") + assert len(collections) == 1, "There are multiple collections in the folder" + assert collection.is_contiguous(), "Some frames appear to be missing" - assert remainder is not None, self.log.info("There are some extra files in folder") - - basename, ext = os.path.splitext(list(collections[0])[0]) - assert all(ext == os.path.splitext(name)[1] - for name in collections[0]), self.log.info( - "Files had varying suffixes" - ) - - assert not any(os.path.isabs(name) - for name in collections[0]), self.log.info("some file name are absolute") + assert remainder is not None, "There are some extra files in folder" self.log.info('frame_length: {}'.format(frame_length)) - self.log.info('len(list(instance.data["files"])): {}'.format( - len(list(instance.data["files"][0])))) + self.log.info('len(collection.indexes): {}'.format( + len(collection.indexes))) - assert len(list(instance.data["files"][0])) is frame_length, self.log.info( - "{} missing frames. Use repair to render all frames".format(__name__)) + assert len( + collection.indexes + ) is frame_length, "{} missing frames. Use " + "repair to render all frames".format(__name__) From 6c3b49a49f5b2866f677d34816e816e500755b3a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 18 Dec 2018 16:12:13 +0100 Subject: [PATCH 10/24] collections into integrate render frames --- .../nuke/publish/integrate_rendered_frames.py | 45 ++++++++++++------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py index 6d1434a77f..556b3845a6 100644 --- a/pype/plugins/nuke/publish/integrate_rendered_frames.py +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -170,30 +170,43 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # if isinstance(files, list): - collection = files + src_collections, remainder = clique.assemble(files) + self.log.warning(src_collections) + src_collection = src_collections[0] # Assert that each member has identical suffix + src_head = src_collection.format("{head}") + src_tail = src_collection.format("{tail}") - dst_collection = [] - for fname in collection: - self.log.warning("fname: {}".format(fname)) - filename, ext = os.path.splitext(fname) - _, frame = os.path.splitext(filename) - - template_data["representation"] = ext[1:] - template_data["frame"] = frame[1:] - - src = os.path.join(stagingdir, fname) + test_dest_files = list() + for i in [1, 2]: + template_data["representation"] = src_tail[1:] + template_data["frame"] = src_collection.format( + "{padding}") % i + self.log.warning(template_data) anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled.render.path + self.log.warning(anatomy_filled) + test_dest_files.append(anatomy_filled.render.path) + + self.log.warning(test_dest_files) + dst_collections, remainder = clique.assemble(test_dest_files) + self.log.warning(dst_collections) + dst_collection = dst_collections[0] + dst_head = dst_collection.format("{head}") + dst_tail = dst_collection.format("{tail}") + + for i in src_collection.indexes: + src_padding = src_collection.format("{padding}") % i + src_file_name = "{0}{1}{2}".format(src_head, src_padding, src_tail) + dst_padding = dst_collection.format("{padding}") % i + dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail) + + src = os.path.join(stagingdir, src_file_name) + self.log.warning(src_file_name) - dst_collection.append(dst) instance.data["transfers"].append([src, dst]) template = anatomy.render.path - collections, remainder = clique.assemble(dst_collection) - dst = collections[0].format('{head}{padding}{tail}') - else: # Single file # _______ From 96ebfb38ed708d763b6198059c1c37ab946b150d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Dec 2018 14:55:46 +0100 Subject: [PATCH 11/24] update integrate rendered frames --- pype/plugins/nuke/publish/integrate_rendered_frames.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py index 556b3845a6..d209dcb3ee 100644 --- a/pype/plugins/nuke/publish/integrate_rendered_frames.py +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -201,7 +201,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail) src = os.path.join(stagingdir, src_file_name) - self.log.warning(src_file_name) + self.log.warning(src) + self.log.warning(dst) instance.data["transfers"].append([src, dst]) @@ -237,6 +238,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): template = anatomy.render.path instance.data["transfers"].append([src, dst]) + self.log.warning(src) + self.log.warning(dst) representation = { "schema": "pype:representation-2.0", @@ -268,6 +271,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): self.log.info("Registering {} items".format(len(representations))) + self.log.warning(representations) + io.insert_many(representations) def integrate(self, instance): @@ -284,6 +289,9 @@ class IntegrateFrames(pyblish.api.InstancePlugin): for src, dest in transfers: src = os.path.normpath(src) dest = os.path.normpath(dest) + self.log.warning("copying") + self.log.warning(src) + self.log.warning(dest) if src in dest: continue From a8836d658e953af485d0cf87db5e2305472b0b61 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Dec 2018 18:31:24 +0100 Subject: [PATCH 12/24] finishing review representation --- .../nuke/publish/extract_render_local.py | 3 ++- pype/plugins/nuke/publish/extract_review.py | 9 ++++--- .../nuke/publish/integrate_rendered_frames.py | 25 ++++--------------- 3 files changed, 13 insertions(+), 24 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_render_local.py b/pype/plugins/nuke/publish/extract_render_local.py index 6feab98786..5b53a42136 100644 --- a/pype/plugins/nuke/publish/extract_render_local.py +++ b/pype/plugins/nuke/publish/extract_render_local.py @@ -59,7 +59,7 @@ class NukeRenderLocal(pype.api.Extractor): if "files" not in instance.data: instance.data["files"] = list() - instance.data["files"].append(os.listdir(temp_dir)) + instance.data["files"] = [os.listdir(temp_dir)] self.log.info("Extracted instance '{0}' to: {1}".format( instance.name, @@ -67,3 +67,4 @@ class NukeRenderLocal(pype.api.Extractor): )) self.log.info('Finished render') + return diff --git a/pype/plugins/nuke/publish/extract_review.py b/pype/plugins/nuke/publish/extract_review.py index 6541774c3b..30de2039df 100644 --- a/pype/plugins/nuke/publish/extract_review.py +++ b/pype/plugins/nuke/publish/extract_review.py @@ -26,16 +26,18 @@ class ExtractDataForReview(pype.api.Extractor): if search in f] if not families: return - + self.log.debug("here:") # Store selection selection = [i for i in nuke.allNodes() if i["selected"].getValue()] - + self.log.debug("here:") # Deselect all nodes to prevent external connections [i["selected"].setValue(False) for i in nuke.allNodes()] - + self.log.debug("here:") + self.log.debug("creating staging dir:") self.staging_dir(instance) self.render_review_representation(instance, representation="mov") + self.log.debug("review mov:") self.transcode_mov(instance) self.render_review_representation(instance, representation="jpeg") @@ -98,6 +100,7 @@ class ExtractDataForReview(pype.api.Extractor): collection = instance.data.get("collection", None) + self.log.warning("instance.data['files']: {}".format(instance.data['files'])) if not collection: collections, remainder = clique.assemble(*instance.data['files']) collection = collections[0] diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py index d209dcb3ee..908955c873 100644 --- a/pype/plugins/nuke/publish/integrate_rendered_frames.py +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -171,25 +171,20 @@ class IntegrateFrames(pyblish.api.InstancePlugin): if isinstance(files, list): src_collections, remainder = clique.assemble(files) - self.log.warning(src_collections) src_collection = src_collections[0] # Assert that each member has identical suffix src_head = src_collection.format("{head}") - src_tail = src_collection.format("{tail}") + src_tail = ext = src_collection.format("{tail}") test_dest_files = list() for i in [1, 2]: template_data["representation"] = src_tail[1:] template_data["frame"] = src_collection.format( "{padding}") % i - self.log.warning(template_data) anatomy_filled = anatomy.format(template_data) - self.log.warning(anatomy_filled) test_dest_files.append(anatomy_filled.render.path) - self.log.warning(test_dest_files) dst_collections, remainder = clique.assemble(test_dest_files) - self.log.warning(dst_collections) dst_collection = dst_collections[0] dst_head = dst_collection.format("{head}") dst_tail = dst_collection.format("{tail}") @@ -201,11 +196,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail) src = os.path.join(stagingdir, src_file_name) - self.log.warning(src) - self.log.warning(dst) - instance.data["transfers"].append([src, dst]) - template = anatomy.render.path else: @@ -218,12 +209,13 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # |_______| # - if template_data.get("frame"): - template_data.pop("frame") + template_data.pop("frame", None) + anatomy.pop("frame", None) fname = files self.log.info("fname: {}".format(fname)) + assert not os.path.isabs(fname), ( "Given file name is a full path" ) @@ -238,8 +230,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin): template = anatomy.render.path instance.data["transfers"].append([src, dst]) - self.log.warning(src) - self.log.warning(dst) representation = { "schema": "pype:representation-2.0", @@ -265,14 +255,12 @@ class IntegrateFrames(pyblish.api.InstancePlugin): "representation": ext[1:] } } + destination_list.append(dst) instance.data['destination_list'] = destination_list representations.append(representation) self.log.info("Registering {} items".format(len(representations))) - - self.log.warning(representations) - io.insert_many(representations) def integrate(self, instance): @@ -289,9 +277,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin): for src, dest in transfers: src = os.path.normpath(src) dest = os.path.normpath(dest) - self.log.warning("copying") - self.log.warning(src) - self.log.warning(dest) if src in dest: continue From dff2d8a959668e613f5d95835520d2511c662727 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 2 Jan 2019 16:23:40 +0100 Subject: [PATCH 13/24] remove legacy utilities --- pype/utils/__init__.py | 0 pype/utils/lib.py | 203 ----------------------------------------- 2 files changed, 203 deletions(-) delete mode 100644 pype/utils/__init__.py delete mode 100644 pype/utils/lib.py diff --git a/pype/utils/__init__.py b/pype/utils/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/utils/lib.py b/pype/utils/lib.py deleted file mode 100644 index 8b7be1a3fe..0000000000 --- a/pype/utils/lib.py +++ /dev/null @@ -1,203 +0,0 @@ -import re -import tempfile -import json -import os -import sys -import pyblish.api - -print 'pyblish_utils loaded' - - -def save_preset(path, preset): - """Save options to path""" - with open(path, "w") as f: - json.dump(preset, f) - - -def load_preset(path): - """Load options json from path""" - with open(path, "r") as f: - return json.load(f) - - -def temp_dir(context): - """Provide a temporary directory in which to store extracted files""" - extract_dir = context.data('extractDir') - - if not extract_dir: - extract_dir = tempfile.mkdtemp() - context.set_data('extractDir', value=extract_dir) - - return extract_dir - - -def version_get(string, prefix, suffix=None): - """Extract version information from filenames. Code from Foundry's nukescripts.version_get()""" - - if string is None: - raise ValueError, "Empty version string - no match" - - regex = "[/_.]" + prefix + "\d+" - matches = re.findall(regex, string, re.IGNORECASE) - if not len(matches): - msg = "No \"_" + prefix + "#\" found in \"" + string + "\"" - raise ValueError, msg - return (matches[-1:][0][1], re.search("\d+", matches[-1:][0]).group()) - - -def version_set(string, prefix, oldintval, newintval): - """Changes version information from filenames. Code from Foundry's nukescripts.version_set()""" - - regex = "[/_.]" + prefix + "\d+" - matches = re.findall(regex, string, re.IGNORECASE) - if not len(matches): - return "" - - # Filter to retain only version strings with matching numbers - matches = filter(lambda s: int(s[2:]) == oldintval, matches) - - # Replace all version strings with matching numbers - for match in matches: - # use expression instead of expr so 0 prefix does not make octal - fmt = "%%(#)0%dd" % (len(match) - 2) - newfullvalue = match[0] + prefix + str(fmt % {"#": newintval}) - string = re.sub(match, newfullvalue, string) - return string - - -def version_up(string): - - try: - (prefix, v) = version_get(string, 'v') - v = int(v) - file = version_set(string, prefix, v, v + 1) - except: - raise ValueError, 'Unable to version up File' - - return file - - -def open_folder(path): - """Provide a temporary directory in which to store extracted files""" - import subprocess - path = os.path.abspath(path) - if sys.platform == 'win32': - subprocess.Popen('explorer "%s"' % path) - elif sys.platform == 'darwin': # macOS - subprocess.Popen(['open', path]) - else: # linux - try: - subprocess.Popen(['xdg-open', path]) - except OSError: - raise OSError('unsupported xdg-open call??') - - -def filter_instances(context, plugin): - """Provide a temporary directory in which to store extracted files""" - # Get the errored instances - allInstances = [] - for result in context.data["results"]: - if (result["instance"] is not None and - result["instance"] not in allInstances): - allInstances.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(allInstances, plugin) - - return instances - -def load_capture_preset(path): - import capture_gui - import capture - - path = path - preset = capture_gui.lib.load_json(path) - print preset - - options = dict() - - # CODEC - id = 'Codec' - for key in preset[id]: - options[str(key)] = preset[id][key] - - # GENERIC - id = 'Generic' - for key in preset[id]: - if key.startswith('isolate'): - pass - # options['isolate'] = preset[id][key] - else: - options[str(key)] = preset[id][key] - - # RESOLUTION - id = 'Resolution' - options['height'] = preset[id]['height'] - options['width'] = preset[id]['width'] - - # DISPLAY OPTIONS - id = 'Display Options' - disp_options = {} - for key in preset['Display Options']: - if key.startswith('background'): - disp_options[key] = preset['Display Options'][key] - else: - disp_options['displayGradient'] = True - - options['display_options'] = disp_options - - # VIEWPORT OPTIONS - temp_options = {} - id = 'Renderer' - for key in preset[id]: - temp_options[str(key)] = preset[id][key] - - temp_options2 = {} - id = 'Viewport Options' - light_options = {0: "default", - 1: 'all', - 2: 'selected', - 3: 'flat', - 4: 'nolights'} - for key in preset[id]: - if key == 'high_quality': - temp_options2['multiSampleEnable'] = True - temp_options2['multiSampleCount'] = 4 - temp_options2['textureMaxResolution'] = 512 - temp_options2['enableTextureMaxRes'] = True - - if key == 'alphaCut': - temp_options2['transparencyAlgorithm'] = 5 - temp_options2['transparencyQuality'] = 1 - - if key == 'headsUpDisplay': - temp_options['headsUpDisplay'] = True - - if key == 'displayLights': - temp_options[str(key)] = light_options[preset[id][key]] - else: - temp_options[str(key)] = preset[id][key] - - for key in ['override_viewport_options', 'high_quality', 'alphaCut']: - temp_options.pop(key, None) - - options['viewport_options'] = temp_options - options['viewport2_options'] = temp_options2 - - # use active sound track - scene = capture.parse_active_scene() - options['sound'] = scene['sound'] - cam_options = dict() - cam_options['overscan'] = 1.0 - cam_options['displayFieldChart'] = False - cam_options['displayFilmGate'] = False - cam_options['displayFilmOrigin'] = False - cam_options['displayFilmPivot'] = False - cam_options['displayGateMask'] = False - cam_options['displayResolution'] = False - cam_options['displaySafeAction'] = False - cam_options['displaySafeTitle'] = False - - # options['display_options'] = temp_options - - return options From fc0575d4a09622805b9085a7bc283fde74bae285 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 2 Jan 2019 18:18:38 +0100 Subject: [PATCH 14/24] remove unsused scripts from the menu --- pype/maya/menu.json | 799 +------------------------------------ pype/maya/menu_backup.json | 4 +- 2 files changed, 18 insertions(+), 785 deletions(-) diff --git a/pype/maya/menu.json b/pype/maya/menu.json index 21b6d0c74e..779109a169 100644 --- a/pype/maya/menu.json +++ b/pype/maya/menu.json @@ -7,9 +7,9 @@ }, { "type": "action", - "command": "$PYPE_SCRIPTS\\others\\show_current_scene_in_explorer.py", + "command": "$PYPE_SCRIPTS\\others\\open_current_folder.py", "sourcetype": "file", - "title": "# Explore current scene..", + "title": "Open working folder..", "tooltip": "Show current scene in Explorer" }, { @@ -25,47 +25,7 @@ { "type": "menu", "title": "# Modeling", - "items": [{ - "type": "action", - "command": "$PYPE_SCRIPTS\\modeling\\duplicate_normalized.py", - "sourcetype": "file", - "tags": ["modeling", - "duplicate", - "normalized"], - "title": "# # Duplicate Normalized", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\modeling\\transferUVs.py", - "sourcetype": "file", - "tags": ["modeling", - "transfer", - "uv"], - "title": "# Transfer UVs", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\modeling\\mirrorSymmetry.py", - "sourcetype": "file", - "tags": ["modeling", - "mirror", - "symmetry"], - "title": "# Mirror Symmetry", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\modeling\\selectOutlineUI.py", - "sourcetype": "file", - "tags": ["modeling", - "select", - "outline", - "ui"], - "title": "# Select Outline UI", - "tooltip": "" - }, + "items": [ { "type": "action", "command": "$PYPE_SCRIPTS\\modeling\\polyDeleteOtherUVSets.py", @@ -77,17 +37,6 @@ "title": "# Polygon Delete Other UV Sets", "tooltip": "" }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\modeling\\polyCombineQuick.py", - "sourcetype": "file", - "tags": ["modeling", - "combine", - "polygon", - "quick"], - "title": "# Polygon Combine Quick", - "tooltip": "" - }, { "type": "action", "command": "$PYPE_SCRIPTS\\modeling\\separateMeshPerShader.py", @@ -108,16 +57,6 @@ "title": "# Polygon Detach and Separate", "tooltip": "" }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\modeling\\polyRelaxVerts.py", - "sourcetype": "file", - "tags": ["modeling", - "relax", - "verts"], - "title": "# Polygon Relax Vertices", - "tooltip": "" - }, { "type": "action", "command": "$PYPE_SCRIPTS\\modeling\\polySelectEveryNthEdgeUI.py", @@ -142,322 +81,16 @@ }, { "type": "menu", - "title": "# Rigging", - "items": [{ - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\addCurveBetween.py", - "sourcetype": "file", - "tags": ["rigging", - "addCurveBetween", - "file"], - "title": "# Add Curve Between" - }, + "title": "Rigging", + "items": [ { "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\averageSkinWeights.py", + "command": "$PYPE_SCRIPTS\\rigging\\advancedSkeleton.py", "sourcetype": "file", "tags": ["rigging", - "average", - "skin weights", + "autorigger", "advanced", "skeleton", "advancedskeleton", "file"], - "title": "# Average Skin Weights" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\cbSmoothSkinWeightUI.py", - "sourcetype": "file", - "tags": ["rigging", - "cbSmoothSkinWeightUI", - "file"], - "title": "# CB Smooth Skin Weight UI" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\channelBoxManagerUI.py", - "sourcetype": "file", - "tags": ["rigging", - "channelBoxManagerUI", - "file"], - "title": "# Channel Box Manager UI" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\characterAutorigger.py", - "sourcetype": "file", - "tags": ["rigging", - "characterAutorigger", - "file"], - "title": "# Character Auto Rigger" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\connectUI.py", - "sourcetype": "file", - "tags": ["rigging", - "connectUI", - "file"], - "title": "# Connect UI" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\copySkinWeightsLocal.py", - "sourcetype": "file", - "tags": ["rigging", - "copySkinWeightsLocal", - "file"], - "title": "# Copy Skin Weights Local" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\createCenterLocator.py", - "sourcetype": "file", - "tags": ["rigging", - "createCenterLocator", - "file"], - "title": "# Create Center Locator" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\freezeTransformToGroup.py", - "sourcetype": "file", - "tags": ["rigging", - "freezeTransformToGroup", - "file"], - "title": "# Freeze Transform To Group" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\groupSelected.py", - "sourcetype": "file", - "tags": ["rigging", - "groupSelected", - "file"], - "title": "# Group Selected" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\ikHandlePoleVectorLocator.py", - "sourcetype": "file", - "tags": ["rigging", - "ikHandlePoleVectorLocator", - "file"], - "title": "# IK Handle Pole Vector Locator" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\jointOrientUI.py", - "sourcetype": "file", - "tags": ["rigging", - "jointOrientUI", - "file"], - "title": "# Joint Orient UI" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\jointsOnCurve.py", - "sourcetype": "file", - "tags": ["rigging", - "jointsOnCurve", - "file"], - "title": "# Joints On Curve" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\resetBindSelectedSkinJoints.py", - "sourcetype": "file", - "tags": ["rigging", - "resetBindSelectedSkinJoints", - "file"], - "title": "# Reset Bind Selected Skin Joints" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedComponents.py", - "sourcetype": "file", - "tags": ["rigging", - "selectSkinclusterJointsFromSelectedComponents", - "file"], - "title": "# Select Skincluster Joints From Selected Components" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedMesh.py", - "sourcetype": "file", - "tags": ["rigging", - "selectSkinclusterJointsFromSelectedMesh", - "file"], - "title": "# Select Skincluster Joints From Selected Mesh" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\setJointLabels.py", - "sourcetype": "file", - "tags": ["rigging", - "setJointLabels", - "file"], - "title": "# Set Joint Labels" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\setJointOrientationFromCurrentRotation.py", - "sourcetype": "file", - "tags": ["rigging", - "setJointOrientationFromCurrentRotation", - "file"], - "title": "# Set Joint Orientation From Current Rotation" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\setSelectedJointsOrientationZero.py", - "sourcetype": "file", - "tags": ["rigging", - "setSelectedJointsOrientationZero", - "file"], - "title": "# Set Selected Joints Orientation Zero" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\mirrorCurveShape.py", - "sourcetype": "file", - "tags": ["rigging", - "mirrorCurveShape", - "file"], - "title": "# Mirror Curve Shape" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\setRotationOrderUI.py", - "sourcetype": "file", - "tags": ["rigging", - "setRotationOrderUI", - "file"], - "title": "# Set Rotation Order UI" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\paintItNowUI.py", - "sourcetype": "file", - "tags": ["rigging", - "paintItNowUI", - "file"], - "title": "# Paint It Now UI" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\parentScaleConstraint.py", - "sourcetype": "file", - "tags": ["rigging", - "parentScaleConstraint", - "file"], - "title": "# Parent Scale Constraint" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\quickSetWeightsUI.py", - "sourcetype": "file", - "tags": ["rigging", - "quickSetWeightsUI", - "file"], - "title": "# Quick Set Weights UI" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\rapidRig.py", - "sourcetype": "file", - "tags": ["rigging", - "rapidRig", - "file"], - "title": "# Rapid Rig" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\regenerate_blendshape_targets.py", - "sourcetype": "file", - "tags": ["rigging", - "regenerate_blendshape_targets", - "file"], - "title": "# Regenerate Blendshape Targets" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\removeRotationAxis.py", - "sourcetype": "file", - "tags": ["rigging", - "removeRotationAxis", - "file"], - "title": "# Remove Rotation Axis" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\resetBindSelectedMeshes.py", - "sourcetype": "file", - "tags": ["rigging", - "resetBindSelectedMeshes", - "file"], - "title": "# Reset Bind Selected Meshes" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\simpleControllerOnSelection.py", - "sourcetype": "file", - "tags": ["rigging", - "simpleControllerOnSelection", - "file"], - "title": "# Simple Controller On Selection" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\simpleControllerOnSelectionHierarchy.py", - "sourcetype": "file", - "tags": ["rigging", - "simpleControllerOnSelectionHierarchy", - "file"], - "title": "# Simple Controller On Selection Hierarchy" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\superRelativeCluster.py", - "sourcetype": "file", - "tags": ["rigging", - "superRelativeCluster", - "file"], - "title": "# Super Relative Cluster" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\tfSmoothSkinWeight.py", - "sourcetype": "file", - "tags": ["rigging", - "tfSmoothSkinWeight", - "file"], - "title": "# TF Smooth Skin Weight" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\toggleIntermediates.py", - "sourcetype": "file", - "tags": ["rigging", - "toggleIntermediates", - "file"], - "title": "# Toggle Intermediates" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\toggleSegmentScaleCompensate.py", - "sourcetype": "file", - "tags": ["rigging", - "toggleSegmentScaleCompensate", - "file"], - "title": "# Toggle Segment Scale Compensate" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\rigging\\toggleSkinclusterDeformNormals.py", - "sourcetype": "file", - "tags": ["rigging", - "toggleSkinclusterDeformNormals", - "file"], - "title": "# Toggle Skincluster Deform Normals" + "title": "Advanced Skeleton" }] }, { @@ -841,7 +474,7 @@ }, { "type": "menu", - "title": "# Animation", + "title": "Animation", "items": [{ "type": "menu", "title": "# Attributes", @@ -1060,10 +693,10 @@ }, { "sourcetype": "file", - "command": "$PYPE_SCRIPTS\\animation\\poseLibrary.py", + "command": "$PYPE_SCRIPTS\\animation\\animLibrary.py", "tags": ["animation", - "poseLibrary.py"], - "title": "# Pose Library", + "studiolibrary.py"], + "title": "Anim Library", "type": "action" }] }, @@ -1220,51 +853,6 @@ "title": "# Instancer To Objects Instances", "tooltip": "" }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\particles\\objectsToParticlesAndInstancerCleanSource.py", - "sourcetype": "file", - "tags": ["particles", - "objects", - "Particles", - "Instancer", - "Clean", - "Source"], - "title": "# Objects To Particles & Instancer - Clean Source", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\particles\\particleComponentsToLocators.py", - "sourcetype": "file", - "tags": ["particles", - "components", - "locators"], - "title": "# Particle Components To Locators", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\particles\\objectsToParticlesAndInstancer.py", - "sourcetype": "file", - "tags": ["particles", - "objects", - "particles", - "instancer"], - "title": "# Objects To Particles And Instancer", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\particles\\spawnParticlesOnMesh.py", - "sourcetype": "file", - "tags": ["particles", - "spawn", - "on", - "mesh"], - "title": "# Spawn Particles On Mesh", - "tooltip": "" - }, { "type": "action", "command": "$PYPE_SCRIPTS\\particles\\instancerToObjectsInstancesWithAnimation.py", @@ -1274,42 +862,6 @@ "title": "# Instancer To Objects Instances With Animation", "tooltip": "" }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\particles\\objectsToParticles.py", - "sourcetype": "file", - "tags": ["particles", - "objectsToParticles"], - "title": "# Objects To Particles", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\particles\\add_particle_cacheFile_attrs.py", - "sourcetype": "file", - "tags": ["particles", - "add_particle_cacheFile_attrs"], - "title": "# Add Particle CacheFile Attributes", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\particles\\mergeParticleSystems.py", - "sourcetype": "file", - "tags": ["particles", - "mergeParticleSystems"], - "title": "# Merge Particle Systems", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\particles\\particlesToLocators.py", - "sourcetype": "file", - "tags": ["particles", - "particlesToLocators"], - "title": "# Particles To Locators", - "tooltip": "" - }, { "type": "action", "command": "$PYPE_SCRIPTS\\particles\\instancerToObjectsWithAnimation.py", @@ -1318,61 +870,11 @@ "instancerToObjectsWithAnimation"], "title": "# Instancer To Objects With Animation", "tooltip": "" - }, - { - "type": "separator" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\particles\\mayaReplicateHoudiniTool.py", - "sourcetype": "file", - "tags": ["particles", - "houdini", - "houdiniTool", - "houdiniEngine"], - "title": "# Replicate Houdini Tool", - "tooltip": "" - }, - { - "type": "separator" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\particles\\clearInitialState.py", - "sourcetype": "file", - "tags": ["particles", - "clearInitialState"], - "title": "# Clear Initial State", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\particles\\killSelectedParticles.py", - "sourcetype": "file", - "tags": ["particles", - "killSelectedParticles"], - "title": "# Kill Selected Particles", - "tooltip": "" }] }, { "type": "menu", - "title": "# Yeti", - "items": [{ - "type": "action", - "command": "$PYPE_SCRIPTS\\yeti\\yeti_rig_manager.py", - "sourcetype": "file", - "tags": ["yeti", - "rig", - "fur", - "manager"], - "title": "# Open Yeti Rig Manager", - "tooltip": "" - }] -}, -{ - "type": "menu", - "title": "# Cleanup", + "title": "Cleanup", "items": [{ "type": "action", "command": "$PYPE_SCRIPTS\\cleanup\\repair_faulty_containers.py", @@ -1383,35 +885,6 @@ "title": "# Find and Repair Containers", "tooltip": "" }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\cleanup\\selectByType.py", - "sourcetype": "file", - "tags": ["cleanup", - "selectByType"], - "title": "# Select By Type", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\cleanup\\selectIntermediateObjects.py", - "sourcetype": "file", - "tags": ["cleanup", - "selectIntermediateObjects"], - "title": "# Select Intermediate Objects", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\cleanup\\selectNonUniqueNames.py", - "sourcetype": "file", - "tags": ["cleanup", - "select", - "non unique", - "names"], - "title": "# Select Non Unique Names", - "tooltip": "" - }, { "type": "separator" }, @@ -1470,29 +943,9 @@ "title": "# Remove Unused Looks", "tooltip": "Remove all loaded yet unused Avalon look containers" }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\cleanup\\deleteGhostIntermediateObjects.py", - "sourcetype": "file", - "tags": ["cleanup", - "deleteGhostIntermediateObjects"], - "title": "# Delete Ghost Intermediate Objects", - "tooltip": "" - }, { "type": "separator" }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\cleanup\\resetViewportCache.py", - "sourcetype": "file", - "tags": ["cleanup", - "reset", - "viewport", - "cache"], - "title": "# Reset Viewport Cache", - "tooltip": "" - }, { "type": "action", "command": "$PYPE_SCRIPTS\\cleanup\\uniqifyNodeNames.py", @@ -1527,13 +980,13 @@ }, { "type": "action", - "command": "$PYPE_SCRIPTS\\cleanup\\colorbleedRename.py", + "command": "$PYPE_SCRIPTS\\cleanup\\ccRenameReplace.py", "sourcetype": "file", "tags": ["cleanup", "rename", "ui"], - "title": "# Colorbleed Renamer", - "tooltip": "Colorbleed Rename UI" + "title": "Renamer", + "tooltip": "Rename UI" }, { "type": "action", @@ -1543,225 +996,5 @@ "renameShapesToTransform"], "title": "# Rename Shapes To Transform", "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\cleanup\\reorderUI.py", - "sourcetype": "file", - "tags": ["cleanup", - "reorderUI"], - "title": "# Reorder UI", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\cleanup\\pastedCleaner.py", - "sourcetype": "file", - "tags": ["cleanup", - "pastedCleaner"], - "title": "# Pasted Cleaner", - "tooltip": "" - }] -}, -{ - "type": "menu", - "title": "# Others", - "items": [{ - "type": "menu", - "sourcetype": "file", - "title": "# Yeti", - "items": [{ - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\yeti\\cache_selected_yeti_nodes.py", - "sourcetype": "file", - "tags": ["others", - "yeti", - "cache", - "selected"], - "title": "# Cache Selected Yeti Nodes", - "tooltip": "" - }] - }, - { - "type": "menu", - "title": "# Hair", - "tooltip": "", - "items": [{ - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\hair\\recolorHairCurrentCurve", - "sourcetype": "file", - "tags": ["others", - "selectSoftSelection"], - "title": "# Select Soft Selection", - "tooltip": "" - }] - }, - { - "type": "menu", - "command": "$PYPE_SCRIPTS\\others\\display", - "sourcetype": "file", - "tags": ["others", - "display"], - "title": "# Display", - "items": [{ - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\display\\wireframeSelectedObjects.py", - "sourcetype": "file", - "tags": ["others", - "wireframe", - "selected", - "objects"], - "title": "# Wireframe Selected Objects", - "tooltip": "" - }] - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\archiveSceneUI.py", - "sourcetype": "file", - "tags": ["others", - "archiveSceneUI"], - "title": "# Archive Scene UI", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\getSimilarMeshes.py", - "sourcetype": "file", - "tags": ["others", - "getSimilarMeshes"], - "title": "# Get Similar Meshes", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\createBoundingBoxEachSelected.py", - "sourcetype": "file", - "tags": ["others", - "createBoundingBoxEachSelected"], - "title": "# Create BoundingBox Each Selected", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\curveFromPositionEveryFrame.py", - "sourcetype": "file", - "tags": ["others", - "curveFromPositionEveryFrame"], - "title": "# Curve From Position", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\instanceLeafSmartTransform.py", - "sourcetype": "file", - "tags": ["others", - "instance", - "leaf", - "smart", - "transform"], - "title": "# Instance Leaf Smart Transform", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\instanceSmartTransform.py", - "sourcetype": "file", - "tags": ["others", - "instance", - "smart", - "transform"], - "title": "# Instance Smart Transform", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\randomizeUVShellsSelectedObjects.py", - "sourcetype": "file", - "tags": ["others", - "randomizeUVShellsSelectedObjects"], - "title": "# Randomize UV Shells", - "tooltip": "Select objects before running action" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\centerPivotGroup.py", - "sourcetype": "file", - "tags": ["others", - "centerPivotGroup"], - "title": "# Center Pivot Group", - "tooltip": "" - }, - { - "type": "separator" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\locatorsOnSelectedFaces.py", - "sourcetype": "file", - "tags": ["others", - "locatorsOnSelectedFaces"], - "title": "# Locators On Selected Faces", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\locatorsOnEdgeSelectionPrompt.py", - "sourcetype": "file", - "tags": ["others", - "locatorsOnEdgeSelectionPrompt"], - "title": "# Locators On Edge Selection Prompt", - "tooltip": "" - }, - { - "type": "separator" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\copyDeformers.py", - "sourcetype": "file", - "tags": ["others", - "copyDeformers"], - "title": "# Copy Deformers", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\selectInReferenceEditor.py", - "sourcetype": "file", - "tags": ["others", - "selectInReferenceEditor"], - "title": "# Select In Reference Editor", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\selectConstrainingObject.py", - "sourcetype": "file", - "tags": ["others", - "selectConstrainingObject"], - "title": "# Select Constraining Object", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\deformerSetRelationsUI.py", - "sourcetype": "file", - "tags": ["others", - "deformerSetRelationsUI"], - "title": "# Deformer Set Relations UI", - "tooltip": "" - }, - { - "type": "action", - "command": "$PYPE_SCRIPTS\\others\\recreateBaseNodesForAllLatticeNodes.py", - "sourcetype": "file", - "tags": ["others", - "recreate", - "base", - "nodes", - "lattice"], - "title": "# Recreate Base Nodes For Lattice Nodes", - "tooltip": "" }] }] diff --git a/pype/maya/menu_backup.json b/pype/maya/menu_backup.json index d16bb7e47f..e687c09b48 100644 --- a/pype/maya/menu_backup.json +++ b/pype/maya/menu_backup.json @@ -5,13 +5,13 @@ "title": "Version Up", "tooltip": "Incremental save with a specific format" }, -/* { +{ "type": "action", "command": "$PYPE_SCRIPTS\\others\\show_current_scene_in_explorer.py", "sourcetype": "file", "title": "Explore current scene..", "tooltip": "Show current scene in Explorer" -}, */ +}, { "type": "action", "command": "$PYPE_SCRIPTS\\avalon\\launch_manager.py", From 617e7aced9fa78cefe233e4395a0b76314e337d9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Jan 2019 12:31:53 +0100 Subject: [PATCH 15/24] Timer can be restarted. Timer duration is taken from config file in templates. --- pype/ftrack/actions/ftrack_action_handler.py | 1 + pype/ftrack/ftrack_run.py | 111 +++++++++++++++---- 2 files changed, 92 insertions(+), 20 deletions(-) diff --git a/pype/ftrack/actions/ftrack_action_handler.py b/pype/ftrack/actions/ftrack_action_handler.py index d147a2630b..3b4610de55 100644 --- a/pype/ftrack/actions/ftrack_action_handler.py +++ b/pype/ftrack/actions/ftrack_action_handler.py @@ -353,6 +353,7 @@ class AppAction(object): task = session.query('Task where id is {}'.format(entity['id'])).one() self.log.info('Starting timer for task: ' + task['name']) user.start_timer(task, force=True) + os.environ["FTRACK_LAST_TASK_ID"] = entity['id'] return { 'success': True, diff --git a/pype/ftrack/ftrack_run.py b/pype/ftrack/ftrack_run.py index 99ec2ec841..f084514c40 100644 --- a/pype/ftrack/ftrack_run.py +++ b/pype/ftrack/ftrack_run.py @@ -1,7 +1,6 @@ import sys import os -import argparse -import subprocess +import json import threading import time import ftrack_api @@ -168,7 +167,6 @@ class FtrackRunner: if self.bool_timer_event is False: self.start_timer_thread() - def start_timer_thread(self): if self.thread_timer is None: self.thread_timer = FtrackEventsThread(self) @@ -197,7 +195,7 @@ class FtrackRunner: def stop_countdown_thread(self): if self.thread_timer_coundown is not None: - self.thread_timer_coundown.runs=False + self.thread_timer_coundown.runs = False self.thread_timer_coundown.terminate() self.thread_timer_coundown.wait() self.thread_timer_coundown = None @@ -209,7 +207,8 @@ class FtrackRunner: # self.widget_timer.activateWindow() def change_count_widget(self, time): - self.widget_timer.lbl_rest_time.setText(str(time)) + str_time = str(time).replace(".0", "") + self.widget_timer.lbl_rest_time.setText(str_time) def timer_started(self): self.start_countdown_thread() @@ -225,22 +224,32 @@ class FtrackRunner: if self.thread_timer_coundown is not None: self.stop_countdown_thread() + def timer_restart(self): + if self.thread_timer is not None: + self.thread_timer.signal_restart_timer.emit() + + self.timer_started() + def timer_continue(self): if self.thread_timer_coundown is not None: self.thread_timer_coundown.signal_continue_timer.emit() + class FtrackEventsThread(QtCore.QThread): # Senders signal_timer_started = QtCore.Signal() signal_timer_stopped = QtCore.Signal() # Listeners signal_stop_timer = QtCore.Signal() + signal_restart_timer = QtCore.Signal() def __init__(self, parent): super(FtrackEventsThread, self).__init__() cred = credentials._get_credentials() self.username = cred['username'] self.signal_stop_timer.connect(self.ftrack_stop_timer) + self.signal_restart_timer.connect(self.ftrack_restart_timer) + self.user = None def run(self): self.timer_session = ftrack_api.Session(auto_connect_event_hub=True) @@ -248,6 +257,9 @@ class FtrackEventsThread(QtCore.QThread): 'topic=ftrack.update and source.user.username={}'.format(self.username), self.event_handler) + query = 'User where username is "{}"'.format(self.username) + self.user = self.timer_session.query(query).one() + self.timer_session.event_hub.wait() def event_handler(self, event): @@ -256,9 +268,10 @@ class FtrackEventsThread(QtCore.QThread): return except: return + new = event['data']['entities'][0]['changes']['start']['new'] old = event['data']['entities'][0]['changes']['start']['old'] - self.userId = event['source']['user']['id'] + if old is None and new is None: return elif old is None: @@ -268,12 +281,25 @@ class FtrackEventsThread(QtCore.QThread): def ftrack_stop_timer(self): try: - user = self.timer_session.query('User where id is ' + self.userId).one() - user.stop_timer() + self.user.stop_timer() self.timer_session.commit() except Exception as e: log.debug("Timer stop had issues: {}".format(e)) + def ftrack_restart_timer(self): + try: + last_task = None + if "FTRACK_LAST_TASK_ID" in os.environ: + task_id = os.environ["FTRACK_LAST_TASK_ID"] + query = 'Task where id is {}'.format(task_id) + last_task = self.timer_session.query(query).one() + + if (last_task is not None) and (self.user is not None): + self.user.start_timer(last_task) + self.timer_session.commit() + except Exception as e: + log.debug("Timer stop had issues: {}".format(e)) + class CountdownThread(QtCore.QThread): # Senders @@ -287,10 +313,12 @@ class CountdownThread(QtCore.QThread): def __init__(self, parent): super(CountdownThread, self).__init__() + self.runs = True self.over_line = False - self.count_length = 60*5 # 5 minutes - self.border_line = 31 + config_data = self.load_timer_values() + self.count_length = config_data['full_time']*60 + self.border_line = config_data['message_time']*60 + 1 self.reset_count() self.signal_reset_timer.connect(self.reset_count) self.signal_continue_timer.connect(self.continue_timer) @@ -335,6 +363,38 @@ class CountdownThread(QtCore.QThread): thread_keyboard.terminate() thread_keyboard.wait() + def load_timer_values(self): + templates = os.environ['PYPE_STUDIO_TEMPLATES'] + path_items = [templates, 'presets', 'ftrack', 'ftrack_config.json'] + filepath = os.path.sep.join(path_items) + data = dict() + try: + with open(filepath) as data_file: + json_dict = json.load(data_file) + data = json_dict['timer'] + except Exception as e: + msg = 'Loading "Ftrack Config file" Failed. Please check log for more information. Times are set to default.' + self.log.warning("{} - {}".format(msg, str(e))) + + data = self.validate_timer_values(data) + + return data + + def validate_timer_values(self, data): + # default values + if 'full_time' not in data: + data['full_time'] = 15 + if 'message_time' not in data: + data['message_time'] = 0.5 + + # minimum values + if data['full_time'] < 2: + data['full_time'] = 2 + # message time is earlier that full time + if data['message_time'] > data['full_time']: + data['message_time'] = data['full_time'] - 0.5 + return data + class MouseThread(QtCore.QThread): signal_stop = QtCore.Signal() @@ -377,6 +437,7 @@ class KeyboardThread(QtCore.QThread): self.k_listener = keyboard.Listener(on_press=self.on_press) self.k_listener.start() + class StopTimer(QtWidgets.QWidget): SIZE_W = 300 @@ -419,31 +480,31 @@ class StopTimer(QtWidgets.QWidget): msg_info = "You didn't work for a long time." msg_question = "Would you like to stop Ftrack timer?" - msg_stopped = "Your Ftrack timer was stopped!" + msg_stopped = "Your Ftrack timer was stopped. Do you want to start again?" self.lbl_info = QtWidgets.QLabel(msg_info) self.lbl_info.setFont(self.font) self.lbl_info.setTextFormat(QtCore.Qt.RichText) self.lbl_info.setObjectName("lbl_info") - self.lbl_info.setWordWrap(True); + self.lbl_info.setWordWrap(True) self.lbl_question = QtWidgets.QLabel(msg_question) self.lbl_question.setFont(self.font) self.lbl_question.setTextFormat(QtCore.Qt.RichText) self.lbl_question.setObjectName("lbl_question") - self.lbl_question.setWordWrap(True); + self.lbl_question.setWordWrap(True) self.lbl_stopped = QtWidgets.QLabel(msg_stopped) self.lbl_stopped.setFont(self.font) self.lbl_stopped.setTextFormat(QtCore.Qt.RichText) self.lbl_stopped.setObjectName("lbl_stopped") - self.lbl_stopped.setWordWrap(True); + self.lbl_stopped.setWordWrap(True) self.lbl_rest_time = QtWidgets.QLabel("") self.lbl_rest_time.setFont(self.font) self.lbl_rest_time.setTextFormat(QtCore.Qt.RichText) self.lbl_rest_time.setObjectName("lbl_rest_time") - self.lbl_rest_time.setWordWrap(True); + self.lbl_rest_time.setWordWrap(True) self.lbl_rest_time.setAlignment(QtCore.Qt.AlignCenter) self.form.addRow(self.lbl_info) @@ -463,13 +524,18 @@ class StopTimer(QtWidgets.QWidget): self.btn_continue.setToolTip('Timer will continue') self.btn_continue.clicked.connect(self.continue_timer) - self.btn_ok = QtWidgets.QPushButton("OK") - self.btn_ok.setToolTip('Close window') - self.btn_ok.clicked.connect(self.close_widget) + self.btn_close = QtWidgets.QPushButton("Close") + self.btn_close.setToolTip('Close window') + self.btn_close.clicked.connect(self.close_widget) + + self.btn_restart = QtWidgets.QPushButton("Start timer") + self.btn_restart.setToolTip('Timer will be started again') + self.btn_restart.clicked.connect(self.restart_timer) self.group_btn.addWidget(self.btn_continue) self.group_btn.addWidget(self.btn_stop) - self.group_btn.addWidget(self.btn_ok) + self.group_btn.addWidget(self.btn_restart) + self.group_btn.addWidget(self.btn_close) self.main.addLayout(self.form) self.main.addLayout(self.group_btn) @@ -483,12 +549,17 @@ class StopTimer(QtWidgets.QWidget): self.btn_continue.setVisible(self.main_context) self.btn_stop.setVisible(self.main_context) - self.btn_ok.setVisible(not self.main_context) + self.btn_restart.setVisible(not self.main_context) + self.btn_close.setVisible(not self.main_context) def stop_timer(self): self.parent.timer_stop() self.close_widget() + def restart_timer(self): + self.parent.timer_restart() + self.close_widget() + def continue_timer(self): self.parent.timer_continue() self.close_widget() From 1aac16e9136747d55edc885a7a8f19d716a5e0a1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Jan 2019 14:14:46 +0100 Subject: [PATCH 16/24] Now doesn't matter how timer is started and when --- pype/ftrack/ftrack_run.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/pype/ftrack/ftrack_run.py b/pype/ftrack/ftrack_run.py index f084514c40..db42c501a7 100644 --- a/pype/ftrack/ftrack_run.py +++ b/pype/ftrack/ftrack_run.py @@ -250,6 +250,7 @@ class FtrackEventsThread(QtCore.QThread): self.signal_stop_timer.connect(self.ftrack_stop_timer) self.signal_restart_timer.connect(self.ftrack_restart_timer) self.user = None + self.last_task = None def run(self): self.timer_session = ftrack_api.Session(auto_connect_event_hub=True) @@ -257,8 +258,14 @@ class FtrackEventsThread(QtCore.QThread): 'topic=ftrack.update and source.user.username={}'.format(self.username), self.event_handler) - query = 'User where username is "{}"'.format(self.username) - self.user = self.timer_session.query(query).one() + user_query = 'User where username is "{}"'.format(self.username) + self.user = self.timer_session.query(user_query).one() + + timer_query = 'Timer where user.username is "{}"'.format(self.username) + timer = self.timer_session.query(timer_query).first() + if timer is not None: + self.last_task = timer['context'] + self.signal_timer_started.emit() self.timer_session.event_hub.wait() @@ -274,7 +281,13 @@ class FtrackEventsThread(QtCore.QThread): if old is None and new is None: return - elif old is None: + + timer_query = 'Timer where user.username is "{}"'.format(self.username) + timer = self.timer_session.query(timer_query).first() + if timer is not None: + self.last_task = timer['context'] + + if old is None: self.signal_timer_started.emit() elif new is None: self.signal_timer_stopped.emit() @@ -294,8 +307,8 @@ class FtrackEventsThread(QtCore.QThread): query = 'Task where id is {}'.format(task_id) last_task = self.timer_session.query(query).one() - if (last_task is not None) and (self.user is not None): - self.user.start_timer(last_task) + if (self.last_task is not None) and (self.user is not None): + self.user.start_timer(self.last_task) self.timer_session.commit() except Exception as e: log.debug("Timer stop had issues: {}".format(e)) @@ -374,7 +387,7 @@ class CountdownThread(QtCore.QThread): data = json_dict['timer'] except Exception as e: msg = 'Loading "Ftrack Config file" Failed. Please check log for more information. Times are set to default.' - self.log.warning("{} - {}".format(msg, str(e))) + log.warning("{} - {}".format(msg, str(e))) data = self.validate_timer_values(data) From 13bc5081dccb9f3ac408bb38f514148cfb8bf1a3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 3 Jan 2019 14:22:26 +0100 Subject: [PATCH 17/24] deleted TASK_ID_ environ --- pype/ftrack/actions/ftrack_action_handler.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pype/ftrack/actions/ftrack_action_handler.py b/pype/ftrack/actions/ftrack_action_handler.py index 3b4610de55..d147a2630b 100644 --- a/pype/ftrack/actions/ftrack_action_handler.py +++ b/pype/ftrack/actions/ftrack_action_handler.py @@ -353,7 +353,6 @@ class AppAction(object): task = session.query('Task where id is {}'.format(entity['id'])).one() self.log.info('Starting timer for task: ' + task['name']) user.start_timer(task, force=True) - os.environ["FTRACK_LAST_TASK_ID"] = entity['id'] return { 'success': True, From c2c2e86c85650764d686e0239bf39667b753d782 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 4 Jan 2019 13:45:39 +0100 Subject: [PATCH 18/24] normalising environment paths before passing to deadline --- pype/plugins/maya/publish/submit_deadline.py | 32 +++++++++----------- 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/pype/plugins/maya/publish/submit_deadline.py b/pype/plugins/maya/publish/submit_deadline.py index eacac445d8..5247311c97 100644 --- a/pype/plugins/maya/publish/submit_deadline.py +++ b/pype/plugins/maya/publish/submit_deadline.py @@ -228,6 +228,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): # have accesss to these paths, such as if slaves are # running Linux and the submitter is on Windows. "PYTHONPATH", + "PATH", "MTOA_EXTENSIONS_PATH", "MTOA_EXTENSIONS", @@ -254,33 +255,28 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): if path.lower().startswith('pype_'): environment[path] = os.environ[path] - PATHS = os.environ["PATH"].split(";") - environment["PATH"] = ";".join([p for p in PATHS - if p.startswith("P:")]) + environment["PATH"] = os.environ["PATH"] + clean_pythonpath = '' for path in environment['PYTHONPATH'].split(os.pathsep): - # self.log.debug('checking path for UTF: {}'.format(path)) try: path.decode('UTF-8', 'strict') - # path = path.lower().replace("k:/", r"\\kre-c01\\share\\").replace("p:/", r"\\kre-p01\\share\\") clean_pythonpath += path + os.pathsep except UnicodeDecodeError: self.log.debug('path contains non UTF characters') environment['PYTHONPATH'] = clean_pythonpath - for key in environment: - remapped_key = '' - list_paths = environment[key].split(os.pathsep) - if len(list_paths) > 1: - for path in list_paths: - path = path.replace("K:/", "\\\\kre-c01\\share\\").replace("P:/", "\\\\kre-p01\\share\\") - path = path.replace("K:\\", "\\\\kre-c01\\share\\").replace("P:\\", "\\\\kre-p01\\share\\") - remapped_key += path + os.pathsep - else: - path = list_paths[0].replace("K:/", "\\\\kre-c01\\share\\").replace("P:/", "\\\\kre-p01\\share\\") - path = path.replace("K:\\", "\\\\kre-c01\\share\\").replace("P:\\", "\\\\kre-p01\\share\\") - remapped_key = path - environment[key] = remapped_key + clean_path = '' + for path in environment['PATH'].split(os.pathsep): + clean_path += os.path.normpath(path) + os.pathsep + + environment['PATH'] = clean_path + + for path in environment: + environment[path] = environment[path].replace( + os.path.normpath(environment['PYPE_STUDIO_CORE_MOUNT']), + environment['PYPE_STUDIO_CORE']) + payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( From ee4a90baf892baf3359bb6c15cd529e5f550f3c7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 7 Jan 2019 17:05:42 +0100 Subject: [PATCH 19/24] Added show_interface to event handler. Sync to avalon modified, need add checking statuses of tasks --- pype/ftrack/events/event_sync_to_avalon.py | 137 +++++++++++++++------ pype/ftrack/events/ftrack_event_handler.py | 42 ++++--- 2 files changed, 126 insertions(+), 53 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 77b036880c..d1d6519a53 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -9,10 +9,10 @@ from avalon.vendor import toml from bson.objectid import ObjectId from pype.ftrack import ftrack_utils + class Sync_to_Avalon(BaseEvent): def launch(self, session, entities, event): - self.ca_mongoid = 'avalon_mongo_id' # If mongo_id textfield has changed: RETURN! # - infinite loop @@ -21,7 +21,7 @@ class Sync_to_Avalon(BaseEvent): if self.ca_mongoid in ent['keys']: return self.proj = None - + self.errors = [] # get project for entity in entities: try: @@ -32,10 +32,12 @@ class Sync_to_Avalon(BaseEvent): break # check if project is set to auto-sync - if (self.proj is None or + if ( + self.proj is None or 'avalon_auto_sync' not in self.proj['custom_attributes'] or - self.proj['custom_attributes']['avalon_auto_sync'] is False): - return + self.proj['custom_attributes']['avalon_auto_sync'] is False + ): + return # check if project have Custom Attribute 'avalon_mongo_id' if self.ca_mongoid not in self.proj['custom_attributes']: @@ -51,13 +53,18 @@ class Sync_to_Avalon(BaseEvent): # get avalon project if possible io.install() try: - self.avalon_project = io.find_one({"_id": ObjectId(self.projectId)}) + self.avalon_project = io.find_one({ + "_id": ObjectId(self.projectId) + }) except: self.avalon_project = None importEntities = [] if self.avalon_project is None: - self.avalon_project = io.find_one({"type": "project", "name": self.proj["full_name"]}) + self.avalon_project = io.find_one({ + "type": "project", + "name": self.proj["full_name"] + }) if self.avalon_project is None: importEntities.append(self.proj) else: @@ -69,9 +76,10 @@ class Sync_to_Avalon(BaseEvent): if entity.entity_type.lower() in ['task']: entity = entity['parent'] - try: - mongo_id = entity['custom_attributes'][self.ca_mongoid] - except: + if ( + 'custom_attributes' not in entity or + self.ca_mongoid not in entity['custom_attributes'] + ): message = "Custom attribute '{}' for '{}' is not created or don't have set permissions for API".format(self.ca_mongoid, entity.entity_type) self.log.warning(message) self.show_message(event, message, False) @@ -93,13 +101,25 @@ class Sync_to_Avalon(BaseEvent): except ValueError as ve: message = str(ve) - self.show_message(event, message, False) + items = [{ + 'label': 'Error', + 'type': 'textarea', + 'name': 'error', + 'value': message + }] + self.show_interface(event, items) self.log.warning(message) except Exception as e: message = str(e) ftrack_message = "SyncToAvalon event ended with unexpected error please check log file for more information." - self.show_message(event, ftrack_message, False) + items = [{ + 'label': 'Error', + 'type': 'textarea', + 'name': 'error', + 'value': ftrack_message + }] + self.show_interface(event, items) self.log.error(message) io.uninstall() @@ -122,21 +142,26 @@ class Sync_to_Avalon(BaseEvent): if self.avalon_project is None: inventory.save(name, config, template) - self.avalon_project = io.find_one({'type': 'project', 'name': name}) + self.avalon_project = io.find_one({'type': type, 'name': name}) elif self.avalon_project['name'] != name: - raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(self.avalon_project['name'], name)) + entity['name'] = self.avalon_project['name'] + session.commit() + + msg = 'You can\'t change name {} to {}, avalon wouldn\'t work properly!\nName was changed back!'.format(self.avalon_project['name'], name) + self.errors.append(msg) + return self.projectId = self.avalon_project['_id'] - data = ftrack_utils.get_data(self, entity, session,self.custom_attributes) + data = ftrack_utils.get_data(self, entity, session, self.custom_attributes) io.update_many( {"_id": ObjectId(self.projectId)}, - {'$set':{ - 'name':name, - 'config':config, - 'data':data, + {'$set': { + 'name': name, + 'config': config, + 'data': data, }}) entity['custom_attributes'][self.ca_mongoid] = str(self.projectId) @@ -146,7 +171,7 @@ class Sync_to_Avalon(BaseEvent): if self.avalon_project is None: self.importToAvalon(session, self.proj) - data = ftrack_utils.get_data(self, entity, session,self.custom_attributes) + data = ftrack_utils.get_data(self, entity, session, self.custom_attributes) # return if entity is silo if len(data['parents']) == 0: @@ -171,30 +196,64 @@ class Sync_to_Avalon(BaseEvent): if avalon_asset is None: mongo_id = inventory.create_asset(name, silo, data, ObjectId(self.projectId)) # Raise error if it seems to be different ent. with same name - elif (avalon_asset['data']['parents'] != data['parents'] or - avalon_asset['silo'] != silo): - raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name)) - elif avalon_asset['name'] != entity['name']: - raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please set name back'.format(avalon_asset['name'], name)) - elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']: - old_path = "/".join(avalon_asset['data']['parents']) - new_path = "/".join(data['parents']) - raise ValueError('You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" , avalon DB won\'t work properly'.format(avalon_asset['name'], old_path, new_path)) + elif ( + avalon_asset['data']['parents'] != data['parents'] or + avalon_asset['silo'] != silo + ): + msg = 'In Avalon DB already exists entity with name "{0}"'.format(name) + self.errors.append(msg) + return + else: + if avalon_asset['name'] != entity['name']: + self.assetNamer(session, entity, avalon_asset) + if avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']: + old_path = "/".join(avalon_asset['data']['parents']) + new_path = "/".join(data['parents']) + msg = 'You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" , avalon DB won\'t work properly'.format(avalon_asset['name'], old_path, new_path) + self.errors.append(msg) + + if len(self.errors) > 0: + return io.update_many( {"_id": ObjectId(mongo_id)}, - {'$set':{ - 'name':name, - 'silo':silo, - 'data':data, + {'$set': { + 'name': name, + 'silo': silo, + 'data': data, 'parent': ObjectId(self.projectId)}}) entity['custom_attributes'][self.ca_mongoid] = str(mongo_id) + def checkChilds(self, entity): + if entity['children']: + childs = entity['children'] + for child in childs: + if child.entity_type.lower() == 'task': + pass + else: + self.checkChilds() + + def assetNamer(self, session, entity, asset): + ability = True + if entity['children']: + childs = entity['children'] + for child in childs: + if child.entity_type.lower() == 'task': + pass + + if ability is True: + return + msg = 'You can\'t change name {} to {}, avalon wouldn\'t work properly!\nPlease create new entity.\nName was changed back!'.format(avalon_asset['name'], name) + entity['name'] = asset['name'] + session.commit() + self.errors.append(msg) + def setAvalonAttributes(self): self.custom_attributes = [] - all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one() + query = 'CustomAttributeGroup where name is "avalon"' + all_avalon_attr = self.session.query(query).one() for cust_attr in all_avalon_attr['custom_attribute_configurations']: if 'avalon_' not in cust_attr['key']: self.custom_attributes.append(cust_attr) @@ -210,10 +269,13 @@ class Sync_to_Avalon(BaseEvent): self.session, *args ) return - + def _translate_event(self, session, event): - exceptions = ['assetversion', 'job', 'user', 'reviewsessionobject', 'timer', 'socialfeed', 'timelog'] - _selection = event['data'].get('entities',[]) + exceptions = [ + 'assetversion', 'job', 'user', 'reviewsessionobject', 'timer', + 'socialfeed', 'timelog' + ] + _selection = event['data'].get('entities', []) _entities = list() for entity in _selection: @@ -227,6 +289,7 @@ class Sync_to_Avalon(BaseEvent): return [_entities, event] + def register(session, **kw): '''Register plugin. Called when used as an plugin.''' diff --git a/pype/ftrack/events/ftrack_event_handler.py b/pype/ftrack/events/ftrack_event_handler.py index e6d942af06..0cb53b74a9 100644 --- a/pype/ftrack/events/ftrack_event_handler.py +++ b/pype/ftrack/events/ftrack_event_handler.py @@ -1,19 +1,7 @@ # :coding: utf-8 # :copyright: Copyright (c) 2017 ftrack -import os -import logging -import getpass -# import platform import ftrack_api -import toml -from avalon import io, lib, pipeline -from avalon import session as sess -import acre - -from app.api import ( - Templates, - Logger -) +from app.api import Logger class BaseEvent(object): @@ -47,7 +35,7 @@ class BaseEvent(object): def _translate_event(self, session, event): '''Return *event* translated structure to be used with the API.''' - _selection = event['data'].get('entities',[]) + _selection = event['data'].get('entities', []) _entities = list() for entity in _selection: @@ -119,7 +107,7 @@ class BaseEvent(object): ''' raise NotImplementedError() - def show_message(self, event, input_message, result = False): + def show_message(self, event, input_message, result=False): """ Shows message to user who triggered event - event - just source of user id @@ -137,6 +125,8 @@ class BaseEvent(object): return user_id = event['source']['user']['id'] + target = 'applicationId=ftrack.client.web and user.id="{0}"'.format(user_id) + self.session.event_hub.publish( ftrack_api.event.base.Event( topic='ftrack.action.trigger-user-interface', @@ -145,7 +135,27 @@ class BaseEvent(object): success=result, message=message ), - target='applicationId=ftrack.client.web and user.id="{0}"'.format(user_id) + target=target + ), + on_error='ignore' + ) + + def show_interface(self, event, items): + """ + Shows interface to user who triggered event + - 'items' must be list containing Ftrack interface items + """ + user_id = event['source']['user']['id'] + target = 'applicationId=ftrack.client.web and user.id="{0}"'.format(user_id) + + self.session.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.action.trigger-user-interface', + data=dict( + type='widget', + items=items + ), + target=target ), on_error='ignore' ) From 15c391129655498fe04eff65de8a7b1bd1a5afdf Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 8 Jan 2019 11:32:44 +0100 Subject: [PATCH 20/24] hotfix / filter integrate rendered frames --- pype/plugins/global/publish/integrate_rendered_frames.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py index 50914a4339..bc5b138fb8 100644 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ b/pype/plugins/global/publish/integrate_rendered_frames.py @@ -24,6 +24,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): label = "Integrate Frames" order = pyblish.api.IntegratorOrder + families = ["imagesequence", "render", "write", "source"] + family_targets = [".frames", ".local", ".review", "imagesequence", "render"] def process(self, instance): From 44e1d87b52332321c1b0903e68cf14ddb0c054da Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 8 Jan 2019 13:25:16 +0100 Subject: [PATCH 21/24] sync to avalon shows interface to user when error has happened so he can't continue to do another errors --- pype/ftrack/events/event_sync_to_avalon.py | 117 +++++++++++++++------ pype/ftrack/ftrack_utils.py | 17 +++ 2 files changed, 100 insertions(+), 34 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index d1d6519a53..0f2cb9d29f 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -10,6 +10,11 @@ from bson.objectid import ObjectId from pype.ftrack import ftrack_utils +class ExpectedError(Exception): + def __init__(self, *args, **kwargs): + super().__init__(self, *args, **kwargs) + + class Sync_to_Avalon(BaseEvent): def launch(self, session, entities, event): @@ -96,19 +101,21 @@ class Sync_to_Avalon(BaseEvent): io.install() try: for entity in importEntities: - self.importToAvalon(session, entity) + self.importToAvalon(session, event, entity) session.commit() - except ValueError as ve: - message = str(ve) - items = [{ - 'label': 'Error', - 'type': 'textarea', - 'name': 'error', - 'value': message - }] + except ExpectedError as ee: + items = [] + for error in self.errors: + info = { + 'label': 'Error', + 'type': 'textarea', + 'name': 'error', + 'value': error + } + items.append(info) + self.log.warning(error) self.show_interface(event, items) - self.log.warning(message) except Exception as e: message = str(e) @@ -126,7 +133,7 @@ class Sync_to_Avalon(BaseEvent): return - def importToAvalon(self, session, entity): + def importToAvalon(self, session, event, entity): if self.ca_mongoid not in entity['custom_attributes']: raise ValueError("Custom attribute '{}' for '{}' is not created or don't have set permissions for API".format(self.ca_mongoid, entity['name'])) @@ -169,12 +176,14 @@ class Sync_to_Avalon(BaseEvent): return if self.avalon_project is None: - self.importToAvalon(session, self.proj) + self.importToAvalon(session, event, self.proj) data = ftrack_utils.get_data(self, entity, session, self.custom_attributes) - # return if entity is silo + # only check name if entity is silo if len(data['parents']) == 0: + if self.checkSilo(entity, event, session) is False: + raise ExpectedError return else: silo = data['parents'][0] @@ -205,16 +214,20 @@ class Sync_to_Avalon(BaseEvent): return else: if avalon_asset['name'] != entity['name']: - self.assetNamer(session, entity, avalon_asset) + if self.checkChilds(entity) is False: + msg = 'You can\'t change name {} to {}, avalon wouldn\'t work properly!\n\nName was changed back!\n\nCreate new entity if you want to change name.'.format(avalon_asset['name'], entity['name']) + entity['name'] = avalon_asset['name'] + session.commit() + self.errors.append(msg) if avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']: old_path = "/".join(avalon_asset['data']['parents']) new_path = "/".join(data['parents']) - msg = 'You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" , avalon DB won\'t work properly'.format(avalon_asset['name'], old_path, new_path) + msg = 'You can\'t move with entities.\nEntity "{}" was moved from "{}" to "{}"\n\nAvalon won\'t work properly, please move them back!'.format(avalon_asset['name'], old_path, new_path) self.errors.append(msg) if len(self.errors) > 0: - return + raise ExpectedError io.update_many( {"_id": ObjectId(mongo_id)}, @@ -227,28 +240,64 @@ class Sync_to_Avalon(BaseEvent): entity['custom_attributes'][self.ca_mongoid] = str(mongo_id) def checkChilds(self, entity): - if entity['children']: - childs = entity['children'] - for child in childs: - if child.entity_type.lower() == 'task': - pass + if (entity.entity_type.lower() != 'task' and 'children' not in entity): + return True + childs = entity['children'] + for child in childs: + if child.entity_type.lower() == 'task': + config = ftrack_utils.get_config_data() + if 'sync_to_avalon' in config: + config = config['sync_to_avalon'] + if 'statuses_name_change' in config: + available_statuses = config['statuses_name_change'] else: - self.checkChilds() + available_statuses = [] + ent_status = child['status']['name'].lower() + if ent_status not in available_statuses: + return False + # If not task go deeper + elif self.checkChilds(child) is False: + return False + # If everything is allright + return True - def assetNamer(self, session, entity, asset): - ability = True - if entity['children']: - childs = entity['children'] - for child in childs: - if child.entity_type.lower() == 'task': - pass + def checkSilo(self, entity, event, session): + changes = event['data']['entities'][0]['changes'] + if 'name' not in changes: + return True + new_name = changes['name']['new'] + old_name = changes['name']['old'] - if ability is True: - return - msg = 'You can\'t change name {} to {}, avalon wouldn\'t work properly!\nPlease create new entity.\nName was changed back!'.format(avalon_asset['name'], name) - entity['name'] = asset['name'] - session.commit() + if 'children' not in entity or len(entity['children']) < 1: + return True + + if self.checkChilds(entity) is True: + self.updateSilo(old_name, new_name) + return True + + new_found = 0 + old_found = 0 + for asset in io.find({'silo': new_name}): + new_found += 1 + for asset in io.find({'silo': old_name}): + old_found += 1 + + if new_found > 0 or old_found == 0: + return True + + # If any condition is possible, show error to user and change name back + msg = 'You can\'t change name {} to {}, avalon wouldn\'t work properly!\n\nName was changed back!\n\nCreate new entity if you want to change name.'.format(old_name, new_name) self.errors.append(msg) + entity['name'] = old_name + session.commit() + + return False + + def updateSilo(self, old, new): + io.update_many( + {'silo': old}, + {'$set': {'silo': new}} + ) def setAvalonAttributes(self): self.custom_attributes = [] diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index caaeb6c707..2177b3f8c3 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -1,6 +1,7 @@ import os import sys import re +import json from pprint import * import ftrack_api @@ -13,6 +14,22 @@ from app.api import Logger log = Logger.getLogger(__name__) + +def get_config_data(): + templates = os.environ['PYPE_STUDIO_TEMPLATES'] + path_items = [templates, 'presets', 'ftrack', 'ftrack_config.json'] + filepath = os.path.sep.join(path_items) + data = dict() + try: + with open(filepath) as data_file: + data = json.load(data_file) + + except Exception as e: + msg = 'Loading "Ftrack Config file" Failed. Please check log for more information. Times are set to default.' + log.warning("{} - {}".format(msg, str(e))) + + return data + def get_data(parent, entity, session, custom_attributes): entity_type = entity.entity_type From e25c840a29fb96a72f611520b6773bce213c6c53 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 9 Jan 2019 19:39:38 +0100 Subject: [PATCH 22/24] add option to work with arnold standins and simple proxies --- pype/plugins/global/publish/integrate.py | 3 +- pype/plugins/maya/create/create_ass.py | 32 ++++ pype/plugins/maya/load/load_ass.py | 148 ++++++++++++++++++ pype/plugins/maya/publish/collect_ass.py | 35 +++++ pype/plugins/maya/publish/extract_ass.py | 47 ++++++ pype/plugins/maya/publish/extract_assproxy.py | 73 +++++++++ 6 files changed, 337 insertions(+), 1 deletion(-) create mode 100644 pype/plugins/maya/create/create_ass.py create mode 100644 pype/plugins/maya/load/load_ass.py create mode 100644 pype/plugins/maya/publish/collect_ass.py create mode 100644 pype/plugins/maya/publish/extract_ass.py create mode 100644 pype/plugins/maya/publish/extract_assproxy.py diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index 8a1d2224cb..b63c1693eb 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -37,7 +37,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "yeticache", "nukescript", "review", - "scene"] + "scene", + "ass"] def process(self, instance): diff --git a/pype/plugins/maya/create/create_ass.py b/pype/plugins/maya/create/create_ass.py new file mode 100644 index 0000000000..3423648c40 --- /dev/null +++ b/pype/plugins/maya/create/create_ass.py @@ -0,0 +1,32 @@ +from collections import OrderedDict + +import avalon.maya + +from maya import cmds + + +class CreateAss(avalon.maya.Creator): + """Arnold Archive""" + + name = "ass" + label = "Ass StandIn" + family = "ass" + icon = "cube" + + def process(self): + instance = super(CreateAss, self).process() + + data = OrderedDict(**self.data) + + nodes = list() + + if (self.options or {}).get("useSelection"): + nodes = cmds.ls(selection=True) + + cmds.sets(nodes, rm=instance) + + assContent = cmds.sets(name="content_SET") + assProxy = cmds.sets(name="proxy_SET", empty=True) + cmds.sets([assContent, assProxy], forceElement=instance) + + self.data = data diff --git a/pype/plugins/maya/load/load_ass.py b/pype/plugins/maya/load/load_ass.py new file mode 100644 index 0000000000..814639a4d9 --- /dev/null +++ b/pype/plugins/maya/load/load_ass.py @@ -0,0 +1,148 @@ +from avalon import api +import pype.maya.plugin +import os + + +class AssProxyLoader(pype.maya.plugin.ReferenceLoader): + """Load the Proxy""" + + families = ["ass"] + representations = ["ass"] + + label = "Reference .ASS standin with Proxy" + order = -10 + icon = "code-fork" + color = "orange" + + def process_reference(self, context, name, namespace, data): + + import maya.cmds as cmds + from avalon import maya + import pymel.core as pm + + with maya.maintained_selection(): + + groupName = "{}:{}".format(namespace, name) + path = self.fname + proxyPath = os.path.splitext(path)[0] + ".ma" + + nodes = cmds.file(proxyPath, + namespace=namespace, + reference=True, + returnNewNodes=True, + groupReference=True, + groupName=groupName) + + cmds.makeIdentity(groupName, apply=False, rotate=True, translate=True, scale=True) + + # Set attributes + proxyShape = pm.ls(nodes, type="mesh")[0] + proxyShape = pm.ls(nodes, type="mesh")[0] + + proxyShape.aiTranslator.set('procedural') + proxyShape.dso.set(path) + proxyShape.aiOverrideShaders.set(0) + + + self[:] = nodes + + return nodes + + def switch(self, container, representation): + self.update(container, representation) + + +class AssStandinLoader(api.Loader): + """Load .ASS file as standin""" + + families = ["ass"] + representations = ["ass"] + + label = "Load .ASS file as standin" + order = -5 + icon = "code-fork" + color = "orange" + + def load(self, context, name, namespace, data): + + import maya.cmds as cmds + import avalon.maya.lib as lib + from avalon.maya.pipeline import containerise + import mtoa.ui.arnoldmenu + import pymel.core as pm + + + asset = context['asset']['name'] + namespace = namespace or lib.unique_namespace( + asset + "_", + prefix="_" if asset[0].isdigit() else "", + suffix="_", + ) + + # cmds.loadPlugin("gpuCache", quiet=True) + + # Root group + label = "{}:{}".format(namespace, name) + root = pm.group(name=label, empty=True) + + # Create transform with shape + transform_name = label + "_ASS" + # transform = pm.createNode("transform", name=transform_name, + # parent=root) + + standinShape = pm.PyNode(mtoa.ui.arnoldmenu.createStandIn()) + standin = standinShape.getParent() + standin.rename(transform_name) + + pm.parent(standin, root) + + # Set the standin filepath + standinShape.dso.set(self.fname) + + + # Lock parenting of the transform and standin + cmds.lockNode([root, standin], lock=True) + + nodes = [root, standin] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, representation): + + import pymel.core as pm + + path = api.get_representation_path(representation) + + # Update the standin + members = pm.sets(container['objectName'], query=True) + standins = pm.ls(members, type="AiStandIn", long=True) + + assert len(caches) == 1, "This is a bug" + + for standin in standins: + standin.cacheFileName.set(path) + + container = pm.PyNode(container["objectName"]) + container.representation.set(str(representation["_id"])) + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + import maya.cmds as cmds + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass diff --git a/pype/plugins/maya/publish/collect_ass.py b/pype/plugins/maya/publish/collect_ass.py new file mode 100644 index 0000000000..c0174e7026 --- /dev/null +++ b/pype/plugins/maya/publish/collect_ass.py @@ -0,0 +1,35 @@ +from maya import cmds +import pymel.core as pm + +import pyblish.api +import avalon.api + +class CollectAssData(pyblish.api.InstancePlugin): + """Collect Ass data + + """ + + order = pyblish.api.CollectorOrder + 0.2 + label = 'Collect Ass' + families = ["ass"] + + def process(self, instance): + + + context = instance.context + + objsets = instance.data['setMembers'] + + for objset in objsets: + members = cmds.sets(objset, query=True) + if members is None: + self.log.warning("Skipped empty instance: \"%s\" " % objset) + continue + if objset == "content_SET": + instance.data['setMembers'] = members + elif objset == "proxy_SET": + assert len(members) == 1, "You have multiple proxy meshes, please only use one" + instance.data['proxy'] = members + + + self.log.debug("data: {}".format(instance.data)) diff --git a/pype/plugins/maya/publish/extract_ass.py b/pype/plugins/maya/publish/extract_ass.py new file mode 100644 index 0000000000..14b548b928 --- /dev/null +++ b/pype/plugins/maya/publish/extract_ass.py @@ -0,0 +1,47 @@ +import os + +import avalon.maya +import pype.api + +from maya import cmds + + +class ExtractAssStandin(pype.api.Extractor): + """Extract the content of the instance to a ass file + + Things to pay attention to: + - If animation is toggled, are the frames correct + - + """ + + label = "Ass Standin (.ass)" + hosts = ["maya"] + families = ["ass"] + + def process(self, instance): + + staging_dir = self.staging_dir(instance) + file_name = "{}.ass".format(instance.name) + file_path = os.path.join(staging_dir, file_name) + + # Write out .ass file + self.log.info("Writing: '%s'" % file_path) + with avalon.maya.maintained_selection(): + self.log.info("Writing: {}".format(instance.data["setMembers"])) + cmds.select(instance.data["setMembers"], noExpand=True) + cmds.arnoldExportAss( filename=file_path, + selected=True, + asciiAss=True, + shadowLinks=True, + lightLinks=True, + boundingBox=True + ) + + + if "files" not in instance.data: + instance.data["files"] = list() + + instance.data["files"].append(file_name) + + self.log.info("Extracted instance '%s' to: %s" + % (instance.name, staging_dir)) diff --git a/pype/plugins/maya/publish/extract_assproxy.py b/pype/plugins/maya/publish/extract_assproxy.py new file mode 100644 index 0000000000..31f5e0393b --- /dev/null +++ b/pype/plugins/maya/publish/extract_assproxy.py @@ -0,0 +1,73 @@ +import os + +from maya import cmds +import contextlib + +import avalon.maya +import pype.api +import pype.maya.lib as lib + + +class ExtractModel(pype.api.Extractor): + """Extract proxy model as Maya Ascii to use as arnold standin + + + """ + + order = pype.api.Extractor.order + 0.2 + label = "Ass Proxy (Maya ASCII)" + hosts = ["maya"] + families = ["ass"] + + def process(self, instance): + + @contextlib.contextmanager + def unparent(root): + """Temporarily unparent `root`""" + parent = cmds.listRelatives(root, parent=True) + if parent: + cmds.parent(root, world=True) + yield + self.log.info("{} - {}".format(root, parent)) + cmds.parent(root, parent) + else: + yield + + + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = "{0}.ma".format(instance.name) + path = os.path.join(stagingdir, filename) + + # Perform extraction + self.log.info("Performing extraction..") + + # Get only the shape contents we need in such a way that we avoid + # taking along intermediateObjects + members = instance.data['proxy'] + members = cmds.ls(members, + dag=True, + transforms=True, + noIntermediate=True) + self.log.info(members) + + with avalon.maya.maintained_selection(): + with unparent(members[0]): + cmds.select(members, noExpand=True) + cmds.file(path, + force=True, + typ="mayaAscii", + exportSelected=True, + preserveReferences=False, + channels=False, + constraints=False, + expressions=False, + constructionHistory=False) + + + if "files" not in instance.data: + instance.data["files"] = list() + + instance.data["files"].append(filename) + + self.log.info("Extracted instance '%s' to: %s" % (instance.name, path)) From a0ffa586cebed54b283bc6a5c6299711117c2521 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 10 Jan 2019 12:42:32 +0100 Subject: [PATCH 23/24] hotfix / rename ass extractor --- pype/plugins/maya/publish/extract_assproxy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/maya/publish/extract_assproxy.py b/pype/plugins/maya/publish/extract_assproxy.py index 31f5e0393b..87e7b35799 100644 --- a/pype/plugins/maya/publish/extract_assproxy.py +++ b/pype/plugins/maya/publish/extract_assproxy.py @@ -8,7 +8,7 @@ import pype.api import pype.maya.lib as lib -class ExtractModel(pype.api.Extractor): +class ExtractAssProxy(pype.api.Extractor): """Extract proxy model as Maya Ascii to use as arnold standin From 6298e63e418444a469eb381fc878cc4abe4d898f Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 10 Jan 2019 13:44:52 +0100 Subject: [PATCH 24/24] hotfix / ass publishing invalidated looks publishing --- pype/plugins/maya/publish/validate_look_sets.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pype/plugins/maya/publish/validate_look_sets.py b/pype/plugins/maya/publish/validate_look_sets.py index 6ef333486d..1819602430 100644 --- a/pype/plugins/maya/publish/validate_look_sets.py +++ b/pype/plugins/maya/publish/validate_look_sets.py @@ -70,6 +70,13 @@ class ValidateLookSets(pyblish.api.InstancePlugin): # check if any objectSets are not present ion the relationships missing_sets = [s for s in sets if s not in relationships] + + for set in missing_sets: + if set.endswith("_SET"): + missing_sets.remove(set) + cls.log.info("Missing Sets " + "'{}'".format(missing_sets)) + if missing_sets: # A set of this node is not coming along, this is wrong! cls.log.error("Missing sets '{}' for node "