From 3fd9170e1fa061e0eb3f5b03a3c2640322ece093 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 12 Jan 2019 18:27:06 +0100 Subject: [PATCH 1/9] adding json export of all context and instances for debug, polishing integrate_render_frames --- pype/plugin.py | 5 +- .../publish/integrate_rendered_frames.py | 32 +++--- .../plugins/nuke/publish/extract_post_json.py | 107 ++++++++++++++++++ pype/plugins/nuke/publish/extract_review.py | 6 +- 4 files changed, 129 insertions(+), 21 deletions(-) create mode 100644 pype/plugins/nuke/publish/extract_post_json.py diff --git a/pype/plugin.py b/pype/plugin.py index 0ba1fe5ded..9f8e6f09fd 100644 --- a/pype/plugin.py +++ b/pype/plugin.py @@ -1,4 +1,5 @@ import tempfile +import os import pyblish.api ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05 @@ -28,7 +29,9 @@ class Extractor(pyblish.api.InstancePlugin): staging_dir = instance.data.get('stagingDir', None) if not staging_dir: - staging_dir = tempfile.mkdtemp(prefix="pyblish_tmp_") + staging_dir = os.path.normpath( + tempfile.mkdtemp(prefix="pyblish_tmp_") + ) instance.data['stagingDir'] = staging_dir return staging_dir diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py index bc5b138fb8..c363fa49bb 100644 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ b/pype/plugins/global/publish/integrate_rendered_frames.py @@ -39,8 +39,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): self.register(instance) - self.log.info("Integrating Asset in to the database ...") - self.log.info("instance.data: {}".format(instance.data)) + # self.log.info("Integrating Asset in to the database ...") + # self.log.info("instance.data: {}".format(instance.data)) if instance.data.get('transfer', True): self.integrate(instance) @@ -158,8 +158,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # Each should be a single representation (as such, a single extension) representations = [] destination_list = [] - self.log.debug("integrate_frames:instance.data[files]: {}".format( - instance.data["files"])) + for files in instance.data["files"]: # Collection # _______ @@ -193,7 +192,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): for i in src_collection.indexes: src_padding = src_collection.format("{padding}") % i - src_file_name = "{0}{1}{2}".format(src_head, src_padding, src_tail) + src_file_name = "{0}{1}{2}".format(src_head, + src_padding, src_tail) dst_padding = dst_collection.format("{padding}") % i dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail) @@ -244,17 +244,17 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # Imprint shortcut to context # for performance reasons. "context": { - "root": root, - "project": PROJECT, - "projectcode": project['data']['code'], - 'task': api.Session["AVALON_TASK"], - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "VERSION": version["name"], - "hierarchy": hierarchy, - "representation": ext[1:] + "root": root, + "project": PROJECT, + "projectcode": project['data']['code'], + 'task': api.Session["AVALON_TASK"], + "silo": asset['silo'], + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "VERSION": version["name"], + "hierarchy": hierarchy, + "representation": ext[1:] } } diff --git a/pype/plugins/nuke/publish/extract_post_json.py b/pype/plugins/nuke/publish/extract_post_json.py new file mode 100644 index 0000000000..6954abff3d --- /dev/null +++ b/pype/plugins/nuke/publish/extract_post_json.py @@ -0,0 +1,107 @@ +import os +import json +import datetime +import time + +import clique +import pyblish.api + + +class ExtractJSON(pyblish.api.ContextPlugin): + """ Extract all instances to a serialized json file. """ + + order = pyblish.api.IntegratorOrder + 1 + label = "Extract to JSON" + families = ["write"] + + def process(self, context): + workspace = os.path.join( + os.path.dirname(context.data["currentFile"]), "workspace", + "instances") + + if not os.path.exists(workspace): + os.makedirs(workspace) + + context_data = context.data.copy() + out_data = dict(self.serialize(context_data)) + + instances_data = [] + for instance in context: + + data = {} + for key, value in instance.data.items(): + if isinstance(value, clique.Collection): + value = value.format() + + try: + json.dumps(value) + data[key] = value + except KeyError: + msg = "\"{0}\"".format(value) + msg += " in instance.data[\"{0}\"]".format(key) + msg += " could not be serialized." + self.log.debug(msg) + + instances_data.append(data) + + out_data["instances"] = instances_data + + timestamp = datetime.datetime.fromtimestamp( + time.time()).strftime("%Y%m%d-%H%M%S") + filename = timestamp + "_instances.json" + + with open(os.path.join(workspace, filename), "w") as outfile: + outfile.write(json.dumps(out_data, indent=4, sort_keys=True)) + + def serialize(self, data): + """ + Convert all nested content to serialized objects + + Args: + data (dict): nested data + + Returns: + dict: nested data + """ + + def encoding_obj(value): + try: + value = str(value).replace("\\", "/") + # value = getattr(value, '__dict__', str(value)) + except Exception: + pass + return value + + for key, value in dict(data).items(): + if key in ["records", "instances", "results"]: + # escape all record objects + data[key] = None + continue + + if hasattr(value, '__module__'): + # only deals with module objects + if "plugins" in value.__module__: + # only dealing with plugin objects + data[key] = str(value.__module__) + else: + if ".lib." in value.__module__: + # will allow only anatomy dict + data[key] = self.serialize(value) + else: + data[key] = None + continue + continue + + if isinstance(value, dict): + # loops if dictionary + data[key] = self.serialize(value) + + if isinstance(value, (list or tuple)): + # loops if list or tuple + for i, item in enumerate(value): + value[i] = self.serialize(item) + data[key] = value + + data[key] = encoding_obj(value) + + return data diff --git a/pype/plugins/nuke/publish/extract_review.py b/pype/plugins/nuke/publish/extract_review.py index 30de2039df..9ff7097e61 100644 --- a/pype/plugins/nuke/publish/extract_review.py +++ b/pype/plugins/nuke/publish/extract_review.py @@ -35,6 +35,7 @@ class ExtractDataForReview(pype.api.Extractor): self.log.debug("here:") self.log.debug("creating staging dir:") self.staging_dir(instance) + self.render_review_representation(instance, representation="mov") self.log.debug("review mov:") @@ -52,7 +53,7 @@ class ExtractDataForReview(pype.api.Extractor): staging_dir = instance.data["stagingDir"] file_name = collection.format("{head}mov") - review_mov = os.path.join(staging_dir, file_name) + review_mov = os.path.join(staging_dir, file_name).replace("\\", "/") if instance.data.get("baked_colorspace_movie"): args = [ @@ -110,9 +111,6 @@ class ExtractDataForReview(pype.api.Extractor): first_frame = min(collection.indexes) last_frame = max(collection.indexes) - self.log.warning("first_frame: {}".format(first_frame)) - self.log.warning("last_frame: {}".format(last_frame)) - node = previous_node = nuke.createNode("Read") node["file"].setValue( From 0a844ee7093c0eb80274e6430dab8f6b4eee2571 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 12 Jan 2019 18:27:28 +0100 Subject: [PATCH 2/9] cosmetic changes --- pype/plugins/global/publish/integrate_rendered_frames.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py index c363fa49bb..1f14832492 100644 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ b/pype/plugins/global/publish/integrate_rendered_frames.py @@ -233,6 +233,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): instance.data["transfers"].append([src, dst]) + self.log.debug('ext[1:]: {}'.format(ext[1:])) representation = { "schema": "pype:representation-2.0", "type": "representation", From 11cf14e2829f0aafeba1359b5e8560cc68033e1c Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 18 Jan 2019 17:52:40 +0100 Subject: [PATCH 3/9] cleanup write publishing and loading --- .../validate_write_families.py | 32 ++++++++++--------- pype/plugins/nuke/load/load_sequence.py | 23 ++++++------- pype/plugins/nuke/publish/collect_families.py | 31 ++++++++---------- .../plugins/nuke/publish/collect_instances.py | 4 +-- pype/plugins/nuke/publish/collect_writes.py | 3 +- .../nuke/publish/validate_collection.py | 9 +++--- 6 files changed, 51 insertions(+), 51 deletions(-) rename pype/plugins/nuke/{publish => _publish_unused}/validate_write_families.py (57%) diff --git a/pype/plugins/nuke/publish/validate_write_families.py b/pype/plugins/nuke/_publish_unused/validate_write_families.py similarity index 57% rename from pype/plugins/nuke/publish/validate_write_families.py rename to pype/plugins/nuke/_publish_unused/validate_write_families.py index 1dfdbc06d5..087fb3be5e 100644 --- a/pype/plugins/nuke/publish/validate_write_families.py +++ b/pype/plugins/nuke/_publish_unused/validate_write_families.py @@ -20,29 +20,31 @@ class ValidateWriteFamilies(pyblish.api.InstancePlugin): """ Validates write families. """ order = pyblish.api.ValidatorOrder - label = "Check correct writes families" + label = "Valitade writes families" hosts = ["nuke"] families = ["write"] actions = [pype.nuke.actions.SelectInvalidAction, pype.api.RepairAction] - @staticmethod - def get_invalid(instance): - if not [f for f in instance.data["families"] - if ".frames" in f]: - return - - if not instance.data["files"]: - return (instance) + # @staticmethod + # def get_invalid(instance): + # for f in instance.data["families"]: + # if ".frames" in f: + # return + # + # if not instance.data["files"]: + # return (instance) def process(self, instance): self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) - invalid = self.get_invalid(instance) - if invalid: - raise ValueError(str("`{}`: Switch `Render` on! " - "> {}".format(__name__, invalid))) - - self.log.info("Checked correct writes families") + # if any(".frames" in f for f in instance.data["families"]): + # if not instance.data["files"]: + # raise ValueError("instance {} is set to publish frames\ + # but no files were collected, render the frames first or\ + # check 'render' checkbox onthe no to 'ON'".format(instance))) + # + # + # self.log.info("Checked correct writes families") @classmethod def repair(cls, instance): diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 1cd3688aaf..056867fd5f 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -36,13 +36,13 @@ def preserve_trim(node): if start_at_frame: node['frame_mode'].setValue("start at") node['frame'].setValue(str(script_start)) - log.info("start frame of reader was set to" + log.info("start frame of Read was set to" "{}".format(script_start)) if offset_frame: node['frame_mode'].setValue("offset") node['frame'].setValue(str((script_start + offset_frame))) - log.info("start frame of reader was set to" + log.info("start frame of Read was set to" "{}".format(script_start)) @@ -67,7 +67,7 @@ def loader_shift(node, frame, relative=True): if relative: node['frame_mode'].setValue("start at") - node['frame'].setValue(str(script_start)) + node['frame'].setValue(str(frame)) return int(script_start) @@ -75,7 +75,7 @@ def loader_shift(node, frame, relative=True): class LoadSequence(api.Loader): """Load image sequence into Nuke""" - families = ["write"] + families = ["write", "source"] representations = ["*"] label = "Load sequence" @@ -142,8 +142,9 @@ class LoadSequence(api.Loader): data_imprint = {} for k in add_keys: data_imprint.update({k: context["version"]['data'][k]}) + data_imprint.update({"objectName": read_name}) - containerise(r, + return containerise(r, name=name, namespace=namespace, context=context, @@ -168,9 +169,9 @@ class LoadSequence(api.Loader): update_container ) log.info("this i can see") - node = container["_tool"] - # TODO: prepare also for other readers img/geo/camera - assert node.Class() == "Reader", "Must be Reader" + node = nuke.toNode(container['objectName']) + # TODO: prepare also for other Read img/geo/camera + assert node.Class() == "Read", "Must be Read" root = api.get_representation_path(representation) file = ls_img_sequence(os.path.dirname(root), one=True) @@ -189,7 +190,7 @@ class LoadSequence(api.Loader): # Update the loader's path whilst preserving some values with preserve_trim(node): - node["file"] = file["path"] + node["file"].setValue(file["path"]) # Set the global in to the start frame of the sequence global_in_changed = loader_shift(node, start, relative=False) @@ -208,8 +209,8 @@ class LoadSequence(api.Loader): from avalon.nuke import viewer_update_and_undo_stop - node = container["_tool"] - assert node.Class() == "Reader", "Must be Reader" + node = nuke.toNode(container['objectName']) + assert node.Class() == "Read", "Must be Read" with viewer_update_and_undo_stop(): nuke.delete(node) diff --git a/pype/plugins/nuke/publish/collect_families.py b/pype/plugins/nuke/publish/collect_families.py index d0e61c349b..4fd09fe967 100644 --- a/pype/plugins/nuke/publish/collect_families.py +++ b/pype/plugins/nuke/publish/collect_families.py @@ -15,31 +15,28 @@ class CollectInstanceFamilies(pyblish.api.ContextPlugin): if "write" in instance.data["family"]: node = instance[0] - # set for ftrack to accept - instance.data["families"] = ["ftrack"] + families = [] + if instance.data.get('families'): + families.append(instance.data['families']) - if not node["render"].value(): - families = ["{}.frames".format( - instance.data["avalonKnob"]["families"])] - # to ignore staging dir op in integrate - instance.data['transfer'] = False - else: + # set for ftrack to accept + # instance.data["families"] = ["ftrack"] + + if node["render"].value(): # dealing with local/farm rendering if node["render_farm"].value(): - families = ["{}.farm".format( - instance.data["avalonKnob"]["families"])] + families.append("render.farm") else: - families = ["{}.local".format( - instance.data["avalonKnob"]["families"])] + families.append("render.local") + else: + families.append("render.frames") + # to ignore staging dir op in integrate + instance.data['transfer'] = False - instance.data["families"].extend(families) - - elif "source" in instance.data["family"]: - families = [] - families.append(instance.data["avalonKnob"]["families"]) instance.data["families"] = families + # Sort/grouped by family (preserving local index) context[:] = sorted(context, key=self.sort_by_family) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 33e6d5a608..8a2bb06fff 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -56,8 +56,8 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "fps": int(nuke.root()['fps'].value()) }) - if node.Class() == "Write": - instance.data["families"] = [avalon_knob_data["families"]] + # if node.Class() == "Write": + # instance.data["families"] = [avalon_knob_data["families"]] self.log.info("collected instance: {}".format(instance.data)) instances.append(instance) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index dd3247ae8f..89f78367a9 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -64,14 +64,13 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): # collect families in next file if "files" not in instance.data: instance.data["files"] = list() - try: collected_frames = os.listdir(output_dir) self.log.debug("collected_frames: {}".format(label)) instance.data["files"].append(collected_frames) except Exception: - pass + self.log.debug("couldn't collect frames: {}".format(label)) instance.data.update({ "path": path, diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 54b3537055..a99e930661 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -20,20 +20,19 @@ class RepairCollectionAction(pyblish.api.Action): self.log.info("Rendering toggled ON") -class ValidateCollection(pyblish.api.InstancePlugin): +class ValidatePrerenderedFrames(pyblish.api.InstancePlugin): """ Validates file output. """ order = pyblish.api.ValidatorOrder + 0.1 families = ["render.frames", "still.frames", "prerender.frames"] - label = "Check prerendered frames" + label = "Validate prerendered frame" hosts = ["nuke"] actions = [RepairCollectionAction] def process(self, instance): self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) - if not instance.data["files"]: - return + assert instance.data["files"], "No frames have been collected" collections, remainder = clique.assemble(*instance.data['files']) self.log.info('collections: {}'.format(str(collections))) @@ -57,3 +56,5 @@ class ValidateCollection(pyblish.api.InstancePlugin): collection.indexes ) is frame_length, "{} missing frames. Use " "repair to render all frames".format(__name__) + + instance.data['collection'] = collection From e1601664f194eaeb3a7104a194423a45599d6522 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 18 Jan 2019 17:58:39 +0100 Subject: [PATCH 4/9] json extraction is causing troubles in nuke. temporarily setting to just maya host --- pype/plugins/global/publish/collect_json.py | 1 + pype/plugins/global/publish/extract_json.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_json.py b/pype/plugins/global/publish/collect_json.py index 1301bb2ee4..1cf79f4612 100644 --- a/pype/plugins/global/publish/collect_json.py +++ b/pype/plugins/global/publish/collect_json.py @@ -11,6 +11,7 @@ class CollectJSON(pyblish.api.ContextPlugin): label = "JSON" order = pyblish.api.CollectorOrder + hosts = ['maya'] def version_get(self, string, prefix): """ Extract version information from filenames. Code from Foundry"s diff --git a/pype/plugins/global/publish/extract_json.py b/pype/plugins/global/publish/extract_json.py index dc00fecb49..fa7f95147b 100644 --- a/pype/plugins/global/publish/extract_json.py +++ b/pype/plugins/global/publish/extract_json.py @@ -12,7 +12,7 @@ class ExtractJSON(pyblish.api.ContextPlugin): order = pyblish.api.IntegratorOrder label = "JSON" - hosts = ['nuke', 'maya'] + hosts = ['maya'] def process(self, context): From 3613a6061426371156d32508c3668a266d56b601 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 18 Jan 2019 18:20:19 +0100 Subject: [PATCH 5/9] review cleanup, changing family to "review" instead of "render.review" --- pype/nuke/lib.py | 6 +-- .../extract_write_next_render.py | 1 + pype/plugins/nuke/publish/collect_review.py | 12 +++-- pype/plugins/nuke/publish/extract_review.py | 53 +++++-------------- 4 files changed, 24 insertions(+), 48 deletions(-) rename pype/plugins/nuke/{publish => _load_unused}/extract_write_next_render.py (97%) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 2f002ce130..4322e55c00 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -35,9 +35,9 @@ def writes_version_sync(): for each in nuke.allNodes(): if each.Class() == 'Write': avalon_knob_data = get_avalon_knob_data(each) - if avalon_knob_data['families'] not in ["render"]: - log.info(avalon_knob_data['families']) - continue + # if avalon_knob_data['families'] not in ["render"]: + # log.info(avalon_knob_data['families']) + # continue try: node_file = each['file'].value() log.info("node_file: {}".format(node_file)) diff --git a/pype/plugins/nuke/publish/extract_write_next_render.py b/pype/plugins/nuke/_load_unused/extract_write_next_render.py similarity index 97% rename from pype/plugins/nuke/publish/extract_write_next_render.py rename to pype/plugins/nuke/_load_unused/extract_write_next_render.py index d13e67a563..40bfe59ec2 100644 --- a/pype/plugins/nuke/publish/extract_write_next_render.py +++ b/pype/plugins/nuke/_load_unused/extract_write_next_render.py @@ -13,6 +13,7 @@ class WriteToRender(pyblish.api.InstancePlugin): families = ["write"] def process(self, instance): + return if [f for f in instance.data["families"] if ".frames" in f]: instance[0]["render"].setValue(True) diff --git a/pype/plugins/nuke/publish/collect_review.py b/pype/plugins/nuke/publish/collect_review.py index 03f5437e86..c6a3173af1 100644 --- a/pype/plugins/nuke/publish/collect_review.py +++ b/pype/plugins/nuke/publish/collect_review.py @@ -14,14 +14,16 @@ class CollectReview(pyblish.api.InstancePlugin): family_targets = [".local", ".frames"] def process(self, instance): + pass families = [(f, search) for f in instance.data["families"] for search in self.family_targets if search in f][0] if families: - root_femilies = families[0].replace(families[1], "") - instance.data["families"].append(".".join([ - root_femilies, - self.family - ])) + root_families = families[0].replace(families[1], "") + # instance.data["families"].append(".".join([ + # root_families, + # self.family + # ])) + instance.data["families"].append("review") self.log.info("Review collected: `{}`".format(instance)) diff --git a/pype/plugins/nuke/publish/extract_review.py b/pype/plugins/nuke/publish/extract_review.py index 9ff7097e61..cf0864e09d 100644 --- a/pype/plugins/nuke/publish/extract_review.py +++ b/pype/plugins/nuke/publish/extract_review.py @@ -2,6 +2,7 @@ import os import nuke import pyblish.api import pype +from pype.vendor import ffmpeg class ExtractDataForReview(pype.api.Extractor): @@ -12,27 +13,18 @@ class ExtractDataForReview(pype.api.Extractor): """ order = pyblish.api.ExtractorOrder + 0.01 - label = "Data for review" + label = "Extract Review" optional = True - families = ["write"] + families = ["review"] hosts = ["nuke"] - family_targets = [".local", ".review"] def process(self, instance): - families = [f for f in instance.data["families"] - for search in self.family_targets - if search in f] - if not families: - return - self.log.debug("here:") # Store selection selection = [i for i in nuke.allNodes() if i["selected"].getValue()] - self.log.debug("here:") # Deselect all nodes to prevent external connections [i["selected"].setValue(False) for i in nuke.allNodes()] - self.log.debug("here:") self.log.debug("creating staging dir:") self.staging_dir(instance) @@ -55,32 +47,18 @@ class ExtractDataForReview(pype.api.Extractor): review_mov = os.path.join(staging_dir, file_name).replace("\\", "/") - if instance.data.get("baked_colorspace_movie"): - args = [ - "ffmpeg", "-y", - "-i", instance.data["baked_colorspace_movie"], - "-pix_fmt", "yuv420p", - "-crf", "18", - "-timecode", "00:00:00:01", - ] - - args.append(review_mov) - - self.log.debug("Executing args: {0}".format(args)) - self.log.info("transcoding review mov: {0}".format(review_mov)) - p = subprocess.Popen( - args, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - stdin=subprocess.PIPE, - cwd=os.path.dirname(args[-1]) - ) + if instance.data.get("baked_colorspace_movie"): + input_movie = instance.data["baked_colorspace_movie"] + out, err = ( + ffmpeg + .input(input_movie) + .output(review_mov, pix_fmt='yuv420p', crf=18, timecode="00:00:00:01") + .overwrite_output() + .run() + ) - output = p.communicate()[0] - if p.returncode != 0: - raise ValueError(output) self.log.debug("Removing `{0}`...".format( instance.data["baked_colorspace_movie"])) @@ -101,12 +79,6 @@ class ExtractDataForReview(pype.api.Extractor): collection = instance.data.get("collection", None) - self.log.warning("instance.data['files']: {}".format(instance.data['files'])) - if not collection: - collections, remainder = clique.assemble(*instance.data['files']) - collection = collections[0] - instance.data["collection"] = collection - # Create nodes first_frame = min(collection.indexes) last_frame = max(collection.indexes) @@ -156,6 +128,7 @@ class ExtractDataForReview(pype.api.Extractor): if representation in "mov": file = collection.format("{head}baked.mov") path = os.path.join(staging_dir, file).replace("\\", "/") + self.log.debug("Path: {}".format(path)) instance.data["baked_colorspace_movie"] = path write_node["file"].setValue(path) write_node["file_type"].setValue("mov") From e5efe62a81ae3c3557dc8f6f9f2125e3cf44d415 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 21 Jan 2019 13:44:24 +0100 Subject: [PATCH 6/9] fixing families in loader --- .../publish/integrate_rendered_frames.py | 15 ++++--- pype/plugins/nuke/load/load_sequence.py | 41 ++++++++----------- pype/plugins/nuke/publish/collect_review.py | 2 +- pype/plugins/nuke/publish/extract_review.py | 2 +- .../nuke/publish/integrate_script_version.py | 2 + 5 files changed, 30 insertions(+), 32 deletions(-) diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py index eb2450293c..325a9e3442 100644 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ b/pype/plugins/global/publish/integrate_rendered_frames.py @@ -199,7 +199,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin): src = os.path.join(stagingdir, src_file_name) instance.data["transfers"].append([src, dst]) - template = anatomy.render.path else: # Single file @@ -229,31 +228,35 @@ class IntegrateFrames(pyblish.api.InstancePlugin): anatomy_filled = anatomy.format(template_data) dst = anatomy_filled.render.path - template = anatomy.render.path instance.data["transfers"].append([src, dst]) + template_data["frame"] = "#####" + anatomy_filled = anatomy.format(template_data) + path_to_save = anatomy_filled.render.path + template = anatomy.render.fullpath self.log.debug('ext[1:]: {}'.format(ext[1:])) + representation = { "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": ext[1:], - "data": {'path': dst, 'template': template}, + "data": {'path': path_to_save, 'template': template}, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context # for performance reasons. "context": { "root": root, - "project": PROJECT, - "projectcode": project['data']['code'], + "project": {"name": PROJECT, + "code": project['data']['code']}, 'task': api.Session["AVALON_TASK"], "silo": asset['silo'], "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], - "VERSION": version["name"], + "version": version["name"], "hierarchy": hierarchy, "representation": ext[1:] } diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 056867fd5f..23575a3f28 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -76,7 +76,7 @@ class LoadSequence(api.Loader): """Load image sequence into Nuke""" families = ["write", "source"] - representations = ["*"] + representations = ["exr"] label = "Load sequence" order = -10 @@ -86,44 +86,33 @@ class LoadSequence(api.Loader): def load(self, context, name, namespace, data): from avalon.nuke import ( containerise, - ls_img_sequence, viewer_update_and_undo_stop ) - for k, v in context.items(): - log.info("key: `{}`, value: {}\n".format(k, v)) + # for k, v in context.items(): + # log.info("key: `{}`, value: {}\n".format(k, v)) + + version = context['version'] + version_data = version.get("data", {}) + + first = version_data.get("startFrame", None) + last = version_data.get("endFrame", None) # Fallback to asset name when namespace is None if namespace is None: namespace = context['asset']['name'] - # Use the first file for now - # TODO: fix path fname - file = ls_img_sequence(os.path.dirname(self.fname), one=True) - log.info("file: {}\n".format(file)) + file = self.fname + log.info("file: {}\n".format(self.fname)) read_name = "Read_" + context["representation"]["context"]["subset"] + # Create the Loader with the filename path set with viewer_update_and_undo_stop(): # TODO: it might be universal read to img/geo/camera r = nuke.createNode( "Read", "name {}".format(read_name)) - r["file"].setValue(file['path']) - if len(file['frames']) is 1: - first = file['frames'][0][0] - last = file['frames'][0][1] - r["origfirst"].setValue(first) - r["first"].setValue(first) - r["origlast"].setValue(last) - r["last"].setValue(last) - else: - first = file['frames'][0][0] - last = file['frames'][:-1][1] - r["origfirst"].setValue(first) - r["first"].setValue(first) - r["origlast"].setValue(last) - r["last"].setValue(last) - log.warning("Missing frames in image sequence") + r["file"].setValue(self.fname) # Set colorspace defined in version data colorspace = context["version"]["data"].get("colorspace", None) @@ -134,6 +123,10 @@ class LoadSequence(api.Loader): start = context["version"]["data"].get("startFrame", None) if start is not None: loader_shift(r, start, relative=True) + r["origfirst"].setValue(first) + r["first"].setValue(first) + r["origlast"].setValue(last) + r["last"].setValue(last) # add additional metadata from the version to imprint to Avalon knob add_keys = ["startFrame", "endFrame", "handles", diff --git a/pype/plugins/nuke/publish/collect_review.py b/pype/plugins/nuke/publish/collect_review.py index c6a3173af1..f75c675b8f 100644 --- a/pype/plugins/nuke/publish/collect_review.py +++ b/pype/plugins/nuke/publish/collect_review.py @@ -25,5 +25,5 @@ class CollectReview(pyblish.api.InstancePlugin): # root_families, # self.family # ])) - instance.data["families"].append("review") + instance.data["families"].append("render.review") self.log.info("Review collected: `{}`".format(instance)) diff --git a/pype/plugins/nuke/publish/extract_review.py b/pype/plugins/nuke/publish/extract_review.py index cf0864e09d..e85185e919 100644 --- a/pype/plugins/nuke/publish/extract_review.py +++ b/pype/plugins/nuke/publish/extract_review.py @@ -16,7 +16,7 @@ class ExtractDataForReview(pype.api.Extractor): label = "Extract Review" optional = True - families = ["review"] + families = ["render.review"] hosts = ["nuke"] def process(self, instance): diff --git a/pype/plugins/nuke/publish/integrate_script_version.py b/pype/plugins/nuke/publish/integrate_script_version.py index aa37101af0..09dfeb4e7d 100644 --- a/pype/plugins/nuke/publish/integrate_script_version.py +++ b/pype/plugins/nuke/publish/integrate_script_version.py @@ -13,6 +13,8 @@ class IncrementScriptVersion(pyblish.api.ContextPlugin): families = ["nukescript", "render.local", "render.frames"] def process(self, context): + # return + # from pype.lib import version_up path = context.data["currentFile"] nuke.scriptSaveAs(version_up(path)) From 8d28c4af448b2d5859a55dba9f2934e7b4488124 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 6 Feb 2019 21:18:56 +0100 Subject: [PATCH 7/9] fix version checker, and other nuke fixes --- pype/api.py | 4 +-- pype/nuke/lib.py | 8 +++--- pype/plugins/global/publish/collect_json.py | 2 +- .../global/publish/collect_scene_version.py | 23 +++++++++++++++ pype/plugins/global/publish/extract_json.py | 1 + .../publish/integrate_rendered_frames.py | 5 +++- .../validate_write_families.py | 22 +++++++++------ pype/plugins/nuke/create/create_write.py | 28 +++++++++---------- pype/plugins/nuke/load/load_sequence.py | 2 +- .../nuke/publish/collect_current_file.py | 2 +- pype/plugins/nuke/publish/collect_writes.py | 6 ++++ ...version.py => increment_script_version.py} | 9 +++--- .../nuke/publish/validate_collection.py | 4 ++- .../nuke/publish/validate_version_match.py | 15 ++++++++++ pype/templates.py | 2 +- 15 files changed, 95 insertions(+), 38 deletions(-) create mode 100644 pype/plugins/global/publish/collect_scene_version.py rename pype/plugins/nuke/publish/{integrate_script_version.py => increment_script_version.py} (72%) create mode 100644 pype/plugins/nuke/publish/validate_version_match.py diff --git a/pype/api.py b/pype/api.py index fd4a1ca1d2..747ad425f8 100644 --- a/pype/api.py +++ b/pype/api.py @@ -33,7 +33,7 @@ from .templates import ( get_asset, get_task, set_avalon_workdir, - get_version_from_workfile, + get_version_from_path, get_workdir_template, set_hierarchy, set_project_code @@ -77,7 +77,7 @@ __all__ = [ "get_asset", "get_task", "set_avalon_workdir", - "get_version_from_workfile", + "get_version_from_path", "get_workdir_template", "modified_environ", "add_tool_to_environment", diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 183e545ddb..f5b385cfe0 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -35,14 +35,14 @@ def writes_version_sync(): for each in nuke.allNodes(): if each.Class() == 'Write': avalon_knob_data = get_avalon_knob_data(each) - # if avalon_knob_data['families'] not in ["render"]: - # log.info(avalon_knob_data['families']) - # continue + if avalon_knob_data['families'] not in ["render"]: + log.info(avalon_knob_data['families']) + continue try: node_file = each['file'].value() log.info("node_file: {}".format(node_file)) - node_version = pype.get_version_from_path(node_file, None) + node_version = pype.get_version_from_path(node_file) log.info("node_version: {}".format(node_version)) node_new_file = node_file.replace(node_version, new_version) diff --git a/pype/plugins/global/publish/collect_json.py b/pype/plugins/global/publish/collect_json.py index 1cf79f4612..ba5fc29d12 100644 --- a/pype/plugins/global/publish/collect_json.py +++ b/pype/plugins/global/publish/collect_json.py @@ -27,7 +27,7 @@ class CollectJSON(pyblish.api.ContextPlugin): return matches[-1:][0][1], re.search(r"\d+", matches[-1:][0]).group() def process(self, context): - current_file = context.data("currentFile") + current_file = context.data.get("currentFile", '') # Skip if current file is not a directory if not os.path.isdir(current_file): return diff --git a/pype/plugins/global/publish/collect_scene_version.py b/pype/plugins/global/publish/collect_scene_version.py new file mode 100644 index 0000000000..06bc8e3a53 --- /dev/null +++ b/pype/plugins/global/publish/collect_scene_version.py @@ -0,0 +1,23 @@ +import os +import pyblish.api +import os +import pype.api as pype + +class CollectSceneVersion(pyblish.api.ContextPlugin): + """Finds version in the filename or passes the one found in the context + Arguments: + version (int, optional): version number of the publish + """ + + order = pyblish.api.CollectorOrder + label = 'Collect Version' + + def process(self, context): + + filename = os.path.basename(context.data.get('currentFile')) + + rootVersion = pype.get_version_from_path(filename) + + context.data['version'] = rootVersion + + self.log.info('Scene Version: %s' % context.data('version')) diff --git a/pype/plugins/global/publish/extract_json.py b/pype/plugins/global/publish/extract_json.py index fa7f95147b..e218776638 100644 --- a/pype/plugins/global/publish/extract_json.py +++ b/pype/plugins/global/publish/extract_json.py @@ -25,6 +25,7 @@ class ExtractJSON(pyblish.api.ContextPlugin): output_data = [] for instance in context: + self.log.debug(instance['data']) data = {} for key, value in instance.data.iteritems(): diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py index 8f8766a297..08ddbddd99 100644 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ b/pype/plugins/global/publish/integrate_rendered_frames.py @@ -117,6 +117,9 @@ class IntegrateFrames(pyblish.api.InstancePlugin): "('v{1:03d}')".format(assumed_version, next_version)) + if instance.data.get('version'): + next_version = int(instance.data.get('version')) + self.log.debug("Next version: v{0:03d}".format(next_version)) version_data = self.create_version_data(context, instance) @@ -261,7 +264,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], - "VERSION": version["name"], + "version": version["name"], "hierarchy": hierarchy, "representation": ext[1:] } diff --git a/pype/plugins/nuke/_publish_unused/validate_write_families.py b/pype/plugins/nuke/_publish_unused/validate_write_families.py index 087fb3be5e..73f710867d 100644 --- a/pype/plugins/nuke/_publish_unused/validate_write_families.py +++ b/pype/plugins/nuke/_publish_unused/validate_write_families.py @@ -25,18 +25,24 @@ class ValidateWriteFamilies(pyblish.api.InstancePlugin): families = ["write"] actions = [pype.nuke.actions.SelectInvalidAction, pype.api.RepairAction] - # @staticmethod - # def get_invalid(instance): - # for f in instance.data["families"]: - # if ".frames" in f: - # return - # - # if not instance.data["files"]: - # return (instance) + @staticmethod + def get_invalid(self, instance): + if not [f for f in instance.data["families"] + if ".frames" in f]: + return + + if not instance.data.get('files'): + return (instance) def process(self, instance): self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) + invalid = self.get_invalid(self, instance) + + if invalid: + raise ValueError(str("`{}`: Switch `Render` on! " + "> {}".format(__name__, invalid))) + # if any(".frames" in f for f in instance.data["families"]): # if not instance.data["files"]: # raise ValueError("instance {} is set to publish frames\ diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index c1b492ac2e..af7462680e 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -25,7 +25,7 @@ class CrateWriteRender(avalon.nuke.Creator): name = "WriteRender" label = "Create Write Render" hosts = ["nuke"] - family = "{}_write".format(preset) + family = "write" families = preset icon = "sign-out" @@ -34,7 +34,7 @@ class CrateWriteRender(avalon.nuke.Creator): data = OrderedDict() - data["family"] = self.family.split("_")[1] + data["family"] = self.family data["families"] = self.families {data.update({k: v}) for k, v in self.data.items() @@ -44,15 +44,15 @@ class CrateWriteRender(avalon.nuke.Creator): def process(self): self.name = self.data["subset"] - family = self.family.split("_")[0] - node = self.family.split("_")[1] + family = self.family + node = 'write' instance = nuke.toNode(self.data["subset"]) if not instance: write_data = { "class": node, - "preset": family, + "preset": self.preset, "avalon": self.data } @@ -68,7 +68,7 @@ class CrateWritePrerender(avalon.nuke.Creator): name = "WritePrerender" label = "Create Write Prerender" hosts = ["nuke"] - family = "{}_write".format(preset) + family = "write" families = preset icon = "sign-out" @@ -89,13 +89,13 @@ class CrateWritePrerender(avalon.nuke.Creator): instance = nuke.toNode(self.data["subset"]) - family = self.family.split("_")[0] - node = self.family.split("_")[1] + family = self.family + node = 'write' if not instance: write_data = { "class": node, - "preset": family, + "preset": self.preset, "avalon": self.data } @@ -111,7 +111,7 @@ class CrateWriteStill(avalon.nuke.Creator): name = "WriteStill" label = "Create Write Still" hosts = ["nuke"] - family = "{}_write".format(preset) + family = "write" families = preset icon = "image" @@ -120,7 +120,7 @@ class CrateWriteStill(avalon.nuke.Creator): data = OrderedDict() - data["family"] = self.family.split("_")[1] + data["family"] = self.family data["families"] = self.families {data.update({k: v}) for k, v in self.data.items() @@ -132,14 +132,14 @@ class CrateWriteStill(avalon.nuke.Creator): instance = nuke.toNode(self.data["subset"]) - family = self.family.split("_")[0] - node = self.family.split("_")[1] + family = self.family + node = 'write' if not instance: write_data = { "frame_range": [nuke.frame(), nuke.frame()], "class": node, - "preset": family, + "preset": self.preset, "avalon": self.data } diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 23575a3f28..a4a591e657 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -76,7 +76,7 @@ class LoadSequence(api.Loader): """Load image sequence into Nuke""" families = ["write", "source"] - representations = ["exr"] + representations = ["exr", "dpx"] label = "Load sequence" order = -10 diff --git a/pype/plugins/nuke/publish/collect_current_file.py b/pype/plugins/nuke/publish/collect_current_file.py index 96ec44d9d6..35a0ef4c2a 100644 --- a/pype/plugins/nuke/publish/collect_current_file.py +++ b/pype/plugins/nuke/publish/collect_current_file.py @@ -4,7 +4,7 @@ import pyblish.api class SelectCurrentFile(pyblish.api.ContextPlugin): """Inject the current working file into context""" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.5 hosts = ["nuke"] def process(self, context): diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 89f78367a9..df9666b8ca 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -3,6 +3,7 @@ import tempfile import nuke import pyblish.api import logging +import pype.api as pype log = logging.getLogger(__name__) @@ -50,6 +51,11 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): output_dir = os.path.dirname(path) self.log.debug('output dir: {}'.format(output_dir)) + # get version + version = pype.get_version_from_path(path) + instance.data['version'] = version + self.log.debug('Write Version: %s' % instance.data('version')) + # create label name = node.name() # Include start and end render frame in label diff --git a/pype/plugins/nuke/publish/integrate_script_version.py b/pype/plugins/nuke/publish/increment_script_version.py similarity index 72% rename from pype/plugins/nuke/publish/integrate_script_version.py rename to pype/plugins/nuke/publish/increment_script_version.py index 09dfeb4e7d..77eab30a63 100644 --- a/pype/plugins/nuke/publish/integrate_script_version.py +++ b/pype/plugins/nuke/publish/increment_script_version.py @@ -7,14 +7,15 @@ class IncrementScriptVersion(pyblish.api.ContextPlugin): """Increment current script version.""" order = pyblish.api.IntegratorOrder + 0.9 - label = "Increment Current Script Version" + label = "Increment Script Version" optional = True hosts = ['nuke'] - families = ["nukescript", "render.local", "render.frames"] def process(self, context): - # return - # + + assert all(result["success"] for result in context.data["results"]), ( + "Atomicity not held, aborting.") + from pype.lib import version_up path = context.data["currentFile"] nuke.scriptSaveAs(version_up(path)) diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index a99e930661..c402927373 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -30,9 +30,11 @@ class ValidatePrerenderedFrames(pyblish.api.InstancePlugin): hosts = ["nuke"] actions = [RepairCollectionAction] + def process(self, instance): self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) - assert instance.data["files"], "No frames have been collected" + + assert instance.data.get('files'), "no frames were collected, you need to render them" collections, remainder = clique.assemble(*instance.data['files']) self.log.info('collections: {}'.format(str(collections))) diff --git a/pype/plugins/nuke/publish/validate_version_match.py b/pype/plugins/nuke/publish/validate_version_match.py new file mode 100644 index 0000000000..64646ea5dc --- /dev/null +++ b/pype/plugins/nuke/publish/validate_version_match.py @@ -0,0 +1,15 @@ +import pyblish.api + + +class ValidateVersionMatch(pyblish.api.InstancePlugin): + """Checks if write version matches workfile version""" + + label = "Validate Version Match" + order = pyblish.api.ValidatorOrder + hosts = ["nuke"] + families = ['render.frames'] + + def process(self, instance): + + assert instance.data['version'] == instance.context.data['version'], "\ + Version in write doesn't match version of the workfile" diff --git a/pype/templates.py b/pype/templates.py index cf246defb3..5fe41f6899 100644 --- a/pype/templates.py +++ b/pype/templates.py @@ -58,7 +58,7 @@ def reset_data_from_templates(): log.info("Data from templates were Unloaded...") -def get_version_from_workfile(file): +def get_version_from_path(file): """ Finds version number in file path string From e1493540c1b44b67ab03ed9f3bf99b39c1c26b5e Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 7 Feb 2019 21:38:29 +0100 Subject: [PATCH 8/9] fix review on local render --- pype/plugins/nuke/publish/collect_families.py | 47 ++++++++++--------- pype/plugins/nuke/publish/collect_writes.py | 1 + .../nuke/publish/extract_render_local.py | 7 +++ 3 files changed, 33 insertions(+), 22 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_families.py b/pype/plugins/nuke/publish/collect_families.py index 4fd09fe967..08ab90143d 100644 --- a/pype/plugins/nuke/publish/collect_families.py +++ b/pype/plugins/nuke/publish/collect_families.py @@ -2,43 +2,46 @@ import pyblish.api @pyblish.api.log -class CollectInstanceFamilies(pyblish.api.ContextPlugin): +class CollectInstanceFamilies(pyblish.api.InstancePlugin): """Collect families for all instances""" order = pyblish.api.CollectorOrder + 0.2 label = "Collect Families" hosts = ["nuke", "nukeassist"] + families = ['write'] - def process(self, context): - for instance in context.data["instances"]: + def process(self, instance): - if "write" in instance.data["family"]: - node = instance[0] + node = instance[0] - families = [] - if instance.data.get('families'): - families.append(instance.data['families']) + self.log.info('processing {}'.format(node)) - # set for ftrack to accept - # instance.data["families"] = ["ftrack"] + families = [] + if instance.data.get('families'): + families.append(instance.data['families']) - if node["render"].value(): - # dealing with local/farm rendering - if node["render_farm"].value(): - families.append("render.farm") - else: - families.append("render.local") - else: - families.append("render.frames") - # to ignore staging dir op in integrate - instance.data['transfer'] = False + # set for ftrack to accept + # instance.data["families"] = ["ftrack"] + + if node["render"].value(): + # dealing with local/farm rendering + if node["render_farm"].value(): + families.append("render.farm") + else: + families.append("render.local") + else: + families.append("render.frames") + # to ignore staging dir op in integrate + instance.data['transfer'] = False + + families.append('ftrack') - instance.data["families"] = families + instance.data["families"] = families # Sort/grouped by family (preserving local index) - context[:] = sorted(context, key=self.sort_by_family) + instance.context[:] = sorted(instance.context, key=self.sort_by_family) def sort_by_family(self, instance): """Sort by family""" diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index df9666b8ca..7ec2bbc09e 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -89,6 +89,7 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): "colorspace": node["colorspace"].value(), }) + self.log.debug("instance.data: {}".format(instance.data)) self.log.debug("context: {}".format(context)) diff --git a/pype/plugins/nuke/publish/extract_render_local.py b/pype/plugins/nuke/publish/extract_render_local.py index 5b53a42136..1f0a00273f 100644 --- a/pype/plugins/nuke/publish/extract_render_local.py +++ b/pype/plugins/nuke/publish/extract_render_local.py @@ -2,6 +2,7 @@ import pyblish.api import nuke import os import pype +import clique class NukeRenderLocal(pype.api.Extractor): @@ -66,5 +67,11 @@ class NukeRenderLocal(pype.api.Extractor): output_dir )) + collections, remainder = clique.assemble(*instance.data['files']) + self.log.info('collections: {}'.format(str(collections))) + + collection = collections[0] + instance.data['collection'] = collection + self.log.info('Finished render') return From 52a2b375982ee8c6efe4da5ad442c75f792b388b Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 7 Feb 2019 23:51:52 +0100 Subject: [PATCH 9/9] comment out nuke callback that were crashing session --- pype/plugins/ftrack/integrate_ftrack_instances.py | 2 +- pype/plugins/nuke/publish/collect_writes.py | 1 + setup/nuke/nuke_path/menu.py | 4 ++-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py index 491428a33d..48850d3460 100644 --- a/pype/plugins/ftrack/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -67,7 +67,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'frameOut': int(instance.data["startFrame"]), 'frameRate': 25})} } - elif ext in [".jpg"]: + elif ext in [".jpg", ".jpeg"]: component_data = { "name": "thumbnail" # Default component name is "main". } diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 7ec2bbc09e..2a274201bb 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -90,6 +90,7 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): }) + self.log.debug("instance.data: {}".format(instance.data)) self.log.debug("context: {}".format(context)) diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py index 3613bc99f2..45f44d0d11 100644 --- a/setup/nuke/nuke_path/menu.py +++ b/setup/nuke/nuke_path/menu.py @@ -6,7 +6,7 @@ from pype.api import Logger log = Logger.getLogger(__name__, "nuke") -nuke.addOnScriptSave(writes_version_sync) -nuke.addOnScriptSave(onScriptLoad) +# nuke.addOnScriptSave(writes_version_sync) +# nuke.addOnScriptSave(onScriptLoad) log.info('Automatic syncing of write file knob to script version')