diff --git a/pype/api.py b/pype/api.py index fd4a1ca1d2..747ad425f8 100644 --- a/pype/api.py +++ b/pype/api.py @@ -33,7 +33,7 @@ from .templates import ( get_asset, get_task, set_avalon_workdir, - get_version_from_workfile, + get_version_from_path, get_workdir_template, set_hierarchy, set_project_code @@ -77,7 +77,7 @@ __all__ = [ "get_asset", "get_task", "set_avalon_workdir", - "get_version_from_workfile", + "get_version_from_path", "get_workdir_template", "modified_environ", "add_tool_to_environment", diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index fa58e4f937..f5b385cfe0 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -42,7 +42,7 @@ def writes_version_sync(): node_file = each['file'].value() log.info("node_file: {}".format(node_file)) - node_version = pype.get_version_from_path(node_file, None) + node_version = pype.get_version_from_path(node_file) log.info("node_version: {}".format(node_version)) node_new_file = node_file.replace(node_version, new_version) diff --git a/pype/plugin.py b/pype/plugin.py index f88cd6e34b..cfcd814c92 100644 --- a/pype/plugin.py +++ b/pype/plugin.py @@ -1,4 +1,5 @@ import tempfile +import os import pyblish.api ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05 @@ -28,7 +29,9 @@ class Extractor(pyblish.api.InstancePlugin): staging_dir = instance.data.get('stagingDir', None) if not staging_dir: - staging_dir = tempfile.mkdtemp(prefix="pyblish_tmp_") + staging_dir = os.path.normpath( + tempfile.mkdtemp(prefix="pyblish_tmp_") + ) instance.data['stagingDir'] = staging_dir return staging_dir diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py index 491428a33d..48850d3460 100644 --- a/pype/plugins/ftrack/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -67,7 +67,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'frameOut': int(instance.data["startFrame"]), 'frameRate': 25})} } - elif ext in [".jpg"]: + elif ext in [".jpg", ".jpeg"]: component_data = { "name": "thumbnail" # Default component name is "main". } diff --git a/pype/plugins/global/publish/collect_json.py b/pype/plugins/global/publish/collect_json.py index 1301bb2ee4..ba5fc29d12 100644 --- a/pype/plugins/global/publish/collect_json.py +++ b/pype/plugins/global/publish/collect_json.py @@ -11,6 +11,7 @@ class CollectJSON(pyblish.api.ContextPlugin): label = "JSON" order = pyblish.api.CollectorOrder + hosts = ['maya'] def version_get(self, string, prefix): """ Extract version information from filenames. Code from Foundry"s @@ -26,7 +27,7 @@ class CollectJSON(pyblish.api.ContextPlugin): return matches[-1:][0][1], re.search(r"\d+", matches[-1:][0]).group() def process(self, context): - current_file = context.data("currentFile") + current_file = context.data.get("currentFile", '') # Skip if current file is not a directory if not os.path.isdir(current_file): return diff --git a/pype/plugins/global/publish/collect_scene_version.py b/pype/plugins/global/publish/collect_scene_version.py new file mode 100644 index 0000000000..06bc8e3a53 --- /dev/null +++ b/pype/plugins/global/publish/collect_scene_version.py @@ -0,0 +1,23 @@ +import os +import pyblish.api +import os +import pype.api as pype + +class CollectSceneVersion(pyblish.api.ContextPlugin): + """Finds version in the filename or passes the one found in the context + Arguments: + version (int, optional): version number of the publish + """ + + order = pyblish.api.CollectorOrder + label = 'Collect Version' + + def process(self, context): + + filename = os.path.basename(context.data.get('currentFile')) + + rootVersion = pype.get_version_from_path(filename) + + context.data['version'] = rootVersion + + self.log.info('Scene Version: %s' % context.data('version')) diff --git a/pype/plugins/global/publish/extract_json.py b/pype/plugins/global/publish/extract_json.py index dc00fecb49..e218776638 100644 --- a/pype/plugins/global/publish/extract_json.py +++ b/pype/plugins/global/publish/extract_json.py @@ -12,7 +12,7 @@ class ExtractJSON(pyblish.api.ContextPlugin): order = pyblish.api.IntegratorOrder label = "JSON" - hosts = ['nuke', 'maya'] + hosts = ['maya'] def process(self, context): @@ -25,6 +25,7 @@ class ExtractJSON(pyblish.api.ContextPlugin): output_data = [] for instance in context: + self.log.debug(instance['data']) data = {} for key, value in instance.data.iteritems(): diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py index 2ee28797e7..08ddbddd99 100644 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ b/pype/plugins/global/publish/integrate_rendered_frames.py @@ -43,8 +43,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): self.register(instance) - self.log.info("Integrating Asset in to the database ...") - self.log.info("instance.data: {}".format(instance.data)) + # self.log.info("Integrating Asset in to the database ...") + # self.log.info("instance.data: {}".format(instance.data)) if instance.data.get('transfer', True): self.integrate(instance) @@ -117,6 +117,9 @@ class IntegrateFrames(pyblish.api.InstancePlugin): "('v{1:03d}')".format(assumed_version, next_version)) + if instance.data.get('version'): + next_version = int(instance.data.get('version')) + self.log.debug("Next version: v{0:03d}".format(next_version)) version_data = self.create_version_data(context, instance) @@ -163,8 +166,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # Each should be a single representation (as such, a single extension) representations = [] destination_list = [] - self.log.debug("integrate_frames:instance.data[files]: {}".format( - instance.data["files"])) + for files in instance.data["files"]: # Collection # _______ @@ -205,7 +207,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin): src = os.path.join(stagingdir, src_file_name) instance.data["transfers"].append([src, dst]) - template = anatomy.render.path else: # Single file @@ -235,30 +236,35 @@ class IntegrateFrames(pyblish.api.InstancePlugin): anatomy_filled = anatomy.format(template_data) dst = anatomy_filled.render.path - template = anatomy.render.path instance.data["transfers"].append([src, dst]) + template_data["frame"] = "#####" + anatomy_filled = anatomy.format(template_data) + path_to_save = anatomy_filled.render.path + template = anatomy.render.fullpath + self.log.debug('ext[1:]: {}'.format(ext[1:])) + representation = { "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": ext[1:], - "data": {'path': dst, 'template': template}, + "data": {'path': path_to_save, 'template': template}, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context # for performance reasons. "context": { "root": root, - "project": PROJECT, - "projectcode": project['data']['code'], + "project": {"name": PROJECT, + "code": project['data']['code']}, 'task': api.Session["AVALON_TASK"], "silo": asset['silo'], "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], - "VERSION": version["name"], + "version": version["name"], "hierarchy": hierarchy, "representation": ext[1:] } diff --git a/pype/plugins/nuke/publish/extract_write_next_render.py b/pype/plugins/nuke/_load_unused/extract_write_next_render.py similarity index 97% rename from pype/plugins/nuke/publish/extract_write_next_render.py rename to pype/plugins/nuke/_load_unused/extract_write_next_render.py index d13e67a563..40bfe59ec2 100644 --- a/pype/plugins/nuke/publish/extract_write_next_render.py +++ b/pype/plugins/nuke/_load_unused/extract_write_next_render.py @@ -13,6 +13,7 @@ class WriteToRender(pyblish.api.InstancePlugin): families = ["write"] def process(self, instance): + return if [f for f in instance.data["families"] if ".frames" in f]: instance[0]["render"].setValue(True) diff --git a/pype/plugins/nuke/publish/validate_write_families.py b/pype/plugins/nuke/_publish_unused/validate_write_families.py similarity index 68% rename from pype/plugins/nuke/publish/validate_write_families.py rename to pype/plugins/nuke/_publish_unused/validate_write_families.py index 1dfdbc06d5..73f710867d 100644 --- a/pype/plugins/nuke/publish/validate_write_families.py +++ b/pype/plugins/nuke/_publish_unused/validate_write_families.py @@ -20,29 +20,37 @@ class ValidateWriteFamilies(pyblish.api.InstancePlugin): """ Validates write families. """ order = pyblish.api.ValidatorOrder - label = "Check correct writes families" + label = "Valitade writes families" hosts = ["nuke"] families = ["write"] actions = [pype.nuke.actions.SelectInvalidAction, pype.api.RepairAction] @staticmethod - def get_invalid(instance): + def get_invalid(self, instance): if not [f for f in instance.data["families"] if ".frames" in f]: return - if not instance.data["files"]: + if not instance.data.get('files'): return (instance) def process(self, instance): self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) - invalid = self.get_invalid(instance) + + invalid = self.get_invalid(self, instance) if invalid: raise ValueError(str("`{}`: Switch `Render` on! " "> {}".format(__name__, invalid))) - self.log.info("Checked correct writes families") + # if any(".frames" in f for f in instance.data["families"]): + # if not instance.data["files"]: + # raise ValueError("instance {} is set to publish frames\ + # but no files were collected, render the frames first or\ + # check 'render' checkbox onthe no to 'ON'".format(instance))) + # + # + # self.log.info("Checked correct writes families") @classmethod def repair(cls, instance): diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index c1b492ac2e..af7462680e 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -25,7 +25,7 @@ class CrateWriteRender(avalon.nuke.Creator): name = "WriteRender" label = "Create Write Render" hosts = ["nuke"] - family = "{}_write".format(preset) + family = "write" families = preset icon = "sign-out" @@ -34,7 +34,7 @@ class CrateWriteRender(avalon.nuke.Creator): data = OrderedDict() - data["family"] = self.family.split("_")[1] + data["family"] = self.family data["families"] = self.families {data.update({k: v}) for k, v in self.data.items() @@ -44,15 +44,15 @@ class CrateWriteRender(avalon.nuke.Creator): def process(self): self.name = self.data["subset"] - family = self.family.split("_")[0] - node = self.family.split("_")[1] + family = self.family + node = 'write' instance = nuke.toNode(self.data["subset"]) if not instance: write_data = { "class": node, - "preset": family, + "preset": self.preset, "avalon": self.data } @@ -68,7 +68,7 @@ class CrateWritePrerender(avalon.nuke.Creator): name = "WritePrerender" label = "Create Write Prerender" hosts = ["nuke"] - family = "{}_write".format(preset) + family = "write" families = preset icon = "sign-out" @@ -89,13 +89,13 @@ class CrateWritePrerender(avalon.nuke.Creator): instance = nuke.toNode(self.data["subset"]) - family = self.family.split("_")[0] - node = self.family.split("_")[1] + family = self.family + node = 'write' if not instance: write_data = { "class": node, - "preset": family, + "preset": self.preset, "avalon": self.data } @@ -111,7 +111,7 @@ class CrateWriteStill(avalon.nuke.Creator): name = "WriteStill" label = "Create Write Still" hosts = ["nuke"] - family = "{}_write".format(preset) + family = "write" families = preset icon = "image" @@ -120,7 +120,7 @@ class CrateWriteStill(avalon.nuke.Creator): data = OrderedDict() - data["family"] = self.family.split("_")[1] + data["family"] = self.family data["families"] = self.families {data.update({k: v}) for k, v in self.data.items() @@ -132,14 +132,14 @@ class CrateWriteStill(avalon.nuke.Creator): instance = nuke.toNode(self.data["subset"]) - family = self.family.split("_")[0] - node = self.family.split("_")[1] + family = self.family + node = 'write' if not instance: write_data = { "frame_range": [nuke.frame(), nuke.frame()], "class": node, - "preset": family, + "preset": self.preset, "avalon": self.data } diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 1cd3688aaf..a4a591e657 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -36,13 +36,13 @@ def preserve_trim(node): if start_at_frame: node['frame_mode'].setValue("start at") node['frame'].setValue(str(script_start)) - log.info("start frame of reader was set to" + log.info("start frame of Read was set to" "{}".format(script_start)) if offset_frame: node['frame_mode'].setValue("offset") node['frame'].setValue(str((script_start + offset_frame))) - log.info("start frame of reader was set to" + log.info("start frame of Read was set to" "{}".format(script_start)) @@ -67,7 +67,7 @@ def loader_shift(node, frame, relative=True): if relative: node['frame_mode'].setValue("start at") - node['frame'].setValue(str(script_start)) + node['frame'].setValue(str(frame)) return int(script_start) @@ -75,8 +75,8 @@ def loader_shift(node, frame, relative=True): class LoadSequence(api.Loader): """Load image sequence into Nuke""" - families = ["write"] - representations = ["*"] + families = ["write", "source"] + representations = ["exr", "dpx"] label = "Load sequence" order = -10 @@ -86,44 +86,33 @@ class LoadSequence(api.Loader): def load(self, context, name, namespace, data): from avalon.nuke import ( containerise, - ls_img_sequence, viewer_update_and_undo_stop ) - for k, v in context.items(): - log.info("key: `{}`, value: {}\n".format(k, v)) + # for k, v in context.items(): + # log.info("key: `{}`, value: {}\n".format(k, v)) + + version = context['version'] + version_data = version.get("data", {}) + + first = version_data.get("startFrame", None) + last = version_data.get("endFrame", None) # Fallback to asset name when namespace is None if namespace is None: namespace = context['asset']['name'] - # Use the first file for now - # TODO: fix path fname - file = ls_img_sequence(os.path.dirname(self.fname), one=True) - log.info("file: {}\n".format(file)) + file = self.fname + log.info("file: {}\n".format(self.fname)) read_name = "Read_" + context["representation"]["context"]["subset"] + # Create the Loader with the filename path set with viewer_update_and_undo_stop(): # TODO: it might be universal read to img/geo/camera r = nuke.createNode( "Read", "name {}".format(read_name)) - r["file"].setValue(file['path']) - if len(file['frames']) is 1: - first = file['frames'][0][0] - last = file['frames'][0][1] - r["origfirst"].setValue(first) - r["first"].setValue(first) - r["origlast"].setValue(last) - r["last"].setValue(last) - else: - first = file['frames'][0][0] - last = file['frames'][:-1][1] - r["origfirst"].setValue(first) - r["first"].setValue(first) - r["origlast"].setValue(last) - r["last"].setValue(last) - log.warning("Missing frames in image sequence") + r["file"].setValue(self.fname) # Set colorspace defined in version data colorspace = context["version"]["data"].get("colorspace", None) @@ -134,6 +123,10 @@ class LoadSequence(api.Loader): start = context["version"]["data"].get("startFrame", None) if start is not None: loader_shift(r, start, relative=True) + r["origfirst"].setValue(first) + r["first"].setValue(first) + r["origlast"].setValue(last) + r["last"].setValue(last) # add additional metadata from the version to imprint to Avalon knob add_keys = ["startFrame", "endFrame", "handles", @@ -142,8 +135,9 @@ class LoadSequence(api.Loader): data_imprint = {} for k in add_keys: data_imprint.update({k: context["version"]['data'][k]}) + data_imprint.update({"objectName": read_name}) - containerise(r, + return containerise(r, name=name, namespace=namespace, context=context, @@ -168,9 +162,9 @@ class LoadSequence(api.Loader): update_container ) log.info("this i can see") - node = container["_tool"] - # TODO: prepare also for other readers img/geo/camera - assert node.Class() == "Reader", "Must be Reader" + node = nuke.toNode(container['objectName']) + # TODO: prepare also for other Read img/geo/camera + assert node.Class() == "Read", "Must be Read" root = api.get_representation_path(representation) file = ls_img_sequence(os.path.dirname(root), one=True) @@ -189,7 +183,7 @@ class LoadSequence(api.Loader): # Update the loader's path whilst preserving some values with preserve_trim(node): - node["file"] = file["path"] + node["file"].setValue(file["path"]) # Set the global in to the start frame of the sequence global_in_changed = loader_shift(node, start, relative=False) @@ -208,8 +202,8 @@ class LoadSequence(api.Loader): from avalon.nuke import viewer_update_and_undo_stop - node = container["_tool"] - assert node.Class() == "Reader", "Must be Reader" + node = nuke.toNode(container['objectName']) + assert node.Class() == "Read", "Must be Read" with viewer_update_and_undo_stop(): nuke.delete(node) diff --git a/pype/plugins/nuke/publish/collect_current_file.py b/pype/plugins/nuke/publish/collect_current_file.py index 96ec44d9d6..35a0ef4c2a 100644 --- a/pype/plugins/nuke/publish/collect_current_file.py +++ b/pype/plugins/nuke/publish/collect_current_file.py @@ -4,7 +4,7 @@ import pyblish.api class SelectCurrentFile(pyblish.api.ContextPlugin): """Inject the current working file into context""" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.5 hosts = ["nuke"] def process(self, context): diff --git a/pype/plugins/nuke/publish/collect_families.py b/pype/plugins/nuke/publish/collect_families.py index d0e61c349b..08ab90143d 100644 --- a/pype/plugins/nuke/publish/collect_families.py +++ b/pype/plugins/nuke/publish/collect_families.py @@ -2,46 +2,46 @@ import pyblish.api @pyblish.api.log -class CollectInstanceFamilies(pyblish.api.ContextPlugin): +class CollectInstanceFamilies(pyblish.api.InstancePlugin): """Collect families for all instances""" order = pyblish.api.CollectorOrder + 0.2 label = "Collect Families" hosts = ["nuke", "nukeassist"] + families = ['write'] - def process(self, context): - for instance in context.data["instances"]: + def process(self, instance): - if "write" in instance.data["family"]: - node = instance[0] + node = instance[0] - # set for ftrack to accept - instance.data["families"] = ["ftrack"] + self.log.info('processing {}'.format(node)) - if not node["render"].value(): - families = ["{}.frames".format( - instance.data["avalonKnob"]["families"])] - # to ignore staging dir op in integrate - instance.data['transfer'] = False - else: - # dealing with local/farm rendering - if node["render_farm"].value(): - families = ["{}.farm".format( - instance.data["avalonKnob"]["families"])] - else: - families = ["{}.local".format( - instance.data["avalonKnob"]["families"])] + families = [] + if instance.data.get('families'): + families.append(instance.data['families']) - instance.data["families"].extend(families) + # set for ftrack to accept + # instance.data["families"] = ["ftrack"] - elif "source" in instance.data["family"]: - families = [] - families.append(instance.data["avalonKnob"]["families"]) + if node["render"].value(): + # dealing with local/farm rendering + if node["render_farm"].value(): + families.append("render.farm") + else: + families.append("render.local") + else: + families.append("render.frames") + # to ignore staging dir op in integrate + instance.data['transfer'] = False + + families.append('ftrack') + + + instance.data["families"] = families - instance.data["families"] = families # Sort/grouped by family (preserving local index) - context[:] = sorted(context, key=self.sort_by_family) + instance.context[:] = sorted(instance.context, key=self.sort_by_family) def sort_by_family(self, instance): """Sort by family""" diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 33e6d5a608..8a2bb06fff 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -56,8 +56,8 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "fps": int(nuke.root()['fps'].value()) }) - if node.Class() == "Write": - instance.data["families"] = [avalon_knob_data["families"]] + # if node.Class() == "Write": + # instance.data["families"] = [avalon_knob_data["families"]] self.log.info("collected instance: {}".format(instance.data)) instances.append(instance) diff --git a/pype/plugins/nuke/publish/collect_review.py b/pype/plugins/nuke/publish/collect_review.py index 03f5437e86..f75c675b8f 100644 --- a/pype/plugins/nuke/publish/collect_review.py +++ b/pype/plugins/nuke/publish/collect_review.py @@ -14,14 +14,16 @@ class CollectReview(pyblish.api.InstancePlugin): family_targets = [".local", ".frames"] def process(self, instance): + pass families = [(f, search) for f in instance.data["families"] for search in self.family_targets if search in f][0] if families: - root_femilies = families[0].replace(families[1], "") - instance.data["families"].append(".".join([ - root_femilies, - self.family - ])) + root_families = families[0].replace(families[1], "") + # instance.data["families"].append(".".join([ + # root_families, + # self.family + # ])) + instance.data["families"].append("render.review") self.log.info("Review collected: `{}`".format(instance)) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index dd3247ae8f..2a274201bb 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -3,6 +3,7 @@ import tempfile import nuke import pyblish.api import logging +import pype.api as pype log = logging.getLogger(__name__) @@ -50,6 +51,11 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): output_dir = os.path.dirname(path) self.log.debug('output dir: {}'.format(output_dir)) + # get version + version = pype.get_version_from_path(path) + instance.data['version'] = version + self.log.debug('Write Version: %s' % instance.data('version')) + # create label name = node.name() # Include start and end render frame in label @@ -64,14 +70,13 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): # collect families in next file if "files" not in instance.data: instance.data["files"] = list() - try: collected_frames = os.listdir(output_dir) self.log.debug("collected_frames: {}".format(label)) instance.data["files"].append(collected_frames) except Exception: - pass + self.log.debug("couldn't collect frames: {}".format(label)) instance.data.update({ "path": path, @@ -84,6 +89,8 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): "colorspace": node["colorspace"].value(), }) + + self.log.debug("instance.data: {}".format(instance.data)) self.log.debug("context: {}".format(context)) diff --git a/pype/plugins/nuke/publish/extract_render_local.py b/pype/plugins/nuke/publish/extract_render_local.py index 5b53a42136..1f0a00273f 100644 --- a/pype/plugins/nuke/publish/extract_render_local.py +++ b/pype/plugins/nuke/publish/extract_render_local.py @@ -2,6 +2,7 @@ import pyblish.api import nuke import os import pype +import clique class NukeRenderLocal(pype.api.Extractor): @@ -66,5 +67,11 @@ class NukeRenderLocal(pype.api.Extractor): output_dir )) + collections, remainder = clique.assemble(*instance.data['files']) + self.log.info('collections: {}'.format(str(collections))) + + collection = collections[0] + instance.data['collection'] = collection + self.log.info('Finished render') return diff --git a/pype/plugins/nuke/publish/extract_review.py b/pype/plugins/nuke/publish/extract_review.py index 30de2039df..e85185e919 100644 --- a/pype/plugins/nuke/publish/extract_review.py +++ b/pype/plugins/nuke/publish/extract_review.py @@ -2,6 +2,7 @@ import os import nuke import pyblish.api import pype +from pype.vendor import ffmpeg class ExtractDataForReview(pype.api.Extractor): @@ -12,29 +13,21 @@ class ExtractDataForReview(pype.api.Extractor): """ order = pyblish.api.ExtractorOrder + 0.01 - label = "Data for review" + label = "Extract Review" optional = True - families = ["write"] + families = ["render.review"] hosts = ["nuke"] - family_targets = [".local", ".review"] def process(self, instance): - families = [f for f in instance.data["families"] - for search in self.family_targets - if search in f] - if not families: - return - self.log.debug("here:") # Store selection selection = [i for i in nuke.allNodes() if i["selected"].getValue()] - self.log.debug("here:") # Deselect all nodes to prevent external connections [i["selected"].setValue(False) for i in nuke.allNodes()] - self.log.debug("here:") self.log.debug("creating staging dir:") self.staging_dir(instance) + self.render_review_representation(instance, representation="mov") self.log.debug("review mov:") @@ -52,34 +45,20 @@ class ExtractDataForReview(pype.api.Extractor): staging_dir = instance.data["stagingDir"] file_name = collection.format("{head}mov") - review_mov = os.path.join(staging_dir, file_name) - - if instance.data.get("baked_colorspace_movie"): - args = [ - "ffmpeg", "-y", - "-i", instance.data["baked_colorspace_movie"], - "-pix_fmt", "yuv420p", - "-crf", "18", - "-timecode", "00:00:00:01", - ] - - args.append(review_mov) - - self.log.debug("Executing args: {0}".format(args)) + review_mov = os.path.join(staging_dir, file_name).replace("\\", "/") self.log.info("transcoding review mov: {0}".format(review_mov)) - p = subprocess.Popen( - args, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - stdin=subprocess.PIPE, - cwd=os.path.dirname(args[-1]) - ) + if instance.data.get("baked_colorspace_movie"): + input_movie = instance.data["baked_colorspace_movie"] + out, err = ( + ffmpeg + .input(input_movie) + .output(review_mov, pix_fmt='yuv420p', crf=18, timecode="00:00:00:01") + .overwrite_output() + .run() + ) - output = p.communicate()[0] - if p.returncode != 0: - raise ValueError(output) self.log.debug("Removing `{0}`...".format( instance.data["baked_colorspace_movie"])) @@ -100,19 +79,10 @@ class ExtractDataForReview(pype.api.Extractor): collection = instance.data.get("collection", None) - self.log.warning("instance.data['files']: {}".format(instance.data['files'])) - if not collection: - collections, remainder = clique.assemble(*instance.data['files']) - collection = collections[0] - instance.data["collection"] = collection - # Create nodes first_frame = min(collection.indexes) last_frame = max(collection.indexes) - self.log.warning("first_frame: {}".format(first_frame)) - self.log.warning("last_frame: {}".format(last_frame)) - node = previous_node = nuke.createNode("Read") node["file"].setValue( @@ -158,6 +128,7 @@ class ExtractDataForReview(pype.api.Extractor): if representation in "mov": file = collection.format("{head}baked.mov") path = os.path.join(staging_dir, file).replace("\\", "/") + self.log.debug("Path: {}".format(path)) instance.data["baked_colorspace_movie"] = path write_node["file"].setValue(path) write_node["file_type"].setValue("mov") diff --git a/pype/plugins/nuke/publish/integrate_script_version.py b/pype/plugins/nuke/publish/increment_script_version.py similarity index 72% rename from pype/plugins/nuke/publish/integrate_script_version.py rename to pype/plugins/nuke/publish/increment_script_version.py index aa37101af0..77eab30a63 100644 --- a/pype/plugins/nuke/publish/integrate_script_version.py +++ b/pype/plugins/nuke/publish/increment_script_version.py @@ -7,12 +7,15 @@ class IncrementScriptVersion(pyblish.api.ContextPlugin): """Increment current script version.""" order = pyblish.api.IntegratorOrder + 0.9 - label = "Increment Current Script Version" + label = "Increment Script Version" optional = True hosts = ['nuke'] - families = ["nukescript", "render.local", "render.frames"] def process(self, context): + + assert all(result["success"] for result in context.data["results"]), ( + "Atomicity not held, aborting.") + from pype.lib import version_up path = context.data["currentFile"] nuke.scriptSaveAs(version_up(path)) diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index 54b3537055..c402927373 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -20,20 +20,21 @@ class RepairCollectionAction(pyblish.api.Action): self.log.info("Rendering toggled ON") -class ValidateCollection(pyblish.api.InstancePlugin): +class ValidatePrerenderedFrames(pyblish.api.InstancePlugin): """ Validates file output. """ order = pyblish.api.ValidatorOrder + 0.1 families = ["render.frames", "still.frames", "prerender.frames"] - label = "Check prerendered frames" + label = "Validate prerendered frame" hosts = ["nuke"] actions = [RepairCollectionAction] + def process(self, instance): self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) - if not instance.data["files"]: - return + + assert instance.data.get('files'), "no frames were collected, you need to render them" collections, remainder = clique.assemble(*instance.data['files']) self.log.info('collections: {}'.format(str(collections))) @@ -57,3 +58,5 @@ class ValidateCollection(pyblish.api.InstancePlugin): collection.indexes ) is frame_length, "{} missing frames. Use " "repair to render all frames".format(__name__) + + instance.data['collection'] = collection diff --git a/pype/plugins/nuke/publish/validate_version_match.py b/pype/plugins/nuke/publish/validate_version_match.py new file mode 100644 index 0000000000..64646ea5dc --- /dev/null +++ b/pype/plugins/nuke/publish/validate_version_match.py @@ -0,0 +1,15 @@ +import pyblish.api + + +class ValidateVersionMatch(pyblish.api.InstancePlugin): + """Checks if write version matches workfile version""" + + label = "Validate Version Match" + order = pyblish.api.ValidatorOrder + hosts = ["nuke"] + families = ['render.frames'] + + def process(self, instance): + + assert instance.data['version'] == instance.context.data['version'], "\ + Version in write doesn't match version of the workfile" diff --git a/pype/templates.py b/pype/templates.py index cf246defb3..5fe41f6899 100644 --- a/pype/templates.py +++ b/pype/templates.py @@ -58,7 +58,7 @@ def reset_data_from_templates(): log.info("Data from templates were Unloaded...") -def get_version_from_workfile(file): +def get_version_from_path(file): """ Finds version number in file path string diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py index 3613bc99f2..45f44d0d11 100644 --- a/setup/nuke/nuke_path/menu.py +++ b/setup/nuke/nuke_path/menu.py @@ -6,7 +6,7 @@ from pype.api import Logger log = Logger.getLogger(__name__, "nuke") -nuke.addOnScriptSave(writes_version_sync) -nuke.addOnScriptSave(onScriptLoad) +# nuke.addOnScriptSave(writes_version_sync) +# nuke.addOnScriptSave(onScriptLoad) log.info('Automatic syncing of write file knob to script version')