From bddda5a9a06fe2060933005a32bd51703db22e09 Mon Sep 17 00:00:00 2001 From: antirotor Date: Thu, 20 Jun 2019 18:13:10 +0200 Subject: [PATCH] fix(nuke): simplified families fix(nuke): render path simplified and set to work directory leaving it to integrator to copy in into publish chg(nuke): disabled version check in write path fix(nuke): write version is now determined by script version fix(pype): temporary removal of `5661d9e` commit adding Qt dependency to pype fix: minor cleanups and fixes --- pype/__init__.py | 15 -- pype/api.py | 5 +- pype/nuke/lib.py | 18 ++- pype/plugin.py | 4 + .../publish/integrate_ftrack_instances.py | 2 +- .../publish/collect_assumed_destination.py | 148 ------------------ .../plugins/global/publish/collect_presets.py | 2 +- pype/plugins/global/publish/integrate_new.py | 2 - .../global/publish/submit_publish_job.py | 1 - pype/plugins/nuke/create/create_write.py | 108 ++++++------- pype/plugins/nuke/publish/collect_families.py | 50 ------ .../plugins/nuke/publish/collect_instances.py | 17 +- pype/plugins/nuke/publish/collect_writes.py | 2 +- .../nuke/publish/submit_nuke_deadline.py | 6 +- ...lection.py => validate_rendered_frames.py} | 35 +++-- setup/nuke/nuke_path/menu.py | 2 +- 16 files changed, 120 insertions(+), 297 deletions(-) delete mode 100644 pype/plugins/global/publish/collect_assumed_destination.py delete mode 100644 pype/plugins/nuke/publish/collect_families.py rename pype/plugins/nuke/publish/{validate_collection.py => validate_rendered_frames.py} (53%) diff --git a/pype/__init__.py b/pype/__init__.py index 5a65e01776..44721c3eaf 100644 --- a/pype/__init__.py +++ b/pype/__init__.py @@ -2,7 +2,6 @@ import os from pyblish import api as pyblish from avalon import api as avalon -from Qt import QtWidgets import logging log = logging.getLogger(__name__) @@ -25,20 +24,6 @@ def install(): pyblish.register_plugin_path(PUBLISH_PATH) avalon.register_plugin_path(avalon.Loader, LOAD_PATH) - # pyblish-qml settings. - try: - __import__("pyblish_qml") - except ImportError as e: - log.error("Could not load pyblish-qml: %s " % e) - else: - from pyblish_qml import settings - app = QtWidgets.QApplication.instance() - screen_resolution = app.desktop().screenGeometry() - width, height = screen_resolution.width(), screen_resolution.height() - settings.WindowSize = (width / 3, height * 0.75) - settings.WindowPosition = (0, 0) - - def uninstall(): log.info("Deregistering global plug-ins..") pyblish.deregister_plugin_path(PUBLISH_PATH) diff --git a/pype/api.py b/pype/api.py index 1ab7a91955..2227236fd3 100644 --- a/pype/api.py +++ b/pype/api.py @@ -5,7 +5,8 @@ from .plugin import ( ValidatePipelineOrder, ValidateContentsOrder, ValidateSceneOrder, - ValidateMeshOrder + ValidateMeshOrder, + ValidationException ) # temporary fix, might @@ -62,6 +63,8 @@ __all__ = [ "Logger", + "ValidationException", + # contectual templates # get data to preloaded templates "load_data_from_templates", diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 9bc390eaa5..4d96e6b772 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -24,6 +24,7 @@ for path in sys.path: log.info("_ removing from sys.path: `{}`".format(path)) sys.path.remove(path) + def onScriptLoad(): if nuke.env['LINUX']: nuke.tcl('load ffmpegReader') @@ -37,12 +38,12 @@ def checkInventoryVersions(): """ Actiual version idetifier of Loaded containers - Any time this function is run it will check all nodes and filter only Loader nodes for its version. It will get all versions from database - and check if the node is having actual version. If not then it will color it to red. - + Any time this function is run it will check all nodes and filter only + Loader nodes for its version. It will get all versions from database + and check if the node is having actual version. If not then it will color + it to red. """ - # get all Loader nodes by avalon attribute metadata for each in nuke.allNodes(): if each.Class() == 'Read': @@ -195,12 +196,17 @@ def create_write_node(name, data): except Exception as e: log.error("problem with resolving anatomy tepmlate: {}".format(e)) - fpath = str(anatomy_filled["render"]["path"]).replace("\\", "/") + # build file path to workfiles + fpath = str(anatomy_filled["work"]["folder"]).replace("\\", "/") + fpath = '{work}/renders/v{version}/{subset}.{frame}.{ext}'.format( + work=fpath, version=data["version"], subset=data["subset"], + frame=data["frame"], + ext=data["nuke_dataflow_writes"]["file_type"]) # create directory if not os.path.isdir(os.path.dirname(fpath)): log.info("path does not exist") - os.makedirs(os.path.dirname(fpath), 0766) + os.makedirs(os.path.dirname(fpath), 0o766) _data = OrderedDict({ "file": fpath diff --git a/pype/plugin.py b/pype/plugin.py index cfcd814c92..c77b9927e1 100644 --- a/pype/plugin.py +++ b/pype/plugin.py @@ -69,3 +69,7 @@ def contextplugin_should_run(plugin, context): return True return False + + +class ValidationException(Exception): + pass diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index 3c8ab3d1d4..d351289dfe 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -22,9 +22,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'rig': 'rig', 'setdress': 'setdress', 'pointcache': 'cache', - 'write': 'img', 'render': 'render', 'nukescript': 'comp', + 'write': 'render', 'review': 'mov', 'plate': 'img' } diff --git a/pype/plugins/global/publish/collect_assumed_destination.py b/pype/plugins/global/publish/collect_assumed_destination.py deleted file mode 100644 index e91093f4db..0000000000 --- a/pype/plugins/global/publish/collect_assumed_destination.py +++ /dev/null @@ -1,148 +0,0 @@ -import os -import pyblish.api - -from avalon import io, api - - -class CollectAssumedDestination(pyblish.api.ContextPlugin): - """Generate the assumed destination path where the file will be stored""" - - label = "Collect Assumed Destination" - order = pyblish.api.CollectorOrder + 0.498 - exclude_families = ["plate"] - - def process(self, context): - - for instance in context: - if [ef for ef in self.exclude_families - if ef in instance.data["family"]]: - self.log.info("Ignoring instance: {}".format(instance)) - return - self.process_item(instance) - - def process_item(self, instance): - - self.create_destination_template(instance) - - template_data = instance.data["assumedTemplateData"] - - anatomy = instance.context.data['anatomy'] - # self.log.info(anatomy.anatomy()) - self.log.info(anatomy.templates) - # template = anatomy.publish.path - anatomy_filled = anatomy.format(template_data) - self.log.info(anatomy_filled) - mock_template = anatomy_filled["publish"]["path"] - - # For now assume resources end up in a "resources" folder in the - # published folder - mock_destination = os.path.join(os.path.dirname(mock_template), - "resources") - - # Clean the path - mock_destination = os.path.abspath(os.path.normpath(mock_destination)) - - # Define resource destination and transfers - resources = instance.data.get("resources", list()) - transfers = instance.data.get("transfers", list()) - for resource in resources: - - # Add destination to the resource - source_filename = os.path.basename(resource["source"]) - destination = os.path.join(mock_destination, source_filename) - - # Force forward slashes to fix issue with software unable - # to work correctly with backslashes in specific scenarios - # (e.g. escape characters in PLN-151 V-Ray UDIM) - destination = destination.replace("\\", "/") - - resource['destination'] = destination - - # Collect transfers for the individual files of the resource - # e.g. all individual files of a cache or UDIM textures. - files = resource['files'] - for fsrc in files: - fname = os.path.basename(fsrc) - fdest = os.path.join(mock_destination, fname) - transfers.append([fsrc, fdest]) - - instance.data["resources"] = resources - instance.data["transfers"] = transfers - - def create_destination_template(self, instance): - """Create a filepath based on the current data available - - Example template: - {root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/ - {subset}.{representation} - Args: - instance: the instance to publish - - Returns: - file path (str) - """ - if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: - return - - # get all the stuff from the database - subset_name = instance.data["subset"] - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - - # FIXME: io is not initialized at this point for shell host - io.install() - project = io.find_one({"type": "project", - "name": project_name}, - projection={"config": True, "data": True}) - - template = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - asset = io.find_one({"type": "asset", - "name": asset_name, - "parent": project["_id"]}) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset['silo'] - - subset = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset["_id"]}) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[("name", -1)]) - - # if there is a subset there ought to be version - if version is not None: - version_number += int(version["name"]) - - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*hierarchy) - - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP"} - - instance.data["template"] = template - instance.data["assumedTemplateData"] = template_data - - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - (anatomy.format(template_data))["publish"]["path"] - ) diff --git a/pype/plugins/global/publish/collect_presets.py b/pype/plugins/global/publish/collect_presets.py index 312b6b008a..7e0d3e2f4b 100644 --- a/pype/plugins/global/publish/collect_presets.py +++ b/pype/plugins/global/publish/collect_presets.py @@ -27,5 +27,5 @@ class CollectPresets(api.ContextPlugin): context.data["presets"] = presets - self.log.info(context.data["presets"]) + # self.log.info(context.data["presets"]) return diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 587add709e..f96fb240c9 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -60,8 +60,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "nukescript", "render", "rendersetup", - "render.farm", - "write", "rig", "plate", "look" diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index f3ce4f7ec5..75a80d0418 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -376,7 +376,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): rendered_logs.append(log) metadata["metadata"]["instance"]["_log"] = rendered_logs - pprint(metadata) with open(metadata_path, "w") as f: json.dump(metadata, f, indent=4, sort_keys=True) diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index b3c9117641..347a4a1fd2 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -19,7 +19,7 @@ def subset_to_families(subset, family, families): return "{}.{}".format(family, new_subset) -class CrateWriteRender(avalon.nuke.Creator): +class CreateWriteRender(avalon.nuke.Creator): # change this to template preset preset = "render" @@ -31,7 +31,7 @@ class CrateWriteRender(avalon.nuke.Creator): icon = "sign-out" def __init__(self, *args, **kwargs): - super(CrateWriteRender, self).__init__(*args, **kwargs) + super(CreateWriteRender, self).__init__(*args, **kwargs) data = OrderedDict() @@ -62,7 +62,7 @@ class CrateWriteRender(avalon.nuke.Creator): return -class CrateWritePrerender(avalon.nuke.Creator): +class CreateWritePrerender(avalon.nuke.Creator): # change this to template preset preset = "prerender" @@ -74,7 +74,7 @@ class CrateWritePrerender(avalon.nuke.Creator): icon = "sign-out" def __init__(self, *args, **kwargs): - super(CrateWritePrerender, self).__init__(*args, **kwargs) + super(CreateWritePrerender, self).__init__(*args, **kwargs) data = OrderedDict() @@ -89,8 +89,6 @@ class CrateWritePrerender(avalon.nuke.Creator): self.name = self.data["subset"] instance = nuke.toNode(self.data["subset"]) - - family = self.family node = 'write' if not instance: @@ -103,51 +101,53 @@ class CrateWritePrerender(avalon.nuke.Creator): create_write_node(self.data["subset"], write_data) return -# -# -# class CrateWriteStill(avalon.nuke.Creator): -# # change this to template preset -# preset = "still" -# -# name = "WriteStill" -# label = "Create Write Still" -# hosts = ["nuke"] -# family = "{}_write".format(preset) -# families = preset -# icon = "image" -# -# def __init__(self, *args, **kwargs): -# super(CrateWriteStill, self).__init__(*args, **kwargs) -# -# data = OrderedDict() -# -# data["family"] = self.family.split("_")[-1] -# data["families"] = self.families -# -# {data.update({k: v}) for k, v in self.data.items() -# if k not in data.keys()} -# self.data = data -# -# def process(self): -# self.name = self.data["subset"] -# -# node_name = self.data["subset"].replace( -# "_", "_f{}_".format(nuke.frame())) -# instance = nuke.toNode(self.data["subset"]) -# self.data["subset"] = node_name -# -# family = self.family -# node = 'write' -# -# if not instance: -# write_data = { -# "frame_range": [nuke.frame(), nuke.frame()], -# "class": node, -# "preset": self.preset, -# "avalon": self.data -# } -# -# nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame())) -# create_write_node(node_name, write_data) -# -# return + + +""" +class CrateWriteStill(avalon.nuke.Creator): + # change this to template preset + preset = "still" + + name = "WriteStill" + label = "Create Write Still" + hosts = ["nuke"] + family = "{}_write".format(preset) + families = preset + icon = "image" + + def __init__(self, *args, **kwargs): + super(CrateWriteStill, self).__init__(*args, **kwargs) + + data = OrderedDict() + + data["family"] = self.family.split("_")[-1] + data["families"] = self.families + + {data.update({k: v}) for k, v in self.data.items() + if k not in data.keys()} + self.data = data + + def process(self): + self.name = self.data["subset"] + + node_name = self.data["subset"].replace( + "_", "_f{}_".format(nuke.frame())) + instance = nuke.toNode(self.data["subset"]) + self.data["subset"] = node_name + + family = self.family + node = 'write' + + if not instance: + write_data = { + "frame_range": [nuke.frame(), nuke.frame()], + "class": node, + "preset": self.preset, + "avalon": self.data + } + + nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame())) + create_write_node(node_name, write_data) + + return +""" diff --git a/pype/plugins/nuke/publish/collect_families.py b/pype/plugins/nuke/publish/collect_families.py deleted file mode 100644 index 77388a9bd5..0000000000 --- a/pype/plugins/nuke/publish/collect_families.py +++ /dev/null @@ -1,50 +0,0 @@ -import pyblish.api -import nuke - -@pyblish.api.log -class CollectInstanceFamilies(pyblish.api.InstancePlugin): - """Collect families for all instances""" - - order = pyblish.api.CollectorOrder + 0.2 - label = "Collect Families" - hosts = ["nuke", "nukeassist"] - families = ['write'] - - def process(self, instance): - - # node = nuke.toNode(instance.data["name"]) - node = instance[0] - - self.log.info('processing {}'.format(node["name"].value())) - - families = [] - if instance.data.get('families'): - families += instance.data['families'] - - # set for ftrack to accept - # instance.data["families"] = ["ftrack"] - - if node["render"].value(): - self.log.info("flagged for render") - # dealing with local/farm rendering - if node["render_farm"].value(): - self.log.info("adding render farm family") - families.append("render.farm") - else: - self.log.info("adding render to local") - families.append("render.local") - else: - families.append("render.frames") - # to ignore staging dir op in integrate - instance.data['transfer'] = False - - families.append('ftrack') - - instance.data["families"] = families - - # Sort/grouped by family (preserving local index) - instance.context[:] = sorted(instance.context, key=self.sort_by_family) - - def sort_by_family(self, instance): - """Sort by family""" - return instance.data.get("families", instance.data.get("family")) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index e9db556a9f..c4570be530 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -40,27 +40,36 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): if avalon_knob_data["id"] != "pyblish.avalon.instance": continue - subset = avalon_knob_data.get("subset", None) or node["name"].value() + subset = avalon_knob_data.get( + "subset", None) or node["name"].value() # Create instance instance = context.create_instance(subset) instance.add(node) + family = "render.local" + if node["render"].value(): + self.log.info("flagged for render") + # dealing with local/farm rendering + if node["render_farm"].value(): + self.log.info("adding render farm family") + family = "render.farm" + instance.data['transfer'] = False + instance.data.update({ "subset": subset, "asset": os.environ["AVALON_ASSET"], "label": node.name(), "name": node.name(), "subset": subset, - "family": avalon_knob_data["family"], + "family": family, "avalonKnob": avalon_knob_data, "publish": node.knob('publish').value(), "step": 1, "fps": int(nuke.root()['fps'].value()) }) - # if node.Class() == "Write": - # instance.data["families"] = [avalon_knob_data["families"]] + self.log.info("collected instance: {}".format(instance.data)) instances.append(instance) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 68bc2fd5d4..080490e40f 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -50,7 +50,7 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): self.log.debug('output dir: {}'.format(output_dir)) # get version - version = pype.get_version_from_path(path) + version = pype.get_version_from_path(nuke.root().name()) instance.data['version'] = version self.log.debug('Write Version: %s' % instance.data('version')) diff --git a/pype/plugins/nuke/publish/submit_nuke_deadline.py b/pype/plugins/nuke/publish/submit_nuke_deadline.py index 37a5b632cb..3d854f66e9 100644 --- a/pype/plugins/nuke/publish/submit_nuke_deadline.py +++ b/pype/plugins/nuke/publish/submit_nuke_deadline.py @@ -6,6 +6,7 @@ import nuke from avalon import api from avalon.vendor import requests +import re import pyblish.api @@ -51,6 +52,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): dirname = os.path.join(workspace, "renders") deadline_user = context.data.get("deadlineUser", getpass.getuser()) jobname = "%s - %s" % (filename, instance.name) + ver = re.search(r"\d+\.\d+", context.data.get("hostVersion")) try: # Ensure render folder exists @@ -94,7 +96,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): # "OutputFilePrefix": render_variables["filename_prefix"], # Mandatory for Deadline - "Version": context.data.get("hostVersion"), + "Version": ver.group(), # Resolve relative references "ProjectPath": workspace, @@ -188,8 +190,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): raise Exception(response.text) # Store output dir for unified publisher (filesequence) - instance.data["outputDir"] = os.path.dirname(output_dir) instance.data["deadlineSubmissionJob"] = response.json() + instance.data["publishJobState"] = "Active" def preflight_check(self, instance): """Ensure the startFrame, endFrame and byFrameStep are integers""" diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_rendered_frames.py similarity index 53% rename from pype/plugins/nuke/publish/validate_collection.py rename to pype/plugins/nuke/publish/validate_rendered_frames.py index 1d0e1b260e..2a44eef1b5 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_rendered_frames.py @@ -1,5 +1,6 @@ import os import pyblish.api +from pype.api import ValidationException import clique @@ -20,21 +21,25 @@ class RepairCollectionAction(pyblish.api.Action): self.log.info("Rendering toggled ON") -class ValidatePrerenderedFrames(pyblish.api.InstancePlugin): +class ValidateRenderedFrames(pyblish.api.InstancePlugin): """ Validates file output. """ order = pyblish.api.ValidatorOrder + 0.1 - families = ["render.frames", "still.frames", "prerender.frames"] + families = ["render.local"] - label = "Validate prerendered frame" - hosts = ["nuke"] + label = "Validate rendered frame" + hosts = ["nuke", "nukestudio"] actions = [RepairCollectionAction] def process(self, instance): for repre in instance.data.get('representations'): - assert repre.get('files'), "no frames were collected, you need to render them" + if not repre.get('files'): + msg = ("no frames were collected, " + "you need to render them") + self.log.error(msg) + raise ValidationException(msg) collections, remainder = clique.assemble(repre["files"]) self.log.info('collections: {}'.format(str(collections))) @@ -45,10 +50,20 @@ class ValidatePrerenderedFrames(pyblish.api.InstancePlugin): - instance.data["startFrame"] + 1 if frame_length != 1: - assert len(collections) == 1, "There are multiple collections in the folder" - assert collection.is_contiguous(), "Some frames appear to be missing" + if len(collections) != 1: + msg = "There are multiple collections in the folder" + self.log.error(msg) + raise ValidationException(msg) - assert remainder is not None, "There are some extra files in folder" + if not collection.is_contiguous(): + msg = "Some frames appear to be missing" + self.log.error(msg) + raise ValidationException(msg) + + if remainder is not None: + msg = "There are some extra files in folder" + self.log.error(msg) + raise ValidationException(msg) self.log.info('frame_length: {}'.format(frame_length)) self.log.info('len(collection.indexes): {}'.format( @@ -56,7 +71,7 @@ class ValidatePrerenderedFrames(pyblish.api.InstancePlugin): assert len( collection.indexes - ) is frame_length, "{} missing frames. Use " - "repair to render all frames".format(__name__) + ) is frame_length, ("{} missing frames. Use " + "repair to render all frames").format(__name__) instance.data['collection'] = collection diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py index 4982513b78..61922d55ac 100644 --- a/setup/nuke/nuke_path/menu.py +++ b/setup/nuke/nuke_path/menu.py @@ -11,7 +11,7 @@ from pypeapp import Logger log = Logger().get_logger(__name__, "nuke") -nuke.addOnScriptSave(writes_version_sync) +# nuke.addOnScriptSave(writes_version_sync) nuke.addOnScriptSave(onScriptLoad) nuke.addOnScriptSave(checkInventoryVersions)