From ae59f724a1a429e72ee3c6a3238e6a320f780815 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 31 Jan 2020 20:07:40 +0100 Subject: [PATCH 01/10] clean(nk): removing unneeded files --- .../_load_unused/extract_write_next_render.py | 24 ---- pype/plugins/nuke/_load_unused/load_backdrop | 0 .../_publish_unused/collect_active_viewer.py | 14 --- .../nuke/_publish_unused/extract_frames.py | 22 ---- .../_publish_unused/extract_nuke_write.py | 116 ------------------ .../nuke/_publish_unused/extract_script.py | 40 ------ .../_publish_unused/integrate_staging_dir.py | 27 ---- .../publish_image_sequences.py | 98 --------------- .../_publish_unused/validate_active_viewer.py | 24 ---- .../_publish_unused/validate_version_match.py | 36 ------ .../validate_write_families.py | 59 --------- .../nukestudio/publish/validate_version.py | 79 ------------ 12 files changed, 539 deletions(-) delete mode 100644 pype/plugins/nuke/_load_unused/extract_write_next_render.py delete mode 100644 pype/plugins/nuke/_load_unused/load_backdrop delete mode 100644 pype/plugins/nuke/_publish_unused/collect_active_viewer.py delete mode 100644 pype/plugins/nuke/_publish_unused/extract_frames.py delete mode 100644 pype/plugins/nuke/_publish_unused/extract_nuke_write.py delete mode 100644 pype/plugins/nuke/_publish_unused/extract_script.py delete mode 100644 pype/plugins/nuke/_publish_unused/integrate_staging_dir.py delete mode 100644 pype/plugins/nuke/_publish_unused/publish_image_sequences.py delete mode 100644 pype/plugins/nuke/_publish_unused/validate_active_viewer.py delete mode 100644 pype/plugins/nuke/_publish_unused/validate_version_match.py delete mode 100644 pype/plugins/nuke/_publish_unused/validate_write_families.py delete mode 100644 pype/plugins/nukestudio/publish/validate_version.py diff --git a/pype/plugins/nuke/_load_unused/extract_write_next_render.py b/pype/plugins/nuke/_load_unused/extract_write_next_render.py deleted file mode 100644 index 40bfe59ec2..0000000000 --- a/pype/plugins/nuke/_load_unused/extract_write_next_render.py +++ /dev/null @@ -1,24 +0,0 @@ -import pyblish.api - - -class WriteToRender(pyblish.api.InstancePlugin): - """Swith Render knob on write instance to on, - so next time publish will be set to render - """ - - order = pyblish.api.ExtractorOrder + 0.1 - label = "Write to render next" - optional = True - hosts = ["nuke", "nukeassist"] - families = ["write"] - - def process(self, instance): - return - if [f for f in instance.data["families"] - if ".frames" in f]: - instance[0]["render"].setValue(True) - self.log.info("Swith write node render to `on`") - else: - # swith to - instance[0]["render"].setValue(False) - self.log.info("Swith write node render to `Off`") diff --git a/pype/plugins/nuke/_load_unused/load_backdrop b/pype/plugins/nuke/_load_unused/load_backdrop deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/plugins/nuke/_publish_unused/collect_active_viewer.py b/pype/plugins/nuke/_publish_unused/collect_active_viewer.py deleted file mode 100644 index 5a6cc02b88..0000000000 --- a/pype/plugins/nuke/_publish_unused/collect_active_viewer.py +++ /dev/null @@ -1,14 +0,0 @@ -import pyblish.api -import nuke - - -class CollectActiveViewer(pyblish.api.ContextPlugin): - """Collect any active viewer from nodes - """ - - order = pyblish.api.CollectorOrder + 0.3 - label = "Collect Active Viewer" - hosts = ["nuke"] - - def process(self, context): - context.data["ActiveViewer"] = nuke.activeViewer() diff --git a/pype/plugins/nuke/_publish_unused/extract_frames.py b/pype/plugins/nuke/_publish_unused/extract_frames.py deleted file mode 100644 index b75f893802..0000000000 --- a/pype/plugins/nuke/_publish_unused/extract_frames.py +++ /dev/null @@ -1,22 +0,0 @@ -import pyblish - - -class ExtractFramesToIntegrate(pyblish.api.InstancePlugin): - """Extract rendered frames for integrator - """ - - order = pyblish.api.ExtractorOrder - label = "Extract rendered frames" - hosts = ["nuke"] - families = ["render"] - - def process(self, instance\ - return - - # staging_dir = instance.data.get('stagingDir', None) - # output_dir = instance.data.get('outputDir', None) - # - # if not staging_dir: - # staging_dir = output_dir - # instance.data['stagingDir'] = staging_dir - # # instance.data['transfer'] = False diff --git a/pype/plugins/nuke/_publish_unused/extract_nuke_write.py b/pype/plugins/nuke/_publish_unused/extract_nuke_write.py deleted file mode 100644 index 155b5cf56d..0000000000 --- a/pype/plugins/nuke/_publish_unused/extract_nuke_write.py +++ /dev/null @@ -1,116 +0,0 @@ -import os - -import nuke -import pyblish.api - - -class Extract(pyblish.api.InstancePlugin): - """Super class for write and writegeo extractors.""" - - order = pyblish.api.ExtractorOrder - optional = True - label = "Extract Nuke [super]" - hosts = ["nuke"] - match = pyblish.api.Subset - - # targets = ["process.local"] - - def execute(self, instance): - # Get frame range - node = instance[0] - first_frame = nuke.root()["first_frame"].value() - last_frame = nuke.root()["last_frame"].value() - - if node["use_limit"].value(): - first_frame = node["first"].value() - last_frame = node["last"].value() - - # Render frames - nuke.execute(node.name(), int(first_frame), int(last_frame)) - - -class ExtractNukeWrite(Extract): - """ Extract output from write nodes. """ - - families = ["write", "local"] - label = "Extract Write" - - def process(self, instance): - - self.execute(instance) - - # Validate output - for filename in list(instance.data["collection"]): - if not os.path.exists(filename): - instance.data["collection"].remove(filename) - self.log.warning("\"{0}\" didn't render.".format(filename)) - - -class ExtractNukeCache(Extract): - - label = "Cache" - families = ["cache", "local"] - - def process(self, instance): - - self.execute(instance) - - # Validate output - msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) - assert os.path.exists(instance.data["output_path"]), msg - - -class ExtractNukeCamera(Extract): - - label = "Camera" - families = ["camera", "local"] - - def process(self, instance): - - node = instance[0] - node["writeGeometries"].setValue(False) - node["writePointClouds"].setValue(False) - node["writeAxes"].setValue(False) - - file_path = node["file"].getValue() - node["file"].setValue(instance.data["output_path"]) - - self.execute(instance) - - node["writeGeometries"].setValue(True) - node["writePointClouds"].setValue(True) - node["writeAxes"].setValue(True) - - node["file"].setValue(file_path) - - # Validate output - msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) - assert os.path.exists(instance.data["output_path"]), msg - - -class ExtractNukeGeometry(Extract): - - label = "Geometry" - families = ["geometry", "local"] - - def process(self, instance): - - node = instance[0] - node["writeCameras"].setValue(False) - node["writePointClouds"].setValue(False) - node["writeAxes"].setValue(False) - - file_path = node["file"].getValue() - node["file"].setValue(instance.data["output_path"]) - - self.execute(instance) - - node["writeCameras"].setValue(True) - node["writePointClouds"].setValue(True) - node["writeAxes"].setValue(True) - - node["file"].setValue(file_path) - - # Validate output - msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) - assert os.path.exists(instance.data["output_path"]), msg diff --git a/pype/plugins/nuke/_publish_unused/extract_script.py b/pype/plugins/nuke/_publish_unused/extract_script.py deleted file mode 100644 index 7d55ea0da4..0000000000 --- a/pype/plugins/nuke/_publish_unused/extract_script.py +++ /dev/null @@ -1,40 +0,0 @@ - -import pyblish.api -import os -import pype -import shutil - - -class ExtractScript(pype.api.Extractor): - """Publish script - """ - label = 'Extract Script' - order = pyblish.api.ExtractorOrder - 0.05 - optional = True - hosts = ['nuke'] - families = ["workfile"] - - def process(self, instance): - self.log.debug("instance extracting: {}".format(instance.data)) - current_script = instance.context.data["currentFile"] - - # Define extract output file path - stagingdir = self.staging_dir(instance) - filename = "{0}".format(instance.data["name"]) - path = os.path.join(stagingdir, filename) - - self.log.info("Performing extraction..") - shutil.copy(current_script, path) - - if "representations" not in instance.data: - instance.data["representations"] = list() - - representation = { - 'name': 'nk', - 'ext': '.nk', - 'files': filename, - "stagingDir": stagingdir, - } - instance.data["representations"].append(representation) - - self.log.info("Extracted instance '%s' to: %s" % (instance.name, path)) diff --git a/pype/plugins/nuke/_publish_unused/integrate_staging_dir.py b/pype/plugins/nuke/_publish_unused/integrate_staging_dir.py deleted file mode 100644 index e05c42ae50..0000000000 --- a/pype/plugins/nuke/_publish_unused/integrate_staging_dir.py +++ /dev/null @@ -1,27 +0,0 @@ -import pyblish.api -import shutil -import os - - -class CopyStagingDir(pyblish.api.InstancePlugin): - """Copy data rendered into temp local directory - """ - - order = pyblish.api.IntegratorOrder - 2 - label = "Copy data from temp dir" - hosts = ["nuke", "nukeassist"] - families = ["render.local"] - - def process(self, instance): - temp_dir = instance.data.get("stagingDir") - output_dir = instance.data.get("outputDir") - - # copy data to correct dir - if not os.path.exists(output_dir): - os.makedirs(output_dir) - self.log.info("output dir has been created") - - for f in os.listdir(temp_dir): - self.log.info("copy file to correct destination: {}".format(f)) - shutil.copy(os.path.join(temp_dir, os.path.basename(f)), - os.path.join(output_dir, os.path.basename(f))) diff --git a/pype/plugins/nuke/_publish_unused/publish_image_sequences.py b/pype/plugins/nuke/_publish_unused/publish_image_sequences.py deleted file mode 100644 index 34634dcc6b..0000000000 --- a/pype/plugins/nuke/_publish_unused/publish_image_sequences.py +++ /dev/null @@ -1,98 +0,0 @@ -import re -import os -import json -import subprocess - -import pyblish.api - -from pype.action import get_errored_plugins_from_data - - -def _get_script(): - """Get path to the image sequence script""" - - # todo: use a more elegant way to get the python script - - try: - from pype.fusion.scripts import publish_filesequence - except Exception: - raise RuntimeError("Expected module 'publish_imagesequence'" - "to be available") - - module_path = publish_filesequence.__file__ - if module_path.endswith(".pyc"): - module_path = module_path[:-len(".pyc")] + ".py" - - return module_path - - -class PublishImageSequence(pyblish.api.InstancePlugin): - """Publish the generated local image sequences.""" - - order = pyblish.api.IntegratorOrder - label = "Publish Rendered Image Sequence(s)" - hosts = ["fusion"] - families = ["saver.renderlocal"] - - def process(self, instance): - - # Skip this plug-in if the ExtractImageSequence failed - errored_plugins = get_errored_plugins_from_data(instance.context) - if any(plugin.__name__ == "FusionRenderLocal" for plugin in - errored_plugins): - raise RuntimeError("Fusion local render failed, " - "publishing images skipped.") - - subset = instance.data["subset"] - ext = instance.data["ext"] - - # Regex to match resulting renders - regex = "^{subset}.*[0-9]+{ext}+$".format(subset=re.escape(subset), - ext=re.escape(ext)) - - # The instance has most of the information already stored - metadata = { - "regex": regex, - "frameStart": instance.context.data["frameStart"], - "frameEnd": instance.context.data["frameEnd"], - "families": ["imagesequence"], - } - - # Write metadata and store the path in the instance - output_directory = instance.data["outputDir"] - path = os.path.join(output_directory, - "{}_metadata.json".format(subset)) - with open(path, "w") as f: - json.dump(metadata, f) - - assert os.path.isfile(path), ("Stored path is not a file for %s" - % instance.data["name"]) - - # Suppress any subprocess console - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - startupinfo.wShowWindow = subprocess.SW_HIDE - - process = subprocess.Popen(["python", _get_script(), - "--paths", path], - bufsize=1, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - startupinfo=startupinfo) - - while True: - output = process.stdout.readline() - # Break when there is no output or a return code has been given - if output == '' and process.poll() is not None: - process.stdout.close() - break - if output: - line = output.strip() - if line.startswith("ERROR"): - self.log.error(line) - else: - self.log.info(line) - - if process.returncode != 0: - raise RuntimeError("Process quit with non-zero " - "return code: {}".format(process.returncode)) diff --git a/pype/plugins/nuke/_publish_unused/validate_active_viewer.py b/pype/plugins/nuke/_publish_unused/validate_active_viewer.py deleted file mode 100644 index 618a7f1502..0000000000 --- a/pype/plugins/nuke/_publish_unused/validate_active_viewer.py +++ /dev/null @@ -1,24 +0,0 @@ -import pyblish.api -import nuke - - -class ValidateActiveViewer(pyblish.api.ContextPlugin): - """Validate presentse of the active viewer from nodes - """ - - order = pyblish.api.ValidatorOrder - label = "Validate Active Viewer" - hosts = ["nuke"] - - def process(self, context): - viewer_process_node = context.data.get("ViewerProcess") - - assert viewer_process_node, ( - "Missing active viewer process! Please click on output write node and push key number 1-9" - ) - active_viewer = context.data["ActiveViewer"] - active_input = active_viewer.activeInput() - - assert active_input is not None, ( - "Missing active viewer input! Please click on output write node and push key number 1-9" - ) diff --git a/pype/plugins/nuke/_publish_unused/validate_version_match.py b/pype/plugins/nuke/_publish_unused/validate_version_match.py deleted file mode 100644 index 1358d9a7b3..0000000000 --- a/pype/plugins/nuke/_publish_unused/validate_version_match.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -import pyblish.api -import pype.utils - - - -@pyblish.api.log -class RepairNukeWriteNodeVersionAction(pyblish.api.Action): - label = "Repair" - on = "failed" - icon = "wrench" - - def process(self, context, plugin): - import pype.nuke.lib as nukelib - instances = pype.utils.filter_instances(context, plugin) - - for instance in instances: - node = instance[0] - render_path = nukelib.get_render_path(node) - self.log.info("render_path: {}".format(render_path)) - node['file'].setValue(render_path.replace("\\", "/")) - - -class ValidateVersionMatch(pyblish.api.InstancePlugin): - """Checks if write version matches workfile version""" - - label = "Validate Version Match" - order = pyblish.api.ValidatorOrder - actions = [RepairNukeWriteNodeVersionAction] - hosts = ["nuke"] - families = ['write'] - - def process(self, instance): - - assert instance.data['version'] == instance.context.data['version'], "\ - Version in write doesn't match version of the workfile" diff --git a/pype/plugins/nuke/_publish_unused/validate_write_families.py b/pype/plugins/nuke/_publish_unused/validate_write_families.py deleted file mode 100644 index 73f710867d..0000000000 --- a/pype/plugins/nuke/_publish_unused/validate_write_families.py +++ /dev/null @@ -1,59 +0,0 @@ - -import pyblish.api -import pype.api -import pype.nuke.actions - - -class RepairWriteFamiliesAction(pyblish.api.Action): - label = "Fix Write's render attributes" - on = "failed" - icon = "wrench" - - def process(self, instance, plugin): - self.log.info("instance {}".format(instance)) - instance["render"].setValue(True) - self.log.info("Rendering toggled ON") - - -@pyblish.api.log -class ValidateWriteFamilies(pyblish.api.InstancePlugin): - """ Validates write families. """ - - order = pyblish.api.ValidatorOrder - label = "Valitade writes families" - hosts = ["nuke"] - families = ["write"] - actions = [pype.nuke.actions.SelectInvalidAction, pype.api.RepairAction] - - @staticmethod - def get_invalid(self, instance): - if not [f for f in instance.data["families"] - if ".frames" in f]: - return - - if not instance.data.get('files'): - return (instance) - - def process(self, instance): - self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) - - invalid = self.get_invalid(self, instance) - - if invalid: - raise ValueError(str("`{}`: Switch `Render` on! " - "> {}".format(__name__, invalid))) - - # if any(".frames" in f for f in instance.data["families"]): - # if not instance.data["files"]: - # raise ValueError("instance {} is set to publish frames\ - # but no files were collected, render the frames first or\ - # check 'render' checkbox onthe no to 'ON'".format(instance))) - # - # - # self.log.info("Checked correct writes families") - - @classmethod - def repair(cls, instance): - cls.log.info("instance {}".format(instance)) - instance[0]["render"].setValue(True) - cls.log.info("Rendering toggled ON") diff --git a/pype/plugins/nukestudio/publish/validate_version.py b/pype/plugins/nukestudio/publish/validate_version.py deleted file mode 100644 index ebb8f357f8..0000000000 --- a/pype/plugins/nukestudio/publish/validate_version.py +++ /dev/null @@ -1,79 +0,0 @@ -import pyblish -from avalon import io -from pype.action import get_errored_instances_from_context -import pype.api as pype - - -@pyblish.api.log -class RepairNukestudioVersionUp(pyblish.api.Action): - label = "Version Up Workfile" - on = "failed" - icon = "wrench" - - def process(self, context, plugin): - - errored_instances = get_errored_instances_from_context(context) - - # Apply pyblish logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(errored_instances, plugin) - - if instances: - project = context.data["activeProject"] - path = context.data.get("currentFile") - - new_path = pype.version_up(path) - - if project: - project.saveAs(new_path) - - self.log.info("Project workfile version was fixed") - - -class ValidateVersion(pyblish.api.InstancePlugin): - """Validate clip's versions. - - """ - - order = pyblish.api.ValidatorOrder - families = ["plate"] - label = "Validate Version" - actions = [RepairNukestudioVersionUp] - hosts = ["nukestudio"] - - def process(self, instance): - version = int(instance.data.get("version", 0)) - asset_name = instance.data.get("asset", None) - subset_name = instance.data.get("subset", None) - - assert version, "The file is missing version string! example: filename_v001.hrox `{}`" - - self.log.debug("Collected version: `{0}`".format(version)) - - found_v = 0 - try: - io.install() - project = io.find_one({"type": "project"}) - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - subset = io.find_one({ - "type": "subset", - "parent": asset["_id"], - "name": subset_name - }) - - version_db = io.find_one({ - 'type': 'version', - 'parent': subset["_id"], - 'name': version - }) or {} - found_v = version_db.get("name", 0) - self.log.debug("Found version: `{0}`".format(found_v)) - except Exception as e: - self.log.debug("Problem to get data from database for asset `{0}` subset `{1}`. Error: `{2}`".format(asset_name, subset_name, e)) - - assert (found_v != version), "Version must not be the same as in database `{0}`, Versions file: `{1}`, db: `{2}`".format(asset_name, version, found_v) From fb710bbef45ebebfcac50b9cf9dc69651e0ab9e1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 31 Jan 2020 18:58:51 +0100 Subject: [PATCH 02/10] fix(nuke): remove commented code --- pype/nuke/__init__.py | 36 ------------------------------------ 1 file changed, 36 deletions(-) diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index dfd61f4b39..f1f87e40c8 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -33,42 +33,6 @@ if os.getenv("PYBLISH_GUI", None): pyblish.register_gui(os.getenv("PYBLISH_GUI", None)) -# class NukeHandler(logging.Handler): -# ''' -# Nuke Handler - emits logs into nuke's script editor. -# warning will emit nuke.warning() -# critical and fatal would popup msg dialog to alert of the error. -# ''' -# -# def __init__(self): -# logging.Handler.__init__(self) -# self.set_name("Pype_Nuke_Handler") -# -# def emit(self, record): -# # Formated message: -# msg = self.format(record) -# -# if record.levelname.lower() in [ -# # "warning", -# "critical", -# "fatal", -# "error" -# ]: -# msg = self.format(record) -# nuke.message(msg) -# -# -# '''Adding Nuke Logging Handler''' -# log.info([handler.get_name() for handler in logging.root.handlers[:]]) -# nuke_handler = NukeHandler() -# if nuke_handler.get_name() \ -# not in [handler.get_name() -# for handler in logging.root.handlers[:]]: -# logging.getLogger().addHandler(nuke_handler) -# logging.getLogger().setLevel(logging.INFO) -# log.info([handler.get_name() for handler in logging.root.handlers[:]]) - - def reload_config(): """Attempt to reload pipeline at run-time. From f3bc7258df212d34d7882bdb6bf2ad662c87739d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Feb 2020 00:19:43 +0100 Subject: [PATCH 03/10] clean(nuke): commented code --- pype/plugins/nuke/publish/validate_script.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pype/plugins/nuke/publish/validate_script.py b/pype/plugins/nuke/publish/validate_script.py index 307e3ade59..f7dd84d714 100644 --- a/pype/plugins/nuke/publish/validate_script.py +++ b/pype/plugins/nuke/publish/validate_script.py @@ -15,12 +15,6 @@ class ValidateScript(pyblish.api.InstancePlugin): def process(self, instance): ctx_data = instance.context.data asset_name = ctx_data["asset"] - - # asset = io.find_one({ - # "type": "asset", - # "name": asset_name - # }) - asset = lib.get_asset(asset_name) asset_data = asset["data"] From b68cbf4be0c41152a4008d2312626a0e6075cf4b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Feb 2020 16:59:22 +0100 Subject: [PATCH 04/10] fix(nuke): didnt create write node --- pype/nuke/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 2ed9f75513..6d6e7de1b2 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -374,7 +374,7 @@ def create_write_node(name, data, input=None, prenodes=None): now_node.setInput(0, prev_node) # imprinting group node - avalon.nuke.imprint(GN, data["avalon"], tab="Pype") + avalon.nuke.imprint(GN, data["avalon"]) divider = nuke.Text_Knob('') GN.addKnob(divider) From 979cad41dceeb2a9dffd62d82b55ccd5edef06d0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Feb 2020 16:59:59 +0100 Subject: [PATCH 05/10] fix(nks):didnt publish plates --- pype/plugins/nukestudio/publish/collect_plates.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_plates.py b/pype/plugins/nukestudio/publish/collect_plates.py index be448931c8..70f0f7407e 100644 --- a/pype/plugins/nukestudio/publish/collect_plates.py +++ b/pype/plugins/nukestudio/publish/collect_plates.py @@ -156,8 +156,9 @@ class CollectPlatesData(api.InstancePlugin): ext=ext ) - start_frame = source_first_frame + instance.data["sourceInH"] - duration = instance.data["sourceOutH"] - instance.data["sourceInH"] + start_frame = int(source_first_frame + instance.data["sourceInH"]) + duration = int( + instance.data["sourceOutH"] - instance.data["sourceInH"]) end_frame = start_frame + duration self.log.debug("start_frame: `{}`".format(start_frame)) self.log.debug("end_frame: `{}`".format(end_frame)) From d0f0129c2c585b4f493e8b0bbd68f24f317849eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Feb 2020 17:40:07 +0100 Subject: [PATCH 06/10] fix(nk): loaders use self log --- pype/plugins/nuke/load/load_sequence.py | 31 +++++++++++-------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 76ff7d2cb6..db77c53aff 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -5,10 +5,6 @@ import contextlib from avalon import api, io from pype.nuke import presets -from pype.api import Logger - -log = Logger().get_logger(__name__, "nuke") - @contextlib.contextmanager def preserve_trim(node): @@ -35,14 +31,14 @@ def preserve_trim(node): if start_at_frame: node['frame_mode'].setValue("start at") node['frame'].setValue(str(script_start)) - log.info("start frame of Read was set to" - "{}".format(script_start)) + print("start frame of Read was set to" + "{}".format(script_start)) if offset_frame: node['frame_mode'].setValue("offset") node['frame'].setValue(str((script_start + offset_frame))) - log.info("start frame of Read was set to" - "{}".format(script_start)) + print("start frame of Read was set to" + "{}".format(script_start)) def loader_shift(node, frame, relative=True): @@ -74,7 +70,7 @@ def loader_shift(node, frame, relative=True): class LoadSequence(api.Loader): """Load image sequence into Nuke""" - families = ["write", "source", "plate", "render"] + families = ["render2d", "source", "plate", "render"] representations = ["exr", "dpx", "jpg", "jpeg", "png"] label = "Load sequence" @@ -91,7 +87,7 @@ class LoadSequence(api.Loader): version = context['version'] version_data = version.get("data", {}) - log.info("version_data: {}\n".format(version_data)) + self.log.info("version_data: {}\n".format(version_data)) self.first_frame = int(nuke.root()["first_frame"].getValue()) self.handle_start = version_data.get("handleStart", 0) @@ -111,7 +107,7 @@ class LoadSequence(api.Loader): if not file: repr_id = context["representation"]["_id"] - log.warning( + self.log.warning( "Representation id `{}` is failing to load".format(repr_id)) return @@ -242,7 +238,7 @@ class LoadSequence(api.Loader): if not file: repr_id = representation["_id"] - log.warning( + self.log.warning( "Representation id `{}` is failing to load".format(repr_id)) return @@ -277,9 +273,10 @@ class LoadSequence(api.Loader): last = version_data.get("frameEnd") if first is None: - log.warning("Missing start frame for updated version" - "assuming starts at frame 0 for: " - "{} ({})".format(node['name'].value(), representation)) + self.log.warning("Missing start frame for updated version" + "assuming starts at frame 0 for: " + "{} ({})".format( + node['name'].value(), representation)) first = 0 first -= self.handle_start @@ -288,7 +285,7 @@ class LoadSequence(api.Loader): # Update the loader's path whilst preserving some values with preserve_trim(node): node["file"].setValue(file) - log.info("__ node['file']: {}".format(node["file"].value())) + self.log.info("__ node['file']: {}".format(node["file"].value())) # Set the global in to the start frame of the sequence loader_shift(node, first, relative=True) @@ -328,7 +325,7 @@ class LoadSequence(api.Loader): node, updated_dict ) - log.info("udated to version: {}".format(version.get("name"))) + self.log.info("udated to version: {}".format(version.get("name"))) def remove(self, container): From 1b1770dd638cd684a1f69c76c5fb06eef9739ad8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Feb 2020 17:40:37 +0100 Subject: [PATCH 07/10] fix(nuke): loader mov reads revew presets for family and representation --- pype/plugins/nuke/load/load_mov.py | 58 +++++++++++++++++++++--------- 1 file changed, 41 insertions(+), 17 deletions(-) diff --git a/pype/plugins/nuke/load/load_mov.py b/pype/plugins/nuke/load/load_mov.py index fccba4c573..77346a82a4 100644 --- a/pype/plugins/nuke/load/load_mov.py +++ b/pype/plugins/nuke/load/load_mov.py @@ -4,9 +4,7 @@ import contextlib from avalon import api, io from pype.nuke import presets - -from pype.api import Logger -log = Logger().get_logger(__name__, "nuke") +from pypeapp import config @contextlib.contextmanager @@ -34,14 +32,14 @@ def preserve_trim(node): if start_at_frame: node['frame_mode'].setValue("start at") node['frame'].setValue(str(script_start)) - log.info("start frame of Read was set to" - "{}".format(script_start)) + print("start frame of Read was set to" + "{}".format(script_start)) if offset_frame: node['frame_mode'].setValue("offset") node['frame'].setValue(str((script_start + offset_frame))) - log.info("start frame of Read was set to" - "{}".format(script_start)) + print("start frame of Read was set to" + "{}".format(script_start)) def loader_shift(node, frame, relative=True): @@ -70,11 +68,37 @@ def loader_shift(node, frame, relative=True): return int(script_start) +def add_review_presets_config(): + returning = { + "families": list(), + "representations": list() + } + review_presets = config.get_presets()["plugins"]["global"]["publish"].get( + "ExtractReview", {}) + + outputs = review_presets.get("outputs", {}) + # + for output, properities in outputs.items(): + returning["representations"].append(output) + returning["families"] += properities.get("families", []) + + return returning + + class LoadMov(api.Loader): """Load mov file into Nuke""" + presets = add_review_presets_config() + families = [ + "source", + "plate", + "render", + "review"] + presets["families"] - families = ["write", "source", "plate", "render", "review"] - representations = ["wipmov", "h264", "mov", "preview", "review", "mp4"] + representations = [ + "mov", + "preview", + "review", + "mp4"] + presets["representations"] label = "Load mov" order = -10 @@ -115,7 +139,7 @@ class LoadMov(api.Loader): if not file: repr_id = context["representation"]["_id"] - log.warning( + self.log.warning( "Representation id `{}` is failing to load".format(repr_id)) return @@ -211,7 +235,7 @@ class LoadMov(api.Loader): if not file: repr_id = representation["_id"] - log.warning( + self.log.warning( "Representation id `{}` is failing to load".format(repr_id)) return @@ -246,9 +270,10 @@ class LoadMov(api.Loader): colorspace = version_data.get("colorspace") if first is None: - log.warning("Missing start frame for updated version" - "assuming starts at frame 0 for: " - "{} ({})".format(node['name'].value(), representation)) + self.log.warning("Missing start frame for updated version" + "assuming starts at frame 0 for: " + "{} ({})".format( + node['name'].value(), representation)) first = 0 # fix handle start and end if none are available @@ -264,7 +289,7 @@ class LoadMov(api.Loader): # Update the loader's path whilst preserving some values with preserve_trim(node): node["file"].setValue(file) - log.info("__ node['file']: {}".format(node["file"].value())) + self.log.info("__ node['file']: {}".format(node["file"].value())) # Set the global in to the start frame of the sequence loader_shift(node, first, relative=True) @@ -290,7 +315,6 @@ class LoadMov(api.Loader): if preset_clrsp is not None: node["colorspace"].setValue(str(preset_clrsp)) - updated_dict = {} updated_dict.update({ "representation": str(representation["_id"]), @@ -316,7 +340,7 @@ class LoadMov(api.Loader): update_container( node, updated_dict ) - log.info("udated to version: {}".format(version.get("name"))) + self.log.info("udated to version: {}".format(version.get("name"))) def remove(self, container): From 3e6ce6c1644fbdd63deece2bb756b4705ab39f58 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 7 Feb 2020 20:48:38 +0100 Subject: [PATCH 08/10] feat(nuke): setting colorspace to write and Reads from presets --- pype/nuke/lib.py | 101 +++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 98 insertions(+), 3 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 6d6e7de1b2..a7f1b64eec 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -645,15 +645,105 @@ class WorkfileSettings(object): write_dict (dict): nuke write node as dictionary ''' - # TODO: complete this function so any write node in # scene will have fixed colorspace following presets for the project if not isinstance(write_dict, dict): msg = "set_root_colorspace(): argument should be dictionary" - nuke.message(msg) log.error(msg) return - log.debug("__ set_writes_colorspace(): {}".format(write_dict)) + from avalon.nuke import get_avalon_knob_data + + for node in nuke.allNodes(): + + if node.Class() in ["Viewer", "Dot"]: + continue + + # get data from avalon knob + avalon_knob_data = get_avalon_knob_data(node, ["avalon:", "ak:"]) + + if not avalon_knob_data: + continue + + if avalon_knob_data["id"] != "pyblish.avalon.instance": + continue + + # establish families + families = [avalon_knob_data["family"]] + if avalon_knob_data.get("families"): + families.append(avalon_knob_data.get("families")) + + # except disabled nodes but exclude backdrops in test + for fmly, knob in write_dict.items(): + write = None + if (fmly in families): + # Add all nodes in group instances. + if node.Class() == "Group": + node.begin() + for x in nuke.allNodes(): + if x.Class() == "Write": + write = x + node.end() + elif node.Class() == "Write": + write = node + else: + log.warning("Wrong write node Class") + + write["colorspace"].setValue(str(knob["colorspace"])) + log.info( + "Setting `{0}` to `{1}`".format( + write.name(), + knob["colorspace"])) + + def set_reads_colorspace(self, reads): + """ Setting colorspace to Read nodes + + Looping trought all read nodes and tries to set colorspace based on regex rules in presets + """ + changes = dict() + for n in nuke.allNodes(): + file = nuke.filename(n) + if not n.Class() == "Read": + continue + + # load nuke presets for Read's colorspace + read_clrs_presets = get_colorspace_preset().get( + "nuke", {}).get("read", {}) + + # check if any colorspace presets for read is mathing + preset_clrsp = next((read_clrs_presets[k] + for k in read_clrs_presets + if bool(re.search(k, file))), + None) + log.debug(preset_clrsp) + if preset_clrsp is not None: + current = n["colorspace"].value() + future = str(preset_clrsp) + if current != future: + changes.update({ + n.name(): { + "from": current, + "to": future + } + }) + log.debug(changes) + if changes: + msg = "Read nodes are not set to correct colospace:\n\n" + for nname, knobs in changes.items(): + msg += str(" - node: '{0}' is now '{1}' " + "but should be '{2}'\n").format( + nname, knobs["from"], knobs["to"] + ) + + msg += "\nWould you like to change it?" + + if nuke.ask(msg): + for nname, knobs in changes.items(): + n = nuke.toNode(nname) + n["colorspace"].setValue(knobs["to"]) + log.info( + "Setting `{0}` to `{1}`".format( + nname, + knobs["to"])) def set_colorspace(self): ''' Setting colorpace following presets @@ -671,6 +761,7 @@ class WorkfileSettings(object): msg = "set_colorspace(): missing `viewer` settings in template" nuke.message(msg) log.error(msg) + try: self.set_writes_colorspace(nuke_colorspace["write"]) except AttributeError: @@ -678,6 +769,10 @@ class WorkfileSettings(object): nuke.message(msg) log.error(msg) + reads = nuke_colorspace.get("read") + if reads: + self.set_reads_colorspace(reads) + try: for key in nuke_colorspace: log.debug("Preset's colorspace key: {}".format(key)) From 1a84b605a162ab381a459e9421b1d0e3e32677ab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 7 Feb 2020 20:49:07 +0100 Subject: [PATCH 09/10] fix(nuke, nks): removing `handles` obsolete --- pype/plugins/nukestudio/publish/collect_clips.py | 1 - pype/plugins/nukestudio/publish/collect_effects.py | 12 +++++++++--- pype/plugins/nukestudio/publish/collect_handles.py | 4 ---- .../nukestudio/publish/collect_hierarchy_context.py | 1 - pype/plugins/nukestudio/publish/collect_plates.py | 1 - pype/plugins/nukestudio/publish/collect_reviews.py | 8 +++++--- .../nukestudio/publish/collect_tag_handles.py | 12 +++++++----- pype/plugins/nukestudio/publish/extract_effects.py | 11 +++++++---- 8 files changed, 28 insertions(+), 22 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_clips.py b/pype/plugins/nukestudio/publish/collect_clips.py index 3759d50f6a..4525b4947f 100644 --- a/pype/plugins/nukestudio/publish/collect_clips.py +++ b/pype/plugins/nukestudio/publish/collect_clips.py @@ -105,7 +105,6 @@ class CollectClips(api.ContextPlugin): "asset": asset, "family": "clip", "families": [], - "handles": 0, "handleStart": projectdata.get("handleStart", 0), "handleEnd": projectdata.get("handleEnd", 0), "version": int(version)}) diff --git a/pype/plugins/nukestudio/publish/collect_effects.py b/pype/plugins/nukestudio/publish/collect_effects.py index 0aee0adf2e..55ff849c88 100644 --- a/pype/plugins/nukestudio/publish/collect_effects.py +++ b/pype/plugins/nukestudio/publish/collect_effects.py @@ -11,7 +11,9 @@ class CollectVideoTracksLuts(pyblish.api.InstancePlugin): def process(self, instance): - self.log.debug("Finding soft effect for subset: `{}`".format(instance.data.get("subset"))) + self.log.debug( + "Finding soft effect for subset: `{}`".format( + instance.data.get("subset"))) # taking active sequence subset = instance.data.get("subset") @@ -41,8 +43,12 @@ class CollectVideoTracksLuts(pyblish.api.InstancePlugin): if len(instance.data.get("effectTrackItems", {}).keys()) > 0: instance.data["families"] += ["lut"] - self.log.debug("effects.keys: {}".format(instance.data.get("effectTrackItems", {}).keys())) - self.log.debug("effects: {}".format(instance.data.get("effectTrackItems", {}))) + self.log.debug( + "effects.keys: {}".format( + instance.data.get("effectTrackItems", {}).keys())) + self.log.debug( + "effects: {}".format( + instance.data.get("effectTrackItems", {}))) def add_effect(self, instance, track_index, item): track = item.parentTrack().name() diff --git a/pype/plugins/nukestudio/publish/collect_handles.py b/pype/plugins/nukestudio/publish/collect_handles.py index 8da83e715b..28f502d846 100644 --- a/pype/plugins/nukestudio/publish/collect_handles.py +++ b/pype/plugins/nukestudio/publish/collect_handles.py @@ -24,7 +24,6 @@ class CollectClipHandles(api.ContextPlugin): continue # get handles - handles = int(instance.data["handles"]) handle_start = int(instance.data["handleStart"]) handle_end = int(instance.data["handleEnd"]) @@ -38,19 +37,16 @@ class CollectClipHandles(api.ContextPlugin): self.log.debug("Adding to shared assets: `{}`".format( instance.data["name"])) asset_shared.update({ - "handles": handles, "handleStart": handle_start, "handleEnd": handle_end }) - for instance in filtered_instances: if not instance.data.get("main") and not instance.data.get("handleTag"): self.log.debug("Synchronize handles on: `{}`".format( instance.data["name"])) name = instance.data["asset"] s_asset_data = assets_shared.get(name) - instance.data["handles"] = s_asset_data.get("handles", 0) instance.data["handleStart"] = s_asset_data.get( "handleStart", 0 ) diff --git a/pype/plugins/nukestudio/publish/collect_hierarchy_context.py b/pype/plugins/nukestudio/publish/collect_hierarchy_context.py index 5f29837d80..5085b9719e 100644 --- a/pype/plugins/nukestudio/publish/collect_hierarchy_context.py +++ b/pype/plugins/nukestudio/publish/collect_hierarchy_context.py @@ -263,7 +263,6 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin): # get custom attributes of the shot if instance.data.get("main"): in_info['custom_attributes'] = { - 'handles': int(instance.data.get('handles', 0)), "handleStart": handle_start, "handleEnd": handle_end, "frameStart": instance.data["frameStart"], diff --git a/pype/plugins/nukestudio/publish/collect_plates.py b/pype/plugins/nukestudio/publish/collect_plates.py index 70f0f7407e..b98eccce7f 100644 --- a/pype/plugins/nukestudio/publish/collect_plates.py +++ b/pype/plugins/nukestudio/publish/collect_plates.py @@ -134,7 +134,6 @@ class CollectPlatesData(api.InstancePlugin): # add to data of representation version_data.update({ - "handles": version_data['handleStart'], "colorspace": item.sourceMediaColourTransform(), "colorspaceScript": instance.context.data["colorspace"], "families": [f for f in families if 'ftrack' not in f], diff --git a/pype/plugins/nukestudio/publish/collect_reviews.py b/pype/plugins/nukestudio/publish/collect_reviews.py index f9032b2ca4..f223e5ca65 100644 --- a/pype/plugins/nukestudio/publish/collect_reviews.py +++ b/pype/plugins/nukestudio/publish/collect_reviews.py @@ -125,7 +125,7 @@ class CollectReviews(api.InstancePlugin): thumb_path, format='png' ) - + self.log.debug("__ sourceIn: `{}`".format(instance.data["sourceIn"])) self.log.debug("__ thumbnail: `{}`, frame: `{}`".format(thumbnail, thumb_frame)) @@ -145,7 +145,10 @@ class CollectReviews(api.InstancePlugin): item = instance.data["item"] transfer_data = [ - "handleStart", "handleEnd", "sourceIn", "sourceOut", "frameStart", "frameEnd", "sourceInH", "sourceOutH", "clipIn", "clipOut", "clipInH", "clipOutH", "asset", "track", "version" + "handleStart", "handleEnd", "sourceIn", "sourceOut", + "frameStart", "frameEnd", "sourceInH", "sourceOutH", + "clipIn", "clipOut", "clipInH", "clipOutH", "asset", + "track", "version" ] version_data = dict() @@ -154,7 +157,6 @@ class CollectReviews(api.InstancePlugin): # add to data of representation version_data.update({ - "handles": version_data['handleStart'], "colorspace": item.sourceMediaColourTransform(), "families": instance.data["families"], "subset": instance.data["subset"], diff --git a/pype/plugins/nukestudio/publish/collect_tag_handles.py b/pype/plugins/nukestudio/publish/collect_tag_handles.py index 929f5e3b68..a6a63faea9 100644 --- a/pype/plugins/nukestudio/publish/collect_tag_handles.py +++ b/pype/plugins/nukestudio/publish/collect_tag_handles.py @@ -38,7 +38,9 @@ class CollectClipTagHandles(api.ContextPlugin): # gets arguments if there are any t_args = t_metadata.get("tag.args", "") - assert t_args, self.log.error("Tag with Handles is missing Args. Use only handle start/end") + assert t_args, self.log.error( + "Tag with Handles is missing Args. " + "Use only handle start/end") t_args = json.loads(t_args.replace("'", "\"")) # add in start @@ -55,8 +57,8 @@ class CollectClipTagHandles(api.ContextPlugin): # adding handles to asset_shared on context if instance.data.get("handleEnd"): - assets_shared_a["handleEnd"] = instance.data["handleEnd"] + assets_shared_a[ + "handleEnd"] = instance.data["handleEnd"] if instance.data.get("handleStart"): - assets_shared_a["handleStart"] = instance.data["handleStart"] - if instance.data.get("handles"): - assets_shared_a["handles"] = instance.data["handles"] + assets_shared_a[ + "handleStart"] = instance.data["handleStart"] diff --git a/pype/plugins/nukestudio/publish/extract_effects.py b/pype/plugins/nukestudio/publish/extract_effects.py index 15d2a80a55..5e2721aa8e 100644 --- a/pype/plugins/nukestudio/publish/extract_effects.py +++ b/pype/plugins/nukestudio/publish/extract_effects.py @@ -6,6 +6,7 @@ import pyblish.api import tempfile from avalon import io, api + class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): """Collect video tracks effects into context.""" @@ -17,9 +18,12 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): item = instance.data["item"] effects = instance.data.get("effectTrackItems") - instance.data["families"] = [f for f in instance.data.get("families", []) if f not in ["lut"]] + instance.data["families"] = [f for f in instance.data.get( + "families", []) if f not in ["lut"]] - self.log.debug("___ instance.data[families]: `{}`".format(instance.data["families"])) + self.log.debug( + "__ instance.data[families]: `{}`".format( + instance.data["families"])) # skip any without effects if not effects: @@ -102,7 +106,6 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): # add to data of representation version_data.update({ - "handles": version_data['handleStart'], "colorspace": item.sourceMediaColourTransform(), "colorspaceScript": instance.context.data["colorspace"], "families": ["plate", "lut"], @@ -132,7 +135,7 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): def copy_linked_files(self, effect, dst_dir): for k, v in effect["node"].items(): - if k in "file" and v is not '': + if k in "file" and v != '': base_name = os.path.basename(v) dst = os.path.join(dst_dir, base_name).replace("\\", "/") From 2da3de670eda9c2cf9e6fc679ca19c291d1e8128 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 10 Feb 2020 22:44:29 +0100 Subject: [PATCH 10/10] add version --- pype/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/__init__.py b/pype/__init__.py index 91b72d7de5..89c653bf6f 100644 --- a/pype/__init__.py +++ b/pype/__init__.py @@ -9,7 +9,7 @@ from pypeapp import config import logging log = logging.getLogger(__name__) -__version__ = "2.3.0" +__version__ = "2.5.0" PACKAGE_DIR = os.path.dirname(__file__) PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")