From 16189b56918acebb51295b93f157f0d892e5497e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 2 Oct 2019 13:45:01 +0200 Subject: [PATCH 1/6] fix: some changes improving publishing and loading luts --- pype/plugins/global/publish/integrate_new.py | 2 + pype/plugins/nuke/load/load_luts.py | 322 +++++++++++++++++ pype/plugins/nuke/load/load_luts_ip.py | 335 ++++++++++++++++++ .../nuke/publish/validate_active_viewer.py | 24 ++ 4 files changed, 683 insertions(+) create mode 100644 pype/plugins/nuke/load/load_luts.py create mode 100644 pype/plugins/nuke/load/load_luts_ip.py create mode 100644 pype/plugins/nuke/publish/validate_active_viewer.py diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index d9e4f3f533..e87ee97087 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -429,6 +429,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Returns: None """ + src = os.path.normpath(src) + dst = os.path.normpath(dst) self.log.debug("Copying file .. {} -> {}".format(src, dst)) dirname = os.path.dirname(dst) diff --git a/pype/plugins/nuke/load/load_luts.py b/pype/plugins/nuke/load/load_luts.py new file mode 100644 index 0000000000..4f7c19a588 --- /dev/null +++ b/pype/plugins/nuke/load/load_luts.py @@ -0,0 +1,322 @@ +from avalon import api, style, io +import nuke +import json +from collections import OrderedDict + + +class LoadLuts(api.Loader): + """Loading colorspace soft effect exported from nukestudio""" + + representations = ["lutJson"] + families = ["lut"] + + label = "Load Luts - nodes" + order = 0 + icon = "cc" + color = style.colors.light + ignore_attr = ["useLifetime"] + + def load(self, context, name, namespace, data): + """ + Loading function to get the soft effects to particular read node + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + # import dependencies + from avalon.nuke import containerise + + # get main variables + version = context['version'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + workfile_first_frame = int(nuke.root()["first_frame"].getValue()) + namespace = namespace or context['asset']['name'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + # prepare data for imprinting + # add additional metadata from the version to imprint to Avalon knob + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # getting file path + file = self.fname.replace("\\", "/") + + # getting data from json file with unicode conversion + with open(file, "r") as f: + json_f = {self.byteify(key): self.byteify(value) + for key, value in json.load(f).iteritems()} + + # get correct order of nodes by positions on track and subtrack + nodes_order = self.reorder_nodes(json_f["effects"]) + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + GN = nuke.createNode("Group") + + GN["name"].setValue(object_name) + + # adding content to the group node + with GN: + pre_node = nuke.createNode("Input") + pre_node["name"].setValue("rgb") + + for ef_name, ef_val in nodes_order.items(): + node = nuke.createNode(ef_val["class"]) + for k, v in ef_val["node"].items(): + if k in self.ignore_attr: + continue + if isinstance(v, list) and len(v) > 4: + node[k].setAnimated() + for i, value in enumerate(v): + if isinstance(value, list): + for ci, cv in enumerate(value): + node[k].setValueAt( + cv, + (workfile_first_frame + i), + ci) + else: + node[k].setValueAt( + value, + (workfile_first_frame + i)) + else: + node[k].setValue(v) + node.setInput(0, pre_node) + pre_node = node + + output = nuke.createNode("Output") + output.setInput(0, pre_node) + + # try to find parent read node + self.connect_read_node(GN, namespace, json_f["assignTo"]) + + GN["tile_color"].setValue(int("0x3469ffff", 16)) + + self.log.info("Loaded lut setup: `{}`".format(GN["name"].value())) + + return containerise( + node=GN, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def update(self, container, representation): + """Update the Loader's path + + Nuke automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + + from avalon.nuke import ( + update_container + ) + # get main variables + # Get version from io + version = io.find_one({ + "type": "version", + "_id": representation["parent"] + }) + # get corresponding node + GN = nuke.toNode(container['objectName']) + + file = api.get_representation_path(representation).replace("\\", "/") + name = container['name'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + workfile_first_frame = int(nuke.root()["first_frame"].getValue()) + namespace = container['namespace'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"representation": str(representation["_id"]), + "frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # Update the imprinted representation + update_container( + GN, + data_imprint + ) + + # getting data from json file with unicode conversion + with open(file, "r") as f: + json_f = {self.byteify(key): self.byteify(value) + for key, value in json.load(f).iteritems()} + + # get correct order of nodes by positions on track and subtrack + nodes_order = self.reorder_nodes(json_f["effects"]) + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + # adding content to the group node + with GN: + # first remove all nodes + [nuke.delete(n) for n in nuke.allNodes()] + + # create input node + pre_node = nuke.createNode("Input") + pre_node["name"].setValue("rgb") + + for ef_name, ef_val in nodes_order.items(): + node = nuke.createNode(ef_val["class"]) + for k, v in ef_val["node"].items(): + if k in self.ignore_attr: + continue + if isinstance(v, list) and len(v) > 3: + node[k].setAnimated() + for i, value in enumerate(v): + if isinstance(value, list): + for ci, cv in enumerate(value): + node[k].setValueAt( + cv, + (workfile_first_frame + i), + ci) + else: + node[k].setValueAt( + value, + (workfile_first_frame + i)) + else: + node[k].setValue(v) + node.setInput(0, pre_node) + pre_node = node + + # create output node + output = nuke.createNode("Output") + output.setInput(0, pre_node) + + # try to find parent read node + self.connect_read_node(GN, namespace, json_f["assignTo"]) + + # get all versions in list + versions = io.find({ + "type": "version", + "parent": version["parent"] + }).distinct('name') + + max_version = max(versions) + + # change color of node + if version.get("name") not in [max_version]: + GN["tile_color"].setValue(int("0xd84f20ff", 16)) + else: + GN["tile_color"].setValue(int("0x3469ffff", 16)) + + self.log.info("udated to version: {}".format(version.get("name"))) + + def connect_read_node(self, group_node, asset, subset): + """ + Finds read node and selects it + + Arguments: + asset (str): asset name + + Returns: + nuke node: node is selected + None: if nothing found + """ + search_name = "{0}_{1}".format(asset, subset) + node = [n for n in nuke.allNodes() if search_name in n["name"].value()] + if len(node) > 0: + rn = node[0] + else: + rn = None + + # Parent read node has been found + # solving connections + if rn: + dep_nodes = rn.dependent() + + if len(dep_nodes) > 0: + for dn in dep_nodes: + dn.setInput(0, group_node) + + group_node.setInput(0, rn) + group_node.autoplace() + + def reorder_nodes(self, data): + new_order = OrderedDict() + trackNums = [v["trackIndex"] for k, v in data.items()] + subTrackNums = [v["subTrackIndex"] for k, v in data.items()] + + for trackIndex in range( + min(trackNums), max(trackNums) + 1): + for subTrackIndex in range( + min(subTrackNums), max(subTrackNums) + 1): + item = self.get_item(data, trackIndex, subTrackIndex) + if item is not {}: + new_order.update(item) + return new_order + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes trought all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.iteritems()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, unicode): + return input.encode('utf-8') + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + from avalon.nuke import viewer_update_and_undo_stop + node = nuke.toNode(container['objectName']) + with viewer_update_and_undo_stop(): + nuke.delete(node) diff --git a/pype/plugins/nuke/load/load_luts_ip.py b/pype/plugins/nuke/load/load_luts_ip.py new file mode 100644 index 0000000000..b30f84cc42 --- /dev/null +++ b/pype/plugins/nuke/load/load_luts_ip.py @@ -0,0 +1,335 @@ +from avalon import api, style, io +import nuke +import json +from collections import OrderedDict +from pype.nuke import lib + +class LoadLutsInputProcess(api.Loader): + """Loading colorspace soft effect exported from nukestudio""" + + representations = ["lutJson"] + families = ["lut"] + + label = "Load Luts - Input Process" + order = 0 + icon = "eye" + color = style.colors.alert + ignore_attr = ["useLifetime"] + + def load(self, context, name, namespace, data): + """ + Loading function to get the soft effects to particular read node + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + # import dependencies + from avalon.nuke import containerise + + # get main variables + version = context['version'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + workfile_first_frame = int(nuke.root()["first_frame"].getValue()) + namespace = namespace or context['asset']['name'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + # prepare data for imprinting + # add additional metadata from the version to imprint to Avalon knob + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # getting file path + file = self.fname.replace("\\", "/") + + # getting data from json file with unicode conversion + with open(file, "r") as f: + json_f = {self.byteify(key): self.byteify(value) + for key, value in json.load(f).iteritems()} + + # get correct order of nodes by positions on track and subtrack + nodes_order = self.reorder_nodes(json_f["effects"]) + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + GN = nuke.createNode("Group") + + GN["name"].setValue(object_name) + + # adding content to the group node + with GN: + pre_node = nuke.createNode("Input") + pre_node["name"].setValue("rgb") + + for ef_name, ef_val in nodes_order.items(): + node = nuke.createNode(ef_val["class"]) + for k, v in ef_val["node"].items(): + if k in self.ignore_attr: + continue + if isinstance(v, list) and len(v) > 4: + node[k].setAnimated() + for i, value in enumerate(v): + if isinstance(value, list): + for ci, cv in enumerate(value): + node[k].setValueAt( + cv, + (workfile_first_frame + i), + ci) + else: + node[k].setValueAt( + value, + (workfile_first_frame + i)) + else: + node[k].setValue(v) + node.setInput(0, pre_node) + pre_node = node + + output = nuke.createNode("Output") + output.setInput(0, pre_node) + + # try to place it under Viewer1 + if not self.connect_active_viewer(GN): + nuke.delete(GN) + return + + GN["tile_color"].setValue(int("0x3469ffff", 16)) + + self.log.info("Loaded lut setup: `{}`".format(GN["name"].value())) + + return containerise( + node=GN, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def update(self, container, representation): + """Update the Loader's path + + Nuke automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + + from avalon.nuke import ( + update_container + ) + # get main variables + # Get version from io + version = io.find_one({ + "type": "version", + "_id": representation["parent"] + }) + # get corresponding node + GN = nuke.toNode(container['objectName']) + + file = api.get_representation_path(representation).replace("\\", "/") + name = container['name'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + workfile_first_frame = int(nuke.root()["first_frame"].getValue()) + namespace = container['namespace'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"representation": str(representation["_id"]), + "frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # Update the imprinted representation + update_container( + GN, + data_imprint + ) + + # getting data from json file with unicode conversion + with open(file, "r") as f: + json_f = {self.byteify(key): self.byteify(value) + for key, value in json.load(f).iteritems()} + + # get correct order of nodes by positions on track and subtrack + nodes_order = self.reorder_nodes(json_f["effects"]) + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + # adding content to the group node + with GN: + # first remove all nodes + [nuke.delete(n) for n in nuke.allNodes()] + + # create input node + pre_node = nuke.createNode("Input") + pre_node["name"].setValue("rgb") + + for ef_name, ef_val in nodes_order.items(): + node = nuke.createNode(ef_val["class"]) + for k, v in ef_val["node"].items(): + if k in self.ignore_attr: + continue + if isinstance(v, list) and len(v) > 3: + node[k].setAnimated() + for i, value in enumerate(v): + if isinstance(value, list): + for ci, cv in enumerate(value): + node[k].setValueAt( + cv, + (workfile_first_frame + i), + ci) + else: + node[k].setValueAt( + value, + (workfile_first_frame + i)) + else: + node[k].setValue(v) + node.setInput(0, pre_node) + pre_node = node + + # create output node + output = nuke.createNode("Output") + output.setInput(0, pre_node) + + # try to place it under Viewer1 + if not self.connect_active_viewer(GN): + nuke.delete(GN) + return + + # get all versions in list + versions = io.find({ + "type": "version", + "parent": version["parent"] + }).distinct('name') + + max_version = max(versions) + + # change color of node + if version.get("name") not in [max_version]: + GN["tile_color"].setValue(int("0xd84f20ff", 16)) + else: + GN["tile_color"].setValue(int("0x3469ffff", 16)) + + self.log.info("udated to version: {}".format(version.get("name"))) + + def connect_active_viewer(self, group_node): + """ + Finds Active viewer and + place the node under it, also adds + name of group into Input Process of the viewer + + Arguments: + group_node (nuke node): nuke group node object + + """ + group_node_name = group_node["name"].value() + + viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] + if len(viewer) > 0: + viewer = viewer[0] + else: + self.log.error("Please create Viewer node before you run this action again") + return None + + # get coordinates of Viewer1 + xpos = viewer["xpos"].value() + ypos = viewer["ypos"].value() + + ypos += 150 + + viewer["ypos"].setValue(ypos) + + # set coordinates to group node + group_node["xpos"].setValue(xpos) + group_node["ypos"].setValue(ypos + 50) + + # add group node name to Viewer Input Process + viewer["input_process_node"].setValue(group_node_name) + + # put backdrop under + lib.create_backdrop(label="Input Process", layer=2, nodes=[viewer, group_node], color="0x7c7faaff") + + return True + + def reorder_nodes(self, data): + new_order = OrderedDict() + trackNums = [v["trackIndex"] for k, v in data.items()] + subTrackNums = [v["subTrackIndex"] for k, v in data.items()] + + for trackIndex in range( + min(trackNums), max(trackNums) + 1): + for subTrackIndex in range( + min(subTrackNums), max(subTrackNums) + 1): + item = self.get_item(data, trackIndex, subTrackIndex) + if item is not {}: + new_order.update(item) + return new_order + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes trought all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.iteritems()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, unicode): + return input.encode('utf-8') + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + from avalon.nuke import viewer_update_and_undo_stop + node = nuke.toNode(container['objectName']) + with viewer_update_and_undo_stop(): + nuke.delete(node) diff --git a/pype/plugins/nuke/publish/validate_active_viewer.py b/pype/plugins/nuke/publish/validate_active_viewer.py new file mode 100644 index 0000000000..618a7f1502 --- /dev/null +++ b/pype/plugins/nuke/publish/validate_active_viewer.py @@ -0,0 +1,24 @@ +import pyblish.api +import nuke + + +class ValidateActiveViewer(pyblish.api.ContextPlugin): + """Validate presentse of the active viewer from nodes + """ + + order = pyblish.api.ValidatorOrder + label = "Validate Active Viewer" + hosts = ["nuke"] + + def process(self, context): + viewer_process_node = context.data.get("ViewerProcess") + + assert viewer_process_node, ( + "Missing active viewer process! Please click on output write node and push key number 1-9" + ) + active_viewer = context.data["ActiveViewer"] + active_input = active_viewer.activeInput() + + assert active_input is not None, ( + "Missing active viewer input! Please click on output write node and push key number 1-9" + ) From 2f06a65198a3f5d71aacbae934d3283cdb2d36ca Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 4 Oct 2019 16:41:36 +0200 Subject: [PATCH 2/6] impoving publishing of baked mov - adding feature to add input process node to baking - removing collec/validate active viewer process (not necessary) - output node added to render write node --- .../collect_active_viewer.py | 1 - .../validate_active_viewer.py | 0 .../nuke/publish/extract_ouput_node.py | 20 ++--- .../nuke/publish/extract_review_data.py | 73 ++++++++++++------- .../nuke/publish/validate_rendered_frames.py | 2 + 5 files changed, 57 insertions(+), 39 deletions(-) rename pype/plugins/nuke/{publish => _publish_unused}/collect_active_viewer.py (83%) rename pype/plugins/nuke/{publish => _publish_unused}/validate_active_viewer.py (100%) diff --git a/pype/plugins/nuke/publish/collect_active_viewer.py b/pype/plugins/nuke/_publish_unused/collect_active_viewer.py similarity index 83% rename from pype/plugins/nuke/publish/collect_active_viewer.py rename to pype/plugins/nuke/_publish_unused/collect_active_viewer.py index 5dc17d8768..5a6cc02b88 100644 --- a/pype/plugins/nuke/publish/collect_active_viewer.py +++ b/pype/plugins/nuke/_publish_unused/collect_active_viewer.py @@ -11,5 +11,4 @@ class CollectActiveViewer(pyblish.api.ContextPlugin): hosts = ["nuke"] def process(self, context): - context.data["ViewerProcess"] = nuke.ViewerProcess.node() context.data["ActiveViewer"] = nuke.activeViewer() diff --git a/pype/plugins/nuke/publish/validate_active_viewer.py b/pype/plugins/nuke/_publish_unused/validate_active_viewer.py similarity index 100% rename from pype/plugins/nuke/publish/validate_active_viewer.py rename to pype/plugins/nuke/_publish_unused/validate_active_viewer.py diff --git a/pype/plugins/nuke/publish/extract_ouput_node.py b/pype/plugins/nuke/publish/extract_ouput_node.py index 4d7533f010..a144761e5f 100644 --- a/pype/plugins/nuke/publish/extract_ouput_node.py +++ b/pype/plugins/nuke/publish/extract_ouput_node.py @@ -15,21 +15,17 @@ class CreateOutputNode(pyblish.api.ContextPlugin): def process(self, context): # capture selection state with maintained_selection(): - # deselect all allNodes - self.log.info(context.data["ActiveViewer"]) + active_node = [node for inst in context[:] + for node in inst[:] + if "ak:family" in node.knobs()] - active_viewer = context.data["ActiveViewer"] - active_input = active_viewer.activeInput() - active_node = active_viewer.node() - - - last_viewer_node = active_node.input(active_input) - - name = last_viewer_node.name() - self.log.info("Node name: {}".format(name)) + if active_node: + self.log.info(active_node) + active_node = active_node[0] + self.log.info(active_node) + active_node['selected'].setValue(True) # select only instance render node - last_viewer_node['selected'].setValue(True) output_node = nuke.createNode("Output") # deselect all and select the original selection diff --git a/pype/plugins/nuke/publish/extract_review_data.py b/pype/plugins/nuke/publish/extract_review_data.py index 40c3e37434..885fe99b3d 100644 --- a/pype/plugins/nuke/publish/extract_review_data.py +++ b/pype/plugins/nuke/publish/extract_review_data.py @@ -2,7 +2,7 @@ import os import nuke import pyblish.api import pype - +import copy class ExtractReviewData(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts @@ -48,9 +48,9 @@ class ExtractReviewData(pype.api.Extractor): assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" - import nuke temporary_nodes = [] - stagingDir = instance.data['representations'][0]["stagingDir"].replace("\\", "/") + stagingDir = instance.data[ + 'representations'][0]["stagingDir"].replace("\\", "/") self.log.debug("StagingDir `{0}`...".format(stagingDir)) collection = instance.data.get("collection", None) @@ -70,16 +70,24 @@ class ExtractReviewData(pype.api.Extractor): first_frame = instance.data.get("frameStart", None) last_frame = instance.data.get("frameEnd", None) - node = previous_node = nuke.createNode("Read") + rnode = nuke.createNode("Read") - node["file"].setValue( + rnode["file"].setValue( os.path.join(stagingDir, fname).replace("\\", "/")) - node["first"].setValue(first_frame) - node["origfirst"].setValue(first_frame) - node["last"].setValue(last_frame) - node["origlast"].setValue(last_frame) - temporary_nodes.append(node) + rnode["first"].setValue(first_frame) + rnode["origfirst"].setValue(first_frame) + rnode["last"].setValue(last_frame) + rnode["origlast"].setValue(last_frame) + temporary_nodes.append(rnode) + previous_node = rnode + + # get input process and connect it to baking + ipn = self.get_view_process_node() + if ipn is not None: + ipn.setInput(0, previous_node) + previous_node = ipn + temporary_nodes.append(ipn) reformat_node = nuke.createNode("Reformat") @@ -95,22 +103,10 @@ class ExtractReviewData(pype.api.Extractor): previous_node = reformat_node temporary_nodes.append(reformat_node) - viewer_process_node = instance.context.data.get("ViewerProcess") - dag_node = None - if viewer_process_node: - dag_node = nuke.createNode(viewer_process_node.Class()) - dag_node.setInput(0, previous_node) - previous_node = dag_node - temporary_nodes.append(dag_node) - # Copy viewer process values - excludedKnobs = ["name", "xpos", "ypos"] - for item in viewer_process_node.knobs().keys(): - if item not in excludedKnobs and item in dag_node.knobs(): - x1 = viewer_process_node[item] - x2 = dag_node[item] - x2.fromScript(x1.toScript(False)) - else: - self.log.warning("No viewer node found.") + dag_node = nuke.createNode("OCIODisplay") + dag_node.setInput(0, previous_node) + previous_node = dag_node + temporary_nodes.append(dag_node) # create write node write_node = nuke.createNode("Write") @@ -164,3 +160,28 @@ class ExtractReviewData(pype.api.Extractor): # Clean up for node in temporary_nodes: nuke.delete(node) + + def get_view_process_node(self): + + # Select only the target node + if nuke.selectedNodes(): + [n.setSelected(False) for n in nuke.selectedNodes()] + + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + nuke.nodeCopy('%clipboard%') + + [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all + + nuke.nodePaste('%clipboard%') + + ipn = nuke.selectedNode() + + return ipn diff --git a/pype/plugins/nuke/publish/validate_rendered_frames.py b/pype/plugins/nuke/publish/validate_rendered_frames.py index 85cbe7b2c0..3887b5d5b7 100644 --- a/pype/plugins/nuke/publish/validate_rendered_frames.py +++ b/pype/plugins/nuke/publish/validate_rendered_frames.py @@ -81,3 +81,5 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): ).format(__name__) instance.data['collection'] = collection + + return From 185e3c29a7157e3264db06c344fa12c0329c30b1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 4 Oct 2019 16:45:45 +0200 Subject: [PATCH 3/6] fix: standalone publishing and image sequence had troubles --- pype/plugins/ftrack/publish/integrate_remove_components.py | 3 +++ pype/plugins/global/publish/integrate_new.py | 6 +++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_remove_components.py b/pype/plugins/ftrack/publish/integrate_remove_components.py index a215ee1b97..bad50f7200 100644 --- a/pype/plugins/ftrack/publish/integrate_remove_components.py +++ b/pype/plugins/ftrack/publish/integrate_remove_components.py @@ -17,6 +17,9 @@ class IntegrateCleanComponentData(pyblish.api.InstancePlugin): for comp in instance.data['representations']: self.log.debug('component {}'.format(comp)) + + if "%" in comp['published_path'] or "#" in comp['published_path']: + continue if comp.get('thumbnail') or ("thumbnail" in comp.get('tags', [])): os.remove(comp['published_path']) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 6c89e22a83..61881b2a34 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -307,7 +307,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if repre.get("frameStart"): frame_start_padding = len(str( repre.get("frameEnd"))) - index_frame_start = repre.get("frameStart") + index_frame_start = int(repre.get("frameStart")) dst_padding_exp = src_padding_exp for i in src_collection.indexes: @@ -322,7 +322,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dst_padding = dst_padding_exp % index_frame_start index_frame_start += 1 - dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail) + dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail).replace("..", ".") self.log.debug("destination: `{}`".format(dst)) src = os.path.join(stagingdir, src_file_name) self.log.debug("source: {}".format(src)) @@ -357,7 +357,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) dst = os.path.normpath( - anatomy_filled[template_name]["path"]) + anatomy_filled[template_name]["path"]).replace("..", ".") instance.data["transfers"].append([src, dst]) From 776e8922bcff08fe37453173dfaea82126a571aa Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 4 Oct 2019 16:50:18 +0200 Subject: [PATCH 4/6] fix: unnecessary import of module --- pype/plugins/nuke/publish/extract_review_data.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pype/plugins/nuke/publish/extract_review_data.py b/pype/plugins/nuke/publish/extract_review_data.py index 885fe99b3d..08eba5bb1e 100644 --- a/pype/plugins/nuke/publish/extract_review_data.py +++ b/pype/plugins/nuke/publish/extract_review_data.py @@ -2,7 +2,6 @@ import os import nuke import pyblish.api import pype -import copy class ExtractReviewData(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts From a0103b36f0c63058a6868ffb1d676fba434954a3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 4 Oct 2019 19:07:50 +0200 Subject: [PATCH 5/6] created first version of rest api server based on simple http server --- pype/services/rest_api/__init__.py | 5 + pype/services/rest_api/rest_api.py | 237 +++++++++++++++++++++++++++++ 2 files changed, 242 insertions(+) create mode 100644 pype/services/rest_api/__init__.py create mode 100644 pype/services/rest_api/rest_api.py diff --git a/pype/services/rest_api/__init__.py b/pype/services/rest_api/__init__.py new file mode 100644 index 0000000000..c11ecfd761 --- /dev/null +++ b/pype/services/rest_api/__init__.py @@ -0,0 +1,5 @@ +from .rest_api import RestApiServer + + +def tray_init(tray_widget, main_widget): + return RestApiServer() diff --git a/pype/services/rest_api/rest_api.py b/pype/services/rest_api/rest_api.py new file mode 100644 index 0000000000..894ac8e986 --- /dev/null +++ b/pype/services/rest_api/rest_api.py @@ -0,0 +1,237 @@ +import os +import json +import enum +import collections +import threading +from inspect import signature +import socket +import http.server +from http import HTTPStatus +import socketserver + +from Qt import QtCore + +from pypeapp import config, Logger + +log = Logger().get_logger("RestApiServer") + + +class RestMethods(enum.Enum): + GET = "GET" + POST = "POST" + PUT = "PUT" + PATCH = "PATCH" + DELETE = "DELETE" + + def __repr__(self): + return str(self.value) + + def __eq__(self, other): + if isinstance(other, str): + return self.value == other + return self == other + + def __hash__(self): + return enum.Enum.__hash__(self) + + def __str__(self): + return str(self.value) + + +class Handler(http.server.SimpleHTTPRequestHandler): + + def do_GET(self): + self.process_request(RestMethods.GET) + + def do_POST(self): + """Common code for POST. + + This trigger callbacks on specific paths. + + If request contain data and callback func has arg data are sent to + callback too. + + Send back return values of callbacks. + """ + self.process_request(RestMethods.POST) + + def process_request(self, rest_method): + """Because processing is technically the same for now so it is used + the same way + """ + content_length = int(self.headers["Content-Length"]) + in_data_str = self.rfile.read(content_length) + in_data = None + if in_data_str: + in_data = json.loads(in_data_str) + + registered_callbacks = self.server.registered_callbacks[rest_method] + + path_items = [part.lower() for part in self.path.split("/") if part] + + results = [] + for check_path, callbacks in registered_callbacks.items(): + check_path_items = check_path.split("/") + if check_path_items == path_items: + log.debug( + "Triggering callbacks for path \"{}\"".format(check_path) + ) + for callback in callbacks: + try: + params = signature(callback).parameters + if len(params) > 0 and in_data: + result = callback(in_data) + else: + result = callback() + + if result: + results.append(result) + except Exception: + log.error( + "Callback on path \"{}\" failed".format(check_path), + exc_info=True + ) + + any_result = len(results) > 0 + self.send_response(HTTPStatus.OK) + if any_result: + self.send_header("Content-type", "application/json") + self.end_headers() + + if not any_result: + return + + if len(results) == 1: + json_message = str(results[0]) + else: + index = 1 + messages = {} + for result in results: + if isinstance(result, str): + value = result + else: + value = json.dumps(result) + messages["callback{}".format(str(index))] = value + + json_message = json.dumps(messages) + + self.wfile.write(json_message.encode()) + + +class AdditionalArgsTCPServer(socketserver.TCPServer): + def __init__(self, registered_callbacks, *args, **kwargs): + self.registered_callbacks = registered_callbacks + super(AdditionalArgsTCPServer, self).__init__(*args, **kwargs) + + +class RestApiServer(QtCore.QThread): + """ Listener for REST requests. + + It is possible to register callbacks for url paths. + Be careful about crossreferencing to different QThreads it is not allowed. + """ + + def __init__(self): + super(RestApiServer, self).__init__() + self.registered_callbacks = { + RestMethods.GET: collections.defaultdict(list), + RestMethods.POST: collections.defaultdict(list), + RestMethods.PUT: collections.defaultdict(list), + RestMethods.PATCH: collections.defaultdict(list), + RestMethods.DELETE: collections.defaultdict(list) + } + + self.qaction = None + self.failed_icon = None + self._is_running = False + try: + self.presets = config.get_presets().get( + "services", {}).get( + "rest_api", {} + ) + except Exception: + self.presets = {"default_port": 8011, "exclude_ports": []} + + self.port = self.find_port() + + def set_qaction(self, qaction, failed_icon): + self.qaction = qaction + self.failed_icon = failed_icon + + def register_callback(self, path, callback, rest_method=RestMethods.POST): + if isinstance(path, (list, set)): + path = "/".join([part.lower() for part in path]) + elif isinstance(path, str): + path = "/".join( + [part.lower() for part in str(path).split("/") if part] + ) + + if isinstance(rest_method, str): + rest_method = str(rest_method).upper() + + if path in self.registered_callbacks[rest_method]: + log.warning( + "Path \"{}\" has already registered callback.".format(path) + ) + else: + log.debug( + "Registering callback for path \"{}\"".format(path) + ) + self.registered_callbacks[rest_method][path].append(callback) + + def tray_start(self): + self.start() + + @property + def is_running(self): + return self._is_running + + def stop(self): + self._is_running = False + + def run(self): + self._is_running = True + if not self.registered_callbacks: + log.info("Any registered callbacks for Rest Api server.") + return + + try: + log.debug( + "Running Rest Api server on URL:" + " \"http://localhost:{}\"".format(self.port) + ) + with AdditionalArgsTCPServer( + self.registered_callbacks, + ("", self.port), + Handler + ) as httpd: + while self._is_running: + httpd.handle_request() + except Exception: + log.warning( + "Rest Api Server service has failed", exc_info=True + ) + self._is_running = False + if self.qaction and self.failed_icon: + self.qaction.setIcon(self.failed_icon) + + def find_port(self): + start_port = self.presets["default_port"] + exclude_ports = self.presets["exclude_ports"] + found_port = None + # port check takes time so it's lowered to 100 ports + for port in range(start_port, start_port+100): + if port in exclude_ports: + continue + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + result = sock.connect_ex(("localhost", port)) + if result != 0: + found_port = port + if found_port is not None: + break + if found_port is None: + return None + os.environ["PYPE_REST_API_URL"] = "http://localhost:{}".format( + found_port + ) + return found_port From 726633182fe44ae881e40dd818bcae529ff739e4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 4 Oct 2019 19:08:10 +0200 Subject: [PATCH 6/6] added register of callback from rest api server to muster module --- pype/muster/muster.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pype/muster/muster.py b/pype/muster/muster.py index 28f1c2ddd1..a4805369aa 100644 --- a/pype/muster/muster.py +++ b/pype/muster/muster.py @@ -36,6 +36,12 @@ class MusterModule: # nothing to do pass + def process_modules(self, modules): + if "RestApiServer" in modules: + modules["RestApiServer"].register_callback( + "muster/show_login", self.show_login, "post" + ) + # Definition of Tray menu def tray_menu(self, parent): """