From f350d7694cf5c4d7c2598aa540db219cbf216403 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 4 Dec 2019 17:26:23 +0100 Subject: [PATCH 01/55] feat(global): print ffmpeg path --- pype/plugins/global/publish/validate_ffmpeg_installed.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/plugins/global/publish/validate_ffmpeg_installed.py b/pype/plugins/global/publish/validate_ffmpeg_installed.py index 6d5ffba1e1..df7c330e95 100644 --- a/pype/plugins/global/publish/validate_ffmpeg_installed.py +++ b/pype/plugins/global/publish/validate_ffmpeg_installed.py @@ -27,6 +27,8 @@ class ValidateFfmpegInstallef(pyblish.api.Validator): return True def process(self, instance): + self.log.info("ffmpeg path: `{}`".format( + os.environ.get("FFMPEG_PATH", ""))) if self.is_tool( os.path.join( os.environ.get("FFMPEG_PATH", ""), "ffmpeg")) is False: From 346e3e27bce2bc0dd71829a3a686857cdd1268d7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 4 Dec 2019 17:27:39 +0100 Subject: [PATCH 02/55] feat(nuke): extract review will generate lut file this will remove the nuke rendering of `*.baked..mov` --- .../nuke/publish/extract_review_data_lut.py | 185 ++++++++++++++++++ 1 file changed, 185 insertions(+) create mode 100644 pype/plugins/nuke/publish/extract_review_data_lut.py diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py new file mode 100644 index 0000000000..bba9544b13 --- /dev/null +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -0,0 +1,185 @@ +import os +import nuke +import pyblish.api +from avalon.nuke import lib as anlib +import pype + + +class ExtractReviewData(pype.api.Extractor): + """Extracts movie and thumbnail with baked in luts + + must be run after extract_render_local.py + + """ + + order = pyblish.api.ExtractorOrder + 0.01 + label = "Extract Review Data Lut" + + families = ["review"] + hosts = ["nuke"] + + def process(self, instance): + + self.log.debug("creating staging dir:") + self.staging_dir(instance) + + with anlib.maintained_selection(): + if "still" not in instance.data["families"]: + self.render_review_representation(instance, + representation="mov") + self.render_review_representation(instance, + representation="jpeg") + else: + self.render_review_representation(instance, representation="jpeg") + + def render_review_representation(self, + instance, + representation="mov"): + + assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" + + temporary_nodes = [] + stagingDir = instance.data[ + 'representations'][0]["stagingDir"].replace("\\", "/") + self.log.debug("StagingDir `{0}`...".format(stagingDir)) + + collection = instance.data.get("collection", None) + + if collection: + # get path + fname = os.path.basename(collection.format( + "{head}{padding}{tail}")) + fhead = collection.format("{head}") + + # get first and last frame + first_frame = min(collection.indexes) + last_frame = max(collection.indexes) + else: + fname = os.path.basename(instance.data.get("path", None)) + fhead = os.path.splitext(fname)[0] + "." + first_frame = instance.data.get("frameStart", None) + last_frame = instance.data.get("frameEnd", None) + + rnode = nuke.createNode("Read") + + rnode["file"].setValue( + os.path.join(stagingDir, fname).replace("\\", "/")) + + rnode["first"].setValue(first_frame) + rnode["origfirst"].setValue(first_frame) + rnode["last"].setValue(last_frame) + rnode["origlast"].setValue(last_frame) + temporary_nodes.append(rnode) + previous_node = rnode + + # get input process and connect it to baking + ipn = self.get_view_process_node() + if ipn is not None: + ipn.setInput(0, previous_node) + previous_node = ipn + temporary_nodes.append(ipn) + + reformat_node = nuke.createNode("Reformat") + + ref_node = self.nodes.get("Reformat", None) + if ref_node: + for k, v in ref_node: + self.log.debug("k,v: {0}:{1}".format(k,v)) + if isinstance(v, unicode): + v = str(v) + reformat_node[k].setValue(v) + + reformat_node.setInput(0, previous_node) + previous_node = reformat_node + temporary_nodes.append(reformat_node) + + dag_node = nuke.createNode("OCIODisplay") + dag_node.setInput(0, previous_node) + previous_node = dag_node + temporary_nodes.append(dag_node) + + # create write node + write_node = nuke.createNode("Write") + + if representation in "mov": + file = fhead + "baked.mov" + name = "baked" + path = os.path.join(stagingDir, file).replace("\\", "/") + self.log.debug("Path: {}".format(path)) + instance.data["baked_colorspace_movie"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("mov") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + tags = ["review", "delete"] + + elif representation in "jpeg": + file = fhead + "jpeg" + name = "thumbnail" + path = os.path.join(stagingDir, file).replace("\\", "/") + instance.data["thumbnail"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("jpeg") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + tags = ["thumbnail"] + + # retime for + first_frame = int(last_frame) / 2 + last_frame = int(last_frame) / 2 + + repre = { + 'name': name, + 'ext': representation, + 'files': file, + "stagingDir": stagingDir, + "frameStart": first_frame, + "frameEnd": last_frame, + "anatomy_template": "render", + "tags": tags + } + instance.data["representations"].append(repre) + + # Render frames + nuke.execute(write_node.name(), int(first_frame), int(last_frame)) + + self.log.debug("representations: {}".format(instance.data["representations"])) + + # Clean up + for node in temporary_nodes: + nuke.delete(node) + + def get_view_process_node(self): + """ + Will get any active view process. + + Arguments: + self (class): in object definition + + Returns: + nuke.Node: copy node of Input Process node + """ + + with anlib.maintained_selection(): + ipn_orig = None + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + # copy selected to clipboard + nuke.nodeCopy('%clipboard%') + # reset selection + anlib.reset_selection() + # paste node and selection is on it only + nuke.nodePaste('%clipboard%') + # assign to variable + ipn = nuke.selectedNode() + + return ipn From 48e5c4fd26143d78903564679f6dce15f5239da3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 10:36:08 +0100 Subject: [PATCH 03/55] feat(nuke): Lut exporter added to nuke.lib --- pype/nuke/lib.py | 169 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 169 insertions(+) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 157af9019d..960b65f769 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -6,6 +6,7 @@ from collections import OrderedDict from avalon import api, io, lib import avalon.nuke +from avalon.nuke import lib as anlib import pype.api as pype import nuke @@ -1190,3 +1191,171 @@ class BuildWorkfile(WorkfileSettings): def position_up(self, multiply=1): self.ypos -= (self.ypos_size * multiply) + self.ypos_gap + + +class Exporter_review_lut: + """ + Generator object for review lut from Nuke + + Args: + klass (pyblish.plugin): pyblish plugin parent + + + """ + _temp_nodes = [] + data = dict({ + "representations": list() + }) + + def __init__(self, + klass, + instance, + name=None, + ext=None, + lut_size=None, + lut_style=None): + + self.log = klass.log + self.instance = instance + + self.name = name or "baked_lut" + self.ext = ext or "cube" + self.lut_size = lut_size or 1024 + self.lut_style = lut_style or "linear" + + self.stagingDir = self.instance.data["stagingDir"] + self.collection = self.instance.data.get("collection", None) + + # set frame start / end and file name to self + self.get_file_info() + + self.log.info("File info was set...") + + self.file = self.fhead + self.name + ".{}".format(self.ext) + self.path = os.path.join(self.stagingDir, self.file).replace("\\", "/") + + def generate_lut(self): + # ---------- start nodes creation + + # CMSTestPattern + cms_node = nuke.createNode("CMSTestPattern") + cms_node["cube_size"].setValue(96) + # connect + self._temp_nodes.append(cms_node) + self.previous_node = cms_node + self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes)) + + # Node View Process + ipn = self.get_view_process_node() + if ipn is not None: + # connect + ipn.setInput(0, self.previous_node) + self._temp_nodes.append(ipn) + self.previous_node = ipn + self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) + + # OCIODisplay + dag_node = nuke.createNode("OCIODisplay") + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + + # GenerateLUT + gen_lut_node = nuke.createNode("GenerateLUT") + gen_lut_node["file"].setValue(self.path) + gen_lut_node["file_type"].setValue(".{}".format(self.ext)) + gen_lut_node["lut1d"].setValue(self.lut_size) + gen_lut_node["style1d"].setValue(self.lut_style) + # connect + gen_lut_node.setInput(0, self.previous_node) + self._temp_nodes.append(gen_lut_node) + self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes)) + + # ---------- end nodes creation + + # Export lut file + nuke.execute( + gen_lut_node.name(), + int(self.first_frame), + int(self.first_frame)) + + self.log.info("Exported...") + + # ---------- generate representation data + self.get_representation_data() + + self.log.debug("Representation... `{}`".format(self.data)) + + # ---------- Clean up + for node in self._temp_nodes: + nuke.delete(node) + self.log.info("Deleted nodes...") + + return self.data + + def get_file_info(self): + if self.collection: + self.log.debug("Collection: `{}`".format(self.collection)) + # get path + self.fname = os.path.basename(self.collection.format( + "{head}{padding}{tail}")) + self.fhead = self.collection.format("{head}") + + # get first and last frame + self.first_frame = min(self.collection.indexes) + self.last_frame = max(self.collection.indexes) + else: + self.fname = os.path.basename(self.instance.data.get("path", None)) + self.fhead = os.path.splitext(self.fname)[0] + "." + self.first_frame = self.instance.data.get("frameStart", None) + self.last_frame = self.instance.data.get("frameEnd", None) + + if "#" in self.fhead: + self.fhead = self.fhead.replace("#", "")[:-1] + + def get_representation_data(self): + + repre = { + 'name': self.name, + 'ext': self.ext, + 'files': self.file, + "stagingDir": self.stagingDir, + "anatomy_template": "publish", + "tags": [self.name.replace("_", "-")] + } + + self.data["representations"].append(repre) + + def get_view_process_node(self): + """ + Will get any active view process. + + Arguments: + self (class): in object definition + + Returns: + nuke.Node: copy node of Input Process node + """ + anlib.reset_selection() + ipn_orig = None + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + # copy selected to clipboard + nuke.nodeCopy('%clipboard%') + # reset selection + anlib.reset_selection() + # paste node and selection is on it only + nuke.nodePaste('%clipboard%') + # assign to variable + ipn = nuke.selectedNode() + + return ipn From 49eadd8a2d8c28e73b9ef7ffc63fb12d3c87d1f6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 10:36:53 +0100 Subject: [PATCH 04/55] feat(global): added lut filter to ffmpeg --- pype/plugins/global/publish/extract_review.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index de167710a5..3ff3241812 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -162,6 +162,13 @@ class ExtractReview(pyblish.api.InstancePlugin): # output filename output_args.append(full_output_path) + + lut_path = instance.data.get("lutPath") + if lut_path: + lut_arg = "-vf \"lut3d=file='{}'\"".format( + lut_path) + output_args.insert(0, lut_arg) + mov_args = [ os.path.join( os.environ.get( From bd5805301b3c234f50fe8901f5a3f6a59dd09dac Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 10:37:32 +0100 Subject: [PATCH 05/55] feat(nuke): lut extractor added to nuke plugins --- .../nuke/publish/extract_review_data_lut.py | 182 +++--------------- 1 file changed, 26 insertions(+), 156 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py index bba9544b13..54013af11a 100644 --- a/pype/plugins/nuke/publish/extract_review_data_lut.py +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -1,185 +1,55 @@ import os -import nuke import pyblish.api from avalon.nuke import lib as anlib +from pype.nuke import lib as pnlib import pype +reload(pnlib) -class ExtractReviewData(pype.api.Extractor): +class ExtractReviewLutData(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py """ - order = pyblish.api.ExtractorOrder + 0.01 + order = pyblish.api.ExtractorOrder + 0.005 label = "Extract Review Data Lut" families = ["review"] hosts = ["nuke"] def process(self, instance): + self.log.debug( + "_ representations: {}".format(instance.data["representations"])) - self.log.debug("creating staging dir:") - self.staging_dir(instance) + self.log.info("Creating staging dir...") - with anlib.maintained_selection(): - if "still" not in instance.data["families"]: - self.render_review_representation(instance, - representation="mov") - self.render_review_representation(instance, - representation="jpeg") - else: - self.render_review_representation(instance, representation="jpeg") - - def render_review_representation(self, - instance, - representation="mov"): - - assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" - - temporary_nodes = [] stagingDir = instance.data[ 'representations'][0]["stagingDir"].replace("\\", "/") - self.log.debug("StagingDir `{0}`...".format(stagingDir)) + instance.data["stagingDir"] = stagingDir - collection = instance.data.get("collection", None) + instance.data['representations'][0]["tags"] = ["review"] - if collection: - # get path - fname = os.path.basename(collection.format( - "{head}{padding}{tail}")) - fhead = collection.format("{head}") + self.log.info( + "StagingDir `{0}`...".format(instance.data["stagingDir"])) - # get first and last frame - first_frame = min(collection.indexes) - last_frame = max(collection.indexes) - else: - fname = os.path.basename(instance.data.get("path", None)) - fhead = os.path.splitext(fname)[0] + "." - first_frame = instance.data.get("frameStart", None) - last_frame = instance.data.get("frameEnd", None) - - rnode = nuke.createNode("Read") - - rnode["file"].setValue( - os.path.join(stagingDir, fname).replace("\\", "/")) - - rnode["first"].setValue(first_frame) - rnode["origfirst"].setValue(first_frame) - rnode["last"].setValue(last_frame) - rnode["origlast"].setValue(last_frame) - temporary_nodes.append(rnode) - previous_node = rnode - - # get input process and connect it to baking - ipn = self.get_view_process_node() - if ipn is not None: - ipn.setInput(0, previous_node) - previous_node = ipn - temporary_nodes.append(ipn) - - reformat_node = nuke.createNode("Reformat") - - ref_node = self.nodes.get("Reformat", None) - if ref_node: - for k, v in ref_node: - self.log.debug("k,v: {0}:{1}".format(k,v)) - if isinstance(v, unicode): - v = str(v) - reformat_node[k].setValue(v) - - reformat_node.setInput(0, previous_node) - previous_node = reformat_node - temporary_nodes.append(reformat_node) - - dag_node = nuke.createNode("OCIODisplay") - dag_node.setInput(0, previous_node) - previous_node = dag_node - temporary_nodes.append(dag_node) - - # create write node - write_node = nuke.createNode("Write") - - if representation in "mov": - file = fhead + "baked.mov" - name = "baked" - path = os.path.join(stagingDir, file).replace("\\", "/") - self.log.debug("Path: {}".format(path)) - instance.data["baked_colorspace_movie"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("mov") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["review", "delete"] - - elif representation in "jpeg": - file = fhead + "jpeg" - name = "thumbnail" - path = os.path.join(stagingDir, file).replace("\\", "/") - instance.data["thumbnail"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("jpeg") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["thumbnail"] - - # retime for - first_frame = int(last_frame) / 2 - last_frame = int(last_frame) / 2 - - repre = { - 'name': name, - 'ext': representation, - 'files': file, - "stagingDir": stagingDir, - "frameStart": first_frame, - "frameEnd": last_frame, - "anatomy_template": "render", - "tags": tags - } - instance.data["representations"].append(repre) - - # Render frames - nuke.execute(write_node.name(), int(first_frame), int(last_frame)) - - self.log.debug("representations: {}".format(instance.data["representations"])) - - # Clean up - for node in temporary_nodes: - nuke.delete(node) - - def get_view_process_node(self): - """ - Will get any active view process. - - Arguments: - self (class): in object definition - - Returns: - nuke.Node: copy node of Input Process node - """ + if "representations" not in instance.data: + instance.data["representations"] = [] with anlib.maintained_selection(): - ipn_orig = None - for v in [n for n in nuke.allNodes() - if "Viewer" in n.Class()]: - ip = v['input_process'].getValue() - ipn = v['input_process_node'].getValue() - if "VIEWER_INPUT" not in ipn and ip: - ipn_orig = nuke.toNode(ipn) - ipn_orig.setSelected(True) + exporter = pnlib.Exporter_review_lut( + self, instance + ) + data = exporter.generate_lut() - if ipn_orig: - # copy selected to clipboard - nuke.nodeCopy('%clipboard%') - # reset selection - anlib.reset_selection() - # paste node and selection is on it only - nuke.nodePaste('%clipboard%') - # assign to variable - ipn = nuke.selectedNode() + # assign to representations + instance.data["lutPath"] = os.path.join( + exporter.stagingDir, exporter.file).replace("\\", "/").replace( + "C:/", "C\\:/") + instance.data["representations"] += data["representations"] - return ipn + self.log.debug( + "_ lutPath: {}".format(instance.data["lutPath"])) + self.log.debug( + "_ representations: {}".format(instance.data["representations"])) From bf8a829eee011b9d5fecb8bb4781a2a44395f1bd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 10:38:06 +0100 Subject: [PATCH 06/55] fix(nuke): review extractor fixed maintained selection --- .../nuke/publish/extract_review_data.py | 34 +++++++++---------- 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_review_data.py b/pype/plugins/nuke/publish/extract_review_data.py index 791b9d7969..9bb4f93582 100644 --- a/pype/plugins/nuke/publish/extract_review_data.py +++ b/pype/plugins/nuke/publish/extract_review_data.py @@ -1,5 +1,6 @@ import os import nuke +from avalon.nuke import lib as anlib import pyblish.api import pype @@ -18,28 +19,22 @@ class ExtractReviewData(pype.api.Extractor): def process(self, instance): - # Store selection - selection = [i for i in nuke.allNodes() if i["selected"].getValue()] - # Deselect all nodes to prevent external connections - [i["selected"].setValue(False) for i in nuke.allNodes()] - self.log.debug("creating staging dir:") - self.staging_dir(instance) + with anlib.maintained_selection(): + self.log.debug("creating staging dir:") + self.staging_dir(instance) - self.log.debug("instance: {}".format(instance)) - self.log.debug("instance.data[families]: {}".format( - instance.data["families"])) + self.log.debug("instance: {}".format(instance)) + self.log.debug("instance.data[families]: {}".format( + instance.data["families"])) - if "still" not in instance.data["families"]: - self.render_review_representation(instance, - representation="mov") - self.render_review_representation(instance, - representation="jpeg") - else: + # if "still" not in instance.data["families"]: + # self.render_review_representation(instance, + # representation="mov") + # self.render_review_representation(instance, + # representation="jpeg") + # else: self.render_review_representation(instance, representation="jpeg") - # Restore selection - [i["selected"].setValue(False) for i in nuke.allNodes()] - [i["selected"].setValue(True) for i in selection] def render_review_representation(self, instance, @@ -69,6 +64,9 @@ class ExtractReviewData(pype.api.Extractor): first_frame = instance.data.get("frameStart", None) last_frame = instance.data.get("frameEnd", None) + if "#" in fhead: + fhead = fhead.replace("#", "")[:-1] + rnode = nuke.createNode("Read") rnode["file"].setValue( From d3e36f13bc01b7234b492748653572bc5d9d3cb9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 10:38:42 +0100 Subject: [PATCH 07/55] feat(global): adding debug prints to assumed destination --- pype/plugins/global/publish/collect_templates.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index b80ca4ae1b..9b0c03fdee 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -85,3 +85,6 @@ class CollectTemplates(pyblish.api.InstancePlugin): instance.data["assumedDestination"] = os.path.dirname( (anatomy.format(template_data))["publish"]["path"] ) + self.log.info("Assumed Destination has been created...") + self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"])) + self.log.debug("__ template: `{}`".format(instance.data["template"])) From 1361d12452223977a14cb623b1765457ca2ea54b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 07:59:19 +0100 Subject: [PATCH 08/55] feat(nuke): polishing the Lut Exporter --- pype/nuke/lib.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 960b65f769..6349f35bea 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1212,6 +1212,7 @@ class Exporter_review_lut: instance, name=None, ext=None, + cube_size=None, lut_size=None, lut_style=None): @@ -1220,6 +1221,7 @@ class Exporter_review_lut: self.name = name or "baked_lut" self.ext = ext or "cube" + self.cube_size = cube_size or 32 self.lut_size = lut_size or 1024 self.lut_style = lut_style or "linear" @@ -1239,7 +1241,7 @@ class Exporter_review_lut: # CMSTestPattern cms_node = nuke.createNode("CMSTestPattern") - cms_node["cube_size"].setValue(96) + cms_node["cube_size"].setValue(self.cube_size) # connect self._temp_nodes.append(cms_node) self.previous_node = cms_node From 43ed9443941a136227aa694e1fd7cc84985b7ce6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 08:00:15 +0100 Subject: [PATCH 09/55] feat(global): implementing Lut integration into Extract Review --- pype/plugins/global/publish/extract_review.py | 30 +++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 3ff3241812..59ef308f9a 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -165,9 +165,35 @@ class ExtractReview(pyblish.api.InstancePlugin): lut_path = instance.data.get("lutPath") if lut_path: - lut_arg = "-vf \"lut3d=file='{}'\"".format( + # removing Gama info as it is all baked in lut + gamma = next((g for g in input_args + if "-gamma" in g), None) + if gamma: + input_args.remove(gamma) + + # find all video format settings + vf_settings = [p for p in output_args + for v in ["-filter:v", "-vf"] + if v in p] + self.log.debug("_ vf_settings: `{}`".format(vf_settings)) + # remove them from output args list + for p in vf_settings: + self.log.debug("_ remove p: `{}`".format(p)) + output_args.remove(p) + self.log.debug("_ output_args: `{}`".format(output_args)) + # strip them from all flags + vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "") for p in vf_settings] + # create lut argument + lut_arg = "lut3d=file='{}',colormatrix=bt601:bt709".format( lut_path) - output_args.insert(0, lut_arg) + vf_fixed.insert(0, lut_arg) + # create new video filter setting + vf_back = "-vf " + ",".join(vf_fixed) + # add it to output_args + output_args.insert(0, vf_back) + self.log.info("Added Lut to ffmpeg command") + self.log.debug("_ output_args: `{}`".format(output_args)) + mov_args = [ os.path.join( From ec9f5b5e0405e0ba53b26d1d14adbcb78d680c20 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 08:00:35 +0100 Subject: [PATCH 10/55] feat(nuke): adding format data to instance --- pype/plugins/nuke/publish/collect_instances.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 483f260295..cb98b8244d 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -23,6 +23,8 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): instances = [] # creating instances per write node + root = nuke.root() + self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes())) for node in nuke.allNodes(): @@ -61,7 +63,13 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): family = avalon_knob_data["family"] families = [avalon_knob_data["families"]] - + + # Get format + format = root['format'].value() + resolution_width = format.width() + resolution_height = format.height() + pixel_aspect = format.pixelAspect() + if node.Class() not in "Read": if node["render"].value(): self.log.info("flagged for render") @@ -87,7 +95,10 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "avalonKnob": avalon_knob_data, "publish": node.knob('publish').value(), "step": 1, - "fps": nuke.root()['fps'].value() + "fps": nuke.root()['fps'].value(), + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height, + "pixelAspect": pixel_aspect, }) @@ -95,5 +106,4 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): instances.append(instance) context.data["instances"] = instances - self.log.debug("context: {}".format(context)) From 3c90a7984023889b30c2fc7b0233b02c8a6a3bb3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 13:58:31 +0100 Subject: [PATCH 11/55] feat(global): extr review pixel aspect rescale --- pype/plugins/global/publish/extract_review.py | 76 ++++++++++++++----- 1 file changed, 58 insertions(+), 18 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 59ef308f9a..fa02826527 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -151,7 +151,6 @@ class ExtractReview(pyblish.api.InstancePlugin): output_args.extend(profile.get('output', [])) # letter_box - # TODO: add to documentation lb = profile.get('letter_box', None) if lb: output_args.append( @@ -163,6 +162,18 @@ class ExtractReview(pyblish.api.InstancePlugin): # output filename output_args.append(full_output_path) + # scaling none square pixels and 1920 width + # scale=320:-2 # to auto count height with output to be multiple of 2 + if profile.get('reformat', False): + pixel_aspect = instance.data["pixelAspect"] + scaling_arg = "scale=1920:'ceil((1920/{})/2)*2':flags=lanczos,setsar=1".format( + pixel_aspect) + vf_back = self.add_video_filter_args( + output_args, scaling_arg) + # add it to output_args + output_args.insert(0, vf_back) + + # baking lut file application lut_path = instance.data.get("lutPath") if lut_path: # removing Gama info as it is all baked in lut @@ -171,24 +182,15 @@ class ExtractReview(pyblish.api.InstancePlugin): if gamma: input_args.remove(gamma) - # find all video format settings - vf_settings = [p for p in output_args - for v in ["-filter:v", "-vf"] - if v in p] - self.log.debug("_ vf_settings: `{}`".format(vf_settings)) - # remove them from output args list - for p in vf_settings: - self.log.debug("_ remove p: `{}`".format(p)) - output_args.remove(p) - self.log.debug("_ output_args: `{}`".format(output_args)) - # strip them from all flags - vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "") for p in vf_settings] # create lut argument - lut_arg = "lut3d=file='{}',colormatrix=bt601:bt709".format( - lut_path) - vf_fixed.insert(0, lut_arg) - # create new video filter setting - vf_back = "-vf " + ",".join(vf_fixed) + lut_arg = "lut3d=file='{}'".format( + lut_path.replace( + "\\", "/").replace(":/", "\\:/") + ) + lut_arg += ",colormatrix=bt601:bt709" + + vf_back = self.add_video_filter_args( + output_args, lut_arg) # add it to output_args output_args.insert(0, vf_back) self.log.info("Added Lut to ffmpeg command") @@ -240,3 +242,41 @@ class ExtractReview(pyblish.api.InstancePlugin): instance.data["representations"] = representations_new self.log.debug("Families Out: `{}`".format(instance.data["families"])) + + + def add_video_filter_args(self, args, inserting_arg): + """ + Fixing video filter argumets to be one long string + + Args: + args (list): list of string arguments + inserting_arg (str): string argument we want to add + (without flag `-vf`) + + Returns: + str: long joined argument to be added back to list of arguments + + """ + # find all video format settings + vf_settings = [p for p in args + for v in ["-filter:v", "-vf"] + if v in p] + self.log.debug("_ vf_settings: `{}`".format(vf_settings)) + + # remove them from output args list + for p in vf_settings: + self.log.debug("_ remove p: `{}`".format(p)) + args.remove(p) + self.log.debug("_ args: `{}`".format(args)) + + # strip them from all flags + vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "") + for p in vf_settings] + + self.log.debug("_ vf_fixed: `{}`".format(vf_fixed)) + vf_fixed.insert(0, inserting_arg) + self.log.debug("_ vf_fixed: `{}`".format(vf_fixed)) + # create new video filter setting + vf_back = "-vf " + ",".join(vf_fixed) + + return vf_back From b79e22464b02c0b3c4ecf7224bac69ae84270ac6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 13:59:35 +0100 Subject: [PATCH 12/55] fix(nuke: when writes collected family should be `write` otherwise validate frames is picking it up --- pype/plugins/nuke/publish/collect_writes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index ba8a0534b1..5484d971bf 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -99,7 +99,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "subset": instance.data["subset"], "fps": instance.context.data["fps"] } - + instance.data["family"] = "write" group_node = [x for x in instance if x.Class() == "Group"][0] deadlineChunkSize = 1 if "deadlineChunkSize" in group_node.knobs(): From 78f2da25cf81c494ee6f7d1979ddf98ab67784d6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:01:42 +0100 Subject: [PATCH 13/55] feat(nuke): extract thumbnail from before Review Data (mov, jpg) --- ...ct_review_data.py => extract_thumbnail.py} | 69 ++++++------------- 1 file changed, 22 insertions(+), 47 deletions(-) rename pype/plugins/nuke/publish/{extract_review_data.py => extract_thumbnail.py} (67%) diff --git a/pype/plugins/nuke/publish/extract_review_data.py b/pype/plugins/nuke/publish/extract_thumbnail.py similarity index 67% rename from pype/plugins/nuke/publish/extract_review_data.py rename to pype/plugins/nuke/publish/extract_thumbnail.py index 9bb4f93582..5740a90924 100644 --- a/pype/plugins/nuke/publish/extract_review_data.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -4,7 +4,7 @@ from avalon.nuke import lib as anlib import pyblish.api import pype -class ExtractReviewData(pype.api.Extractor): +class ExtractThumbnail(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py @@ -12,7 +12,7 @@ class ExtractReviewData(pype.api.Extractor): """ order = pyblish.api.ExtractorOrder + 0.01 - label = "Extract Review Data" + label = "Extract Thumbnail" families = ["review"] hosts = ["nuke"] @@ -20,29 +20,19 @@ class ExtractReviewData(pype.api.Extractor): def process(self, instance): with anlib.maintained_selection(): - self.log.debug("creating staging dir:") - self.staging_dir(instance) - self.log.debug("instance: {}".format(instance)) self.log.debug("instance.data[families]: {}".format( instance.data["families"])) - # if "still" not in instance.data["families"]: - # self.render_review_representation(instance, - # representation="mov") - # self.render_review_representation(instance, - # representation="jpeg") - # else: - self.render_review_representation(instance, representation="jpeg") + self.render_thumbnail(instance) - def render_review_representation(self, - instance, - representation="mov"): + def render_thumbnail(self, instance): assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" temporary_nodes = [] + self.log.info("Getting staging dir...") stagingDir = instance.data[ 'representations'][0]["stagingDir"].replace("\\", "/") self.log.debug("StagingDir `{0}`...".format(stagingDir)) @@ -107,39 +97,24 @@ class ExtractReviewData(pype.api.Extractor): # create write node write_node = nuke.createNode("Write") + file = fhead + "jpeg" + name = "thumbnail" + path = os.path.join(stagingDir, file).replace("\\", "/") + instance.data["thumbnail"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("jpeg") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + tags = ["thumbnail"] - if representation in "mov": - file = fhead + "baked.mov" - name = "baked" - path = os.path.join(stagingDir, file).replace("\\", "/") - self.log.debug("Path: {}".format(path)) - instance.data["baked_colorspace_movie"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("mov") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["review", "delete"] - - elif representation in "jpeg": - file = fhead + "jpeg" - name = "thumbnail" - path = os.path.join(stagingDir, file).replace("\\", "/") - instance.data["thumbnail"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("jpeg") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["thumbnail"] - - # retime for - first_frame = int(last_frame) / 2 - last_frame = int(last_frame) / 2 + # retime for + first_frame = int(last_frame) / 2 + last_frame = int(last_frame) / 2 repre = { 'name': name, - 'ext': representation, + 'ext': "jpeg", 'files': file, "stagingDir": stagingDir, "frameStart": first_frame, @@ -154,9 +129,9 @@ class ExtractReviewData(pype.api.Extractor): self.log.debug("representations: {}".format(instance.data["representations"])) - # Clean up - for node in temporary_nodes: - nuke.delete(node) + # # Clean up + # for node in temporary_nodes: + # nuke.delete(node) def get_view_process_node(self): From 5754450b88e7cb5b894137d5daac0608df77ccfb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:02:37 +0100 Subject: [PATCH 14/55] fix(nuke): the path was only working with C: Also the replace was moved to Extract Review --- pype/plugins/nuke/publish/extract_review_data_lut.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py index 54013af11a..910b6ee19e 100644 --- a/pype/plugins/nuke/publish/extract_review_data_lut.py +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -45,8 +45,7 @@ class ExtractReviewLutData(pype.api.Extractor): # assign to representations instance.data["lutPath"] = os.path.join( - exporter.stagingDir, exporter.file).replace("\\", "/").replace( - "C:/", "C\\:/") + exporter.stagingDir, exporter.file).replace("\\", "/") instance.data["representations"] += data["representations"] self.log.debug( From f4a0b341ac5036945a9f47f0ebfe50cb030d564b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:03:16 +0100 Subject: [PATCH 15/55] fix(nuke): validator had wrong family --- pype/plugins/nuke/publish/validate_rendered_frames.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/validate_rendered_frames.py b/pype/plugins/nuke/publish/validate_rendered_frames.py index e244a9b4b6..3887b5d5b7 100644 --- a/pype/plugins/nuke/publish/validate_rendered_frames.py +++ b/pype/plugins/nuke/publish/validate_rendered_frames.py @@ -28,7 +28,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): """ Validates file output. """ order = pyblish.api.ValidatorOrder + 0.1 - families = ["render.no"] + families = ["render"] label = "Validate rendered frame" hosts = ["nuke", "nukestudio"] From 350dbda38ccf8d3ebf7c9322d5fa2e4a6b9cb000 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:41:50 +0100 Subject: [PATCH 16/55] feat(global): atatching conditions to preset tags instead of arguments that is what they are for, isnt it? ;) --- pype/plugins/nuke/publish/extract_thumbnail.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index 5740a90924..126203603e 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -4,6 +4,7 @@ from avalon.nuke import lib as anlib import pyblish.api import pype + class ExtractThumbnail(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts @@ -26,9 +27,7 @@ class ExtractThumbnail(pype.api.Extractor): self.render_thumbnail(instance) - def render_thumbnail(self, instance): - assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" temporary_nodes = [] @@ -129,9 +128,9 @@ class ExtractThumbnail(pype.api.Extractor): self.log.debug("representations: {}".format(instance.data["representations"])) - # # Clean up - # for node in temporary_nodes: - # nuke.delete(node) + # Clean up + for node in temporary_nodes: + nuke.delete(node) def get_view_process_node(self): From 7f171bd11d16a806987f2a355808f2d1a59aa3c2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:49:45 +0100 Subject: [PATCH 17/55] feat(global): hook the conditions to preset tags after all that is what they are for ;) --- pype/plugins/global/publish/extract_review.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 418ac39186..a002e1140d 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -165,7 +165,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # scaling none square pixels and 1920 width # scale=320:-2 # to auto count height with output to be multiple of 2 - if profile.get('reformat', False): + if "reformat" in tags: pixel_aspect = instance.data["pixelAspect"] scaling_arg = "scale=1920:'ceil((1920/{})/2)*2':flags=lanczos,setsar=1".format( pixel_aspect) @@ -176,7 +176,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # baking lut file application lut_path = instance.data.get("lutPath") - if lut_path: + if lut_path and ("bake-lut" in tags): # removing Gama info as it is all baked in lut gamma = next((g for g in input_args if "-gamma" in g), None) From b69e9629a74e462c4be36aedc42152ece9a9c0f5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:50:08 +0100 Subject: [PATCH 18/55] clean(nuke): make it nicer --- pype/plugins/nuke/publish/extract_thumbnail.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index 126203603e..a58dad02f5 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -80,7 +80,7 @@ class ExtractThumbnail(pype.api.Extractor): ref_node = self.nodes.get("Reformat", None) if ref_node: for k, v in ref_node: - self.log.debug("k,v: {0}:{1}".format(k,v)) + self.log.debug("k, v: {0}:{1}".format(k, v)) if isinstance(v, unicode): v = str(v) reformat_node[k].setValue(v) @@ -126,7 +126,8 @@ class ExtractThumbnail(pype.api.Extractor): # Render frames nuke.execute(write_node.name(), int(first_frame), int(last_frame)) - self.log.debug("representations: {}".format(instance.data["representations"])) + self.log.debug( + "representations: {}".format(instance.data["representations"])) # Clean up for node in temporary_nodes: From 305bd6a02181744668e0360f25ba70fb801b0f27 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 16:10:23 +0100 Subject: [PATCH 19/55] fix(global): not using correct preset tags fixing pixelAspect to by applied to letter box too --- pype/plugins/global/publish/extract_review.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index a002e1140d..9de4a966f3 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -31,7 +31,7 @@ class ExtractReview(pyblish.api.InstancePlugin): inst_data = instance.data fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") - + pixel_aspect = instance.data["pixelAspect"] self.log.debug("Families In: `{}`".format(instance.data["families"])) # get representation and loop them @@ -147,13 +147,16 @@ class ExtractReview(pyblish.api.InstancePlugin): ) output_args = [] - output_args.extend(profile.get('codec', [])) + codec_args = profile.get('codec', []) + output_args.extend(codec_args) # preset's output data output_args.extend(profile.get('output', [])) # letter_box lb = profile.get('letter_box', None) if lb: + if "reformat" not in p_tags: + lb /= pixel_aspect output_args.append( "-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb)) @@ -165,8 +168,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # scaling none square pixels and 1920 width # scale=320:-2 # to auto count height with output to be multiple of 2 - if "reformat" in tags: - pixel_aspect = instance.data["pixelAspect"] + if "reformat" in p_tags: scaling_arg = "scale=1920:'ceil((1920/{})/2)*2':flags=lanczos,setsar=1".format( pixel_aspect) vf_back = self.add_video_filter_args( @@ -176,7 +178,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # baking lut file application lut_path = instance.data.get("lutPath") - if lut_path and ("bake-lut" in tags): + if lut_path and ("bake-lut" in p_tags): # removing Gama info as it is all baked in lut gamma = next((g for g in input_args if "-gamma" in g), None) @@ -220,7 +222,7 @@ class ExtractReview(pyblish.api.InstancePlugin): 'files': repr_file, "tags": new_tags, "outputName": name, - "codec": profile.get('codec', []) + "codec": codec_args }) if repre_new.get('preview'): repre_new.pop("preview") From 9727a70722f11a41394f1dbdbedda6245623f23d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 19:00:42 +0100 Subject: [PATCH 20/55] feat(global): making collection filesequences for nuke --- .../global/publish/collect_filesequences.py | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 39481e216b..d0ff5722a3 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -100,6 +100,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): label = "RenderedFrames" def process(self, context): + pixel_aspect = 1 + lut_path = None if os.environ.get("PYPE_PUBLISH_PATHS"): paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep) self.log.info("Collecting paths: {}".format(paths)) @@ -144,6 +146,12 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): self.log.info("setting session using metadata") api.Session.update(session) os.environ.update(session) + instance = metadata.get("instance") + if instance: + instance_family = instance.get("family") + pixel_aspect = instance.get("pixelAspect", 1) + lut_path = instance.get("lutPath", None) + else: # Search in directory @@ -181,6 +189,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): families.append("ftrack") if "review" not in families: families.append("review") + if "write" in instance_family: + families.append("write") for collection in collections: instance = context.create_instance(str(collection)) @@ -197,6 +207,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): start = data.get("frameStart", indices[0]) end = data.get("frameEnd", indices[-1]) + self.log.debug("Collected pixel_aspect:\n" + "{}".format(pixel_aspect)) + self.log.debug("type pixel_aspect:\n" + "{}".format(type(pixel_aspect))) + # root = os.path.normpath(root) # self.log.info("Source: {}}".format(data.get("source", ""))) @@ -212,8 +227,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "frameStart": start, "frameEnd": end, "fps": fps, - "source": data.get('source', '') + "source": data.get('source', ''), + "pixelAspect": pixel_aspect, }) + if lut_path: + instance.data.update({"lutPath": lut_path}) instance.append(collection) instance.context.data['fps'] = fps From 83f506d3eb94f830eea6a79be379a4f0d958d449 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 19:01:15 +0100 Subject: [PATCH 21/55] feat(nuke): removing families which are not needed anymore --- .../nuke/publish/extract_review_data_lut.py | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py index 910b6ee19e..dfc10952cd 100644 --- a/pype/plugins/nuke/publish/extract_review_data_lut.py +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -20,23 +20,23 @@ class ExtractReviewLutData(pype.api.Extractor): hosts = ["nuke"] def process(self, instance): - self.log.debug( - "_ representations: {}".format(instance.data["representations"])) - + families = instance.data["families"] self.log.info("Creating staging dir...") - - stagingDir = instance.data[ - 'representations'][0]["stagingDir"].replace("\\", "/") - instance.data["stagingDir"] = stagingDir - - instance.data['representations'][0]["tags"] = ["review"] + if "representations" in instance.data: + staging_dir = instance.data[ + "representations"][0]["stagingDir"].replace("\\", "/") + instance.data["stagingDir"] = staging_dir + instance.data["representations"][0]["tags"] = ["review"] + else: + instance.data["representations"] = [] + # get output path + render_path = instance.data['path'] + staging_dir = os.path.normpath(os.path.dirname(render_path)) + instance.data["stagingDir"] = staging_dir self.log.info( "StagingDir `{0}`...".format(instance.data["stagingDir"])) - if "representations" not in instance.data: - instance.data["representations"] = [] - with anlib.maintained_selection(): exporter = pnlib.Exporter_review_lut( self, instance @@ -48,6 +48,10 @@ class ExtractReviewLutData(pype.api.Extractor): exporter.stagingDir, exporter.file).replace("\\", "/") instance.data["representations"] += data["representations"] + if "render.farm" in families: + instance.data["families"].remove("review") + instance.data["families"].remove("ftrack") + self.log.debug( "_ lutPath: {}".format(instance.data["lutPath"])) self.log.debug( From 75145a4095b1ecaaa593488e6f82ecb1374cad19 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 19:01:43 +0100 Subject: [PATCH 22/55] feat(nuke): make thumbnail exporter available for render farm --- .../plugins/nuke/publish/extract_thumbnail.py | 50 ++++++++++++------- 1 file changed, 32 insertions(+), 18 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index a58dad02f5..3886fda569 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -15,7 +15,7 @@ class ExtractThumbnail(pype.api.Extractor): order = pyblish.api.ExtractorOrder + 0.01 label = "Extract Thumbnail" - families = ["review"] + families = ["review", "render.farm"] hosts = ["nuke"] def process(self, instance): @@ -28,14 +28,24 @@ class ExtractThumbnail(pype.api.Extractor): self.render_thumbnail(instance) def render_thumbnail(self, instance): - assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" + node = instance[0] # group node + self.log.info("Creating staging dir...") + if "representations" not in instance.data: + staging_dir = instance.data[ + "representations"][0]["stagingDir"].replace("\\", "/") + instance.data["stagingDir"] = staging_dir + instance.data["representations"][0]["tags"] = ["review"] + else: + instance.data["representations"] = [] + # get output path + render_path = instance.data['path'] + staging_dir = os.path.normpath(os.path.dirname(render_path)) + instance.data["stagingDir"] = staging_dir + + self.log.info( + "StagingDir `{0}`...".format(instance.data["stagingDir"])) temporary_nodes = [] - self.log.info("Getting staging dir...") - stagingDir = instance.data[ - 'representations'][0]["stagingDir"].replace("\\", "/") - self.log.debug("StagingDir `{0}`...".format(stagingDir)) - collection = instance.data.get("collection", None) if collection: @@ -56,17 +66,21 @@ class ExtractThumbnail(pype.api.Extractor): if "#" in fhead: fhead = fhead.replace("#", "")[:-1] - rnode = nuke.createNode("Read") + path_render = os.path.join(staging_dir, fname).replace("\\", "/") + # check if file exist otherwise connect to write node + if os.path.isfile(path_render): + rnode = nuke.createNode("Read") - rnode["file"].setValue( - os.path.join(stagingDir, fname).replace("\\", "/")) + rnode["file"].setValue(path_render) - rnode["first"].setValue(first_frame) - rnode["origfirst"].setValue(first_frame) - rnode["last"].setValue(last_frame) - rnode["origlast"].setValue(last_frame) - temporary_nodes.append(rnode) - previous_node = rnode + rnode["first"].setValue(first_frame) + rnode["origfirst"].setValue(first_frame) + rnode["last"].setValue(last_frame) + rnode["origlast"].setValue(last_frame) + temporary_nodes.append(rnode) + previous_node = rnode + else: + previous_node = node # get input process and connect it to baking ipn = self.get_view_process_node() @@ -98,7 +112,7 @@ class ExtractThumbnail(pype.api.Extractor): write_node = nuke.createNode("Write") file = fhead + "jpeg" name = "thumbnail" - path = os.path.join(stagingDir, file).replace("\\", "/") + path = os.path.join(staging_dir, file).replace("\\", "/") instance.data["thumbnail"] = path write_node["file"].setValue(path) write_node["file_type"].setValue("jpeg") @@ -115,7 +129,7 @@ class ExtractThumbnail(pype.api.Extractor): 'name': name, 'ext': "jpeg", 'files': file, - "stagingDir": stagingDir, + "stagingDir": staging_dir, "frameStart": first_frame, "frameEnd": last_frame, "anatomy_template": "render", From 532e485e5d6588db1d4a0bfa9464da21fdc09cbc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 19:01:59 +0100 Subject: [PATCH 23/55] feat(nuke): fixing deadline submiter --- .../nuke/publish/submit_nuke_deadline.py | 50 ++++++++----------- 1 file changed, 22 insertions(+), 28 deletions(-) diff --git a/pype/plugins/nuke/publish/submit_nuke_deadline.py b/pype/plugins/nuke/publish/submit_nuke_deadline.py index 4044026b5e..d9207d2bfc 100644 --- a/pype/plugins/nuke/publish/submit_nuke_deadline.py +++ b/pype/plugins/nuke/publish/submit_nuke_deadline.py @@ -1,9 +1,7 @@ import os import json import getpass - -import nuke - + from avalon import api from avalon.vendor import requests import re @@ -27,40 +25,36 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): def process(self, instance): - node = None - for x in instance: - if x.Class() == "Write": - node = x - - if node is None: - return + node = instance[0] + # for x in instance: + # if x.Class() == "Write": + # node = x + # + # if node is None: + # return DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL", "http://localhost:8082") assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL" context = instance.context - workspace = os.path.dirname(context.data["currentFile"]) - filepath = None - # get path - path = nuke.filename(node) - output_dir = instance.data['outputDir'] + # get output path + render_path = instance.data['path'] + render_dir = os.path.normpath(os.path.dirname(render_path)) - filepath = context.data["currentFile"] + script_path = context.data["currentFile"] - self.log.debug(filepath) - - filename = os.path.basename(filepath) + script_name = os.path.basename(script_path) comment = context.data.get("comment", "") - dirname = os.path.join(workspace, "renders") + deadline_user = context.data.get("deadlineUser", getpass.getuser()) - jobname = "%s - %s" % (filename, instance.name) + jobname = "%s - %s" % (script_name, instance.name) ver = re.search(r"\d+\.\d+", context.data.get("hostVersion")) try: # Ensure render folder exists - os.makedirs(dirname) + os.makedirs(render_dir) except OSError: pass @@ -71,7 +65,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): payload = { "JobInfo": { # Top-level group name - "BatchName": filename, + "BatchName": script_name, # Job name, as seen in Monitor "Name": jobname, @@ -95,20 +89,20 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): }, "PluginInfo": { # Input - "SceneFile": filepath, + "SceneFile": script_path, # Output directory and filename - "OutputFilePath": dirname.replace("\\", "/"), + "OutputFilePath": render_dir.replace("\\", "/"), # "OutputFilePrefix": render_variables["filename_prefix"], # Mandatory for Deadline "Version": ver.group(), # Resolve relative references - "ProjectPath": workspace, - + "ProjectPath": script_path, + "AWSAssetFile0": render_path, # Only the specific write node is rendered. - "WriteNode": instance[0].name() + "WriteNode": node.name() }, # Mandatory for Deadline, may be empty From ac875a9306257eb078f5f2d971ce53dbd1f1f834 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 6 Dec 2019 22:44:50 +0100 Subject: [PATCH 24/55] use automatic preset loading --- pype/plugins/global/publish/extract_review.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 9de4a966f3..96c01fffb2 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -22,16 +22,17 @@ class ExtractReview(pyblish.api.InstancePlugin): families = ["review"] hosts = ["nuke", "maya", "shell"] + outputs = {} + ext_filter = [] + def process(self, instance): - # adding plugin attributes from presets - publish_presets = config.get_presets()["plugins"]["global"]["publish"] - plugin_attrs = publish_presets[self.__class__.__name__] - output_profiles = plugin_attrs.get("outputs", {}) + + output_profiles = self.outputs or {} inst_data = instance.data fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") - pixel_aspect = instance.data["pixelAspect"] + pixel_aspect = instance.data.get("pixelAspect") or 1 self.log.debug("Families In: `{}`".format(instance.data["families"])) # get representation and loop them @@ -40,7 +41,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # filter out mov and img sequences representations_new = representations[:] for repre in representations: - if repre['ext'] in plugin_attrs["ext_filter"]: + if repre['ext'] in self.ext_filter: tags = repre.get("tags", []) self.log.info("Try repre: {}".format(repre)) @@ -92,8 +93,9 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.info("p_tags: `{}`".format(p_tags)) # add families [instance.data["families"].append(t) - for t in p_tags - if t not in instance.data["families"]] + for t in p_tags + if t not in instance.data["families"]] + # add to [new_tags.append(t) for t in p_tags if t not in new_tags] @@ -199,7 +201,6 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.info("Added Lut to ffmpeg command") self.log.debug("_ output_args: `{}`".format(output_args)) - mov_args = [ os.path.join( os.environ.get( @@ -247,7 +248,6 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("Families Out: `{}`".format(instance.data["families"])) - def add_video_filter_args(self, args, inserting_arg): """ Fixing video filter argumets to be one long string From 0f50ead66153922a856c1ebf1623d88db97464a9 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 6 Dec 2019 22:47:54 +0100 Subject: [PATCH 25/55] remove unnecesary import --- pype/plugins/global/publish/extract_review.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 96c01fffb2..7554c080a0 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -3,7 +3,6 @@ import os import pyblish.api import clique import pype.api -from pypeapp import config class ExtractReview(pyblish.api.InstancePlugin): From c069f36d954a93f5786503aa8e3efcb43e8e3df8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 7 Dec 2019 18:06:54 +0100 Subject: [PATCH 26/55] fix(global): reformat was not counting with ratio of format --- pype/plugins/global/publish/extract_review.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 9de4a966f3..936ae74c6f 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -32,6 +32,8 @@ class ExtractReview(pyblish.api.InstancePlugin): fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") pixel_aspect = instance.data["pixelAspect"] + resolution_width = instance.data["resolutionWidth"] + resolution_height = instance.data["resolutionHeight"] self.log.debug("Families In: `{}`".format(instance.data["families"])) # get representation and loop them @@ -167,10 +169,9 @@ class ExtractReview(pyblish.api.InstancePlugin): output_args.append(full_output_path) # scaling none square pixels and 1920 width - # scale=320:-2 # to auto count height with output to be multiple of 2 if "reformat" in p_tags: scaling_arg = "scale=1920:'ceil((1920/{})/2)*2':flags=lanczos,setsar=1".format( - pixel_aspect) + (lb/pixel_aspect * (resolution_width / resolution_height))) vf_back = self.add_video_filter_args( output_args, scaling_arg) # add it to output_args From 8bf0874a4ad9368a21dde16be9759c906afde9ef Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 12:12:27 +0100 Subject: [PATCH 27/55] fix(global): LetterBox can be 0 rather then None --- pype/plugins/global/publish/extract_review.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 77b00d52ec..a3707f4e59 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -156,8 +156,8 @@ class ExtractReview(pyblish.api.InstancePlugin): output_args.extend(profile.get('output', [])) # letter_box - lb = profile.get('letter_box', None) - if lb: + lb = profile.get('letter_box', 0) + if lb is not 0: if "reformat" not in p_tags: lb /= pixel_aspect output_args.append( From 101c3bc190d2785e9d337b4f4e1d4b0b7945d000 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 13:25:34 +0100 Subject: [PATCH 28/55] fix(nuke): backward compatibility if avalon knob `ak:` --- pype/plugins/nuke/publish/collect_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index fbff28b282..3f3042ec46 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -34,7 +34,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): # get data from avalon knob self.log.debug("node[name]: {}".format(node['name'].value())) - avalon_knob_data = get_avalon_knob_data(node) + avalon_knob_data = get_avalon_knob_data(node, ["avalon:", "ak:"]) self.log.debug("avalon_knob_data: {}".format(avalon_knob_data)) From 1e76ec6fd48d0e0b303140eb5d335921014a6f91 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:57:10 +0100 Subject: [PATCH 29/55] fix(global): letter box was not applying on 1929x1080 --- pype/plugins/global/publish/extract_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index bf4682b26e..11b7b6ee8a 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -156,7 +156,7 @@ class ExtractReview(pyblish.api.InstancePlugin): lb = profile.get('letter_box', None) if lb: output_args.append( - "-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb)) + "-filter:v scale=1920x1080:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb)) # In case audio is longer than video. output_args.append("-shortest") From 2dd940078ebd23631fcf1eb662754a394f230c7d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:57:34 +0100 Subject: [PATCH 30/55] fix(global): missing coma --- pype/plugins/global/publish/integrate_new.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 9021a3f997..faade613f2 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -71,7 +71,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "yetiRig", "yeticache", "nukenodes", - "gizmo" + "gizmo", "source", "matchmove", "image" @@ -414,7 +414,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): } if sequence_repre and repre.get("frameStart"): - representation['context']['frame'] = src_padding_exp % repre.get("frameStart") + representation['context']['frame'] = src_padding_exp % int(repre.get("frameStart")) self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) From 399d9e8b7531b3202dab5c884af7118d57ce94b3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:58:14 +0100 Subject: [PATCH 31/55] fix(nuke): instance didnt detect other then `write` family --- pype/plugins/nuke/publish/collect_instances.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index fbff28b282..74cb0a8226 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -30,7 +30,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): continue except Exception as E: self.log.warning(E) - continue + # get data from avalon knob self.log.debug("node[name]: {}".format(node['name'].value())) @@ -84,10 +84,16 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): node.end() family = avalon_knob_data["family"] - families = [avalon_knob_data["families"]] + families = avalon_knob_data.get("families") + if families: + families = [families] + else: + families = [family] if node.Class() not in "Read": - if node["render"].value(): + if "render" not in node.knobs().keys(): + families.insert(0, family) + elif node["render"].value(): self.log.info("flagged for render") add_family = "render.local" # dealing with local/farm rendering From 5b89eca421f543e04bd9fcb518327fbe7251fe39 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:58:38 +0100 Subject: [PATCH 32/55] fix(nuke): families mishmash --- pype/plugins/nuke/publish/collect_writes.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index ba8a0534b1..cd3dd67bef 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -11,7 +11,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.1 label = "Collect Writes" hosts = ["nuke", "nukeassist"] - families = ["render", "render.local", "render.farm"] + families = ["write"] def process(self, instance): @@ -95,7 +95,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "frameEnd": last_frame - handle_end, "version": int(version), "colorspace": node["colorspace"].value(), - "families": [instance.data["family"]] + instance.data["families"], + "families": [instance.data["family"]], "subset": instance.data["subset"], "fps": instance.context.data["fps"] } @@ -109,6 +109,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): if "deadlinePriority" in group_node.knobs(): deadlinePriority = group_node["deadlinePriority"].value() + families = [f for f in instance.data["families"] if "write" not in f] instance.data.update({ "versionData": version_data, "path": path, @@ -119,10 +120,13 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "frameStart": first_frame, "frameEnd": last_frame, "outputType": output_type, + "family": "write", + "families": families, "colorspace": node["colorspace"].value(), "deadlineChunkSize": deadlineChunkSize, "deadlinePriority": deadlinePriority, "subsetGroup": "renders" }) + self.log.debug("instance.data: {}".format(instance.data)) From ac1fab9bdd42242dc77374fc82ca61b20088b01a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:59:00 +0100 Subject: [PATCH 33/55] fix(nuke): path with hashes error --- pype/plugins/nuke/publish/extract_review_data.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/plugins/nuke/publish/extract_review_data.py b/pype/plugins/nuke/publish/extract_review_data.py index 791b9d7969..f63ca4d426 100644 --- a/pype/plugins/nuke/publish/extract_review_data.py +++ b/pype/plugins/nuke/publish/extract_review_data.py @@ -69,6 +69,9 @@ class ExtractReviewData(pype.api.Extractor): first_frame = instance.data.get("frameStart", None) last_frame = instance.data.get("frameEnd", None) + if "#" in fhead: + fhead = fhead.replace("#", "")[:-1] + rnode = nuke.createNode("Read") rnode["file"].setValue( From a60e02e7c0dea9372acd7d0032af051f41587681 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:59:22 +0100 Subject: [PATCH 34/55] fix(nuke): family mishmash --- pype/plugins/nuke/publish/validate_rendered_frames.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/validate_rendered_frames.py b/pype/plugins/nuke/publish/validate_rendered_frames.py index e244a9b4b6..3887b5d5b7 100644 --- a/pype/plugins/nuke/publish/validate_rendered_frames.py +++ b/pype/plugins/nuke/publish/validate_rendered_frames.py @@ -28,7 +28,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): """ Validates file output. """ order = pyblish.api.ValidatorOrder + 0.1 - families = ["render.no"] + families = ["render"] label = "Validate rendered frame" hosts = ["nuke", "nukestudio"] From 482addd6439265ed71e7c3efb4e8d46470af77e3 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 9 Dec 2019 16:52:15 +0100 Subject: [PATCH 35/55] (maya) fixed correct colorspace for linearized textures during look extraction --- pype/plugins/maya/publish/extract_look.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index 5f3c1b33f3..ad43e02d21 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -231,11 +231,12 @@ class ExtractLook(pype.api.Extractor): # ensure after context it's still the original value. color_space_attr = resource["node"] + ".colorSpace" color_space = cmds.getAttr(color_space_attr) - + if files_metadata[source]["color_space"] == "raw": + # set colorpsace to raw if we linearized it + color_space = "Raw" # Remap file node filename to destination attr = resource["attribute"] remap[attr] = destinations[source] - remap[color_space_attr] = color_space self.log.info("Finished remapping destinations ...") @@ -310,6 +311,12 @@ class ExtractLook(pype.api.Extractor): # Source hash for the textures instance.data["sourceHashes"] = hashes + """ + self.log.info("Returning colorspaces to their original values ...") + for attr, value in remap.items(): + self.log.info(" - {}: {}".format(attr, value)) + cmds.setAttr(attr, value, type="string") + """ self.log.info("Extracted instance '%s' to: %s" % (instance.name, maya_path)) From 3469becf6e298744a4b035022699cc81a7a34da8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Dec 2019 17:46:32 +0100 Subject: [PATCH 36/55] remove `delete_asset_by_name` action which may cause issues on duplicated names or not syncrhonized entities --- .../actions/action_delete_asset_byname.py | 175 ------------------ 1 file changed, 175 deletions(-) delete mode 100644 pype/ftrack/actions/action_delete_asset_byname.py diff --git a/pype/ftrack/actions/action_delete_asset_byname.py b/pype/ftrack/actions/action_delete_asset_byname.py deleted file mode 100644 index c05c135991..0000000000 --- a/pype/ftrack/actions/action_delete_asset_byname.py +++ /dev/null @@ -1,175 +0,0 @@ -import os -import sys -import logging -import argparse -import ftrack_api -from pype.ftrack import BaseAction -from pype.ftrack.lib.io_nonsingleton import DbConnector - - -class AssetsRemover(BaseAction): - '''Edit meta data action.''' - - #: Action identifier. - identifier = 'remove.assets' - #: Action label. - label = "Pype Admin" - variant = '- Delete Assets by Name' - #: Action description. - description = 'Removes assets from Ftrack and Avalon db with all childs' - #: roles that are allowed to register this action - role_list = ['Pypeclub', 'Administrator'] - icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( - os.environ.get('PYPE_STATICS_SERVER', '') - ) - #: Db - db = DbConnector() - - def discover(self, session, entities, event): - ''' Validation ''' - if len(entities) != 1: - return False - - valid = ["show", "task"] - entityType = event["data"]["selection"][0].get("entityType", "") - if entityType.lower() not in valid: - return False - - return True - - def interface(self, session, entities, event): - if not event['data'].get('values', {}): - title = 'Enter Asset names to delete' - - items = [] - for i in range(15): - - item = { - 'label': 'Asset {}'.format(i+1), - 'name': 'asset_{}'.format(i+1), - 'type': 'text', - 'value': '' - } - items.append(item) - - return { - 'items': items, - 'title': title - } - - def launch(self, session, entities, event): - entity = entities[0] - if entity.entity_type.lower() != 'Project': - project = entity['project'] - else: - project = entity - - if 'values' not in event['data']: - return - - values = event['data']['values'] - if len(values) <= 0: - return { - 'success': True, - 'message': 'No Assets to delete!' - } - - asset_names = [] - - for k, v in values.items(): - if v.replace(' ', '') != '': - asset_names.append(v) - - self.db.install() - self.db.Session['AVALON_PROJECT'] = project["full_name"] - - assets = self.find_assets(asset_names) - - all_ids = [] - for asset in assets: - all_ids.append(asset['_id']) - all_ids.extend(self.find_child(asset)) - - if len(all_ids) == 0: - self.db.uninstall() - return { - 'success': True, - 'message': 'None of assets' - } - - delete_query = {'_id': {'$in': all_ids}} - self.db.delete_many(delete_query) - - self.db.uninstall() - return { - 'success': True, - 'message': 'All assets were deleted!' - } - - def find_child(self, entity): - output = [] - id = entity['_id'] - visuals = [x for x in self.db.find({'data.visualParent': id})] - assert len(visuals) == 0, 'This asset has another asset as child' - childs = self.db.find({'parent': id}) - for child in childs: - output.append(child['_id']) - output.extend(self.find_child(child)) - return output - - def find_assets(self, asset_names): - assets = [] - for name in asset_names: - entity = self.db.find_one({ - 'type': 'asset', - 'name': name - }) - if entity is not None and entity not in assets: - assets.append(entity) - return assets - - -def register(session, plugins_presets={}): - '''Register plugin. Called when used as an plugin.''' - - AssetsRemover(session, plugins_presets).register() - - -def main(arguments=None): - '''Set up logging and register action.''' - if arguments is None: - arguments = [] - - parser = argparse.ArgumentParser() - # Allow setting of logging level from arguments. - loggingLevels = {} - for level in ( - logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, - logging.ERROR, logging.CRITICAL - ): - loggingLevels[logging.getLevelName(level).lower()] = level - - parser.add_argument( - '-v', '--verbosity', - help='Set the logging output verbosity.', - choices=loggingLevels.keys(), - default='info' - ) - namespace = parser.parse_args(arguments) - - # Set up basic logging - logging.basicConfig(level=loggingLevels[namespace.verbosity]) - - session = ftrack_api.Session() - - register(session) - - # Wait for events - logging.info( - 'Registered actions and listening for events. Use Ctrl-C to abort.' - ) - session.event_hub.wait() - - -if __name__ == '__main__': - raise SystemExit(main(sys.argv[1:])) From 5df3afd0bc7ec0b1b25d440e552bd8a95afdd64b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Dec 2019 17:48:57 +0100 Subject: [PATCH 37/55] delete asset is totally rewritten --- pype/ftrack/actions/action_delete_asset.py | 794 ++++++++++++++------- 1 file changed, 523 insertions(+), 271 deletions(-) diff --git a/pype/ftrack/actions/action_delete_asset.py b/pype/ftrack/actions/action_delete_asset.py index df760f7c21..7eb9126fca 100644 --- a/pype/ftrack/actions/action_delete_asset.py +++ b/pype/ftrack/actions/action_delete_asset.py @@ -1,354 +1,606 @@ import os -import sys -import logging +import collections +import uuid +from datetime import datetime +from queue import Queue + from bson.objectid import ObjectId -import argparse -import ftrack_api from pype.ftrack import BaseAction from pype.ftrack.lib.io_nonsingleton import DbConnector -class DeleteAsset(BaseAction): +class DeleteAssetSubset(BaseAction): '''Edit meta data action.''' #: Action identifier. - identifier = 'delete.asset' + identifier = "delete.asset.subset" #: Action label. - label = 'Delete Asset/Subsets' + label = "Delete Asset/Subsets" #: Action description. - description = 'Removes from Avalon with all childs and asset from Ftrack' - icon = '{}/ftrack/action_icons/DeleteAsset.svg'.format( - os.environ.get('PYPE_STATICS_SERVER', '') + description = "Removes from Avalon with all childs and asset from Ftrack" + icon = "{}/ftrack/action_icons/DeleteAsset.svg".format( + os.environ.get("PYPE_STATICS_SERVER", "") ) #: roles that are allowed to register this action - role_list = ['Pypeclub', 'Administrator'] - #: Db - db = DbConnector() + role_list = ["Pypeclub", "Administrator", "Project Manager"] + #: Db connection + dbcon = DbConnector() - value = None + splitter = {"type": "label", "value": "---"} + action_data_by_id = {} + asset_prefix = "asset:" + subset_prefix = "subset:" def discover(self, session, entities, event): - ''' Validation ''' - if len(entities) != 1: - return False + """ Validation """ + task_ids = [] + for ent_info in event["data"]["selection"]: + entType = ent_info.get("entityType", "") + if entType == "task": + task_ids.append(ent_info["entityId"]) - valid = ["task"] - entityType = event["data"]["selection"][0].get("entityType", "") - if entityType.lower() not in valid: - return False - - return True + for entity in entities: + ftrack_id = entity["id"] + if ftrack_id not in task_ids: + continue + if entity.entity_type.lower() != "task": + return True + return False def _launch(self, event): - self.reset_session() try: - self.db.install() args = self._translate_event( self.session, event ) + if "values" not in event["data"]: + self.dbcon.install() + return self._interface(self.session, *args) - interface = self._interface( - self.session, *args - ) - - confirmation = self.confirm_delete( - True, *args - ) - - if interface: - return interface - + confirmation = self.confirm_delete(*args) if confirmation: return confirmation + self.dbcon.install() response = self.launch( self.session, *args ) finally: - self.db.uninstall() + self.dbcon.uninstall() return self._handle_result( self.session, response, *args ) def interface(self, session, entities, event): - if not event['data'].get('values', {}): - self.attempt = 1 - items = [] - entity = entities[0] - title = 'Choose items to delete from "{}"'.format(entity['name']) - project = entity['project'] + self.show_message(event, "Preparing data...", True) + items = [] + title = "Choose items to delete" - self.db.Session['AVALON_PROJECT'] = project["full_name"] + # Filter selection and get ftrack ids + selection = event["data"].get("selection") or [] + ftrack_ids = [] + project_in_selection = False + for entity in selection: + entity_type = (entity.get("entityType") or "").lower() + if entity_type != "task": + if entity_type == "show": + project_in_selection = True + continue - av_entity = self.db.find_one({ - 'type': 'asset', - 'name': entity['name'] + ftrack_id = entity.get("entityId") + if not ftrack_id: + continue + + ftrack_ids.append(ftrack_id) + + if project_in_selection: + msg = "It is not possible to use this action on project entity." + self.show_message(event, msg, True) + + # Filter event even more (skip task entities) + # - task entities are not relevant for avalon + for entity in entities: + ftrack_id = entity["id"] + if ftrack_id not in ftrack_ids: + continue + + if entity.entity_type.lower() == "task": + ftrack_ids.remove(ftrack_id) + + if not ftrack_ids: + # It is bug if this happens! + return { + "success": False, + "message": "Invalid selection for this action (Bug)" + } + + if entities[0].entity_type.lower() == "project": + project = entities[0] + else: + project = entities[0]["project"] + + project_name = project["full_name"] + self.dbcon.Session["AVALON_PROJECT"] = project_name + + selected_av_entities = self.dbcon.find({ + "type": "asset", + "data.ftrackId": {"$in": ftrack_ids} + }) + selected_av_entities = [ent for ent in selected_av_entities] + if not selected_av_entities: + return { + "success": False, + "message": "Didn't found entities in avalon" + } + + # Remove cached action older than 2 minutes + old_action_ids = [] + for id, data in self.action_data_by_id.items(): + created_at = data.get("created_at") + if not created_at: + old_action_ids.append(id) + continue + cur_time = datetime.now() + existing_in_sec = (created_at - cur_time).total_seconds() + if existing_in_sec > 60 * 2: + old_action_ids.append(id) + + for id in old_action_ids: + self.action_data_by_id.pop(id, None) + + # Store data for action id + action_id = str(uuid.uuid1()) + self.action_data_by_id[action_id] = { + "attempt": 1, + "created_at": datetime.now(), + "project_name": project_name, + "subset_ids_by_name": {}, + "subset_ids_by_parent": {} + } + + id_item = { + "type": "hidden", + "name": "action_id", + "value": action_id + } + + items.append(id_item) + asset_ids = [ent["_id"] for ent in selected_av_entities] + subsets_for_selection = self.dbcon.find({ + "type": "subset", + "parent": {"$in": asset_ids} + }) + + asset_ending = "" + if len(selected_av_entities) > 1: + asset_ending = "s" + + asset_title = { + "type": "label", + "value": "# Delete asset{}:".format(asset_ending) + } + asset_note = { + "type": "label", + "value": ( + "

NOTE: Action will delete checked entities" + " in Ftrack and Avalon with all children entities and" + " published content.

" + ) + } + + items.append(asset_title) + items.append(asset_note) + + asset_items = collections.defaultdict(list) + for asset in selected_av_entities: + ent_path_items = [project_name] + ent_path_items.extend(asset.get("data", {}).get("parents") or []) + ent_path_to_parent = "/".join(ent_path_items) + "/" + asset_items[ent_path_to_parent].append(asset) + + for asset_parent_path, assets in sorted(asset_items.items()): + items.append({ + "type": "label", + "value": "## - {}".format(asset_parent_path) }) - - if av_entity is None: - return { - 'success': False, - 'message': 'Didn\'t found assets in avalon' - } - - asset_label = { - 'type': 'label', - 'value': '## Delete whole asset: ##' - } - asset_item = { - 'label': av_entity['name'], - 'name': 'whole_asset', - 'type': 'boolean', - 'value': False - } - splitter = { - 'type': 'label', - 'value': '{}'.format(200*"-") - } - subset_label = { - 'type': 'label', - 'value': '## Subsets: ##' - } - if av_entity is not None: - items.append(asset_label) - items.append(asset_item) - items.append(splitter) - - all_subsets = self.db.find({ - 'type': 'subset', - 'parent': av_entity['_id'] + for asset in assets: + items.append({ + "label": asset["name"], + "name": "{}{}".format( + self.asset_prefix, str(asset["_id"]) + ), + "type": 'boolean', + "value": False }) - subset_items = [] - for subset in all_subsets: - item = { - 'label': subset['name'], - 'name': str(subset['_id']), - 'type': 'boolean', - 'value': False - } - subset_items.append(item) - if len(subset_items) > 0: - items.append(subset_label) - items.extend(subset_items) - else: - return { - 'success': False, - 'message': 'Didn\'t found assets in avalon' - } + subset_ids_by_name = collections.defaultdict(list) + subset_ids_by_parent = collections.defaultdict(list) + for subset in subsets_for_selection: + subset_id = subset["_id"] + name = subset["name"] + parent_id = subset["parent"] + subset_ids_by_name[name].append(subset_id) + subset_ids_by_parent[parent_id].append(subset_id) + if not subset_ids_by_name: return { - 'items': items, - 'title': title + "items": items, + "title": title } - def confirm_delete(self, first_attempt, entities, event): - if first_attempt is True: - if 'values' not in event['data']: - return + subset_ending = "" + if len(subset_ids_by_name.keys()) > 1: + subset_ending = "s" - values = event['data']['values'] + subset_title = { + "type": "label", + "value": "# Subset{} to delete:".format(subset_ending) + } + subset_note = { + "type": "label", + "value": ( + "

WARNING: Subset{} will be removed" + " for all selected entities.

" + ).format(subset_ending) + } - if len(values) <= 0: - return - if 'whole_asset' not in values: - return - else: - values = self.values + items.append(self.splitter) + items.append(subset_title) + items.append(subset_note) - title = 'Confirmation of deleting {}' - if values['whole_asset'] is True: - title = title.format( - 'whole asset {}'.format( - entities[0]['name'] - ) - ) - else: - subsets = [] - for key, value in values.items(): - if value is True: - subsets.append(key) - len_subsets = len(subsets) - if len_subsets == 0: + for name in subset_ids_by_name: + items.append({ + "label": "{}".format(name), + "name": "{}{}".format(self.subset_prefix, name), + "type": "boolean", + "value": False + }) + + self.action_data_by_id[action_id]["subset_ids_by_parent"] = ( + subset_ids_by_parent + ) + self.action_data_by_id[action_id]["subset_ids_by_name"] = ( + subset_ids_by_name + ) + + return { + "items": items, + "title": title + } + + def confirm_delete(self, entities, event): + values = event["data"]["values"] + action_id = values.get("action_id") + spec_data = self.action_data_by_id.get(action_id) + if not spec_data: + # it is a bug if this happens! + return { + "success": False, + "message": "Something bad has happened. Please try again." + } + + # Process Delete confirmation + delete_key = values.get("delete_key") + if delete_key: + delete_key = delete_key.lower().strip() + # Go to launch part if user entered `delete` + if delete_key == "delete": + return + # Skip whole process if user didn't enter any text + elif delete_key == "": + self.action_data_by_id.pop(action_id, None) return { - 'success': True, - 'message': 'Nothing was selected to delete' + "success": True, + "message": "Deleting cancelled (delete entry was empty)" } - elif len_subsets == 1: - title = title.format( - '{} subset'.format(len_subsets) - ) - else: - title = title.format( - '{} subsets'.format(len_subsets) - ) + # Get data to show again + to_delete = spec_data["to_delete"] + + else: + to_delete = collections.defaultdict(list) + for key, value in values.items(): + if not value: + continue + if key.startswith(self.asset_prefix): + _key = key.replace(self.asset_prefix, "") + to_delete["assets"].append(_key) + + elif key.startswith(self.subset_prefix): + _key = key.replace(self.subset_prefix, "") + to_delete["subsets"].append(_key) + + self.action_data_by_id[action_id]["to_delete"] = to_delete + + asset_to_delete = len(to_delete.get("assets") or []) > 0 + subset_to_delete = len(to_delete.get("subsets") or []) > 0 + + if not asset_to_delete and not subset_to_delete: + self.action_data_by_id.pop(action_id, None) + return { + "success": True, + "message": "Nothing was selected to delete" + } + + attempt = spec_data["attempt"] + if attempt > 3: + self.action_data_by_id.pop(action_id, None) + return { + "success": False, + "message": "You didn't enter \"DELETE\" properly 3 times!" + } + + self.action_data_by_id[action_id]["attempt"] += 1 + + title = "Confirmation of deleting" + + if asset_to_delete: + asset_len = len(to_delete["assets"]) + asset_ending = "" + if asset_len > 1: + asset_ending = "s" + title += " {} Asset{}".format(asset_len, asset_ending) + if subset_to_delete: + title += " and" + + if subset_to_delete: + sub_len = len(to_delete["subsets"]) + type_ending = "" + sub_ending = "" + if sub_len == 1: + subset_ids_by_name = spec_data["subset_ids_by_name"] + if len(subset_ids_by_name[to_delete["subsets"][0]]) > 1: + sub_ending = "s" + + elif sub_len > 1: + type_ending = "s" + sub_ending = "s" + + title += " {} type{} of subset{}".format( + sub_len, type_ending, sub_ending + ) - self.values = values items = [] + id_item = {"type": "hidden", "name": "action_id", "value": action_id} delete_label = { 'type': 'label', 'value': '# Please enter "DELETE" to confirm #' } - delete_item = { - 'name': 'delete_key', - 'type': 'text', - 'value': '', - 'empty_text': 'Type Delete here...' + "name": "delete_key", + "type": "text", + "value": "", + "empty_text": "Type Delete here..." } + + items.append(id_item) items.append(delete_label) items.append(delete_item) return { - 'items': items, - 'title': title + "items": items, + "title": title } def launch(self, session, entities, event): - if 'values' not in event['data']: - return - - values = event['data']['values'] - if len(values) <= 0: - return - if 'delete_key' not in values: - return - - if values['delete_key'].lower() != 'delete': - if values['delete_key'].lower() == '': - return { - 'success': False, - 'message': 'Deleting cancelled' - } - if self.attempt < 3: - self.attempt += 1 - return_dict = self.confirm_delete(False, entities, event) - return_dict['title'] = '{} ({} attempt)'.format( - return_dict['title'], self.attempt - ) - return return_dict + self.show_message(event, "Processing...", True) + values = event["data"]["values"] + action_id = values.get("action_id") + spec_data = self.action_data_by_id.get(action_id) + if not spec_data: + # it is a bug if this happens! return { - 'success': False, - 'message': 'You didn\'t enter "DELETE" properly 3 times!' + "success": False, + "message": "Something bad has happened. Please try again." } - entity = entities[0] - project = entity['project'] + report_messages = collections.defaultdict(list) - self.db.Session['AVALON_PROJECT'] = project["full_name"] + project_name = spec_data["project_name"] + to_delete = spec_data["to_delete"] + self.dbcon.Session["AVALON_PROJECT"] = project_name - all_ids = [] - if self.values.get('whole_asset', False) is True: - av_entity = self.db.find_one({ - 'type': 'asset', - 'name': entity['name'] + assets_to_delete = to_delete.get("assets") or [] + subsets_to_delete = to_delete.get("subsets") or [] + + # Convert asset ids to ObjectId obj + assets_to_delete = [ObjectId(id) for id in assets_to_delete if id] + + subset_ids_by_parent = spec_data["subset_ids_by_parent"] + subset_ids_by_name = spec_data["subset_ids_by_name"] + + subset_ids_to_archive = [] + asset_ids_to_archive = [] + ftrack_ids_to_delete = [] + if len(assets_to_delete) > 0: + # Prepare data when deleting whole avalon asset + avalon_assets = self.dbcon.find({"type": "asset"}) + avalon_assets_by_parent = collections.defaultdict(list) + for asset in avalon_assets: + parent_id = asset["data"]["visualParent"] + avalon_assets_by_parent[parent_id].append(asset) + if asset["_id"] in assets_to_delete: + ftrack_id = asset["data"]["ftrackId"] + ftrack_ids_to_delete.append(ftrack_id) + + children_queue = Queue() + for mongo_id in assets_to_delete: + children_queue.put(mongo_id) + + while not children_queue.empty(): + mongo_id = children_queue.get() + if mongo_id in asset_ids_to_archive: + continue + + asset_ids_to_archive.append(mongo_id) + for subset_id in subset_ids_by_parent.get(mongo_id, []): + if subset_id not in subset_ids_to_archive: + subset_ids_to_archive.append(subset_id) + + children = avalon_assets_by_parent.get(mongo_id) + if not children: + continue + + for child in children: + child_id = child["_id"] + if child_id not in asset_ids_to_archive: + children_queue.put(child_id) + + # Prepare names of assets in ftrack and ids of subsets in mongo + asset_names_to_delete = [] + if len(subsets_to_delete) > 0: + for name in subsets_to_delete: + asset_names_to_delete.append(name) + for subset_id in subset_ids_by_name[name]: + if subset_id in subset_ids_to_archive: + continue + subset_ids_to_archive.append(subset_id) + + # Get ftrack ids of entities where will be delete only asset + not_deleted_entities_id = [] + ftrack_id_name_map = {} + if asset_names_to_delete: + for entity in entities: + ftrack_id = entity["id"] + ftrack_id_name_map[ftrack_id] = entity["name"] + if ftrack_id in ftrack_ids_to_delete: + continue + not_deleted_entities_id.append(ftrack_id) + + mongo_proc_txt = "MongoProcessing: " + ftrack_proc_txt = "Ftrack processing: " + if asset_ids_to_archive: + self.log.debug("{}Archivation of assets <{}>".format( + mongo_proc_txt, + ", ".join([str(id) for id in asset_ids_to_archive]) + )) + self.dbcon.update_many( + { + "_id": {"$in": asset_ids_to_archive}, + "type": "asset" + }, + {"$set": {"type": "archived_asset"}} + ) + + if subset_ids_to_archive: + self.log.debug("{}Archivation of subsets <{}>".format( + mongo_proc_txt, + ", ".join([str(id) for id in subset_ids_to_archive]) + )) + self.dbcon.update_many( + { + "_id": {"$in": subset_ids_to_archive}, + "type": "subset" + }, + {"$set": {"type": "archived_subset"}} + ) + + if ftrack_ids_to_delete: + self.log.debug("{}Deleting Ftrack Entities <{}>".format( + ftrack_proc_txt, ", ".join(ftrack_ids_to_delete) + )) + + joined_ids_to_delete = ", ".join( + ["\"{}\"".format(id) for id in ftrack_ids_to_delete] + ) + ftrack_ents_to_delete = self.session.query( + "select id, link from TypedContext where id in ({})".format( + joined_ids_to_delete + ) + ).all() + for entity in ftrack_ents_to_delete: + self.session.delete(entity) + try: + self.session.commit() + except Exception: + ent_path = "/".join( + [ent["name"] for ent in entity["link"]] + ) + msg = "Failed to delete entity" + report_messages[msg].append(ent_path) + self.session.rollback() + self.log.warning( + "{} <{}>".format(msg, ent_path), + exc_info=True + ) + + if not_deleted_entities_id: + joined_not_deleted = ", ".join([ + "\"{}\"".format(ftrack_id) + for ftrack_id in not_deleted_entities_id + ]) + joined_asset_names = ", ".join([ + "\"{}\"".format(name) + for name in asset_names_to_delete + ]) + # Find assets of selected entities with names of checked subsets + assets = self.session.query(( + "select id from Asset where" + " context_id in ({}) and name in ({})" + ).format(joined_not_deleted, joined_asset_names)).all() + + self.log.debug("{}Deleting Ftrack Assets <{}>".format( + ftrack_proc_txt, + ", ".join([asset["id"] for asset in assets]) + )) + for asset in assets: + self.session.delete(asset) + try: + self.session.commit() + except Exception: + self.session.rollback() + msg = "Failed to delete asset" + report_messages[msg].append(asset["id"]) + self.log.warning( + "{} <{}>".format(asset["id"]), + exc_info=True + ) + + return self.report_handle(report_messages, project_name, event) + + def report_handle(self, report_messages, project_name, event): + if not report_messages: + return { + "success": True, + "message": "Deletion was successful!" + } + + title = "Delete report ({}):".format(project_name) + items = [] + items.append({ + "type": "label", + "value": "# Deleting was not completely successful" + }) + items.append({ + "type": "label", + "value": "

Check logs for more information

" + }) + for msg, _items in report_messages.items(): + if not _items or not msg: + continue + + items.append({ + "type": "label", + "value": "# {}".format(msg) }) - if av_entity is not None: - all_ids.append(av_entity['_id']) - all_ids.extend(self.find_child(av_entity)) + if isinstance(_items, str): + _items = [_items] + items.append({ + "type": "label", + "value": '

{}

'.format("
".join(_items)) + }) + items.append(self.splitter) - session.delete(entity) - session.commit() - else: - subset_names = [] - for key, value in self.values.items(): - if key == 'delete_key' or value is False: - continue - - entity_id = ObjectId(key) - av_entity = self.db.find_one({'_id': entity_id}) - subset_names.append(av_entity['name']) - if av_entity is None: - continue - all_ids.append(entity_id) - all_ids.extend(self.find_child(av_entity)) - - for ft_asset in entity['assets']: - if ft_asset['name'] in subset_names: - session.delete(ft_asset) - session.commit() - - if len(all_ids) == 0: - return { - 'success': True, - 'message': 'No entities to delete in avalon' - } - - delete_query = {'_id': {'$in': all_ids}} - self.db.delete_many(delete_query) + self.show_interface(items, title, event) return { - 'success': True, - 'message': 'All assets were deleted!' + "success": False, + "message": "Deleting finished. Read report messages." } - def find_child(self, entity): - output = [] - id = entity['_id'] - visuals = [x for x in self.db.find({'data.visualParent': id})] - assert len(visuals) == 0, 'This asset has another asset as child' - childs = self.db.find({'parent': id}) - for child in childs: - output.append(child['_id']) - output.extend(self.find_child(child)) - return output - - def find_assets(self, asset_names): - assets = [] - for name in asset_names: - entity = self.db.find_one({ - 'type': 'asset', - 'name': name - }) - if entity is not None and entity not in assets: - assets.append(entity) - return assets - def register(session, plugins_presets={}): '''Register plugin. Called when used as an plugin.''' - DeleteAsset(session, plugins_presets).register() - - -def main(arguments=None): - '''Set up logging and register action.''' - if arguments is None: - arguments = [] - - parser = argparse.ArgumentParser() - # Allow setting of logging level from arguments. - loggingLevels = {} - for level in ( - logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, - logging.ERROR, logging.CRITICAL - ): - loggingLevels[logging.getLevelName(level).lower()] = level - - parser.add_argument( - '-v', '--verbosity', - help='Set the logging output verbosity.', - choices=loggingLevels.keys(), - default='info' - ) - namespace = parser.parse_args(arguments) - - # Set up basic logging - logging.basicConfig(level=loggingLevels[namespace.verbosity]) - - session = ftrack_api.Session() - - register(session) - - # Wait for events - logging.info( - 'Registered actions and listening for events. Use Ctrl-C to abort.' - ) - session.event_hub.wait() - - -if __name__ == '__main__': - raise SystemExit(main(sys.argv[1:])) + DeleteAssetSubset(session, plugins_presets).register() From b2e7c60c28cb4917c7aca4f36a35f50b46a5ffdf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 10 Dec 2019 02:02:57 +0100 Subject: [PATCH 38/55] fix(global): reformat now works on width and height --- pype/plugins/global/publish/extract_review.py | 36 ++++++++++++++++--- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index a3707f4e59..786df95fc1 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -1,5 +1,5 @@ import os - +import math import pyblish.api import clique import pype.api @@ -31,8 +31,8 @@ class ExtractReview(pyblish.api.InstancePlugin): inst_data = instance.data fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") - resolution_height = instance.dataget("resolutionHeight", 1080) - resolution_width = instance.dataget("resolutionWidth", 1920) + resolution_height = instance.data.get("resolutionHeight", 1080) + resolution_width = instance.data.get("resolutionWidth", 1920) pixel_aspect = instance.data.get("pixelAspect", 1) self.log.debug("Families In: `{}`".format(instance.data["families"])) @@ -169,10 +169,36 @@ class ExtractReview(pyblish.api.InstancePlugin): # output filename output_args.append(full_output_path) + self.log.debug("__ pixel_aspect: `{}`".format(pixel_aspect)) + self.log.debug("__ resolution_width: `{}`".format(resolution_width)) + self.log.debug("__ resolution_height: `{}`".format(resolution_height)) # scaling none square pixels and 1920 width if "reformat" in p_tags: - scaling_arg = "scale=1920:'ceil((1920/{})/2)*2':flags=lanczos,setsar=1".format( - (lb/pixel_aspect * (resolution_width / resolution_height))) + width_scale = 1920 + width_half_pad = 0 + res_w = int(float(resolution_width) * pixel_aspect) + height_half_pad = int(( + (res_w - 1920) / ( + res_w * .01) * ( + 1080 * .01)) / 2 + ) + height_scale = 1080 - (height_half_pad * 2) + if height_scale > 1080: + height_half_pad = 0 + height_scale = 1080 + width_half_pad = (1920 - (float(resolution_width) * (1080 / float(resolution_height))) ) / 2 + width_scale = int(1920 - (width_half_pad * 2)) + + self.log.debug("__ width_scale: `{}`".format(width_scale)) + self.log.debug("__ width_half_pad: `{}`".format(width_half_pad)) + self.log.debug("__ height_scale: `{}`".format(height_scale)) + self.log.debug("__ height_half_pad: `{}`".format(height_half_pad)) + + + scaling_arg = "scale={0}x{1}:flags=lanczos,pad=1920:1080:{2}:{3}:black,setsar=1".format( + width_scale, height_scale, width_half_pad, height_half_pad + ) + vf_back = self.add_video_filter_args( output_args, scaling_arg) # add it to output_args From 3faef65e44338e9380a208d8b1eabbf6c2d38afd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 10 Dec 2019 02:04:05 +0100 Subject: [PATCH 39/55] fix(nuke): little bits --- pype/plugins/global/publish/integrate_new.py | 2 +- pype/plugins/nuke/publish/collect_writes.py | 3 ++- pype/plugins/nuke/publish/extract_thumbnail.py | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 9021a3f997..c723631679 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -414,7 +414,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): } if sequence_repre and repre.get("frameStart"): - representation['context']['frame'] = src_padding_exp % repre.get("frameStart") + representation['context']['frame'] = src_padding_exp % int(repre.get("frameStart")) self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 5484d971bf..c9c516c888 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -76,7 +76,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): } try: - collected_frames = os.listdir(output_dir) + collected_frames = [f for f in os.listdir(output_dir) + if ext in f] if collected_frames: representation['frameStart'] = "%0{}d".format( len(str(last_frame))) % first_frame diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index 3886fda569..450bb39928 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -30,7 +30,7 @@ class ExtractThumbnail(pype.api.Extractor): def render_thumbnail(self, instance): node = instance[0] # group node self.log.info("Creating staging dir...") - if "representations" not in instance.data: + if "representations" in instance.data: staging_dir = instance.data[ "representations"][0]["stagingDir"].replace("\\", "/") instance.data["stagingDir"] = staging_dir From 37792d0829c9bf340ee5fa872a1da3809d115443 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 13:22:26 +0100 Subject: [PATCH 40/55] implemented tray_exit method to stop threads and clear signals on tray exit in idle manager service --- pype/services/idle_manager/idle_manager.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pype/services/idle_manager/idle_manager.py b/pype/services/idle_manager/idle_manager.py index 64cafcd193..86caa9ddd2 100644 --- a/pype/services/idle_manager/idle_manager.py +++ b/pype/services/idle_manager/idle_manager.py @@ -29,6 +29,13 @@ class IdleManager(QtCore.QThread): def tray_start(self): self.start() + def tray_exit(self): + self.stop() + try: + self.time_signals = {} + except Exception: + pass + def add_time_signal(self, emit_time, signal): """ If any module want to use IdleManager, need to use add_time_signal :param emit_time: time when signal will be emitted From 5a0eed0d76ab6554beeca6698b87ab6c7576e173 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 13:22:43 +0100 Subject: [PATCH 41/55] import cleanup in idle manager service --- pype/services/idle_manager/idle_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/services/idle_manager/idle_manager.py b/pype/services/idle_manager/idle_manager.py index 86caa9ddd2..0897245049 100644 --- a/pype/services/idle_manager/idle_manager.py +++ b/pype/services/idle_manager/idle_manager.py @@ -1,6 +1,6 @@ import time import collections -from Qt import QtCore, QtGui, QtWidgets +from Qt import QtCore from pynput import mouse, keyboard from pypeapp import Logger From 310c7672441c96207c746c4e3db2db7bae26599d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 19:06:16 +0100 Subject: [PATCH 42/55] added replaced svg images of trash and menu with pngs --- pype/standalonepublish/resources/menu.png | Bin 0 -> 1629 bytes pype/standalonepublish/resources/menu.svg | 12 --------- .../resources/menu_disabled.png | Bin 0 -> 1629 bytes .../resources/menu_hover.png | Bin 0 -> 1626 bytes .../resources/menu_pressed.png | Bin 0 -> 1626 bytes .../resources/menu_pressed_hover.png | Bin 0 -> 1488 bytes pype/standalonepublish/resources/trash.png | Bin 0 -> 1235 bytes pype/standalonepublish/resources/trash.svg | 23 ------------------ .../resources/trash_disabled.png | Bin 0 -> 1235 bytes .../resources/trash_hover.png | Bin 0 -> 1232 bytes .../resources/trash_pressed.png | Bin 0 -> 1232 bytes .../resources/trash_pressed_hover.png | Bin 0 -> 1094 bytes 12 files changed, 35 deletions(-) create mode 100644 pype/standalonepublish/resources/menu.png delete mode 100644 pype/standalonepublish/resources/menu.svg create mode 100644 pype/standalonepublish/resources/menu_disabled.png create mode 100644 pype/standalonepublish/resources/menu_hover.png create mode 100644 pype/standalonepublish/resources/menu_pressed.png create mode 100644 pype/standalonepublish/resources/menu_pressed_hover.png create mode 100644 pype/standalonepublish/resources/trash.png delete mode 100644 pype/standalonepublish/resources/trash.svg create mode 100644 pype/standalonepublish/resources/trash_disabled.png create mode 100644 pype/standalonepublish/resources/trash_hover.png create mode 100644 pype/standalonepublish/resources/trash_pressed.png create mode 100644 pype/standalonepublish/resources/trash_pressed_hover.png diff --git a/pype/standalonepublish/resources/menu.png b/pype/standalonepublish/resources/menu.png new file mode 100644 index 0000000000000000000000000000000000000000..da83b4524468dff7e927baac8927e655e004632b GIT binary patch literal 1629 zcmeAS@N?(olHy`uVBq!ia0vp^4j|0I3?%1nZ+yeRz~mX=6XFWw0zpGVLuhCykW5QU z1Cl^cSXc-oOG-+BWLa4mkSs4RFDNLetgI|9E(Vf7sj8}~($Z2O2~<#BT@4|DvK18- zKv^IOl!cH$DWDdh2|&d_5-1Ll0*V6_z-1v)5E3p0Q2-=?;t)fCdV#V)E|3HQWCajq zKtYHsgoFsfNw6l6E}$%&glNI49-<7*OrRhfzzqS4K-h58;3CLskd1;H0%yQgA{&BB z7ONVFVoaC74FNJBvdAKou>b%6&)f007MPL_mIV0)GcYnSv#_#paB^|;@bd8s2#bh{ ziAzXINz2H}D<~={tEj4LY3t}37#W+GnweYK*xK1UIyt+zdU$$y`}p|>1Obje8tLDtJkbuzhUF1&0Dwa*tL7l{sRXO9XWdJ_{meJ&zw7d;o_ysSFT;ZefQq| z2M-@VefIpt%hzw-zI*@S zB;xSi=|7`+6Ge`nf2-v!IBBg?blE0#EluxfDW_-YTx@b`aXBe;vs_nYiFfACy8+=} zexCnx;IPl#nbyYND{Ja!S)MMxSNv?x`|7?9HmB<;G3)X&UcSi4T%mW;Lr!&>f%SGd z)2jPV&KSM2&AD#fFzYI=<_)-)Vo%{QER>p7X^06Yt9e@NEAT`D^L{$Jra5 zHp;ej*XDj!xLmPRu4!@BNj8hTW!L-+1OHP;OtUVk-yn3;kU!I zjw>sJGPsVf5afKDx-zs$qRQP%>78O2XH%8OgxV`zsaH0-uM0?;yI_iU(BZn*EHgt+ zx^Ih6zu~pmB67Ldt;K0sN~hLs^IaHw=g8{Tr&ZMpeR5|vMDG(`E*5uLdB>`4y~}Sq za|jBX2wv20F)N(#%v(r$$*Jx4e>in%ZuJW|S9#z2=JT^Af7A`bIKw_E-}uga^t*Dd zl}og81WT}u>}%F^lfVZrBfkGllDxAjOK;NYk_CHiab-yT_+G?oJin2%UO${OZ|NZ$ z$EW*W$C^1Uiu}Lzl-|j2BFj(mDc)I{Q<$=-`}nh2Ytk(aPi?v}KW4M^_wDh|k0)(? z!SF;<-GZ_D_o+_{Pb7#ipF7<7FYs<#Qu5hvqANap(5+Uy;!^d!e@*MpAHGRn!Q&278>FVdQ&MBb@0KA_L4*&oF literal 0 HcmV?d00001 diff --git a/pype/standalonepublish/resources/menu.svg b/pype/standalonepublish/resources/menu.svg deleted file mode 100644 index ac1e728011..0000000000 --- a/pype/standalonepublish/resources/menu.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - diff --git a/pype/standalonepublish/resources/menu_disabled.png b/pype/standalonepublish/resources/menu_disabled.png new file mode 100644 index 0000000000000000000000000000000000000000..e4758f0b199c554da8bd0932cea2d13b2d2bcced GIT binary patch literal 1629 zcmeAS@N?(olHy`uVBq!ia0vp^4j|0I3?%1nZ+yeRz~mX=6XFWwLO^I}sDp!pv9YnX zwl$>*?tM+1A$9=H})=E|6_zW(HJbYiny|Wd#&8H8lmYfeL`?fm}ONQ=qto zg@v7&nT?GNkPQ(BDzG&-2a*sGh#DIt5gQ8&ph{#BxMHAbK85AR7YLh)@hS0j3_P2CEjh zULfQD|NkdRQ@5x&}tZCZ=ZQRyMYF_Kr@@F0LM)Ufw=_{sBS3A)#U65z#TRafvCZY3UhRd4(mV zW#yGMb@dI6P0cN>Z5^Fm-95d1{SzimnmlFdjG43M&0nx^(c&dbmn~nha@FcJYu9hs zxM}m&Z98`D-n0L}!9z!m9y@;W)af(l&R@8A>GG9p*KgmwcmKh|$4{RDTZ?yo{GGGBQ`_o%E1XU1ngtUCy-X{*yCCuWWO!TQ|(R zo+`Og)6-LPCHtvYRSfsu$z&aMek`sbfA*GX^SR%0-YeR2iZjy8 zf0m>9gS?LKI_-DbUo-zc&79{vasR~oG66i>e?|V9dcbk^MyHLkZQZrGpA{}wER}0o zoOP1TB5&C>|3g(ztDlI=@fkQj_|FpFqO@Z9mL9|HcaLrT_#$4)>`;+mr-iG)!L?Jm zGw&?1d~yD9+`c|@e}R9!ck9J2oD^(T+WBdon$~2Iwwo69`ia)Di$y<4l?1FklWcz5 z#bwDnr>B<|^}k5icj=SPdKZ^x2Vyq1R2n$D6jkJJc1!r}Fs}QYjqd9LlIAX$;vICj?lsHIkdyA)BGhkqEw+eUE_Q2i zT9(qOb=!Ow#@;!yy7g&Q^+KQA*$vVAgqMrOT~^+)YFqE}+s+(20o?BcQQa`>I@fy!>dbw;qTi-pMwSV`&F}9hp>bc!pMHBst z*V^xXwkx@NQGchY@2}+h%dg%`;<N{%q*;I9GqO- zJiL7T0>UDqV&W2#QqnTA@(PMd$||brTG~3g21dpvre@|=Hnw*5j!w=lt{$FV-adZ* z0YSkbp<&?>(J`@ci7BaR=^0shg(anB<&`yc^$m?p%`L5M9i3g>J-vPX6DCfYJZ0*P znX~52U$Ah|;w4L$Enl&6)#^2C*KgRkY4g@?J9h2fv;V-sLr0DtJAU%i=`-ifU$}Vb z@|A1XZ{NLl|G~q@PoF)1@$&VXx9{G6`1s}9_a8rh{r>a!pRdp7O+e2uCV9KNF#c!K zy$8%IUp-wMLn03Eo&GbLH&NvH`L|l$f|J%NMVD<-*V6Q!mU4QQ&c!CD7MGJkH_LTZ zmUw6Gyc-by<>&c72M+t(ooQ|Sy|Si$mgVX4d&ST8ysz%-U~{^j60Ht%oTbk zJ>*oE8CY+ZGp)M+{ojDv*)dUD`N{1 zd+v!Ji)+ZAy=B^b?zf!xing5Mj5PC~dy0MGVc zk-w%MaGbr-X`^ggcWv%xh07I7<(d{}on*7fTXxO=P}S4wC*pE^2F?%uvxK)OtysRL z$8h`IV_QGIh?g=uRAks`;VN)&?Ue4!J4-BIoPQj*ug~0H;9u|Eda(;91zVMNewwGI zHCd$XrbWGeqIK+I(N9t(0c+1Bo8NYESu)S*>7_;eFB0}$`lPeo#pT(7n2jx!2F@-; z75SUp5`H^O>$tKqD1+&u|A$kT=2pLebCvhKZ$3Y3@<-h;j5F+$ z@{RAzN53oQTDe3kN3aCj$i8MxHwk?3GUEH+B*{Cgvh*gME?KbW7FUMUkMBji#`7CF z>-EDq^Ohd6aeTV}b*!1wqR9VSPwAcfCbIk_pW>aRIfW^Ux{p7bwIS-~DflZDy=`ZueHvME~No_Pd|$O733N-)ZXmEBXHNtM`(4?%Z8xTd{4a xwB^C(OIzLU%QpT`{}I;nzt6A0vgG*x_!Q-*UelL+F$LxZ22WQ%mvv4FO#m7ceop`Z literal 0 HcmV?d00001 diff --git a/pype/standalonepublish/resources/menu_pressed.png b/pype/standalonepublish/resources/menu_pressed.png new file mode 100644 index 0000000000000000000000000000000000000000..a5f931b2c4107986d389c1df0fe910cae0145d0d GIT binary patch literal 1626 zcmeAS@N?(olHy`uVBq!ia0vp^4j|0I3?%1nZ+yeRz~mO-6XFWw{s)6qt5)sbzyHjc zGgq%(1u~8uI|gK5ym%4FJ#ys8i4!M)-1FzppFDXI$T)ZI98d}C-@|vuDo&xj;5h9H1Obje8tLDtJkbuzhUF1&0Dwa*tL7l{sRXO z9XWdJ_{meJ&zw7d;o_ysSFT;ZefQq|2M-@VefIpt%hzw-zI*@SlFzi8#D>`p;&~JgEwON2BZ`8!c3^6kg@7?ixX=%gnJ500LuO3n|-;)3B^5^+~gm!&8 zp1tz0U+Lk`p11z3j4ep)xhH-st|5Q+mTB|3-*Vn7+H#6B(#(IBqxplpj_*3{ciLYw z|31x}=R9%$#QQP1jk0aswYi@aE>|p-Yg(LjlFcG-*){(|RZpv* zh|BRAI6wH$65gV;V)>RH!|ivEZT#V(u_Y*pI%X`Y(aWRbR;7WMjx*0GC4KS`AYtUZ%#e%r-m$vmg0mlpNENZ5Dj zlg@e8XnV|>jIMIE|}sSbhz#{%gm6I?%N{NZ+I=Xh+HmqYjIkZ(y4XZd>6*vIkLL- zX;t+?pWN9E(ffp#i^W}5-mz+1@ABKu9D>3of*18$%nBzw^A?g`a%%hiA5LAGTm1sg zRo?f$`TVTOA9cep&ah9)H@-6;{jQvAB=MH!N3%uKwlzjG^ z=!y>?bgLDwxKw@bU(XC+ z=Sb9bF>#kQr=mIs?JZFRdZ+xS2IM_AAQKEDFXlH>p5Q7%TxdW literal 0 HcmV?d00001 diff --git a/pype/standalonepublish/resources/menu_pressed_hover.png b/pype/standalonepublish/resources/menu_pressed_hover.png new file mode 100644 index 0000000000000000000000000000000000000000..51503add0fbc0c7352c764623632509e14dd669b GIT binary patch literal 1488 zcmV;>1uy!EP)^iQUvsuG)vNbl17y^pOCa!(l3&>NZKyx zu%sidy|<)3k_J23QIbYWswva6Mba`!FB@a_hvJ?Jz(C*`U~k#oYX;^6lYxF!=&c2& zxVyitOm`bF7Z{L*Pn8mkfLXvEkJwRQ8E`!i^683z3BYn^BgSsv0U!!xt5QN&0N;2- z4g-sUOLDM11b7KJ;@STi7?HK@n7~ZuaE#Z1i*hmEp2x+`c8tG(`?E9{VZZGD<2N-k zgO&5T18DVl%?tVZ#YXx7?|Mc)2I_+vNdcf9_{8J)4$wQ4@?O9Pp7P~D&mhVJ!tq?; z@q8~ePxsir=_!3G_~R82fWiKfW4w`yFR;*4Ha8&GOaVOY@qV^iCHHzto)641a{w=R z{O?XGJRI2Pgg*nyr=*=8Ku=(U^Pvs6tWrS(eCY)Lq=0{XC;)?j7U#?QioOTH4DZpo zH3y!V0=T{6R{^L4cDc@P=fp8{0Bf92JAm3Vb)4fm9wdi(eO>^DVyDUgv;8*Xb*huJ zO;UeJ^NcZ1Brwn@8wbd1f^=NcGGolDN(F%Vk{*+^N77JZ%z^R_kGc^$3|yFm4X}~? z@ohqmgDI^bAC9m4Pe7BTv65aj#>`Bpe7vM7Nn`@xePhgqBm%%vNi!t{}y-J z5XmMW!@Mo0ID3@1W5Wlh5Whhcp%6CxOcHR1);nV`?^1p)Z$JC6-1^8#PN>Zk!;n5 zRGJWV2gJVq9LW|WNu>!4pNi<%J3E_@gmqZyXU?=Z@3<>mf0fW~}S{hH+l7aC{-`asY6n=jRy7 z6(SmAz6UnRhQ;rZ^hH9&<0Va3zKM~QNyFl@zDYJLzPVis|0hN`M|L_}FKLl6=8*)- z8ztSDgirviN|+jZQPM+_c1Rj#j5%_;PJVK9NI0&=0=S$GlB4Av{q*?S964qR;6vxr znbYI0vtJ;QcaS4EfV(_@&R#1-`x2Q}x^SHUwl1K}`BGVlj1wH@34Tu3Et2}myOBea zDws@Zjj$RU`0Nq@dSJ_C!Or<_s#ejH-utsKFwe{YEb@5INvmwJr)+s=ejR`s;5Co$ zq7X`Z(!N-F1)aB8dZov6ZLpi=-lVuhKW;Tv02_@F^>jp9dnEwz|DCX9|ElNj2FFW>Z3etpN`CQV7o^o?`?B`0C}%M z|3$7JFx!0-d^D5Y#>p1oaRNlKL*AuYNe{{vB4)XDcB!N##+WqO>})}I+&tY8IQ{nd qfs*QE3$rC@ucUpFb}Q%Y^M3=%H32mG5YMLo0000loC1qBrq6+rRI%F5#6Vju~WEiEmruC4~E0g^yLpaLKO z3Iaud3LqpfhdNP5S4KCa5Y%PA&!BoL1y5QLRLnQ z{r~^}Jl8k+z!;rb666=mz{teR!p6?Q$;Hjf$1fl#BrGB-CLt{&E3c%Ys;;4_rLCi@ zZ)jp+X=P()@8st0;pye$>lY9d91Yt&-Rdys|!-6E|Kmp&7BYi6bcOR3B=9!q_%IacbmgDJh;&c9A1@ptbn$<-{6LV=V`Wv)qa0X?H@S*o&Rl)VuyYa#|I|4)t8w{q-Lb$XMBy|I&dUz z_2XV=gUMOhtIlTT@0ouf;N*p0`~Uv#eampt`R2d5nlhSiPu}{fapC%=Q@IP;o(X(7 zx=*0JF*|ix^b;$7neDA|PFrr(OkVA#kkq}WE!wZIQ{FANF>%L3mI6MzTTK2BT7?~0 z;%+k8CG_vCXD>J@7CEu$J^SgiZ&H8nbE=waTOsz~_5p9SzS2r{=u8x_y zynfd0$NMBLmtEuea#wHFhbyxWEz@-f{-_$myJaea>H&MkH&w6JPg2?0yjNS*Q&fC@ zr7Xj-2MsDO)$UAHdD|8*daR*nf%hblmVamV+HYskVDxGbdSK78L#QBRl1RhxNlW-< z%t)wfy2kYOsQk3t z1IO;j@0DojVt(gPRQE1~!6J0Yv+`2Lhc+jViG29;q?75@JN8SJ@fVWQLX*n>F_uY* s%-*Qnx-6nTamR!AxiwEV-G29jH(IMB%le*t6R2$RboFyt=akR{07G6oeE - - - - - - - - - - diff --git a/pype/standalonepublish/resources/trash_disabled.png b/pype/standalonepublish/resources/trash_disabled.png new file mode 100644 index 0000000000000000000000000000000000000000..06f5ae5276d22663eeceeb4099b87d86f8ecf9df GIT binary patch literal 1235 zcmeAS@N?(olHy`uVBq!ia0vp^4j|0I3?%1nZ+yeRz<4phC&U%Vg@Dk|PzMJGZEbB& zPfsAj($dn{*jP_b&)VAB+}s?<1`68P*Z{@N%*-5&jcsjht*op}O-+F!Km~TDra&%4 z0gwyS1r)ciu&@QQfl@#&L>4Fo1W*xka|i=SLKr}0))p2(1t9f6hJ^)01g-!iYheN5 zf*245mX>hs5D}09h%&GOI2ULdL?zHc2)%Gaz=CiEAgjR$t{B7s`vt^6HUTUS6oHrz z)CE@nVu1C)1wm?H2*C!cfr~@5AhjQiW^f>W3|F44i;a<(^pAUMBE$5frFF$8|JMDYy$_BI8%Fi!9Klr+K z);zI0$w7O&mp_iT@^ZWQ^xWU8&JLe0O5SfSd=T54?7(kuL&5VjTf=I-OV)l9tP^@qD?fx9Y={*@u?tIs|`I zjp5xgl|l7@J>#3ISL-LK>}=kvt?DT%KEG0y;n;%)m6vLFrmDPcix)lCP_)2%l1R(H zGkfi~vuH4SH3&VhXW1cC5Hd-m;rOH_d^2XG)~)>ZVV~cmEwy5w1TDdR(% zlgC6p{CU#J^y(e^rONmV$!VcU<^LGVq(o+KRBl}sQJ=Wu!Ta2rC!21+`@tKn)sba= SPreCMG(?(`x)jJ*w{G3CWy>H8APEF(*REZ@ zd^wN=0w8zInl%t|#flZk3?LUM1q2ZB)vH$nWmm3T2_%64$c3;W3^*4k2xkKs$SNV~ z(M$lUfdjY#AOixxR)Oq-m;*NrC<0Uf=Rz0|WkB{&0piollYuchvn0qbn1PXrnT3s= zgOiJ!mycgSP)JxrR7^rzMpj-)MO9rxQ%hS%SKrXY!qUpd&fdw*-NVz%$JZ|)C^#fE zJUS*eJ|QVNEj>H8pt!8Oy0)RYv#Ymn!oGpI?4{ z@OAC1d17~xgZ6eWe;jY+<#zGuxxZJP9X?%@yx&~-AhtKzf#2YUg6Cz%V!@FfFgX#f$#y3^3)=yH|*}PX<)l*b_ex)qKu?Gz*FV*f$Re9SMFM6z@Xo2@6 zk(PgF_S$b}(O~py5PD$GvO}mKWRgh3@kvYgX3R*fTlww7KEFv@Y^P;RSJAD>{#*5l z+04IE{M8zvWo7e1nRI97tYM5QX1d1o_Ne@{+ylq%$nTYC>0*B8P*nFWguxRW{xNrfe z0LTD}oIZW}?AfzGl|Tki4Ui2K1WG};5CuRIE)FpVr~oJeQ4eGQ6$1f~0bxVLA%bu< zm{JfCAPG?eB!ODcNMv0=DIkE0ps^ubkO~-ptAsNkGH6_wa)il11#m%HGnjs4%>>5i z%#t9#U_=Ke7wDj!Ug5t9B>e`0p&aU3R2@@wznL2&O%vrPN%w4c_ z)#^2C*KgdkdCS&q+js2RyZ`W!qsNY)I&=2o}GwI%AU|{0*ba4!cIQ({cdT_9#M5}*>7FXGk zhz$#poC5`XLyq*V5ZrxCE}Cazf-9?wiCK=P$BEDRe-+FR_i9%Ee9&WTIlt_F`8ng; zY2Ra4Hkid$et!A+!Pm93=84@&4%*wj{BgXMm)pgs=l))GcKCEr@_uvSgV^3=2Y!Pa z3ZAFg8dm%LHMM`>{CEDhIf@~1mnKWG(pV2QiQWS7vtv!1=+ zq*&y{ruXcp&%R0hz0av?u5E?bgWCtZg?kx)Y+f-nFhk(ow;%75v|M(L z=gVEaRUfX*KD12NA^4+e4DXhy45|n08Q)aBT0co;XY*cdRZmgz`IWK^#~w7Oyi~h0 zRpo74yy&rpq6OZQL|Xox*=xU@MT60+LFj=!%MPJ}kVzs9$0se}n=vD`ZsoTR`}`(t zv7MGNT}8Jh`)}1JW;6dv@mFhvmX*y5WzwCQvxYIMnCTkR+oST+at|E4BfnRorHlEU zLs8wk5C)6TCC|!B86Vo5JSOtt&y!B3SMS&_RmNXPP76&c|HoJ+B{F-Xa_h2)`otX% i-sjdl*>wBe58i04jx6hY@=d@r!r{^^}<`IdjNGca2VJUHJ=5(0*(LzTY-Cl3nR@j;HD75blo0nMSUgr z=^jzHE}`439?ogsO5wRO)Qiu7iTcm9lK>VKXx4-u?oQaNC6^Ysf z@JDof}kz8BS-i~4k02)znClWU#^ej;0B^cy3hM~cZkEF8-;wU|0?j*-=KTWALy_i;y5vuJeYF&B zePI&+PhRxv?z%2DtAI5tu(l-4Ga$8r)HDI4HjtVofYb(3(*%&(Kx&!*QX5E36F_PM zsc8a8Z6GyG0I3b6rU@Xmfz;Fyz#=doofj1-%yy|+BrANVl*$7LAqebgo&E}ZP$1n$ zHovCNSv?ZKz7RsXDi!sPZVDvSjcVIX73wAGWgRp_>Qx;yH{`tC-%cOX&5n_O za_1YCRJ8~p9Pf0&G8NpdI;Gz_$TsHAt1sPASF1Bo%9E83vP{(dk*@{y^123kFL$fG zV8Rl5v(otIa-P2E<{DjXoK#22HbKVKqTiD}tmc>OR3C$6n=QwpHi6qi2#0DLRo!j> z@_cO*&xNo{J)L^C5i{!3J(+wI!Y1`~^{8ofmCc*&3VA~vS?)ak14O|_7j^>z%K!iX M07*qoM6N<$fz>% literal 0 HcmV?d00001 From 69c1ae5990620b5e81e36e832cdea85418b89690 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 19:07:42 +0100 Subject: [PATCH 43/55] created png button with png factory --- .../widgets/widget_component_item.py | 171 +++++++++++++++++- 1 file changed, 170 insertions(+), 1 deletion(-) diff --git a/pype/standalonepublish/widgets/widget_component_item.py b/pype/standalonepublish/widgets/widget_component_item.py index 78287ccf37..f519164c6b 100644 --- a/pype/standalonepublish/widgets/widget_component_item.py +++ b/pype/standalonepublish/widgets/widget_component_item.py @@ -1,6 +1,5 @@ import os from . import QtCore, QtGui, QtWidgets -from . import SvgButton from . import get_resource from pypeapp import style @@ -362,3 +361,173 @@ class LightingButton(QtWidgets.QPushButton): height = preview_font_metrics.height() + 5 self.setMaximumWidth(width) self.setMaximumHeight(height) + + +class PngFactory: + png_names = { + "trash": { + "normal": QtGui.QIcon(get_resource("trash.png")), + "hover": QtGui.QIcon(get_resource("trash_hover.png")), + "pressed": QtGui.QIcon(get_resource("trash_pressed.png")), + "pressed_hover": QtGui.QIcon( + get_resource("trash_pressed_hover.png") + ), + "disabled": QtGui.QIcon(get_resource("trash_disabled.png")) + }, + + "menu": { + "normal": QtGui.QIcon(get_resource("menu.png")), + "hover": QtGui.QIcon(get_resource("menu_hover.png")), + "pressed": QtGui.QIcon(get_resource("menu_pressed.png")), + "pressed_hover": QtGui.QIcon( + get_resource("menu_pressed_hover.png") + ), + "disabled": QtGui.QIcon(get_resource("menu_disabled.png")) + } + } + + +class PngButton(QtWidgets.QPushButton): + png_button_style = """ + QPushButton { + border: none; + background-color: transparent; + padding-top: 0px; + padding-bottom: 0px; + padding-left: 0px; + padding-right: 0px; + } + QPushButton:hover {} + QPushButton:pressed {} + QPushButton:disabled {} + QPushButton:checked {} + QPushButton:checked:hover {} + QPushButton:checked:pressed {} + """ + + def __init__( + self, name=None, path=None, hover_path=None, pressed_path=None, + hover_pressed_path=None, disabled_path=None, + size=None, *args, **kwargs + ): + self._hovered = False + self._pressed = False + super(PngButton, self).__init__(*args, **kwargs) + self.setStyleSheet(self.png_button_style) + + png_dict = {} + if name: + png_dict = PngFactory.png_names.get(name) or {} + if not png_dict: + print(( + "WARNING: There is not set icon with name \"{}\"" + "in PngFactory!" + ).format(name)) + + ico_normal = png_dict.get("normal") + ico_hover = png_dict.get("hover") + ico_pressed = png_dict.get("pressed") + ico_hover_pressed = png_dict.get("pressed_hover") + ico_disabled = png_dict.get("disabled") + + if path: + ico_normal = QtGui.QIcon(path) + if hover_path: + ico_hover = QtGui.QIcon(hover_path) + + if pressed_path: + ico_pressed = QtGui.QIcon(hover_path) + + if hover_pressed_path: + ico_hover_pressed = QtGui.QIcon(hover_pressed_path) + + if disabled_path: + ico_disabled = QtGui.QIcon(disabled_path) + + self.setIcon(ico_normal) + if size: + self.setIconSize(size) + self.setMaximumSize(size) + + self.ico_normal = ico_normal + self.ico_hover = ico_hover + self.ico_pressed = ico_pressed + self.ico_hover_pressed = ico_hover_pressed + self.ico_disabled = ico_disabled + + def setDisabled(self, in_bool): + super(PngButton, self).setDisabled(in_bool) + icon = self.ico_normal + if in_bool and self.ico_disabled: + icon = self.ico_disabled + self.setIcon(icon) + + def enterEvent(self, event): + self._hovered = True + if not self.isEnabled(): + return + icon = self.ico_normal + if self.ico_hover: + icon = self.ico_hover + + if self._pressed and self.ico_hover_pressed: + icon = self.ico_hover_pressed + + if self.icon() != icon: + self.setIcon(icon) + + def mouseMoveEvent(self, event): + super(PngButton, self).mouseMoveEvent(event) + if self._pressed: + mouse_pos = event.pos() + hovering = self.rect().contains(mouse_pos) + if hovering and not self._hovered: + self.enterEvent(event) + elif not hovering and self._hovered: + self.leaveEvent(event) + + def leaveEvent(self, event): + self._hovered = False + if not self.isEnabled(): + return + icon = self.ico_normal + if self._pressed and self.ico_pressed: + icon = self.ico_pressed + + if self.icon() != icon: + self.setIcon(icon) + + def mousePressEvent(self, event): + self._pressed = True + if not self.isEnabled(): + return + icon = self.ico_hover + if self.ico_pressed: + icon = self.ico_pressed + + if self.ico_hover_pressed: + mouse_pos = event.pos() + if self.rect().contains(mouse_pos): + icon = self.ico_hover_pressed + + if icon is None: + icon = self.ico_normal + + if self.icon() != icon: + self.setIcon(icon) + + def mouseReleaseEvent(self, event): + if not self.isEnabled(): + return + if self._pressed: + self._pressed = False + mouse_pos = event.pos() + if self.rect().contains(mouse_pos): + self.clicked.emit() + + icon = self.ico_normal + if self._hovered and self.ico_hover: + icon = self.ico_hover + + if self.icon() != icon: + self.setIcon(icon) From 43d5dbbe744ea08645b33466d3a32b0def1d6fe9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 19:08:02 +0100 Subject: [PATCH 44/55] replaced svg buttons with pngs --- .../widgets/widget_component_item.py | 25 +++++++------------ 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/pype/standalonepublish/widgets/widget_component_item.py b/pype/standalonepublish/widgets/widget_component_item.py index f519164c6b..daf8f5d773 100644 --- a/pype/standalonepublish/widgets/widget_component_item.py +++ b/pype/standalonepublish/widgets/widget_component_item.py @@ -5,10 +5,6 @@ from pypeapp import style class ComponentItem(QtWidgets.QFrame): - C_NORMAL = '#777777' - C_HOVER = '#ffffff' - C_ACTIVE = '#4BB543' - C_ACTIVE_HOVER = '#4BF543' signal_remove = QtCore.Signal(object) signal_thumbnail = QtCore.Signal(object) @@ -57,10 +53,8 @@ class ComponentItem(QtWidgets.QFrame): self.icon.setText("") self.icon.setScaledContents(True) - self.btn_action_menu = SvgButton( - get_resource('menu.svg'), 22, 22, - [self.C_NORMAL, self.C_HOVER], - frame_image_info, False + self.btn_action_menu = PngButton( + name="menu", size=QtCore.QSize(22, 22) ) self.action_menu = QtWidgets.QMenu() @@ -87,7 +81,9 @@ class ComponentItem(QtWidgets.QFrame): self.file_info.setStyleSheet('padding-left:3px;') - expanding_sizePolicy.setHeightForWidth(self.name.sizePolicy().hasHeightForWidth()) + expanding_sizePolicy.setHeightForWidth( + self.name.sizePolicy().hasHeightForWidth() + ) frame_name_repre = QtWidgets.QFrame(frame) @@ -103,7 +99,8 @@ class ComponentItem(QtWidgets.QFrame): layout.addWidget(self.ext, alignment=QtCore.Qt.AlignRight) frame_name_repre.setSizePolicy( - QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding + QtWidgets.QSizePolicy.MinimumExpanding, + QtWidgets.QSizePolicy.MinimumExpanding ) # Repre + icons @@ -155,12 +152,7 @@ class ComponentItem(QtWidgets.QFrame): layout_main.addWidget(frame_middle) - self.remove = SvgButton( - get_resource('trash.svg'), 22, 22, - [self.C_NORMAL, self.C_HOVER], - frame, False - ) - + self.remove = PngButton(name="trash", size=QtCore.QSize(22, 22)) layout_main.addWidget(self.remove) layout = QtWidgets.QVBoxLayout(self) @@ -350,6 +342,7 @@ class LightingButton(QtWidgets.QPushButton): color: #4BF543; } """ + def __init__(self, text, *args, **kwargs): super().__init__(text, *args, **kwargs) self.setStyleSheet(self.lightingbtnstyle) From f9035a20d6ab16bfababc0e80b79e64d59cd41e0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 10 Dec 2019 20:51:24 +0100 Subject: [PATCH 45/55] fix(nuke): selection of nodes for write render creator --- pype/plugins/nuke/create/create_write.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index 8a1f958f9e..042826d4d9 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -34,6 +34,7 @@ class CreateWriteRender(plugin.PypeCreator): data.update({k: v}) self.data = data + self.nodes = nuke.selectedNodes() self.log.info("self.data: '{}'".format(self.data)) def process(self): @@ -46,9 +47,9 @@ class CreateWriteRender(plugin.PypeCreator): # use selection if (self.options or {}).get("useSelection"): - nodes = nuke.selectedNodes() + nodes = self.nodes - assert len(nodes) == 1, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`") + assert len(nodes) < 2, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`") selected_node = nodes[0] inputs = [selected_node] From 8a14e5d544ade37068cda522628eaff068b411a5 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 11 Dec 2019 00:05:52 +0100 Subject: [PATCH 46/55] make sure we process texture when force copy and maketx is enabled --- pype/plugins/maya/publish/collect_look.py | 4 ---- pype/plugins/maya/publish/extract_look.py | 17 +++++++++++++---- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/pype/plugins/maya/publish/collect_look.py b/pype/plugins/maya/publish/collect_look.py index 17f8180fdf..7a5fea776c 100644 --- a/pype/plugins/maya/publish/collect_look.py +++ b/pype/plugins/maya/publish/collect_look.py @@ -219,10 +219,6 @@ class CollectLook(pyblish.api.InstancePlugin): with lib.renderlayer(instance.data["renderlayer"]): self.collect(instance) - # make ftrack publishable - self.maketx = instance.data.get('maketx', True) - instance.data['maketx'] = self.maketx - self.log.info('maketx: {}'.format(self.maketx)) def collect(self, instance): diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index ad43e02d21..5226f80f7a 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -74,6 +74,8 @@ def maketx(source, destination, *args): cmd.extend(args) cmd.extend(["-o", destination, source]) + cmd = " ".join(cmd) + CREATE_NO_WINDOW = 0x08000000 kwargs = dict(args=cmd, stderr=subprocess.STDOUT) @@ -183,6 +185,7 @@ class ExtractLook(pype.api.Extractor): transfers = list() hardlinks = list() hashes = dict() + forceCopy = instance.data.get("forceCopy", False) self.log.info(files) for filepath in files_metadata: @@ -195,20 +198,26 @@ class ExtractLook(pype.api.Extractor): files_metadata[filepath]["color_space"] = "raw" source, mode, hash = self._process_texture( - filepath, do_maketx, staging=dir_path, linearise=linearise + filepath, + do_maketx, + staging=dir_path, + linearise=linearise, + force=forceCopy ) destination = self.resource_destination(instance, source, do_maketx) # Force copy is specified. - if instance.data.get("forceCopy", False): + if forceCopy: mode = COPY if mode == COPY: transfers.append((source, destination)) + self.log.info('copying') elif mode == HARDLINK: hardlinks.append((source, destination)) + self.log.info('hardlinking') # Store the hashes from hash to destination to include in the # database @@ -337,7 +346,7 @@ class ExtractLook(pype.api.Extractor): instance.data["assumedDestination"], "resources", basename + ext ) - def _process_texture(self, filepath, do_maketx, staging, linearise): + def _process_texture(self, filepath, do_maketx, staging, linearise, force): """Process a single texture file on disk for publishing. This will: 1. Check whether it's already published, if so it will do hardlink @@ -359,7 +368,7 @@ class ExtractLook(pype.api.Extractor): # If source has been published before with the same settings, # then don't reprocess but hardlink from the original existing = find_paths_by_hash(texture_hash) - if existing: + if existing and not force: self.log.info("Found hash in database, preparing hardlink..") source = next((p for p in existing if os.path.exists(p)), None) if filepath: From 7b09e1c41e846c0ba1a0686653cdf5c3101c0c17 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:24:06 +0100 Subject: [PATCH 47/55] formatting --- pype/ftrack/ftrack_server/lib.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 748937c7bd..fd4c1fe7b9 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -49,7 +49,9 @@ def ftrack_events_mongo_settings(): def get_ftrack_event_mongo_info(): - host, port, database, username, password, collection, auth_db = ftrack_events_mongo_settings() + host, port, database, username, password, collection, auth_db = ( + ftrack_events_mongo_settings() + ) user_pass = "" if username and password: user_pass = "{}:{}@".format(username, password) From 4497a89f53198adfeb60d6ea663fd9e266f0ef5e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:24:36 +0100 Subject: [PATCH 48/55] Storer EventHub moved to lib --- pype/ftrack/ftrack_server/lib.py | 29 +++++++++++++++++++++ pype/ftrack/ftrack_server/session_storer.py | 27 ------------------- 2 files changed, 29 insertions(+), 27 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index fd4c1fe7b9..80c147d400 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -99,3 +99,32 @@ def check_ftrack_url(url, log_errors=True): print('DEBUG: Ftrack server {} is accessible.'.format(url)) return url + + +class StorerEventHub(ftrack_api.event.hub.EventHub): + def __init__(self, *args, **kwargs): + self.sock = kwargs.pop("sock") + super(StorerEventHub, self).__init__(*args, **kwargs) + + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "heartbeat": + # Reply with heartbeat. + self.sock.sendall(b"storer") + return self._send_packet(self._code_name_mapping['heartbeat']) + + elif code_name == "connect": + event = ftrack_api.event.base.Event( + topic="pype.storer.started", + data={}, + source={ + "id": self.id, + "user": {"username": self._api_user} + } + ) + self._event_queue.put(event) + + return super(StorerEventHub, self)._handle_packet( + code, packet_identifier, path, data + ) diff --git a/pype/ftrack/ftrack_server/session_storer.py b/pype/ftrack/ftrack_server/session_storer.py index 0b44d7d3a1..29abf329f0 100644 --- a/pype/ftrack/ftrack_server/session_storer.py +++ b/pype/ftrack/ftrack_server/session_storer.py @@ -14,33 +14,6 @@ import ftrack_api.event from ftrack_api.logging import LazyLogMessage as L -class StorerEventHub(ftrack_api.event.hub.EventHub): - def __init__(self, *args, **kwargs): - self.sock = kwargs.pop("sock") - super(StorerEventHub, self).__init__(*args, **kwargs) - - def _handle_packet(self, code, packet_identifier, path, data): - """Override `_handle_packet` which extend heartbeat""" - code_name = self._code_name_mapping[code] - if code_name == "heartbeat": - # Reply with heartbeat. - self.sock.sendall(b"storer") - return self._send_packet(self._code_name_mapping['heartbeat']) - - elif code_name == "connect": - event = ftrack_api.event.base.Event( - topic="pype.storer.started", - data={}, - source={ - "id": self.id, - "user": {"username": self._api_user} - } - ) - self._event_queue.put(event) - - return super(StorerEventHub, self)._handle_packet( - code, packet_identifier, path, data - ) class StorerSession(ftrack_api.session.Session): From d714f5fb780c7c7db13e4610be385590e373778a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:25:25 +0100 Subject: [PATCH 49/55] Processor Eventub moved to lib --- pype/ftrack/ftrack_server/lib.py | 115 ++++++++++++++++++ .../ftrack/ftrack_server/session_processor.py | 110 ----------------- 2 files changed, 115 insertions(+), 110 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 80c147d400..091df72a98 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -128,3 +128,118 @@ class StorerEventHub(ftrack_api.event.hub.EventHub): return super(StorerEventHub, self)._handle_packet( code, packet_identifier, path, data ) + + +class ProcessEventHub(ftrack_api.event.hub.EventHub): + url, database, table_name = get_ftrack_event_mongo_info() + + is_table_created = False + pypelog = Logger().get_logger("Session Processor") + + def __init__(self, *args, **kwargs): + self.dbcon = DbConnector( + mongo_url=self.url, + database_name=self.database, + table_name=self.table_name + ) + self.sock = kwargs.pop("sock") + super(ProcessEventHub, self).__init__(*args, **kwargs) + + def prepare_dbcon(self): + try: + self.dbcon.install() + self.dbcon._database.list_collection_names() + except pymongo.errors.AutoReconnect: + self.pypelog.error( + "Mongo server \"{}\" is not responding, exiting.".format( + os.environ["AVALON_MONGO"] + ) + ) + sys.exit(0) + + except pymongo.errors.OperationFailure: + self.pypelog.error(( + "Error with Mongo access, probably permissions." + "Check if exist database with name \"{}\"" + " and collection \"{}\" inside." + ).format(self.database, self.table_name)) + self.sock.sendall(b"MongoError") + sys.exit(0) + + def wait(self, duration=None): + """Overriden wait + + Event are loaded from Mongo DB when queue is empty. Handled event is + set as processed in Mongo DB. + """ + started = time.time() + self.prepare_dbcon() + while True: + try: + event = self._event_queue.get(timeout=0.1) + except queue.Empty: + if not self.load_events(): + time.sleep(0.5) + else: + try: + self._handle(event) + self.dbcon.update_one( + {"id": event["id"]}, + {"$set": {"pype_data.is_processed": True}} + ) + except pymongo.errors.AutoReconnect: + self.pypelog.error(( + "Mongo server \"{}\" is not responding, exiting." + ).format(os.environ["AVALON_MONGO"])) + sys.exit(0) + # Additional special processing of events. + if event['topic'] == 'ftrack.meta.disconnected': + break + + if duration is not None: + if (time.time() - started) > duration: + break + + def load_events(self): + """Load not processed events sorted by stored date""" + ago_date = datetime.datetime.now() - datetime.timedelta(days=3) + result = self.dbcon.delete_many({ + "pype_data.stored": {"$lte": ago_date}, + "pype_data.is_processed": True + }) + + not_processed_events = self.dbcon.find( + {"pype_data.is_processed": False} + ).sort( + [("pype_data.stored", pymongo.ASCENDING)] + ) + + found = False + for event_data in not_processed_events: + new_event_data = { + k: v for k, v in event_data.items() + if k not in ["_id", "pype_data"] + } + try: + event = ftrack_api.event.base.Event(**new_event_data) + except Exception: + self.logger.exception(L( + 'Failed to convert payload into event: {0}', + event_data + )) + continue + found = True + self._event_queue.put(event) + + return found + + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which skip events and extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "event": + return + if code_name == "heartbeat": + self.sock.sendall(b"processor") + return self._send_packet(self._code_name_mapping["heartbeat"]) + + return super()._handle_packet(code, packet_identifier, path, data) diff --git a/pype/ftrack/ftrack_server/session_processor.py b/pype/ftrack/ftrack_server/session_processor.py index 133719bab4..a17f919969 100644 --- a/pype/ftrack/ftrack_server/session_processor.py +++ b/pype/ftrack/ftrack_server/session_processor.py @@ -24,116 +24,6 @@ from pypeapp import Logger log = Logger().get_logger("Session processor") -class ProcessEventHub(ftrack_api.event.hub.EventHub): - url, database, table_name = get_ftrack_event_mongo_info() - - is_table_created = False - - def __init__(self, *args, **kwargs): - self.dbcon = DbConnector( - mongo_url=self.url, - database_name=self.database, - table_name=self.table_name - ) - self.sock = kwargs.pop("sock") - super(ProcessEventHub, self).__init__(*args, **kwargs) - - def prepare_dbcon(self): - try: - self.dbcon.install() - self.dbcon._database.list_collection_names() - except pymongo.errors.AutoReconnect: - log.error("Mongo server \"{}\" is not responding, exiting.".format( - os.environ["AVALON_MONGO"] - )) - sys.exit(0) - - except pymongo.errors.OperationFailure: - log.error(( - "Error with Mongo access, probably permissions." - "Check if exist database with name \"{}\"" - " and collection \"{}\" inside." - ).format(self.database, self.table_name)) - self.sock.sendall(b"MongoError") - sys.exit(0) - - def wait(self, duration=None): - """Overriden wait - - Event are loaded from Mongo DB when queue is empty. Handled event is - set as processed in Mongo DB. - """ - started = time.time() - self.prepare_dbcon() - while True: - try: - event = self._event_queue.get(timeout=0.1) - except queue.Empty: - if not self.load_events(): - time.sleep(0.5) - else: - try: - self._handle(event) - self.dbcon.update_one( - {"id": event["id"]}, - {"$set": {"pype_data.is_processed": True}} - ) - except pymongo.errors.AutoReconnect: - log.error(( - "Mongo server \"{}\" is not responding, exiting." - ).format(os.environ["AVALON_MONGO"])) - sys.exit(0) - # Additional special processing of events. - if event['topic'] == 'ftrack.meta.disconnected': - break - - if duration is not None: - if (time.time() - started) > duration: - break - - def load_events(self): - """Load not processed events sorted by stored date""" - ago_date = datetime.datetime.now() - datetime.timedelta(days=3) - result = self.dbcon.delete_many({ - "pype_data.stored": {"$lte": ago_date}, - "pype_data.is_processed": True - }) - - not_processed_events = self.dbcon.find( - {"pype_data.is_processed": False} - ).sort( - [("pype_data.stored", pymongo.ASCENDING)] - ) - - found = False - for event_data in not_processed_events: - new_event_data = { - k: v for k, v in event_data.items() - if k not in ["_id", "pype_data"] - } - try: - event = ftrack_api.event.base.Event(**new_event_data) - except Exception: - self.logger.exception(L( - 'Failed to convert payload into event: {0}', - event_data - )) - continue - found = True - self._event_queue.put(event) - - return found - - def _handle_packet(self, code, packet_identifier, path, data): - """Override `_handle_packet` which skip events and extend heartbeat""" - code_name = self._code_name_mapping[code] - if code_name == "event": - return - if code_name == "heartbeat": - self.sock.sendall(b"processor") - return self._send_packet(self._code_name_mapping["heartbeat"]) - - return super()._handle_packet(code, packet_identifier, path, data) class ProcessSession(ftrack_api.session.Session): From edacecf04152ee36e2e6047747fbc69ba4dfc75f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:27:59 +0100 Subject: [PATCH 50/55] sessions from processor and storer moved to lib and added possiblity to set EventHub class in init --- pype/ftrack/ftrack_server/lib.py | 178 +++++++++++++ .../ftrack/ftrack_server/session_processor.py | 182 ------------- pype/ftrack/ftrack_server/session_storer.py | 242 ------------------ 3 files changed, 178 insertions(+), 424 deletions(-) delete mode 100644 pype/ftrack/ftrack_server/session_processor.py delete mode 100644 pype/ftrack/ftrack_server/session_storer.py diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 091df72a98..edd3cee09b 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -1,10 +1,32 @@ import os +import sys +import logging +import getpass +import atexit +import tempfile +import threading +import datetime +import time +import queue +import pymongo + import requests +import ftrack_api +import ftrack_api.session +import ftrack_api.cache +import ftrack_api.operation +import ftrack_api._centralized_storage_scenario +import ftrack_api.event +from ftrack_api.logging import LazyLogMessage as L try: from urllib.parse import urlparse, parse_qs except ImportError: from urlparse import urlparse, parse_qs +from pypeapp import Logger + +from pype.ftrack.lib.custom_db_connector import DbConnector + def ftrack_events_mongo_settings(): host = None @@ -243,3 +265,159 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): return self._send_packet(self._code_name_mapping["heartbeat"]) return super()._handle_packet(code, packet_identifier, path, data) +class SocketSession(ftrack_api.session.Session): + '''An isolated session for interaction with an ftrack server.''' + def __init__( + self, server_url=None, api_key=None, api_user=None, auto_populate=True, + plugin_paths=None, cache=None, cache_key_maker=None, + auto_connect_event_hub=None, schema_cache_path=None, + plugin_arguments=None, sock=None, Eventhub=None + ): + super(ftrack_api.session.Session, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self._closed = False + + if server_url is None: + server_url = os.environ.get('FTRACK_SERVER') + + if not server_url: + raise TypeError( + 'Required "server_url" not specified. Pass as argument or set ' + 'in environment variable FTRACK_SERVER.' + ) + + self._server_url = server_url + + if api_key is None: + api_key = os.environ.get( + 'FTRACK_API_KEY', + # Backwards compatibility + os.environ.get('FTRACK_APIKEY') + ) + + if not api_key: + raise TypeError( + 'Required "api_key" not specified. Pass as argument or set in ' + 'environment variable FTRACK_API_KEY.' + ) + + self._api_key = api_key + + if api_user is None: + api_user = os.environ.get('FTRACK_API_USER') + if not api_user: + try: + api_user = getpass.getuser() + except Exception: + pass + + if not api_user: + raise TypeError( + 'Required "api_user" not specified. Pass as argument, set in ' + 'environment variable FTRACK_API_USER or one of the standard ' + 'environment variables used by Python\'s getpass module.' + ) + + self._api_user = api_user + + # Currently pending operations. + self.recorded_operations = ftrack_api.operation.Operations() + self.record_operations = True + + self.cache_key_maker = cache_key_maker + if self.cache_key_maker is None: + self.cache_key_maker = ftrack_api.cache.StringKeyMaker() + + # Enforce always having a memory cache at top level so that the same + # in-memory instance is returned from session. + self.cache = ftrack_api.cache.LayeredCache([ + ftrack_api.cache.MemoryCache() + ]) + + if cache is not None: + if callable(cache): + cache = cache(self) + + if cache is not None: + self.cache.caches.append(cache) + + self._managed_request = None + self._request = requests.Session() + self._request.auth = ftrack_api.session.SessionAuthentication( + self._api_key, self._api_user + ) + + self.auto_populate = auto_populate + + # Fetch server information and in doing so also check credentials. + self._server_information = self._fetch_server_information() + + # Now check compatibility of server based on retrieved information. + self.check_server_compatibility() + + # Construct event hub and load plugins. + if Eventhub is None: + Eventhub = ftrack_api.event.hub.EventHub + self._event_hub = Eventhub( + self._server_url, + self._api_user, + self._api_key, + sock=sock + ) + + self._auto_connect_event_hub_thread = None + if auto_connect_event_hub in (None, True): + # Connect to event hub in background thread so as not to block main + # session usage waiting for event hub connection. + self._auto_connect_event_hub_thread = threading.Thread( + target=self._event_hub.connect + ) + self._auto_connect_event_hub_thread.daemon = True + self._auto_connect_event_hub_thread.start() + + # To help with migration from auto_connect_event_hub default changing + # from True to False. + self._event_hub._deprecation_warning_auto_connect = ( + auto_connect_event_hub is None + ) + + # Register to auto-close session on exit. + atexit.register(self.close) + + self._plugin_paths = plugin_paths + if self._plugin_paths is None: + self._plugin_paths = os.environ.get( + 'FTRACK_EVENT_PLUGIN_PATH', '' + ).split(os.pathsep) + + self._discover_plugins(plugin_arguments=plugin_arguments) + + # TODO: Make schemas read-only and non-mutable (or at least without + # rebuilding types)? + if schema_cache_path is not False: + if schema_cache_path is None: + schema_cache_path = os.environ.get( + 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() + ) + + schema_cache_path = os.path.join( + schema_cache_path, 'ftrack_api_schema_cache.json' + ) + + self.schemas = self._load_schemas(schema_cache_path) + self.types = self._build_entity_type_classes(self.schemas) + + ftrack_api._centralized_storage_scenario.register(self) + + self._configure_locations() + self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.api.session.ready', + data=dict( + session=self + ) + ), + synchronous=True + ) diff --git a/pype/ftrack/ftrack_server/session_processor.py b/pype/ftrack/ftrack_server/session_processor.py deleted file mode 100644 index a17f919969..0000000000 --- a/pype/ftrack/ftrack_server/session_processor.py +++ /dev/null @@ -1,182 +0,0 @@ -import logging -import os -import atexit -import datetime -import tempfile -import threading -import time -import requests -import queue -import pymongo - -import ftrack_api -import ftrack_api.session -import ftrack_api.cache -import ftrack_api.operation -import ftrack_api._centralized_storage_scenario -import ftrack_api.event -from ftrack_api.logging import LazyLogMessage as L - -from pype.ftrack.lib.custom_db_connector import DbConnector -from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info -from pypeapp import Logger - -log = Logger().get_logger("Session processor") - - - - -class ProcessSession(ftrack_api.session.Session): - '''An isolated session for interaction with an ftrack server.''' - def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=None, schema_cache_path=None, - plugin_arguments=None, sock=None - ): - super(ftrack_api.session.Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self._closed = False - - if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') - - if not server_url: - raise TypeError( - 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' - ) - - self._server_url = server_url - - if api_key is None: - api_key = os.environ.get( - 'FTRACK_API_KEY', - # Backwards compatibility - os.environ.get('FTRACK_APIKEY') - ) - - if not api_key: - raise TypeError( - 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' - ) - - self._api_key = api_key - - if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') - if not api_user: - try: - api_user = getpass.getuser() - except Exception: - pass - - if not api_user: - raise TypeError( - 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' - ) - - self._api_user = api_user - - # Currently pending operations. - self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True - - self.cache_key_maker = cache_key_maker - if self.cache_key_maker is None: - self.cache_key_maker = ftrack_api.cache.StringKeyMaker() - - # Enforce always having a memory cache at top level so that the same - # in-memory instance is returned from session. - self.cache = ftrack_api.cache.LayeredCache([ - ftrack_api.cache.MemoryCache() - ]) - - if cache is not None: - if callable(cache): - cache = cache(self) - - if cache is not None: - self.cache.caches.append(cache) - - self._managed_request = None - self._request = requests.Session() - self._request.auth = ftrack_api.session.SessionAuthentication( - self._api_key, self._api_user - ) - - self.auto_populate = auto_populate - - # Fetch server information and in doing so also check credentials. - self._server_information = self._fetch_server_information() - - # Now check compatibility of server based on retrieved information. - self.check_server_compatibility() - - # Construct event hub and load plugins. - self._event_hub = ProcessEventHub( - self._server_url, - self._api_user, - self._api_key, - sock=sock - ) - - self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): - # Connect to event hub in background thread so as not to block main - # session usage waiting for event hub connection. - self._auto_connect_event_hub_thread = threading.Thread( - target=self._event_hub.connect - ) - self._auto_connect_event_hub_thread.daemon = True - self._auto_connect_event_hub_thread.start() - - # To help with migration from auto_connect_event_hub default changing - # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) - - # Register to auto-close session on exit. - atexit.register(self.close) - - self._plugin_paths = plugin_paths - if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) - - self._discover_plugins(plugin_arguments=plugin_arguments) - - # TODO: Make schemas read-only and non-mutable (or at least without - # rebuilding types)? - if schema_cache_path is not False: - if schema_cache_path is None: - schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() - ) - - schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_schema_cache.json' - ) - - self.schemas = self._load_schemas(schema_cache_path) - self.types = self._build_entity_type_classes(self.schemas) - - ftrack_api._centralized_storage_scenario.register(self) - - self._configure_locations() - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) - ), - synchronous=True - ) diff --git a/pype/ftrack/ftrack_server/session_storer.py b/pype/ftrack/ftrack_server/session_storer.py deleted file mode 100644 index 29abf329f0..0000000000 --- a/pype/ftrack/ftrack_server/session_storer.py +++ /dev/null @@ -1,242 +0,0 @@ -import logging -import os -import atexit -import tempfile -import threading -import requests - -import ftrack_api -import ftrack_api.session -import ftrack_api.cache -import ftrack_api.operation -import ftrack_api._centralized_storage_scenario -import ftrack_api.event -from ftrack_api.logging import LazyLogMessage as L - - - - -class StorerSession(ftrack_api.session.Session): - '''An isolated session for interaction with an ftrack server.''' - def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=None, schema_cache_path=None, - plugin_arguments=None, sock=None - ): - '''Initialise session. - - *server_url* should be the URL of the ftrack server to connect to - including any port number. If not specified attempt to look up from - :envvar:`FTRACK_SERVER`. - - *api_key* should be the API key to use for authentication whilst - *api_user* should be the username of the user in ftrack to record - operations against. If not specified, *api_key* should be retrieved - from :envvar:`FTRACK_API_KEY` and *api_user* from - :envvar:`FTRACK_API_USER`. - - If *auto_populate* is True (the default), then accessing entity - attributes will cause them to be automatically fetched from the server - if they are not already. This flag can be changed on the session - directly at any time. - - *plugin_paths* should be a list of paths to search for plugins. If not - specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`. - - *cache* should be an instance of a cache that fulfils the - :class:`ftrack_api.cache.Cache` interface and will be used as the cache - for the session. It can also be a callable that will be called with the - session instance as sole argument. The callable should return ``None`` - if a suitable cache could not be configured, but session instantiation - can continue safely. - - .. note:: - - The session will add the specified cache to a pre-configured layered - cache that specifies the top level cache as a - :class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary - to construct a separate memory cache for typical behaviour. Working - around this behaviour or removing the memory cache can lead to - unexpected behaviour. - - *cache_key_maker* should be an instance of a key maker that fulfils the - :class:`ftrack_api.cache.KeyMaker` interface and will be used to - generate keys for objects being stored in the *cache*. If not specified, - a :class:`~ftrack_api.cache.StringKeyMaker` will be used. - - If *auto_connect_event_hub* is True then embedded event hub will be - automatically connected to the event server and allow for publishing and - subscribing to **non-local** events. If False, then only publishing and - subscribing to **local** events will be possible until the hub is - manually connected using :meth:`EventHub.connect - `. - - .. note:: - - The event hub connection is performed in a background thread to - improve session startup time. If a registered plugin requires a - connected event hub then it should check the event hub connection - status explicitly. Subscribing to events does *not* require a - connected event hub. - - Enable schema caching by setting *schema_cache_path* to a folder path. - If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to - determine the path to store cache in. If the environment variable is - also not specified then a temporary directory will be used. Set to - `False` to disable schema caching entirely. - - *plugin_arguments* should be an optional mapping (dict) of keyword - arguments to pass to plugin register functions upon discovery. If a - discovered plugin has a signature that is incompatible with the passed - arguments, the discovery mechanism will attempt to reduce the passed - arguments to only those that the plugin accepts. Note that a warning - will be logged in this case. - - ''' - super(ftrack_api.session.Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self._closed = False - - if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') - - if not server_url: - raise TypeError( - 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' - ) - - self._server_url = server_url - - if api_key is None: - api_key = os.environ.get( - 'FTRACK_API_KEY', - # Backwards compatibility - os.environ.get('FTRACK_APIKEY') - ) - - if not api_key: - raise TypeError( - 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' - ) - - self._api_key = api_key - - if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') - if not api_user: - try: - api_user = getpass.getuser() - except Exception: - pass - - if not api_user: - raise TypeError( - 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' - ) - - self._api_user = api_user - - # Currently pending operations. - self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True - - self.cache_key_maker = cache_key_maker - if self.cache_key_maker is None: - self.cache_key_maker = ftrack_api.cache.StringKeyMaker() - - # Enforce always having a memory cache at top level so that the same - # in-memory instance is returned from session. - self.cache = ftrack_api.cache.LayeredCache([ - ftrack_api.cache.MemoryCache() - ]) - - if cache is not None: - if callable(cache): - cache = cache(self) - - if cache is not None: - self.cache.caches.append(cache) - - self._managed_request = None - self._request = requests.Session() - self._request.auth = ftrack_api.session.SessionAuthentication( - self._api_key, self._api_user - ) - - self.auto_populate = auto_populate - - # Fetch server information and in doing so also check credentials. - self._server_information = self._fetch_server_information() - - # Now check compatibility of server based on retrieved information. - self.check_server_compatibility() - - # Construct event hub and load plugins. - self._event_hub = StorerEventHub( - self._server_url, - self._api_user, - self._api_key, - sock=sock - ) - - self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): - # Connect to event hub in background thread so as not to block main - # session usage waiting for event hub connection. - self._auto_connect_event_hub_thread = threading.Thread( - target=self._event_hub.connect - ) - self._auto_connect_event_hub_thread.daemon = True - self._auto_connect_event_hub_thread.start() - - # To help with migration from auto_connect_event_hub default changing - # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) - - # Register to auto-close session on exit. - atexit.register(self.close) - - self._plugin_paths = plugin_paths - if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) - - self._discover_plugins(plugin_arguments=plugin_arguments) - - # TODO: Make schemas read-only and non-mutable (or at least without - # rebuilding types)? - if schema_cache_path is not False: - if schema_cache_path is None: - schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() - ) - - schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_schema_cache.json' - ) - - self.schemas = self._load_schemas(schema_cache_path) - self.types = self._build_entity_type_classes(self.schemas) - - ftrack_api._centralized_storage_scenario.register(self) - - self._configure_locations() - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) - ), - synchronous=True - ) From dd52e6594db97395f4ace8c39c0d873518f54d09 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:29:28 +0100 Subject: [PATCH 51/55] used moved session and event hubs in subprocesses --- .../ftrack_server/sub_event_processor.py | 13 ++++---- pype/ftrack/ftrack_server/sub_event_storer.py | 30 ++++++++++--------- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_processor.py b/pype/ftrack/ftrack_server/sub_event_processor.py index 6ada787223..9c971ca916 100644 --- a/pype/ftrack/ftrack_server/sub_event_processor.py +++ b/pype/ftrack/ftrack_server/sub_event_processor.py @@ -1,12 +1,9 @@ -import os import sys -import datetime import signal import socket -import pymongo from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.session_processor import ProcessSession +from pype.ftrack.ftrack_server.lib import SocketSession, ProcessEventHub from pypeapp import Logger log = Logger().get_logger("Event processor") @@ -24,12 +21,14 @@ def main(args): sock.sendall(b"CreatedProcess") try: - session = ProcessSession(auto_connect_event_hub=True, sock=sock) - server = FtrackServer('event') + session = SocketSession( + auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub + ) + server = FtrackServer("event") log.debug("Launched Ftrack Event processor") server.run_server(session) - except Exception as exc: + except Exception: log.error("Event server crashed. See traceback below", exc_info=True) finally: diff --git a/pype/ftrack/ftrack_server/sub_event_storer.py b/pype/ftrack/ftrack_server/sub_event_storer.py index 4828b10bfa..11cda0e487 100644 --- a/pype/ftrack/ftrack_server/sub_event_storer.py +++ b/pype/ftrack/ftrack_server/sub_event_storer.py @@ -7,22 +7,22 @@ import pymongo import ftrack_api from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info +from pype.ftrack.ftrack_server.lib import ( + get_ftrack_event_mongo_info, + SocketSession, + StorerEventHub +) from pype.ftrack.lib.custom_db_connector import DbConnector -from session_storer import StorerSession from pypeapp import Logger log = Logger().get_logger("Event storer") + +class SessionFactory: + session = None + + url, database, table_name = get_ftrack_event_mongo_info() - - -class SessionClass: - def __init__(self): - self.session = None - - -session_obj = SessionClass() dbcon = DbConnector( mongo_url=url, database_name=database, @@ -75,7 +75,7 @@ def launch(event): def trigger_sync(event): - session = session_obj.session + session = SessionFactory.session if session is None: log.warning("Session is not set. Can't trigger Sync to avalon action.") return True @@ -93,7 +93,7 @@ def trigger_sync(event): "$set": {"pype_data.is_processed": True} } dbcon.update_many(query, set_dict) - + selections = [] for project in projects: if project["status"] != "active": @@ -154,8 +154,10 @@ def main(args): sock.sendall(b"CreatedStore") try: - session = StorerSession(auto_connect_event_hub=True, sock=sock) - session_obj.session = session + session = SocketSession( + auto_connect_event_hub=True, sock=sock, Eventhub=StorerEventHub + ) + SessionFactory.session = session register(session) server = FtrackServer("event") log.debug("Launched Ftrack Event storer") From 8d7f29c1bac8fc1b6cf66137e07771b42b582c1e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:32:03 +0100 Subject: [PATCH 52/55] fix in trigger sync method, now check event_hub id to not trigger sync on every connect event --- pype/ftrack/ftrack_server/sub_event_storer.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/ftrack/ftrack_server/sub_event_storer.py b/pype/ftrack/ftrack_server/sub_event_storer.py index 11cda0e487..dfe8e21654 100644 --- a/pype/ftrack/ftrack_server/sub_event_storer.py +++ b/pype/ftrack/ftrack_server/sub_event_storer.py @@ -76,6 +76,10 @@ def launch(event): def trigger_sync(event): session = SessionFactory.session + source_id = event.get("source", {}).get("id") + if not source_id or source_id != session.event_hub.id: + return + if session is None: log.warning("Session is not set. Can't trigger Sync to avalon action.") return True From f5c326aa568a3660c4a8e612e1704b613403577e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:33:22 +0100 Subject: [PATCH 53/55] formatting changes --- pype/ftrack/ftrack_server/socket_thread.py | 4 ++-- pype/ftrack/ftrack_server/sub_legacy_server.py | 9 +++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index d0a2868743..3309f75cd7 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -1,7 +1,5 @@ import os -import sys import time -import signal import socket import threading import subprocess @@ -10,7 +8,9 @@ from pypeapp import Logger class SocketThread(threading.Thread): """Thread that checks suprocess of storer of processor of events""" + MAX_TIMEOUT = 35 + def __init__(self, name, port, filepath): super(SocketThread, self).__init__() self.log = Logger().get_logger("SocketThread", "Event Thread") diff --git a/pype/ftrack/ftrack_server/sub_legacy_server.py b/pype/ftrack/ftrack_server/sub_legacy_server.py index 31f38d0404..8b7bab5e2e 100644 --- a/pype/ftrack/ftrack_server/sub_legacy_server.py +++ b/pype/ftrack/ftrack_server/sub_legacy_server.py @@ -1,4 +1,3 @@ -import os import sys import time import datetime @@ -7,7 +6,6 @@ import threading from ftrack_server import FtrackServer import ftrack_api -from ftrack_api.event.hub import EventHub from pypeapp import Logger log = Logger().get_logger("Event Server Legacy") @@ -37,7 +35,10 @@ class TimerChecker(threading.Thread): if not self.session.event_hub.connected: if not connected: - if (datetime.datetime.now() - start).seconds > self.max_time_out: + if ( + (datetime.datetime.now() - start).seconds > + self.max_time_out + ): log.error(( "Exiting event server. Session was not connected" " to ftrack server in {} seconds." @@ -61,7 +62,7 @@ class TimerChecker(threading.Thread): def main(args): check_thread = None try: - server = FtrackServer('event') + server = FtrackServer("event") session = ftrack_api.Session(auto_connect_event_hub=True) check_thread = TimerChecker(server, session) From 6e2ea0c05bc63fbcb4272e11922c202b55d39b0f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:34:36 +0100 Subject: [PATCH 54/55] change session check in base event handler --- pype/ftrack/lib/ftrack_base_handler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/lib/ftrack_base_handler.py b/pype/ftrack/lib/ftrack_base_handler.py index 4b57452961..8329505ffb 100644 --- a/pype/ftrack/lib/ftrack_base_handler.py +++ b/pype/ftrack/lib/ftrack_base_handler.py @@ -2,7 +2,7 @@ import functools import time from pypeapp import Logger import ftrack_api -from pype.ftrack.ftrack_server import session_processor +from pype.ftrack.ftrack_server.lib import SocketSession class MissingPermision(Exception): @@ -41,7 +41,7 @@ class BaseHandler(object): self.log = Logger().get_logger(self.__class__.__name__) if not( isinstance(session, ftrack_api.session.Session) or - isinstance(session, session_processor.ProcessSession) + isinstance(session, SocketSession) ): raise Exception(( "Session object entered with args is instance of \"{}\"" From df34ed8705bf62e611b0b7d9e1e3f725df178457 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 12 Dec 2019 17:44:40 +0100 Subject: [PATCH 55/55] refactored verion to task event and added feature with more statuses to set --- .../events/event_version_to_task_statuses.py | 170 ++++++++++++------ 1 file changed, 113 insertions(+), 57 deletions(-) diff --git a/pype/ftrack/events/event_version_to_task_statuses.py b/pype/ftrack/events/event_version_to_task_statuses.py index cd83b819bc..1f5f1514d7 100644 --- a/pype/ftrack/events/event_version_to_task_statuses.py +++ b/pype/ftrack/events/event_version_to_task_statuses.py @@ -4,6 +4,7 @@ from pypeapp import config class VersionToTaskStatus(BaseEvent): + # Presets usage default_status_mapping = {} def launch(self, session, event): @@ -11,69 +12,124 @@ class VersionToTaskStatus(BaseEvent): # start of event procedure ---------------------------------- for entity in event['data'].get('entities', []): - # Filter non-assetversions - if ( - entity['entityType'] == 'assetversion' and - 'statusid' in (entity.get('keys') or []) - ): + # Filter AssetVersions + if entity["entityType"] != "assetversion": + continue - version = session.get('AssetVersion', entity['entityId']) - try: - version_status = session.get( - 'Status', entity['changes']['statusid']['new'] - ) - except Exception: + # Skip if statusid not in keys (in changes) + keys = entity.get("keys") + if not keys or "statusid" not in keys: + continue + + # Get new version task name + version_status_id = ( + entity + .get("changes", {}) + .get("statusid", {}) + .get("new", {}) + ) + + # Just check that `new` is set to any value + if not version_status_id: + continue + + try: + version_status = session.get("Status", version_status_id) + except Exception: + self.log.warning( + "Troubles with query status id [ {} ]".format( + version_status_id + ), + exc_info=True + ) + + if not version_status: + continue + + version_status_orig = version_status["name"] + + # Load status mapping from presets + status_mapping = ( + config.get_presets() + .get("ftrack", {}) + .get("ftrack_config", {}) + .get("status_version_to_task") + ) or self.default_status_mapping + + # Skip if mapping is empty + if not status_mapping: + continue + + # Lower version status name and check if has mapping + version_status = version_status_orig.lower() + new_status_names = status_mapping.get(version_status) + if not new_status_names: + continue + + self.log.debug( + "Processing AssetVersion status change: [ {} ]".format( + version_status_orig + ) + ) + + # Backwards compatibility (convert string to list) + if isinstance(new_status_names, str): + new_status_names = [new_status_names] + + # Lower all names from presets + new_status_names = [name.lower() for name in new_status_names] + + # Get entities necessary for processing + version = session.get("AssetVersion", entity["entityId"]) + task = version.get("task") + if not task: + continue + + project_schema = task["project"]["project_schema"] + # Get all available statuses for Task + statuses = project_schema.get_statuses("Task", task["type_id"]) + # map lowered status name with it's object + stat_names_low = { + status["name"].lower(): status for status in statuses + } + + new_status = None + for status_name in new_status_names: + if status_name not in stat_names_low: continue - task_status = version_status - task = version['task'] - self.log.info('>>> version status: [ {} ]'.format( - version_status['name'])) - version_name_low = version_status['name'].lower() + # store object of found status + new_status = stat_names_low[status_name] + self.log.debug("Status to set: [ {} ]".format( + new_status["name"] + )) + break - status_mapping = ( - config.get_presets() - .get("ftrack", {}) - .get("ftrack_config", {}) - .get("status_version_to_task") - ) or self.default_status_mapping + # Skip if status names were not found for paticulat entity + if not new_status: + self.log.warning( + "Any of statuses from presets can be set: {}".format( + str(new_status_names) + ) + ) + continue - status_to_set = status_mapping.get(version_name_low) + # Get full path to task for logging + ent_path = "/".join([ent["name"] for ent in task["link"]]) - self.log.info( - '>>> status to set: [ {} ]'.format(status_to_set)) - - if status_to_set is not None: - query = 'Status where name is "{}"'.format(status_to_set) - try: - task_status = session.query(query).one() - except Exception: - self.log.info( - '!!! status was not found in Ftrack [ {} ]'.format( - status_to_set - ) - ) - continue - - # Proceed if the task status was set - if task_status is not None: - # Get path to task - path = task['name'] - for p in task['ancestors']: - path = p['name'] + '/' + path - - # Setting task status - try: - task['status'] = task_status - session.commit() - except Exception as e: - session.rollback() - self.log.warning('!!! [ {} ] status couldnt be set:\ - [ {} ]'.format(path, e)) - session.rollback() - else: - self.log.info('>>> [ {} ] updated to [ {} ]'.format( - path, task_status['name'])) + # Setting task status + try: + task["status"] = new_status + session.commit() + self.log.debug("[ {} ] Status updated to [ {} ]".format( + ent_path, new_status['name'] + )) + except Exception: + session.rollback() + self.log.warning( + "[ {} ]Status couldn't be set".format(ent_path), + exc_info=True + ) def register(session, plugins_presets):