From 51957dd3aec5d09c83ffca921966229d4a382b3f Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Wed, 2 Mar 2022 15:37:53 +0700 Subject: [PATCH 01/69] Deformer node ids validation plugin for Maya --- .../validate_node_ids_deformer_transfer.py | 105 ++++++++++++++++++ 1 file changed, 105 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py new file mode 100644 index 0000000000..67b4aff136 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py @@ -0,0 +1,105 @@ +from maya import cmds + +import pyblish.api +import openpype.api +import openpype.hosts.maya.api.action +from openpype.hosts.maya.api import lib + + +class ValidateNodeIdsDeformerTransfer(pyblish.api.InstancePlugin): + """Validate if deformed shapes have related IDs to the original + shapes. + + When a deformer is applied in the scene on a mesh, + Maya creates a new "deformer" shape node for the mesh. + This new node does not get the original ID and later references + to the original node ID don't match. + + This validator checks whether the IDs are valid on all the shape + nodes in the instance. + """ + + order = openpype.api.ValidateContentsOrder + families = ['rig'] + hosts = ['maya'] + label = 'Deformed shape ids transferred' + actions = [ + openpype.hosts.maya.api.action.SelectInvalidAction, + openpype.api.RepairAction + ] + + def process(self, instance): + """Process all the nodes in the instance""" + + # Ensure nodes with sibling share the same ID + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "Shapes found that are considered 'Deformed'" + " with invalid object ids: {0}".format(invalid) + ) + + @classmethod + def get_invalid(cls, instance): + """Get all nodes which do not match the criteria""" + + shapes = cmds.ls(instance[:], + dag=True, + leaf=True, + shapes=True, + long=True, + noIntermediate=True) + + invalid = [] + for shape in shapes: + sibling_id = cls._get_id_from_sibling(shape) + if not sibling_id: + continue + + current_id = lib.get_id(shape) + if current_id != sibling_id: + invalid.append(shape) + + return invalid + + @classmethod + def _get_id_from_sibling(cls, node): + """In some cases, the history of the deformed shapes cannot be used + to get the original shape, as the relation with the orignal shape + has been lost. + The original shape can be found as a sibling of the deformed shape + (sharing same transform parent), which has the "intermediate object" + attribute set. + The ID of that shape node can then be transferred to the deformed + shape node. + """ + + # Get long name + node = cmds.ls(node, long=True)[0] + + parent = cmds.listRelatives(node, parent=True, fullPath=True) + + # Get siblings of same type + node_type = cmds.nodeType(node) + similar_nodes = cmds.listRelatives(parent, type=node_type, fullPath=1) + # Exclude itself + similar_nodes = [x for x in similar_nodes if x != node] + + for similar_node in similar_nodes: + # Make sure it is an "intermediate object" + if cmds.getAttr(similar_node + ".io"): + _id = lib.get_id(similar_node) + if _id: + return _id + + @classmethod + def repair(cls, instance): + + for node in cls.get_invalid(instance): + # Get the original id from sibling + sibling_id = cls._get_id_from_sibling(node) + if not sibling_id: + cls.log.error("Could not find ID from sibling for '%s'", node) + continue + + lib.set_id(node, sibling_id, overwrite=True) From d714e52921ca6a36d2568e1a9e98fc7da8085662 Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Wed, 2 Mar 2022 18:47:07 +0700 Subject: [PATCH 02/69] Refactor to existing lib function + plugin --- openpype/hosts/maya/api/lib.py | 30 ++++- ...date_animation_out_set_related_node_ids.py | 4 +- .../validate_node_ids_deformed_shapes.py | 4 +- .../validate_node_ids_deformer_transfer.py | 105 ------------------ .../publish/validate_rig_out_set_node_ids.py | 16 +-- 5 files changed, 38 insertions(+), 121 deletions(-) delete mode 100644 openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 41528f20ba..2f7a09d4c4 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1751,18 +1751,24 @@ def remove_other_uv_sets(mesh): cmds.removeMultiInstance(attr, b=True) -def get_id_from_history(node): +def get_id_from_sibling(node, history_only=True): """Return first node id in the history chain that matches this node. The nodes in history must be of the exact same node type and must be parented under the same parent. + If no matching node is found in history, the siblings of the node + are checked. Additionally to having the same parent, the sibling must + be marked as 'intermediate object'. + Args: - node (str): node to retrieve the + node (str): node to retrieve the history from + history_only (bool): also looks in node's siblings if True + and if nothing found in history Returns: - str or None: The id from the node in history or None when no id found - on any valid nodes in the history. + str or None: The id from the sibling node or None when no id found + on any valid nodes in the history or siblings. """ @@ -1791,6 +1797,22 @@ def get_id_from_history(node): if _id: return _id + if not history_only: + # Get siblings of same type + similar_nodes = cmds.listRelatives(parent, + type=node_type, + fullPath=True) + # Exclude itself + similar_nodes = [x for x in similar_nodes if x != node] + + for similar_node in similar_nodes: + # Check if "intermediate object" + if cmds.getAttr(similar_node + ".io"): + _id = get_id(similar_node) + if _id: + return _id + + # Project settings def set_scene_fps(fps, update=True): diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 00f0d38775..7c1c695237 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -65,7 +65,7 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): invalid.append(node) continue - history_id = lib.get_id_from_history(node) + history_id = lib.get_id_from_sibling(node) if history_id is not None and node_id != history_id: invalid.append(node) @@ -76,7 +76,7 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): for node in cls.get_invalid(instance): # Get the original id from history - history_id = lib.get_id_from_history(node) + history_id = lib.get_id_from_sibling(node) if not history_id: cls.log.error("Could not find ID in history for '%s'", node) continue diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py index a4d4d2bcc2..0324be9fc9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py @@ -48,7 +48,7 @@ class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): invalid = [] for shape in shapes: - history_id = lib.get_id_from_history(shape) + history_id = lib.get_id_from_sibling(shape) if history_id: current_id = lib.get_id(shape) if current_id != history_id: @@ -61,7 +61,7 @@ class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): for node in cls.get_invalid(instance): # Get the original id from history - history_id = lib.get_id_from_history(node) + history_id = lib.get_id_from_sibling(node) if not history_id: cls.log.error("Could not find ID in history for '%s'", node) continue diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py deleted file mode 100644 index 67b4aff136..0000000000 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py +++ /dev/null @@ -1,105 +0,0 @@ -from maya import cmds - -import pyblish.api -import openpype.api -import openpype.hosts.maya.api.action -from openpype.hosts.maya.api import lib - - -class ValidateNodeIdsDeformerTransfer(pyblish.api.InstancePlugin): - """Validate if deformed shapes have related IDs to the original - shapes. - - When a deformer is applied in the scene on a mesh, - Maya creates a new "deformer" shape node for the mesh. - This new node does not get the original ID and later references - to the original node ID don't match. - - This validator checks whether the IDs are valid on all the shape - nodes in the instance. - """ - - order = openpype.api.ValidateContentsOrder - families = ['rig'] - hosts = ['maya'] - label = 'Deformed shape ids transferred' - actions = [ - openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction - ] - - def process(self, instance): - """Process all the nodes in the instance""" - - # Ensure nodes with sibling share the same ID - invalid = self.get_invalid(instance) - if invalid: - raise RuntimeError( - "Shapes found that are considered 'Deformed'" - " with invalid object ids: {0}".format(invalid) - ) - - @classmethod - def get_invalid(cls, instance): - """Get all nodes which do not match the criteria""" - - shapes = cmds.ls(instance[:], - dag=True, - leaf=True, - shapes=True, - long=True, - noIntermediate=True) - - invalid = [] - for shape in shapes: - sibling_id = cls._get_id_from_sibling(shape) - if not sibling_id: - continue - - current_id = lib.get_id(shape) - if current_id != sibling_id: - invalid.append(shape) - - return invalid - - @classmethod - def _get_id_from_sibling(cls, node): - """In some cases, the history of the deformed shapes cannot be used - to get the original shape, as the relation with the orignal shape - has been lost. - The original shape can be found as a sibling of the deformed shape - (sharing same transform parent), which has the "intermediate object" - attribute set. - The ID of that shape node can then be transferred to the deformed - shape node. - """ - - # Get long name - node = cmds.ls(node, long=True)[0] - - parent = cmds.listRelatives(node, parent=True, fullPath=True) - - # Get siblings of same type - node_type = cmds.nodeType(node) - similar_nodes = cmds.listRelatives(parent, type=node_type, fullPath=1) - # Exclude itself - similar_nodes = [x for x in similar_nodes if x != node] - - for similar_node in similar_nodes: - # Make sure it is an "intermediate object" - if cmds.getAttr(similar_node + ".io"): - _id = lib.get_id(similar_node) - if _id: - return _id - - @classmethod - def repair(cls, instance): - - for node in cls.get_invalid(instance): - # Get the original id from sibling - sibling_id = cls._get_id_from_sibling(node) - if not sibling_id: - cls.log.error("Could not find ID from sibling for '%s'", node) - continue - - lib.set_id(node, sibling_id, overwrite=True) diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index e2090080f6..c1029366e8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -51,10 +51,10 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): noIntermediate=True) for shape in shapes: - history_id = lib.get_id_from_history(shape) - if history_id: + sibling_id = lib.get_id_from_sibling(shape, history_only=False) + if sibling_id: current_id = lib.get_id(shape) - if current_id != history_id: + if current_id != sibling_id: invalid.append(shape) return invalid @@ -63,10 +63,10 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): def repair(cls, instance): for node in cls.get_invalid(instance): - # Get the original id from history - history_id = lib.get_id_from_history(node) - if not history_id: - cls.log.error("Could not find ID in history for '%s'", node) + # Get the original id from sibling + sibling_id = lib.get_id_from_sibling(node, history_only=False) + if not sibling_id: + cls.log.error("Could not find ID in siblings for '%s'", node) continue - lib.set_id(node, history_id, overwrite=True) + lib.set_id(node, sibling_id, overwrite=True) From c95752be966e5f99f0ac25490a035a47c1264f1a Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 14:57:35 +0700 Subject: [PATCH 03/69] 'history only' as plugin setting --- .../publish/validate_rig_out_set_node_ids.py | 9 +++++++-- .../schemas/schema_maya_publish.json | 20 +++++++++++++++++++ 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index c1029366e8..c272c5c485 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -24,6 +24,7 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): openpype.hosts.maya.api.action.SelectInvalidAction, openpype.api.RepairAction ] + allow_history_only = False def process(self, instance): """Process all meshes""" @@ -51,7 +52,9 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): noIntermediate=True) for shape in shapes: - sibling_id = lib.get_id_from_sibling(shape, history_only=False) + sibling_id = \ + lib.get_id_from_sibling(shape, + history_only=cls.allow_history_only) if sibling_id: current_id = lib.get_id(shape) if current_id != sibling_id: @@ -64,7 +67,9 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): for node in cls.get_invalid(instance): # Get the original id from sibling - sibling_id = lib.get_id_from_sibling(node, history_only=False) + sibling_id = \ + lib.get_id_from_sibling(node, + history_only=cls.allow_history_only) if not sibling_id: cls.log.error("Could not find ID in siblings for '%s'", node) continue diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 7c9a5a6b46..0c82997cce 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -396,6 +396,26 @@ "label": "Validate Rig Controllers" } ] + }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "ValidateRigOutSetNodeIds", + "label": "Validate Rig Out Set Node Ids", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "allow_history_only", + "label": "Allow history only" + } + ] } ] }, From acd86c30914f1a2e52dcd3e2f3c9a63d9b3be7d7 Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 16:25:38 +0700 Subject: [PATCH 04/69] Full attribute name for readability --- openpype/hosts/maya/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 2f7a09d4c4..c9e10c7041 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1807,7 +1807,7 @@ def get_id_from_sibling(node, history_only=True): for similar_node in similar_nodes: # Check if "intermediate object" - if cmds.getAttr(similar_node + ".io"): + if cmds.getAttr(similar_node + ".intermediateObject"): _id = get_id(similar_node) if _id: return _id From cd498441a6aa86d02cc2deb8be6c33102797e1ff Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 16:55:40 +0700 Subject: [PATCH 05/69] Code style fix --- .../publish/validate_rig_out_set_node_ids.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index c272c5c485..ed1d36261a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -52,9 +52,10 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): noIntermediate=True) for shape in shapes: - sibling_id = \ - lib.get_id_from_sibling(shape, - history_only=cls.allow_history_only) + sibling_id = lib.get_id_from_sibling( + shape, + history_only=cls.allow_history_only + ) if sibling_id: current_id = lib.get_id(shape) if current_id != sibling_id: @@ -67,9 +68,10 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): for node in cls.get_invalid(instance): # Get the original id from sibling - sibling_id = \ - lib.get_id_from_sibling(node, - history_only=cls.allow_history_only) + sibling_id = lib.get_id_from_sibling( + node, + history_only=cls.allow_history_only + ) if not sibling_id: cls.log.error("Could not find ID in siblings for '%s'", node) continue From 4ba40f2a175ec78ac8371dbf62ea2a3eeb61998b Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 17:48:40 +0700 Subject: [PATCH 06/69] Exact type for siblings --- openpype/hosts/maya/api/lib.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index c9e10c7041..bbd7786b36 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1802,6 +1802,8 @@ def get_id_from_sibling(node, history_only=True): similar_nodes = cmds.listRelatives(parent, type=node_type, fullPath=True) + similar_nodes = cmds.ls(similar_nodes, exactType=node_type, long=True) + # Exclude itself similar_nodes = [x for x in similar_nodes if x != node] From 024a7220fe36f8a3df60347df10873aa01a1cd6b Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 17:52:18 +0700 Subject: [PATCH 07/69] Plugin setting default value --- openpype/settings/defaults/project_settings/maya.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index a756071106..b6fa3719ef 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -351,6 +351,10 @@ "optional": true, "active": true }, + "ValidateRigOutSetNodeIds": { + "enabled": true, + "allow_history_only": false + }, "ValidateCameraAttributes": { "enabled": false, "optional": true, From 3b0cee19ba2b3f9351e87c49f690950d86587b90 Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 19:20:52 +0700 Subject: [PATCH 08/69] Adapted/corrected comment --- openpype/hosts/maya/api/lib.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index bbd7786b36..62de5a96eb 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1757,14 +1757,16 @@ def get_id_from_sibling(node, history_only=True): The nodes in history must be of the exact same node type and must be parented under the same parent. - If no matching node is found in history, the siblings of the node - are checked. Additionally to having the same parent, the sibling must - be marked as 'intermediate object'. + Optionally, if no matching node is found from the history, all the + siblings of the node that are of the same type are checked. + Additionally to having the same parent, the sibling must be marked as + 'intermediate object'. Args: node (str): node to retrieve the history from - history_only (bool): also looks in node's siblings if True - and if nothing found in history + history_only (bool): if True and if nothing found in history, + look for an 'intermediate object' in all the node's siblings + of same type Returns: str or None: The id from the sibling node or None when no id found From f724e0ca222bd27d6a202ab7814fca449569830a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Mar 2022 13:33:17 +0100 Subject: [PATCH 09/69] OP-2813 - fix for rendering single file from AE in DL Solves issue with rendering .mov or .avi file. Added test cae for collect_frames --- openpype/lib/delivery.py | 21 +++++-- .../plugins/publish/submit_publish_job.py | 1 + tests/unit/openpype/lib/test_delivery.py | 57 +++++++++++++++++++ 3 files changed, 75 insertions(+), 4 deletions(-) create mode 100644 tests/unit/openpype/lib/test_delivery.py diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 9fc65aae8e..f1855d9550 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -13,18 +13,30 @@ def collect_frames(files): Uses clique as most precise solution Args: - files(list): list of source paths + files(list) or (set with single value): list of source paths Returns: (dict): {'/asset/subset_v001.0001.png': '0001', ....} """ collections, remainder = clique.assemble(files, minimum_items=1) + real_file_name = None + if len(files) == 1: + real_file_name = list(files)[0] + sources_and_frames = {} if collections: for collection in collections: src_head = collection.head src_tail = collection.tail + if src_head.endswith("_v"): + # print("Collection gathered incorrectly, not a sequence " + # "just a version found in {}".format(files)) + if len(collections) > 1: + continue + else: + return {real_file_name: None} + for index in collection.indexes: src_frame = collection.format("{padding}") % index src_file_name = "{}{}{}".format(src_head, src_frame, @@ -71,14 +83,15 @@ def path_from_representation(representation, anatomy): def copy_file(src_path, dst_path): """Hardlink file if possible(to save space), copy if not""" - from openpype.lib import create_hard_link # safer importing + from avalon.vendor import filelink # safer importing if os.path.exists(dst_path): return try: - create_hard_link( + filelink.create( src_path, - dst_path + dst_path, + filelink.HARDLINK ) except OSError: shutil.copyfile(src_path, dst_path) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 1de1c37575..964fe003fd 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -599,6 +599,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "files": os.path.basename(remainder), "stagingDir": os.path.dirname(remainder), } + representations.append(rep) if "render" in instance.get("families"): rep.update({ "fps": instance.get("fps"), diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py new file mode 100644 index 0000000000..affe14a89f --- /dev/null +++ b/tests/unit/openpype/lib/test_delivery.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +"""Test suite for delivery functions.""" +from openpype.lib.delivery import collect_frames + + +def test_collect_frames_multi_sequence(): + files = ["Asset_renderCompositingMain_v001.0000.png", + "Asset_renderCompositingMain_v001.0001.png", + "Asset_renderCompositingMain_v001.0002.png"] + ret = collect_frames(files) + + expected = { + "Asset_renderCompositingMain_v001.0000.png": "0000", + "Asset_renderCompositingMain_v001.0001.png": "0001", + "Asset_renderCompositingMain_v001.0002.png": "0002" + } + + print(ret) + assert ret == expected, "Not matching" + + +def test_collect_frames_single_sequence(): + files = ["Asset_renderCompositingMain_v001.0000.png"] + ret = collect_frames(files) + + expected = { + "Asset_renderCompositingMain_v001.0000.png": "0000" + } + + print(ret) + assert ret == expected, "Not matching" + + +def test_collect_frames_single_sequence_as_dict(): + files = {"Asset_renderCompositingMain_v001.0000.png"} + ret = collect_frames(files) + + expected = { + "Asset_renderCompositingMain_v001.0000.png": "0000" + } + + print(ret) + assert ret == expected, "Not matching" + + +def test_collect_frames_single_file(): + files = {"Asset_renderCompositingMain_v001.png"} + ret = collect_frames(files) + + expected = { + "Asset_renderCompositingMain_v001.png": None + } + + print(ret) + assert ret == expected, "Not matching" + + From 7ca997de92fd465d9c46b3473f3198a82dd84e2a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Mar 2022 14:14:47 +0100 Subject: [PATCH 10/69] OP-2813 - fix for rendering single file from AE in DL for sequence Solves issue with rendering single frame sequence, eg with 00000 in its file. --- .../publish/submit_aftereffects_deadline.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index 2918b54d4a..c499c14d40 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -6,6 +6,7 @@ import pyblish.api from avalon import api from openpype.lib import env_value_to_bool +from openpype.lib.delivery import collect_frames from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @@ -102,24 +103,18 @@ class AfterEffectsSubmitDeadline( def get_plugin_info(self): deadline_plugin_info = DeadlinePluginInfo() - context = self._instance.context - script_path = context.data["currentFile"] render_path = self._instance.data["expectedFiles"][0] - if len(self._instance.data["expectedFiles"]) > 1: + file_name, frame = list(collect_frames([render_path]).items())[0] + if frame: # replace frame ('000001') with Deadline's required '[#######]' # expects filename in format project_asset_subset_version.FRAME.ext render_dir = os.path.dirname(render_path) file_name = os.path.basename(render_path) - arr = file_name.split('.') - assert len(arr) == 3, \ - "Unable to parse frames from {}".format(file_name) - hashed = '[{}]'.format(len(arr[1]) * "#") - - render_path = os.path.join(render_dir, - '{}.{}.{}'.format(arr[0], hashed, - arr[2])) + hashed = '[{}]'.format(len(frame) * "#") + file_name = file_name.replace(frame, hashed) + render_path = os.path.join(render_dir, file_name) deadline_plugin_info.Comp = self._instance.data["comp_name"] deadline_plugin_info.Version = self._instance.data["app_version"] From 9de8504c4d89e800e4bff2b69376fe6f9f1f3eb2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Mar 2022 14:37:48 +0100 Subject: [PATCH 11/69] OP-2815 - Hound --- tests/unit/openpype/lib/test_delivery.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index affe14a89f..7c2c92c101 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -54,4 +54,3 @@ def test_collect_frames_single_file(): print(ret) assert ret == expected, "Not matching" - From b46a7a538787e733f8d77a7cba89b7166bde133a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Mar 2022 10:49:48 +0100 Subject: [PATCH 12/69] OP-2813 - fix wrong merge --- openpype/lib/delivery.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index f1855d9550..5a69afd5aa 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -83,15 +83,14 @@ def path_from_representation(representation, anatomy): def copy_file(src_path, dst_path): """Hardlink file if possible(to save space), copy if not""" - from avalon.vendor import filelink # safer importing + from openpype.lib import create_hard_link # safer importing if os.path.exists(dst_path): return try: - filelink.create( + create_hard_link( src_path, - dst_path, - filelink.HARDLINK + dst_path ) except OSError: shutil.copyfile(src_path, dst_path) From 392963032732c8248b5c66d03b731d2ef5468237 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Mar 2022 12:06:59 +0100 Subject: [PATCH 13/69] OP-2813 - fix hardcoded value Updated regular expression to match version substring to be more generic. --- openpype/lib/delivery.py | 12 ++++--- tests/unit/openpype/lib/test_delivery.py | 40 ++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 4 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 5a69afd5aa..ee21b01854 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -4,13 +4,18 @@ import shutil import glob import clique import collections +import re def collect_frames(files): """ Returns dict of source path and its frame, if from sequence - Uses clique as most precise solution + Uses clique as most precise solution, used when anatomy template that + created files is not known. + + Depends that version substring starts with 'v' with any number of + numeric characters after. Args: files(list) or (set with single value): list of source paths @@ -29,9 +34,8 @@ def collect_frames(files): src_head = collection.head src_tail = collection.tail - if src_head.endswith("_v"): - # print("Collection gathered incorrectly, not a sequence " - # "just a version found in {}".format(files)) + # version recognized as a collection + if re.match(".*([^a-zA-Z0-9]v%[0-9]+d).*", collection.format()): if len(collections) > 1: continue else: diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index 7c2c92c101..1787286032 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -19,6 +19,22 @@ def test_collect_frames_multi_sequence(): assert ret == expected, "Not matching" +def test_collect_frames_multi_sequence_different_format(): + files = ["Asset.v001.renderCompositingMain.0000.png", + "Asset.v001.renderCompositingMain.0001.png", + "Asset.v001.renderCompositingMain.0002.png"] + ret = collect_frames(files) + + expected = { + "Asset.v001.renderCompositingMain.0000.png": "0000", + "Asset.v001.renderCompositingMain.0001.png": "0001", + "Asset.v001.renderCompositingMain.0002.png": "0002" + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence(): files = ["Asset_renderCompositingMain_v001.0000.png"] ret = collect_frames(files) @@ -31,6 +47,30 @@ def test_collect_frames_single_sequence(): assert ret == expected, "Not matching" +def test_collect_frames_single_sequence_different_format(): + files = ["Asset.v001.renderCompositingMain_0000.png"] + ret = collect_frames(files) + + expected = { + "Asset.v001.renderCompositingMain_0000.png": "0000" + } + + print(ret) + assert ret == expected, "Not matching" + + +def test_collect_frames_single_sequence_withhout_version(): + files = ["pngv001.renderCompositingMain_0000.png"] + ret = collect_frames(files) + + expected = { + "pngv001.renderCompositingMain_0000.png": "0000" + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence_as_dict(): files = {"Asset_renderCompositingMain_v001.0000.png"} ret = collect_frames(files) From a864b80862d91ace2d46e23aa1fbb10b8a6a7481 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 14 Mar 2022 15:53:41 +0100 Subject: [PATCH 14/69] flame: convert segment comment to attributes wip --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 6424bce3bc..54ff543f21 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -16,6 +16,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): audio_track_items = [] + def _get_comment_attributes(self, segment): + comment = segment.comment.get_value() + def process(self, context): project = context.data["flameProject"] sequence = context.data["flameSequence"] @@ -26,6 +29,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # process all sellected with opfapi.maintained_segment_selection(sequence) as segments: for segment in segments: + comment_attributes = self._get_comment_attributes(segment) clip_data = opfapi.get_segment_attributes(segment) clip_name = clip_data["segment_name"] self.log.debug("clip_name: {}".format(clip_name)) From 34b44bec6306c807c3c652872d5b53b8838b0e11 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 14 Mar 2022 19:36:25 +0100 Subject: [PATCH 15/69] flame: resolving attributes from segment comments --- .../publish/collect_timeline_instances.py | 45 +++++++++++++++++-- 1 file changed, 42 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 54ff543f21..9e6c7210fb 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -1,3 +1,4 @@ +import re import pyblish import openpype import openpype.hosts.flame.api as opfapi @@ -16,9 +17,6 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): audio_track_items = [] - def _get_comment_attributes(self, segment): - comment = segment.comment.get_value() - def process(self, context): project = context.data["flameProject"] sequence = context.data["flameSequence"] @@ -30,6 +28,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): with opfapi.maintained_segment_selection(sequence) as segments: for segment in segments: comment_attributes = self._get_comment_attributes(segment) + self.log.debug("_ comment_attributes: {}".format( + pformat(comment_attributes))) + clip_data = opfapi.get_segment_attributes(segment) clip_name = clip_data["segment_name"] self.log.debug("clip_name: {}".format(clip_name)) @@ -130,6 +131,44 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): if marker_data.get("reviewTrack") is not None: instance.data["reviewAudio"] = True + def _get_comment_attributes(self, segment): + comment = segment.comment.get_value() + + # first split comment by comma + split_comments = [] + if "," in comment: + split_comments.extend(iter(comment.split(","))) + elif ";" in comment: + split_comments.extend(iter(comment.split(";"))) + else: + split_comments.append(comment) + + # try to find attributes + attributes = {} + # search for `:` + for split in split_comments: + # make sure we ignore if not `:` in key + if ":" not in split: + continue + + # split to key and value + key, value = split.split(":") + + # condition for resolution in key + if "resolution" in key.lower(): + patern = re.compile(r"([0-9]+)") + res_goup = patern.findall(value) + + # check if axpect was also defined + # 1920x1080x1.5 + aspect = res_goup[2] if len(res_goup) > 2 else 1 + + attributes["resolution"] = { + "width": int(res_goup[0]), + "height": int(res_goup[1]), + "pixelAspect": float(aspect) + } + def _get_head_tail(self, clip_data, first_frame): # calculate head and tail with forward compatibility head = clip_data.get("segment_head") From bd57a0fd56f76c71328020eeaa29aec294ea7efb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 14 Mar 2022 19:49:15 +0100 Subject: [PATCH 16/69] flame: add comment attributes to instance data --- .../plugins/publish/collect_timeline_instances.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 9e6c7210fb..dd44627021 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -106,6 +106,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # add resolution self._get_resolution_to_data(inst_data, context) + # add comment attributes if any + inst_data.update(comment_attributes) + # create instance instance = context.create_instance(**inst_data) @@ -163,11 +166,13 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # 1920x1080x1.5 aspect = res_goup[2] if len(res_goup) > 2 else 1 - attributes["resolution"] = { - "width": int(res_goup[0]), - "height": int(res_goup[1]), + attributes.update({ + "resolutionWidth": int(res_goup[0]), + "resolutionHeight": int(res_goup[1]), "pixelAspect": float(aspect) - } + }) + + return attributes def _get_head_tail(self, clip_data, first_frame): # calculate head and tail with forward compatibility From 420122b8c9ec5e3eeefe7f89e8627c06a30f6eed Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 14 Mar 2022 19:57:35 +0100 Subject: [PATCH 17/69] flame: fix regex to get float number too --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index dd44627021..f41f773802 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -159,7 +159,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # condition for resolution in key if "resolution" in key.lower(): - patern = re.compile(r"([0-9]+)") + patern = re.compile(r"([0-9\.]+)") res_goup = patern.findall(value) # check if axpect was also defined From 0a7cbeef6df772531270755c93120dcb4fa20fad Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 15 Mar 2022 14:17:24 +0100 Subject: [PATCH 18/69] flame: refactor to settings configurability --- .../publish/collect_timeline_instances.py | 103 +++++++++++++----- 1 file changed, 76 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index f41f773802..e54ff9a167 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -7,6 +7,10 @@ from openpype.hosts.flame.otio import flame_export # # developer reload modules from pprint import pformat +# constatns +NUM_PATERN = re.compile(r"([0-9\.]+)") +TXT_PATERN = re.compile(r"([a-zA-Z]+)") + class CollectTimelineInstances(pyblish.api.ContextPlugin): """Collect all Timeline segment selection.""" @@ -17,6 +21,16 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): audio_track_items = [] + # TODO: add to settings + # settings + xml_preset_attrs_from_comments = { + "width": "number", + "height": "number", + "pixelRatio": "number", + "resizeType": "string", + "resizeFilter": "string" + } + def process(self, context): project = context.data["flameProject"] sequence = context.data["flameSequence"] @@ -137,43 +151,78 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): def _get_comment_attributes(self, segment): comment = segment.comment.get_value() - # first split comment by comma - split_comments = [] - if "," in comment: - split_comments.extend(iter(comment.split(","))) - elif ";" in comment: - split_comments.extend(iter(comment.split(";"))) - else: - split_comments.append(comment) - # try to find attributes - attributes = {} + attributes = { + "pixelRatio": 1.00 + } # search for `:` - for split in split_comments: + for split in self._split_comments(comment): # make sure we ignore if not `:` in key if ":" not in split: continue - # split to key and value - key, value = split.split(":") + self._get_xml_preset_attrs( + attributes, split) - # condition for resolution in key - if "resolution" in key.lower(): - patern = re.compile(r"([0-9\.]+)") - res_goup = patern.findall(value) - - # check if axpect was also defined - # 1920x1080x1.5 - aspect = res_goup[2] if len(res_goup) > 2 else 1 - - attributes.update({ - "resolutionWidth": int(res_goup[0]), - "resolutionHeight": int(res_goup[1]), - "pixelAspect": float(aspect) - }) + if attributes.get("width"): + attributes["resolution"] = { + "resolutionWidth": attributes["width"], + "resolutionHeight": attributes["height"], + "pixelAspect": attributes["pixelRatio"] + } return attributes + def _get_xml_preset_attrs(self, attributes, split): + + # split to key and value + key, value = split.split(":") + + for a_name, a_type in self.xml_preset_attrs_from_comments.items(): + # exclude all not related attributes + if a_name.lower() not in key: + continue + + # get pattern defined by type + pattern = TXT_PATERN if "string" in a_type else NUM_PATERN + res_goup = pattern.findall(value) + + # raise if nothing is found as it is not correctly defined + if not res_goup: + raise ValueError(( + "Value for `{}` attribute is not " + "set correctly: `{}`").format(a_name, split)) + + attributes[a_name] = res_goup[0] + + # condition for resolution in key + if "resolution" in key.lower(): + res_goup = NUM_PATERN.findall(value) + # check if axpect was also defined + # 1920x1080x1.5 + aspect = res_goup[2] if len(res_goup) > 2 else 1 + + width = int(res_goup[0]) + height = int(res_goup[1]) + pixel_ratio = float(aspect) + attributes.update({ + "width": width, + "height": height, + "pixelRatio": pixel_ratio + }) + + def _split_comments(self, comment_string): + # first split comment by comma + split_comments = [] + if "," in comment_string: + split_comments.extend(iter(comment_string.split(","))) + elif ";" in comment_string: + split_comments.extend(iter(comment_string.split(";"))) + else: + split_comments.append(comment_string) + + return split_comments + def _get_head_tail(self, clip_data, first_frame): # calculate head and tail with forward compatibility head = clip_data.get("segment_head") From d408139bb7a9753e0892d648819af4db6093e9e9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 15 Mar 2022 14:26:08 +0100 Subject: [PATCH 19/69] flame: restructure data nesting for better absorption to instance data --- .../publish/collect_timeline_instances.py | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index e54ff9a167..72ad2cd1c3 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -153,7 +153,8 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # try to find attributes attributes = { - "pixelRatio": 1.00 + "xml_overrides": { + "pixelRatio": 1.00} } # search for `:` for split in self._split_comments(comment): @@ -164,12 +165,14 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): self._get_xml_preset_attrs( attributes, split) - if attributes.get("width"): - attributes["resolution"] = { - "resolutionWidth": attributes["width"], - "resolutionHeight": attributes["height"], - "pixelAspect": attributes["pixelRatio"] - } + # add xml overides resolution to instance data + xml_overrides = attributes["xml_overrides"] + if xml_overrides.get("width"): + attributes.update({ + "resolutionWidth": xml_overrides["width"], + "resolutionHeight": xml_overrides["height"], + "pixelAspect": xml_overrides["pixelRatio"] + }) return attributes @@ -193,7 +196,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "Value for `{}` attribute is not " "set correctly: `{}`").format(a_name, split)) - attributes[a_name] = res_goup[0] + attributes["xml_overrides"][a_name] = res_goup[0] # condition for resolution in key if "resolution" in key.lower(): @@ -205,7 +208,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): width = int(res_goup[0]) height = int(res_goup[1]) pixel_ratio = float(aspect) - attributes.update({ + attributes["xml_overrides"].update({ "width": width, "height": height, "pixelRatio": pixel_ratio From 48ce34c58e960e458676bf215b21fb5416ad960d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 15 Mar 2022 14:26:47 +0100 Subject: [PATCH 20/69] flame: add xml_overrides to extracting profiles --- .../flame/plugins/publish/extract_subset_resources.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 5c3aed9672..194557e37a 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -1,9 +1,11 @@ import os from pprint import pformat from copy import deepcopy + import pyblish.api import openpype.api from openpype.hosts.flame import api as opfapi +from pprint import pformat class ExtractSubsetResources(openpype.api.Extractor): @@ -131,6 +133,12 @@ class ExtractSubsetResources(openpype.api.Extractor): "startFrame": frame_start }) + # add any xml overrides collected form segment.comment + modify_xml_data.update(instance.data["xml_overrides"]) + self.log.debug("__ modify_xml_data: {}".format(pformat( + modify_xml_data + ))) + # with maintained duplication loop all presets with opfapi.maintained_object_duplication( exporting_clip) as duplclip: From db7e9cc4aa6b4fd09496139d2fff878e3606312f Mon Sep 17 00:00:00 2001 From: 2-REC Date: Wed, 16 Mar 2022 12:13:47 +0700 Subject: [PATCH 21/69] Warning log if more than 1 shape id --- openpype/hosts/maya/api/lib.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 62de5a96eb..f49c0f689e 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1809,12 +1809,22 @@ def get_id_from_sibling(node, history_only=True): # Exclude itself similar_nodes = [x for x in similar_nodes if x != node] + first_id = None for similar_node in similar_nodes: # Check if "intermediate object" if cmds.getAttr(similar_node + ".intermediateObject"): _id = get_id(similar_node) if _id: - return _id + # Check if already found an id + if first_id: + log.warning(("Found more than 1 matching intermediate" + " shape for '{}'. Using id of first" + " found: '{}'".format(node, found_node))) + break + first_id = _id + found_node = similar_node + + return first_id From 802f8a482d6278f2184b0f38ad52fc56efb737cb Mon Sep 17 00:00:00 2001 From: 2-REC Date: Wed, 16 Mar 2022 12:16:33 +0700 Subject: [PATCH 22/69] Variable declaration --- openpype/hosts/maya/api/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index f49c0f689e..e2c07624e6 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1810,6 +1810,7 @@ def get_id_from_sibling(node, history_only=True): similar_nodes = [x for x in similar_nodes if x != node] first_id = None + found_node = None for similar_node in similar_nodes: # Check if "intermediate object" if cmds.getAttr(similar_node + ".intermediateObject"): From a11fe7a5503c993b53c72c16eb306d3447ead29a Mon Sep 17 00:00:00 2001 From: 2-REC Date: Wed, 16 Mar 2022 16:38:17 +0700 Subject: [PATCH 23/69] Fix to allow more than 1 shape with same ids --- openpype/hosts/maya/api/lib.py | 40 ++++++++++++++++++++++------------ 1 file changed, 26 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index b46eff5a4b..f0f6bb706f 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1989,23 +1989,35 @@ def get_id_from_sibling(node, history_only=True): # Exclude itself similar_nodes = [x for x in similar_nodes if x != node] - first_id = None - found_node = None + + # Get all unique ids from siblings in order since + # we consistently take the first one found + sibling_ids = OrderedDict() for similar_node in similar_nodes: # Check if "intermediate object" - if cmds.getAttr(similar_node + ".intermediateObject"): - _id = get_id(similar_node) - if _id: - # Check if already found an id - if first_id: - log.warning(("Found more than 1 matching intermediate" - " shape for '{}'. Using id of first" - " found: '{}'".format(node, found_node))) - break - first_id = _id - found_node = similar_node + if not cmds.getAttr(similar_node + ".intermediateObject"): + continue - return first_id + _id = get_id(similar_node) + if not _id: + continue + + if _id in sibling_ids: + sibling_ids[_id].append(similar_node) + else: + sibling_ids[_id] = [similar_node] + + if sibling_ids: + first_id, found_nodes = next(iter(sibling_ids.items())) + + # Log a warning if we've found multiple unique ids + if len(sibling_ids) > 1: + log.warning(("Found more than 1 intermediate shape with" + " unique id for '{}'. Using id of first" + " found: '{}'".format(node, found_nodes[0]))) + break + + return first_id From 9e4a3cf9504ebbb09c30d3c1d2bcd772eed1cf4d Mon Sep 17 00:00:00 2001 From: 2-REC Date: Wed, 16 Mar 2022 16:41:27 +0700 Subject: [PATCH 24/69] Distraction fix... --- openpype/hosts/maya/api/lib.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index f0f6bb706f..f7507d87c5 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1989,7 +1989,6 @@ def get_id_from_sibling(node, history_only=True): # Exclude itself similar_nodes = [x for x in similar_nodes if x != node] - # Get all unique ids from siblings in order since # we consistently take the first one found sibling_ids = OrderedDict() @@ -2015,7 +2014,6 @@ def get_id_from_sibling(node, history_only=True): log.warning(("Found more than 1 intermediate shape with" " unique id for '{}'. Using id of first" " found: '{}'".format(node, found_nodes[0]))) - break return first_id From 78ae6c1c86ab5a8accce4906bf1eabf87ba4a607 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 11:14:20 +0100 Subject: [PATCH 25/69] OP-2813 - fixed one too many frame after loaded clip in Nuke For 0-229 range it previously produced 229 - 0 + 1 = 230 (duration). last = 1 + 230 = 231 (should be 230). --- openpype/hosts/nuke/plugins/load/load_clip.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index a253ba4a9d..ce1693f700 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -97,7 +97,7 @@ class LoadClip(plugin.NukeLoader): last += self.handle_end if not is_sequence: - duration = last - first + 1 + duration = last - first first = 1 last = first + duration elif "#" not in file: @@ -212,7 +212,7 @@ class LoadClip(plugin.NukeLoader): last += self.handle_end if not is_sequence: - duration = last - first + 1 + duration = last - first first = 1 last = first + duration elif "#" not in file: From 8af535adba3303ae759638f9933cb68ec46517bb Mon Sep 17 00:00:00 2001 From: 2-REC Date: Wed, 16 Mar 2022 18:09:50 +0700 Subject: [PATCH 26/69] More adapted error message --- .../publish/validate_animation_out_set_related_node_ids.py | 4 ++-- .../maya/plugins/publish/validate_rig_out_set_node_ids.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 7c1c695237..05d63f1d56 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -32,8 +32,8 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): # if a deformer has been created on the shape invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Nodes found with non-related " - "asset IDs: {0}".format(invalid)) + raise RuntimeError("Nodes found with mismatching " + "IDs: {0}".format(invalid)) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index ed1d36261a..cc3723a6e1 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -33,8 +33,8 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): # if a deformer has been created on the shape invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Nodes found with non-related " - "asset IDs: {0}".format(invalid)) + raise RuntimeError("Nodes found with mismatching " + "IDs: {0}".format(invalid)) @classmethod def get_invalid(cls, instance): From 87b44b4b14c989c7dce61492650fb54202c37ee0 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 13:48:50 +0100 Subject: [PATCH 27/69] OP-2813 - fix collect_frames when multiple version numbers in path Added new test case. --- openpype/lib/delivery.py | 8 +++----- tests/unit/openpype/lib/test_delivery.py | 12 ++++++++++++ 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index ee21b01854..b9f3f0b106 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -25,10 +25,11 @@ def collect_frames(files): collections, remainder = clique.assemble(files, minimum_items=1) real_file_name = None + sources_and_frames = {} if len(files) == 1: real_file_name = list(files)[0] + sources_and_frames[real_file_name] = None - sources_and_frames = {} if collections: for collection in collections: src_head = collection.head @@ -36,10 +37,7 @@ def collect_frames(files): # version recognized as a collection if re.match(".*([^a-zA-Z0-9]v%[0-9]+d).*", collection.format()): - if len(collections) > 1: - continue - else: - return {real_file_name: None} + continue for index in collection.indexes: src_frame = collection.format("{padding}") % index diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index 1787286032..de87f99d79 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -47,6 +47,18 @@ def test_collect_frames_single_sequence(): assert ret == expected, "Not matching" +def test_collect_frames_single_sequence_full_path(): + files = ['C:/test_project/assets/locations/Town/work/compositing\\renders\\aftereffects\\test_project_TestAsset_compositing_v001\\TestAsset_renderCompositingMain_v001.mov'] # noqa: E501 + ret = collect_frames(files) + + expected = { + 'C:/test_project/assets/locations/Town/work/compositing\\renders\\aftereffects\\test_project_TestAsset_compositing_v001\\TestAsset_renderCompositingMain_v001.mov': None # noqa: E501 + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence_different_format(): files = ["Asset.v001.renderCompositingMain_0000.png"] ret = collect_frames(files) From 033eaa324ffec6dce7d5f44dcfe84464a20c961d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 15:10:38 +0100 Subject: [PATCH 28/69] nuke: imageio adding ocio config version 1.2 --- .../projects_schema/schemas/schema_anatomy_imageio.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 3bec19c3d0..6532f2b6ce 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -195,6 +195,9 @@ { "aces_1.1": "aces_1.1" }, + { + "aces_1.1": "aces_1.2" + }, { "custom": "custom" } From d867b872a894986579709718e2894596ed9e527a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 16:55:51 +0100 Subject: [PATCH 29/69] flame: distribute better value types --- .../publish/collect_timeline_instances.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 72ad2cd1c3..44c25f04a2 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -26,7 +26,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): xml_preset_attrs_from_comments = { "width": "number", "height": "number", - "pixelRatio": "number", + "pixelRatio": "float", "resizeType": "string", "resizeFilter": "string" } @@ -183,11 +183,14 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): for a_name, a_type in self.xml_preset_attrs_from_comments.items(): # exclude all not related attributes - if a_name.lower() not in key: + if a_name.lower() not in key.lower(): continue # get pattern defined by type - pattern = TXT_PATERN if "string" in a_type else NUM_PATERN + pattern = TXT_PATERN + if "number" in a_type or "float" in a_type: + pattern = NUM_PATERN + res_goup = pattern.findall(value) # raise if nothing is found as it is not correctly defined @@ -196,7 +199,14 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "Value for `{}` attribute is not " "set correctly: `{}`").format(a_name, split)) - attributes["xml_overrides"][a_name] = res_goup[0] + if "string" in a_type: + _value = res_goup[0] + if "float" in a_type: + _value = float(res_goup[0]) + if "number" in a_type: + _value = int(res_goup[0]) + + attributes["xml_overrides"][a_name] = _value # condition for resolution in key if "resolution" in key.lower(): From d98d8905afb1ae3a28af03904adc6b4e57114fff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 17:10:56 +0100 Subject: [PATCH 30/69] Flame: add ignoring toggle to settings parsed attributes from comments can be ignored now --- .../plugins/publish/extract_subset_resources.py | 14 +++++++++----- .../settings/defaults/project_settings/flame.json | 1 + .../projects_schema/schema_project_flame.json | 11 +++++++++++ 3 files changed, 21 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 2e3b84def8..ac50c7c980 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -25,6 +25,7 @@ class ExtractSubsetResources(openpype.api.Extractor): "xml_preset_file": "Jpeg (8-bit).xml", "xml_preset_dir": "", "export_type": "File Sequence", + "ignore_comment_attrs": True, "colorspace_out": "Output - sRGB", "representation_add_range": False, "representation_tags": ["thumbnail"] @@ -34,6 +35,7 @@ class ExtractSubsetResources(openpype.api.Extractor): "xml_preset_file": "Apple iPad (1920x1080).xml", "xml_preset_dir": "", "export_type": "Movie", + "ignore_comment_attrs": True, "colorspace_out": "Output - Rec.709", "representation_add_range": True, "representation_tags": [ @@ -104,6 +106,7 @@ class ExtractSubsetResources(openpype.api.Extractor): preset_dir = preset_config["xml_preset_dir"] export_type = preset_config["export_type"] repre_tags = preset_config["representation_tags"] + ignore_comment_attrs = preset_config["ignore_comment_attrs"] color_out = preset_config["colorspace_out"] # get frame range with handles for representation range @@ -133,11 +136,12 @@ class ExtractSubsetResources(openpype.api.Extractor): "startFrame": frame_start }) - # add any xml overrides collected form segment.comment - modify_xml_data.update(instance.data["xml_overrides"]) - self.log.debug("__ modify_xml_data: {}".format(pformat( - modify_xml_data - ))) + if not ignore_comment_attrs: + # add any xml overrides collected form segment.comment + modify_xml_data.update(instance.data["xml_overrides"]) + self.log.debug("__ modify_xml_data: {}".format(pformat( + modify_xml_data + ))) # with maintained duplication loop all presets with opfapi.maintained_object_duplication( diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index ef9c2b1041..c7188b10b5 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -28,6 +28,7 @@ "xml_preset_file": "OpenEXR (16-bit fp DWAA).xml", "xml_preset_dir": "", "export_type": "File Sequence", + "ignore_comment_attrs": false, "colorspace_out": "ACES - ACEScg", "representation_add_range": true, "representation_tags": [] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index 1f30b45981..e352f8b132 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -189,6 +189,17 @@ ] }, + { + "type": "separator" + }, + { + "type": "boolean", + "key": "ignore_comment_attrs", + "label": "Ignore attributes parsed from a segment comments" + }, + { + "type": "separator" + }, { "key": "colorspace_out", "label": "Output color (imageio)", From 4b83446230d54a804fd2a509a709abab463c44cc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 17:42:27 +0100 Subject: [PATCH 31/69] flame: moving logging outside of condition --- .../flame/plugins/publish/extract_subset_resources.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index ac50c7c980..d52669d955 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -139,9 +139,10 @@ class ExtractSubsetResources(openpype.api.Extractor): if not ignore_comment_attrs: # add any xml overrides collected form segment.comment modify_xml_data.update(instance.data["xml_overrides"]) - self.log.debug("__ modify_xml_data: {}".format(pformat( - modify_xml_data - ))) + + self.log.debug("__ modify_xml_data: {}".format(pformat( + modify_xml_data + ))) # with maintained duplication loop all presets with opfapi.maintained_object_duplication( From d0a79e31f5afb8dcdd5bbcf7d376b89c98d29456 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 17:44:01 +0100 Subject: [PATCH 32/69] hound and suggested changes --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 4 ++-- .../hosts/flame/plugins/publish/extract_subset_resources.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 44c25f04a2..c6793874c0 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -228,9 +228,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # first split comment by comma split_comments = [] if "," in comment_string: - split_comments.extend(iter(comment_string.split(","))) + split_comments.extend(comment_string.split(",")) elif ";" in comment_string: - split_comments.extend(iter(comment_string.split(";"))) + split_comments.extend(comment_string.split(";")) else: split_comments.append(comment_string) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index d52669d955..32f6b9508f 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -5,7 +5,6 @@ from copy import deepcopy import pyblish.api import openpype.api from openpype.hosts.flame import api as opfapi -from pprint import pformat class ExtractSubsetResources(openpype.api.Extractor): From 550f0603d4865da46e8878355208c0a4ff8f639d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 20:47:21 +0100 Subject: [PATCH 33/69] fixing ocio config name --- .../schemas/projects_schema/schemas/schema_anatomy_imageio.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 6532f2b6ce..acfd4602df 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -196,7 +196,7 @@ "aces_1.1": "aces_1.1" }, { - "aces_1.1": "aces_1.2" + "aces_1.2": "aces_1.2" }, { "custom": "custom" From 3d426d1d8f90764d35b56316dd81522bb8e6e39d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 17 Mar 2022 00:14:00 +0100 Subject: [PATCH 34/69] Fix #2834 - ensure current state is correct when entering new group order --- openpype/tools/pyblish_pype/control.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/tools/pyblish_pype/control.py b/openpype/tools/pyblish_pype/control.py index 6f89952c22..f657936b79 100644 --- a/openpype/tools/pyblish_pype/control.py +++ b/openpype/tools/pyblish_pype/control.py @@ -389,6 +389,9 @@ class Controller(QtCore.QObject): new_current_group_order ) + # Force update to the current state + self._set_state_by_order() + if self.collect_state == 0: self.collect_state = 1 self._current_state = ( From d14d9eecc86f090bdc4478161da111688e06a581 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 12:44:30 +0100 Subject: [PATCH 35/69] added simple tooltips for settings entities --- openpype/settings/entities/base_entity.py | 4 ++++ openpype/settings/entities/schemas/README.md | 1 + openpype/tools/settings/settings/base.py | 3 +++ 3 files changed, 8 insertions(+) diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index b5bc44640b..76700d605d 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -28,6 +28,10 @@ class BaseEntity: def __init__(self, schema_data, *args, **kwargs): self.schema_data = schema_data + tooltip = None + if schema_data: + tooltip = schema_data.get("tooltip") + self.tooltip = tooltip # Entity id self._id = uuid4() diff --git a/openpype/settings/entities/schemas/README.md b/openpype/settings/entities/schemas/README.md index dd7601c017..fbfd699937 100644 --- a/openpype/settings/entities/schemas/README.md +++ b/openpype/settings/entities/schemas/README.md @@ -14,6 +14,7 @@ - this keys is not allowed for all inputs as they may have not reason for that - key is validated, can be only once in hierarchy but is not required - currently there are `system settings` and `project settings` +- all entities can have set `"tooltip"` key with description which will be shown in UI ## Inner schema - GUI schemas are huge json files, to be able to split whole configuration into multiple schema there's type `schema` diff --git a/openpype/tools/settings/settings/base.py b/openpype/tools/settings/settings/base.py index 706e2fdcf0..bd48b3a966 100644 --- a/openpype/tools/settings/settings/base.py +++ b/openpype/tools/settings/settings/base.py @@ -30,6 +30,9 @@ class BaseWidget(QtWidgets.QWidget): if not self.entity.gui_type: self.entity.on_change_callbacks.append(self._on_entity_change) + if self.entity.tooltip: + self.setToolTip(self.entity.tooltip) + self.label_widget = None self.create_ui() From fdb880c5440568e1f5f1a8fdc539ae7ddcad15f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Thu, 17 Mar 2022 12:57:34 +0100 Subject: [PATCH 36/69] Update openpype/hosts/flame/plugins/publish/collect_timeline_instances.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index c6793874c0..70340ad7a2 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -188,7 +188,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # get pattern defined by type pattern = TXT_PATERN - if "number" in a_type or "float" in a_type: + if a_type in ("number" , "float"): pattern = NUM_PATERN res_goup = pattern.findall(value) From 0ea4e0acd4f78899df9e2ba6932a11beb88283dc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 17 Mar 2022 12:59:30 +0100 Subject: [PATCH 37/69] improving gap detection in extract review --- openpype/plugins/publish/extract_review.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index cbe1924408..3ecea1f8bd 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -747,10 +747,14 @@ class ExtractReview(pyblish.api.InstancePlugin): collections = clique.assemble(files)[0] assert len(collections) == 1, "Multiple collections found." col = collections[0] - # do nothing if sequence is complete - if list(col.indexes)[0] == start_frame and \ - list(col.indexes)[-1] == end_frame and \ - col.is_contiguous(): + + # do nothing if no gap is found in input range + not_gap = True + for fr in range(start_frame, end_frame + 1): + if fr not in col.indexes: + not_gap = False + + if not_gap: return [] holes = col.holes() From 338aac4de6b0cc37a98e624726611fdd1af5a6e7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 13:05:53 +0100 Subject: [PATCH 38/69] ignore 'team' entities in process event --- openpype/modules/ftrack/lib/ftrack_event_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/lib/ftrack_event_handler.py b/openpype/modules/ftrack/lib/ftrack_event_handler.py index af565c5421..0a70b0e301 100644 --- a/openpype/modules/ftrack/lib/ftrack_event_handler.py +++ b/openpype/modules/ftrack/lib/ftrack_event_handler.py @@ -44,7 +44,7 @@ class BaseEvent(BaseHandler): return self._get_entities( event, session, - ignore=['socialfeed', 'socialnotification'] + ignore=['socialfeed', 'socialnotification', 'team'] ) def get_project_name_from_event(self, session, event, project_id): From 3420c68796a6d8aa6f6dc22c3584aed931c0662d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 13:06:12 +0100 Subject: [PATCH 39/69] use 'first' instead of 'one' when querying user and task --- .../ftrack/event_handlers_server/event_user_assigment.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py index efc1e76775..96243c8c36 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py +++ b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py @@ -87,8 +87,8 @@ class UserAssigmentEvent(BaseEvent): if not user_id: return None, None - task = session.query('Task where id is "{}"'.format(task_id)).one() - user = session.query('User where id is "{}"'.format(user_id)).one() + task = session.query('Task where id is "{}"'.format(task_id)).first() + user = session.query('User where id is "{}"'.format(user_id)).first() return task, user From eaae7f4828ba68b1e4b11f688357a9bd13c46ec1 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Thu, 17 Mar 2022 13:33:11 +0000 Subject: [PATCH 40/69] [Automated] Bump version --- CHANGELOG.md | 11 ++++++++++- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7790894b7f..6a1da69f13 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,19 +1,28 @@ # Changelog -## [3.9.1-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.9.1-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...HEAD) +**🚀 Enhancements** + +- Nuke: ExtractReviewSlate can handle more codes and profiles [\#2879](https://github.com/pypeclub/OpenPype/pull/2879) +- Flame: sequence used for reference video [\#2869](https://github.com/pypeclub/OpenPype/pull/2869) + **🐛 Bug fixes** +- Pyblish Pype - ensure current state is correct when entering new group order [\#2899](https://github.com/pypeclub/OpenPype/pull/2899) +- SceneInventory: Fix import of load function [\#2894](https://github.com/pypeclub/OpenPype/pull/2894) - Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891) - General: Remove forgotten use of avalon Creator [\#2885](https://github.com/pypeclub/OpenPype/pull/2885) - General: Avoid circular import [\#2884](https://github.com/pypeclub/OpenPype/pull/2884) - Fixes for attaching loaded containers \(\#2837\) [\#2874](https://github.com/pypeclub/OpenPype/pull/2874) +- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) **🔀 Refactored code** - General: Reduce style usage to OpenPype repository [\#2889](https://github.com/pypeclub/OpenPype/pull/2889) +- General: Move loader logic from avalon to openpype [\#2886](https://github.com/pypeclub/OpenPype/pull/2886) ## [3.9.0](https://github.com/pypeclub/OpenPype/tree/3.9.0) (2022-03-14) diff --git a/openpype/version.py b/openpype/version.py index 17e514642d..5eca7c1d90 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.1-nightly.1" +__version__ = "3.9.1-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 128d1cd615..af448ed24c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.1-nightly.1" # OpenPype +version = "3.9.1-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From e208e7976d4f69207bedba5d55a0c925ac6e6b38 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Mar 2022 14:57:28 +0100 Subject: [PATCH 41/69] OP-2813 - fixed duplication of representations nuke.api.plugin.ExporterReview adds representation explicitly via publish_on_farm, so skip adding repre if already there. (Issue in ExtractBurnin other way.) ExporterReview should be probably refactored and publish_on_farm removed altogether. --- .../deadline/plugins/publish/submit_publish_job.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index b92fd2fe69..8c0d78cae5 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -601,13 +601,22 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "files": os.path.basename(remainder), "stagingDir": os.path.dirname(remainder), } - representations.append(rep) if "render" in instance.get("families"): rep.update({ "fps": instance.get("fps"), "tags": ["review"] }) - self._solve_families(instance, True) + self._solve_families(instance, True) + + already_there = False + for repre in instance.get("representations", []): + # might be added explicitly before by publish_on_farm + already_there = repre.get("files") == rep["files"] + if already_there: + break + self.log.debug("repre {} already_there".format(repre)) + if not already_there: + representations.append(rep) return representations From 72f84c52baf7fed7b31fd59a995880a5bf5a41b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 15:06:24 +0100 Subject: [PATCH 42/69] handle missing ftrack id in more cases --- .../event_sync_to_avalon.py | 72 ++++++++++++++++--- 1 file changed, 64 insertions(+), 8 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py index eea6436b53..237bf9fd80 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -199,8 +199,10 @@ class SyncToAvalonEvent(BaseEvent): if proj: ftrack_id = proj["data"].get("ftrackId") if ftrack_id is None: - ftrack_id = self._update_project_ftrack_id() - proj["data"]["ftrackId"] = ftrack_id + self.handle_missing_ftrack_id(proj) + ftrack_id = proj["data"]["ftrackId"] + self._avalon_ents_by_ftrack_id[ftrack_id] = proj + self._avalon_ents_by_ftrack_id[ftrack_id] = proj for ent in ents: ftrack_id = ent["data"].get("ftrackId") @@ -209,15 +211,56 @@ class SyncToAvalonEvent(BaseEvent): self._avalon_ents_by_ftrack_id[ftrack_id] = ent return self._avalon_ents_by_ftrack_id - def _update_project_ftrack_id(self): - ftrack_id = self.cur_project["id"] + def handle_missing_ftrack_id(self, doc): + ftrack_id = doc["data"].get("ftrackId") + if ftrack_id is not None: + return + if doc["type"] == "project": + ftrack_id = self.cur_project["id"] + + self.dbcon.update_one( + {"type": "project"}, + {"$set": {"data.ftrackId": ftrack_id}} + ) + + doc["data"]["ftrackId"] = ftrack_id + return + + if doc["type"] != "asset": + return + + doc_parents = doc.get("data", {}).get("parents") + if doc_parents is None: + return + + entities = self.process_session.query(( + "select id, link from TypedContext" + " where project_id is \"{}\" and name is \"{}\"" + ).format(self.cur_project["id"], doc["name"])).all() + matching_entity = None + for entity in entities: + parents = [] + for item in entity["link"]: + if item["id"] == entity["id"]: + break + low_type = item["type"].lower() + if low_type == "typedcontext": + parents.append(item["name"]) + if doc_parents == parents: + matching_entity = entity + break + + if matching_entity is None: + return + + ftrack_id = matching_entity["id"] self.dbcon.update_one( - {"type": "project"}, + {"_id": doc["_id"]}, {"$set": {"data.ftrackId": ftrack_id}} ) - return ftrack_id + self._avalon_ents_by_ftrack_id[ftrack_id] = doc @property def avalon_subsets_by_parents(self): @@ -857,7 +900,14 @@ class SyncToAvalonEvent(BaseEvent): if vis_par is None: vis_par = proj["_id"] parent_ent = self.avalon_ents_by_id[vis_par] - parent_ftrack_id = parent_ent["data"]["ftrackId"] + + parent_ftrack_id = parent_ent["data"].get("ftrackId") + if parent_ftrack_id is None: + self.handle_missing_ftrack_id(parent_ent) + parent_ftrack_id = parent_ent["data"].get("ftrackId") + if parent_ftrack_id is None: + continue + parent_ftrack_ent = self.ftrack_ents_by_id.get( parent_ftrack_id ) @@ -2128,7 +2178,13 @@ class SyncToAvalonEvent(BaseEvent): vis_par = avalon_ent["parent"] parent_ent = self.avalon_ents_by_id[vis_par] - parent_ftrack_id = parent_ent["data"]["ftrackId"] + parent_ftrack_id = parent_ent["data"].get("ftrackId") + if parent_ftrack_id is None: + self.handle_missing_ftrack_id(parent_ent) + parent_ftrack_id = parent_ent["data"].get("ftrackId") + if parent_ftrack_id is None: + continue + if parent_ftrack_id not in entities_dict: entities_dict[parent_ftrack_id] = { "children": [], From 9cc9c1afcbcd28557d72cbd50984ef8990eff52b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 16:04:28 +0100 Subject: [PATCH 43/69] added settings for new action --- .../defaults/project_settings/ftrack.json | 5 ++++ .../schema_project_ftrack.json | 28 +++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 01831efad1..89bb41a164 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -193,6 +193,11 @@ "Administrator" ] }, + "fill_workfile_attribute": { + "enabled": false, + "custom_attribute_key": "", + "role_list": [] + }, "seed_project": { "enabled": true, "role_list": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index 6d0e2693d4..cb59e9d67e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -589,6 +589,34 @@ } ] }, + { + "type": "dict", + "key": "fill_workfile_attribute", + "label": "Fill workfile Custom attribute", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "label", + "label": "Custom attribute must be Text type added to Task entity type" + }, + { + "type": "text", + "key": "custom_attribute_key", + "label": "Custom attribute key" + }, + { + "type": "list", + "key": "role_list", + "label": "Roles", + "object_type": "text" + } + ] + }, { "type": "dict", "key": "seed_project", From 1cdbe4568ee7ea7c4d72b96e3f434e072973f05b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 16:06:50 +0100 Subject: [PATCH 44/69] initial commit of new action for filling workfile name in custom attribute --- .../action_fill_workfile_attr.py | 289 ++++++++++++++++++ 1 file changed, 289 insertions(+) create mode 100644 openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py new file mode 100644 index 0000000000..a72b29bdbe --- /dev/null +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -0,0 +1,289 @@ +import collections + +import ftrack_api + +from avalon.api import AvalonMongoDB +from openpype.api import get_project_settings +from openpype.lib import ( + get_workfile_template_key, + get_workdir_data, + Anatomy, + StringTemplate, +) +from openpype_modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib.avalon_sync import create_chunks + + +class FillWorkfileAttributeAction(BaseAction): + """Action fill work filename into custom attribute on tasks. + + Prerequirements are that the project is synchronized so it is possible to + access project anatomy and project/asset documents. Tasks that are not + synchronized are skipped too. + """ + + identifier = "fill.workfile.attr" + label = "OpenPype Admin" + variant = "- Fill workfile attribute" + description = "Precalculate and fill workfile name into a custom attribute" + icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg") + + settings_key = "fill_workfile_attribute" + + def discover(self, session, entities, event): + """ Validate selection. """ + is_valid = False + for ent in event["data"]["selection"]: + # Ignore entities that are not tasks or projects + if ent["entityType"].lower() in ["show", "task"]: + is_valid = True + break + + if is_valid: + is_valid = self.valid_roles(session, entities, event) + return is_valid + + def launch(self, session, entities, event): + task_entities = [] + other_entities = [] + project_entity = None + project_selected = False + for entity in entities: + if project_entity is None: + project_entity = self.get_project_from_entity(entity) + + ent_type_low = entity.entity_type.lower() + if ent_type_low == "project": + project_selected = True + break + + elif ent_type_low == "task": + task_entities.append(entity) + else: + other_entities.append(entity) + + project_name = project_entity["full_name"] + project_settings = get_project_settings(project_name) + custom_attribute_key = ( + project_settings + .get("ftrack", {}) + .get("user_handlers", {}) + .get(self.settings_key, {}) + .get("custom_attribute_key") + ) + if not custom_attribute_key: + return { + "success": False, + "message": "Custom attribute key is not set in settings" + } + + task_obj_type = session.query( + "select id from ObjectType where name is \"Task\"" + ).one() + text_type = session.query( + "select id from CustomAttributeType where name is \"text\"" + ).one() + attr_conf = session.query( + ( + "select id, key from CustomAttributeConfiguration" + " where object_type_id is \"{}\"" + " and type_id is \"{}\"" + " and key is \"{}\"" + ).format( + task_obj_type["id"], text_type["id"], custom_attribute_key + ) + ).first() + if not attr_conf: + return { + "success": False, + "message": ( + "Could not find Task (text) Custom attribute \"{}\"" + ).format(custom_attribute_key) + } + + dbcon = AvalonMongoDB() + dbcon.Session["AVALON_PROJECT"] = project_name + asset_docs = list(dbcon.find({"type": "asset"})) + if project_selected: + asset_docs_with_task_names = self._get_asset_docs_for_project( + session, project_entity, asset_docs + ) + + else: + asset_docs_with_task_names = self._get_tasks_for_selection( + session, other_entities, task_entities, asset_docs + ) + + host_name = "{host}" + project_doc = dbcon.find_one({"type": "project"}) + project_settings = get_project_settings(project_name) + anatomy = Anatomy(project_name) + templates_by_key = {} + + operations = [] + for asset_doc, task_entities in asset_docs_with_task_names: + for task_entity in task_entities: + workfile_data = get_workdir_data( + project_doc, asset_doc, task_entity["name"], host_name + ) + workfile_data["version"] = 1 + workfile_data["ext"] = "{ext}" + + task_type = workfile_data["task"]["type"] + template_key = get_workfile_template_key( + task_type, host_name, project_settings=project_settings + ) + if template_key in templates_by_key: + template = templates_by_key[template_key] + else: + template = StringTemplate( + anatomy.templates[template_key]["file"] + ) + templates_by_key[template_key] = template + + result = template.format(workfile_data) + if not result.solved: + # TODO report + pass + else: + table_values = collections.OrderedDict(( + ("configuration_id", attr_conf["id"]), + ("entity_id", task_entity["id"]) + )) + operations.append( + ftrack_api.operation.UpdateEntityOperation( + "ContextCustomAttributeValue", + table_values, + "value", + ftrack_api.symbol.NOT_SET, + str(result) + ) + ) + + if operations: + for sub_operations in create_chunks(operations, 50): + for op in sub_operations: + session.recorded_operations.push(op) + session.commit() + + return True + + def _get_asset_docs_for_project(self, session, project_entity, asset_docs): + asset_docs_task_names = collections.defaultdict(list) + for asset_doc in asset_docs: + asset_data = asset_doc["data"] + asset_tasks = asset_data.get("tasks") + ftrack_id = asset_data.get("ftrackId") + if not asset_tasks or not ftrack_id: + continue + asset_docs_task_names[ftrack_id].append( + (asset_doc, list(asset_tasks.keys())) + ) + + task_entities = session.query(( + "select id, name, parent_id from Task where project_id is {}" + ).format(project_entity["id"])).all() + task_entities_by_parent_id = collections.defaultdict(list) + for task_entity in task_entities: + parent_id = task_entity["parent_id"] + task_entities_by_parent_id[parent_id].append(task_entity) + + output = [] + for ftrack_id, items in asset_docs_task_names.items(): + for item in items: + asset_doc, task_names = item + valid_task_entities = [] + for task_entity in task_entities_by_parent_id[ftrack_id]: + if task_entity["name"] in task_names: + valid_task_entities.append(task_entity) + + if valid_task_entities: + output.append((asset_doc, valid_task_entities)) + + return output + + def _get_tasks_for_selection( + self, session, other_entities, task_entities, asset_docs + ): + all_tasks = object() + asset_docs_by_ftrack_id = {} + asset_docs_by_parent_id = collections.defaultdict(list) + for asset_doc in asset_docs: + asset_data = asset_doc["data"] + ftrack_id = asset_data.get("ftrackId") + parent_id = asset_data.get("visualParent") + asset_docs_by_parent_id[parent_id].append(asset_doc) + if ftrack_id: + asset_docs_by_ftrack_id[ftrack_id] = asset_doc + + missing_docs = set() + all_tasks_ids = set() + task_names_by_ftrack_id = collections.defaultdict(list) + for other_entity in other_entities: + ftrack_id = other_entity["id"] + if ftrack_id not in asset_docs_by_ftrack_id: + missing_docs.add(ftrack_id) + continue + all_tasks_ids.add(ftrack_id) + task_names_by_ftrack_id[ftrack_id] = all_tasks + + for task_entity in task_entities: + parent_id = task_entity["parent_id"] + if parent_id not in asset_docs_by_ftrack_id: + missing_docs.add(parent_id) + continue + + if all_tasks_ids not in all_tasks_ids: + task_names_by_ftrack_id[ftrack_id].append(task_entity["name"]) + + ftrack_ids = set() + asset_doc_with_task_names_by_id = collections.defaultdict(list) + for ftrack_id, task_names in task_names_by_ftrack_id.items(): + asset_doc = asset_docs_by_ftrack_id[ftrack_id] + asset_data = asset_doc["data"] + asset_tasks = asset_data.get("tasks") + if not asset_tasks: + # TODO add to report + continue + + if task_names is all_tasks: + task_names = list(asset_tasks.keys()) + else: + new_task_names = [] + for task_name in task_names: + if task_name in asset_tasks: + new_task_names.append(task_name) + else: + # TODO add report + pass + task_names = new_task_names + + if task_names: + ftrack_ids.add(ftrack_id) + asset_doc_with_task_names_by_id[ftrack_id].append( + (asset_doc, task_names) + ) + + task_entities = session.query(( + "select id, name, parent_id from Task where parent_id in ({})" + ).format(self.join_query_keys(ftrack_ids))).all() + task_entitiy_by_parent_id = collections.defaultdict(list) + for task_entity in task_entities: + parent_id = task_entity["parent_id"] + task_entitiy_by_parent_id[parent_id].append(task_entity) + + output = [] + for ftrack_id, items in asset_doc_with_task_names_by_id.items(): + for item in items: + asset_doc, task_names = item + valid_task_entities = [] + for task_entity in task_entitiy_by_parent_id[ftrack_id]: + if task_entity["name"] in task_names: + valid_task_entities.append(task_entity) + if valid_task_entities: + output.append((asset_doc, valid_task_entities)) + return output + + +def register(session): + FillWorkfileAttributeAction(session).register() From 395d567aa2d7285e204ca6fb35a2344e0d7f2f94 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Mar 2022 16:07:08 +0100 Subject: [PATCH 45/69] OP-2813 - fix wrong parsing when short label is used --- openpype/lib/delivery.py | 2 +- tests/unit/openpype/lib/test_delivery.py | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index b9f3f0b106..78d743003b 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -36,7 +36,7 @@ def collect_frames(files): src_tail = collection.tail # version recognized as a collection - if re.match(".*([^a-zA-Z0-9]v%[0-9]+d).*", collection.format()): + if re.match(".*([a-zA-Z0-9]%[0-9]+d).*", collection.format()): continue for index in collection.indexes: diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index de87f99d79..871ea95df7 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -47,6 +47,30 @@ def test_collect_frames_single_sequence(): assert ret == expected, "Not matching" +def test_collect_frames_single_sequence_shot(): + files = ["testing_sh010_workfileCompositing_v001.aep"] + ret = collect_frames(files) + + expected = { + "testing_sh010_workfileCompositing_v001.aep": None + } + + print(ret) + assert ret == expected, "Not matching" + + +def test_collect_frames_single_sequence_shot_with_frame(): + files = ["testing_sh010_workfileCompositing_000_v001.aep"] + ret = collect_frames(files) + + expected = { + "testing_sh010_workfileCompositing_000_v001.aep": "000" + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence_full_path(): files = ['C:/test_project/assets/locations/Town/work/compositing\\renders\\aftereffects\\test_project_TestAsset_compositing_v001\\TestAsset_renderCompositingMain_v001.mov'] # noqa: E501 ret = collect_frames(files) From 16f4ada2ad4772debe465231cfb60bf4c22b1f27 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 17:59:56 +0100 Subject: [PATCH 46/69] use 'roots' instead of 'roots_obj' --- openpype/pipeline/load/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index ae47cb9ce9..118f86a570 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -502,7 +502,7 @@ def get_representation_path_from_context(context): session_project = Session.get("AVALON_PROJECT") if project_doc and project_doc["name"] != session_project: anatomy = Anatomy(project_doc["name"]) - root = anatomy.roots_obj + root = anatomy.roots return get_representation_path(representation, root) From d080b17cce56f6d128d45b26f9224db9294dcd5f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Mar 2022 18:00:25 +0100 Subject: [PATCH 47/69] OP-2813 - fix wrong logging --- .../modules/deadline/plugins/publish/submit_publish_job.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 8c0d78cae5..06505b4b47 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -613,8 +613,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # might be added explicitly before by publish_on_farm already_there = repre.get("files") == rep["files"] if already_there: + self.log.debug("repre {} already_there".format(repre)) break - self.log.debug("repre {} already_there".format(repre)) + if not already_there: representations.append(rep) From cd65332942ee14e9b47ee0608e24f5ae8c189aff Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 18:47:51 +0100 Subject: [PATCH 48/69] fixed filling of ftrack id --- .../event_sync_to_avalon.py | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py index 237bf9fd80..46c333c4c4 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -212,6 +212,9 @@ class SyncToAvalonEvent(BaseEvent): return self._avalon_ents_by_ftrack_id def handle_missing_ftrack_id(self, doc): + # TODO handling of missing ftrack id is primarily issue of editorial + # publishing it would be better to find out what causes that + # ftrack id is removed during the publishing ftrack_id = doc["data"].get("ftrackId") if ftrack_id is not None: return @@ -221,10 +224,17 @@ class SyncToAvalonEvent(BaseEvent): self.dbcon.update_one( {"type": "project"}, - {"$set": {"data.ftrackId": ftrack_id}} + {"$set": { + "data.ftrackId": ftrack_id, + "data.entityType": self.cur_project.entity_type + }} ) doc["data"]["ftrackId"] = ftrack_id + doc["data"]["entityType"] = self.cur_project.entity_type + self.log.info("Updated ftrack id of project \"{}\"".format( + self.cur_project["full_name"] + )) return if doc["type"] != "asset": @@ -238,6 +248,7 @@ class SyncToAvalonEvent(BaseEvent): "select id, link from TypedContext" " where project_id is \"{}\" and name is \"{}\"" ).format(self.cur_project["id"], doc["name"])).all() + self.log.info("Entities: {}".format(str(entities))) matching_entity = None for entity in entities: parents = [] @@ -257,9 +268,20 @@ class SyncToAvalonEvent(BaseEvent): ftrack_id = matching_entity["id"] self.dbcon.update_one( {"_id": doc["_id"]}, - {"$set": {"data.ftrackId": ftrack_id}} + {"$set": { + "data.ftrackId": ftrack_id, + "data.entityType": matching_entity.entity_type + }} ) + doc["data"]["ftrackId"] = ftrack_id + doc["data"]["entityType"] = matching_entity.entity_type + entity_path_items = [] + for item in entity["link"]: + entity_path_items.append(item["name"]) + self.log.info("Updated ftrack id of entity \"{}\"".format( + "/".join(entity_path_items) + )) self._avalon_ents_by_ftrack_id[ftrack_id] = doc @property From 4dd95fba6842d2b4c4556e2465cb2ef00f70cb1f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 19:22:35 +0100 Subject: [PATCH 49/69] added job and report messages --- .../action_fill_workfile_attr.py | 319 ++++++++++++++---- 1 file changed, 262 insertions(+), 57 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py index a72b29bdbe..77f18c49c1 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -1,4 +1,9 @@ +import os +import sys +import json import collections +import tempfile +import datetime import ftrack_api @@ -13,6 +18,8 @@ from openpype.lib import ( from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import create_chunks +NOT_SYNCHRONIZED_TITLE = "Not synchronized" + class FillWorkfileAttributeAction(BaseAction): """Action fill work filename into custom attribute on tasks. @@ -44,24 +51,24 @@ class FillWorkfileAttributeAction(BaseAction): return is_valid def launch(self, session, entities, event): - task_entities = [] - other_entities = [] + # Separate entities and get project entity project_entity = None - project_selected = False for entity in entities: if project_entity is None: project_entity = self.get_project_from_entity(entity) - - ent_type_low = entity.entity_type.lower() - if ent_type_low == "project": - project_selected = True break - elif ent_type_low == "task": - task_entities.append(entity) - else: - other_entities.append(entity) + if not project_entity: + return { + "message": ( + "Couldn't find project entity." + " Could be an issue with permissions." + ), + "success": False + } + # Get project settings and check if custom attribute where workfile + # should be set is defined. project_name = project_entity["full_name"] project_settings = get_project_settings(project_name) custom_attribute_key = ( @@ -77,12 +84,16 @@ class FillWorkfileAttributeAction(BaseAction): "message": "Custom attribute key is not set in settings" } + # Try to find the custom attribute + # - get Task type object id task_obj_type = session.query( "select id from ObjectType where name is \"Task\"" ).one() + # - get text custom attribute type text_type = session.query( "select id from CustomAttributeType where name is \"text\"" ).one() + # - find the attribute attr_conf = session.query( ( "select id, key from CustomAttributeConfiguration" @@ -101,33 +112,184 @@ class FillWorkfileAttributeAction(BaseAction): ).format(custom_attribute_key) } + # Store report information + report = collections.defaultdict(list) + user_entity = session.query( + "User where id is {}".format(event["source"]["user"]["id"]) + ).one() + job_entity = session.create("Job", { + "user": user_entity, + "status": "running", + "data": json.dumps({ + "description": "(0/3) Fill of workfiles started" + }) + }) + session.commit() + + try: + self.in_job_process( + session, + entities, + job_entity, + project_entity, + project_settings, + attr_conf, + report + ) + except Exception: + self.log.error( + "Fill of workfiles to custom attribute failed", exc_info=True + ) + session.rollback() + + description = "Fill of workfiles Failed (Download traceback)" + self.add_traceback_to_job( + job_entity, session, sys.exc_info(), description + ) + return { + "message": ( + "Fill of workfiles failed." + " Check job for more information" + ), + "success": False + } + + job_entity["status"] = "done" + job_entity["data"] = json.dumps({ + "description": "Fill of workfiles completed." + }) + session.commit() + if report: + temp_obj = tempfile.NamedTemporaryFile( + mode="w", + prefix="openpype_ftrack_", + suffix=".json", + delete=False + ) + temp_obj.close() + temp_filepath = temp_obj.name + with open(temp_filepath, "w") as temp_file: + json.dump(report, temp_file) + + component_name = "{}_{}".format( + "FillWorkfilesReport", + datetime.datetime.now().strftime("%y-%m-%d-%H%M") + ) + self.add_file_component_to_job( + job_entity, session, temp_filepath, component_name + ) + # Delete temp file + os.remove(temp_filepath) + self._show_report(event, report, project_name) + return { + "message": ( + "Fill of workfiles finished with few issues." + " Check job for more information" + ), + "success": True + } + + return { + "success": True, + "message": "Finished with filling of work filenames" + } + + def _show_report(self, event, report, project_name): + items = [] + title = "Fill workfiles report ({}):".format(project_name) + + for subtitle, lines in report.items(): + if items: + items.append({ + "type": "label", + "value": "---" + }) + items.append({ + "type": "label", + "value": "# {}".format(subtitle) + }) + items.append({ + "type": "label", + "value": '

{}

'.format("
".join(lines)) + }) + + self.show_interface( + items=items, + title=title, + event=event + ) + + def in_job_process( + self, + session, + entities, + job_entity, + project_entity, + project_settings, + attr_conf, + report + ): + task_entities = [] + other_entities = [] + project_selected = False + for entity in entities: + ent_type_low = entity.entity_type.lower() + if ent_type_low == "project": + project_selected = True + break + + elif ent_type_low == "task": + task_entities.append(entity) + else: + other_entities.append(entity) + + project_name = project_entity["full_name"] + + # Find matchin asset documents and map them by ftrack task entities + # - result stored to 'asset_docs_with_task_entities' is list with + # tuple `(asset document, [task entitis, ...])` dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name + # Quety all asset documents asset_docs = list(dbcon.find({"type": "asset"})) + job_entity["data"] = json.dumps({ + "description": "(1/3) Asset documents queried." + }) + session.commit() + + # When project is selected then we can query whole project if project_selected: - asset_docs_with_task_names = self._get_asset_docs_for_project( - session, project_entity, asset_docs + asset_docs_with_task_entities = self._get_asset_docs_for_project( + session, project_entity, asset_docs, report ) else: - asset_docs_with_task_names = self._get_tasks_for_selection( - session, other_entities, task_entities, asset_docs + asset_docs_with_task_entities = self._get_tasks_for_selection( + session, other_entities, task_entities, asset_docs, report ) + job_entity["data"] = json.dumps({ + "description": "(2/3) Queried related task entities." + }) + session.commit() + + # Keep placeholders in the template unfilled host_name = "{host}" + extension = "{ext}" project_doc = dbcon.find_one({"type": "project"}) project_settings = get_project_settings(project_name) anatomy = Anatomy(project_name) templates_by_key = {} operations = [] - for asset_doc, task_entities in asset_docs_with_task_names: + for asset_doc, task_entities in asset_docs_with_task_entities: for task_entity in task_entities: workfile_data = get_workdir_data( project_doc, asset_doc, task_entity["name"], host_name ) + # Use version 1 for each workfile workfile_data["version"] = 1 - workfile_data["ext"] = "{ext}" + workfile_data["ext"] = extension task_type = workfile_data["task"]["type"] template_key = get_workfile_template_key( @@ -166,22 +328,40 @@ class FillWorkfileAttributeAction(BaseAction): session.recorded_operations.push(op) session.commit() - return True + job_entity["data"] = json.dumps({ + "description": "(3/3) Set custom attribute values." + }) + session.commit() + + def _get_entity_path(self, entity): + path_items = [] + for item in entity["link"]: + if item["type"].lower() != "project": + path_items.append(item["name"]) + return "/".join(path_items) + + def _get_asset_docs_for_project( + self, session, project_entity, asset_docs, report + ): + asset_docs_task_names = {} - def _get_asset_docs_for_project(self, session, project_entity, asset_docs): - asset_docs_task_names = collections.defaultdict(list) for asset_doc in asset_docs: asset_data = asset_doc["data"] - asset_tasks = asset_data.get("tasks") ftrack_id = asset_data.get("ftrackId") - if not asset_tasks or not ftrack_id: + if not ftrack_id: + hierarchy = list(asset_data.get("parents") or []) + hierarchy.append(asset_doc["name"]) + path = "/".join(hierarchy) + report[NOT_SYNCHRONIZED_TITLE].append(path) continue - asset_docs_task_names[ftrack_id].append( - (asset_doc, list(asset_tasks.keys())) + + asset_tasks = asset_data.get("tasks") or {} + asset_docs_task_names[ftrack_id] = ( + asset_doc, list(asset_tasks.keys()) ) task_entities = session.query(( - "select id, name, parent_id from Task where project_id is {}" + "select id, name, parent_id, link from Task where project_id is {}" ).format(project_entity["id"])).all() task_entities_by_parent_id = collections.defaultdict(list) for task_entity in task_entities: @@ -189,21 +369,23 @@ class FillWorkfileAttributeAction(BaseAction): task_entities_by_parent_id[parent_id].append(task_entity) output = [] - for ftrack_id, items in asset_docs_task_names.items(): - for item in items: - asset_doc, task_names = item - valid_task_entities = [] - for task_entity in task_entities_by_parent_id[ftrack_id]: - if task_entity["name"] in task_names: - valid_task_entities.append(task_entity) + for ftrack_id, item in asset_docs_task_names.items(): + asset_doc, task_names = item + valid_task_entities = [] + for task_entity in task_entities_by_parent_id[ftrack_id]: + if task_entity["name"] in task_names: + valid_task_entities.append(task_entity) + else: + path = self._get_entity_path(task_entity) + report[NOT_SYNCHRONIZED_TITLE].append(path) - if valid_task_entities: - output.append((asset_doc, valid_task_entities)) + if valid_task_entities: + output.append((asset_doc, valid_task_entities)) return output def _get_tasks_for_selection( - self, session, other_entities, task_entities, asset_docs + self, session, other_entities, task_entities, asset_docs, report ): all_tasks = object() asset_docs_by_ftrack_id = {} @@ -216,13 +398,13 @@ class FillWorkfileAttributeAction(BaseAction): if ftrack_id: asset_docs_by_ftrack_id[ftrack_id] = asset_doc - missing_docs = set() + missing_doc_ftrack_ids = {} all_tasks_ids = set() task_names_by_ftrack_id = collections.defaultdict(list) for other_entity in other_entities: ftrack_id = other_entity["id"] if ftrack_id not in asset_docs_by_ftrack_id: - missing_docs.add(ftrack_id) + missing_doc_ftrack_ids[ftrack_id] = None continue all_tasks_ids.add(ftrack_id) task_names_by_ftrack_id[ftrack_id] = all_tasks @@ -230,21 +412,18 @@ class FillWorkfileAttributeAction(BaseAction): for task_entity in task_entities: parent_id = task_entity["parent_id"] if parent_id not in asset_docs_by_ftrack_id: - missing_docs.add(parent_id) + missing_doc_ftrack_ids[parent_id] = None continue if all_tasks_ids not in all_tasks_ids: task_names_by_ftrack_id[ftrack_id].append(task_entity["name"]) ftrack_ids = set() - asset_doc_with_task_names_by_id = collections.defaultdict(list) + asset_doc_with_task_names_by_id = {} for ftrack_id, task_names in task_names_by_ftrack_id.items(): asset_doc = asset_docs_by_ftrack_id[ftrack_id] asset_data = asset_doc["data"] - asset_tasks = asset_data.get("tasks") - if not asset_tasks: - # TODO add to report - continue + asset_tasks = asset_data.get("tasks") or {} if task_names is all_tasks: task_names = list(asset_tasks.keys()) @@ -253,15 +432,19 @@ class FillWorkfileAttributeAction(BaseAction): for task_name in task_names: if task_name in asset_tasks: new_task_names.append(task_name) - else: - # TODO add report - pass + continue + + if ftrack_id not in missing_doc_ftrack_ids: + missing_doc_ftrack_ids[ftrack_id] = [] + if missing_doc_ftrack_ids[ftrack_id] is not None: + missing_doc_ftrack_ids[ftrack_id].append(task_name) + task_names = new_task_names if task_names: ftrack_ids.add(ftrack_id) - asset_doc_with_task_names_by_id[ftrack_id].append( - (asset_doc, task_names) + asset_doc_with_task_names_by_id[ftrack_id] = ( + asset_doc, task_names ) task_entities = session.query(( @@ -273,15 +456,37 @@ class FillWorkfileAttributeAction(BaseAction): task_entitiy_by_parent_id[parent_id].append(task_entity) output = [] - for ftrack_id, items in asset_doc_with_task_names_by_id.items(): - for item in items: - asset_doc, task_names = item - valid_task_entities = [] - for task_entity in task_entitiy_by_parent_id[ftrack_id]: - if task_entity["name"] in task_names: - valid_task_entities.append(task_entity) - if valid_task_entities: - output.append((asset_doc, valid_task_entities)) + for ftrack_id, item in asset_doc_with_task_names_by_id.items(): + asset_doc, task_names = item + valid_task_entities = [] + for task_entity in task_entitiy_by_parent_id[ftrack_id]: + if task_entity["name"] in task_names: + valid_task_entities.append(task_entity) + else: + if ftrack_id not in missing_doc_ftrack_ids: + missing_doc_ftrack_ids[ftrack_id] = [] + if missing_doc_ftrack_ids[ftrack_id] is not None: + missing_doc_ftrack_ids[ftrack_id].append(task_name) + if valid_task_entities: + output.append((asset_doc, valid_task_entities)) + + # Store report information about not synchronized entities + if missing_doc_ftrack_ids: + missing_entities = session.query( + "select id, link from TypedContext where id in ({})".format( + self.join_query_keys(missing_doc_ftrack_ids.keys()) + ) + ).all() + for missing_entity in missing_entities: + path = self._get_entity_path(missing_entity) + task_names = missing_doc_ftrack_ids[missing_entity["id"]] + if task_names is None: + report[NOT_SYNCHRONIZED_TITLE].append(path) + else: + for task_name in task_names: + task_path = "/".join([path, task_name]) + report[NOT_SYNCHRONIZED_TITLE].append(task_path) + return output From fe8caa3b3aef5b78bf76fe7ff8fce5c37b92227a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 19:24:11 +0100 Subject: [PATCH 50/69] fix app key --- .../ftrack/event_handlers_user/action_fill_workfile_attr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py index 77f18c49c1..3888379e04 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -274,7 +274,7 @@ class FillWorkfileAttributeAction(BaseAction): session.commit() # Keep placeholders in the template unfilled - host_name = "{host}" + host_name = "{app}" extension = "{ext}" project_doc = dbcon.find_one({"type": "project"}) project_settings = get_project_settings(project_name) From d3dc406b905f0554e867e5447e2f71ec8de85862 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 19:27:05 +0100 Subject: [PATCH 51/69] use get_workdir_data in wokrfiles tool --- openpype/tools/workfiles/app.py | 36 ++++++--------------------------- 1 file changed, 6 insertions(+), 30 deletions(-) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 63958ac57b..da5524331a 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -27,7 +27,7 @@ from openpype.lib import ( save_workfile_data_to_doc, get_workfile_template_key, create_workdir_extra_folders, - get_system_general_anatomy_data + get_workdir_data ) from openpype.lib.avalon_context import ( update_current_task, @@ -48,6 +48,7 @@ def build_workfile_data(session): # Set work file data for template formatting asset_name = session["AVALON_ASSET"] task_name = session["AVALON_TASK"] + host_name = session["AVALON_APP"] project_doc = io.find_one( {"type": "project"}, { @@ -63,42 +64,17 @@ def build_workfile_data(session): "name": asset_name }, { + "name": True, "data.tasks": True, "data.parents": True } ) - - task_type = asset_doc["data"]["tasks"].get(task_name, {}).get("type") - - project_task_types = project_doc["config"]["tasks"] - task_short = project_task_types.get(task_type, {}).get("short_name") - - asset_parents = asset_doc["data"]["parents"] - parent_name = project_doc["name"] - if asset_parents: - parent_name = asset_parents[-1] - - data = { - "project": { - "name": project_doc["name"], - "code": project_doc["data"].get("code") - }, - "asset": asset_name, - "task": { - "name": task_name, - "type": task_type, - "short": task_short, - }, - "parent": parent_name, + data = get_workdir_data(project_doc, asset_doc, task_name, host_name) + data.update({ "version": 1, - "user": getpass.getuser(), "comment": "", "ext": None - } - - # add system general settings anatomy data - system_general_data = get_system_general_anatomy_data() - data.update(system_general_data) + }) return data From cbb7db98f7a917bf30a0159a6f3ae548a6a8a906 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 19:36:50 +0100 Subject: [PATCH 52/69] OPENPYPE_DEBUG can be set to 1 to log debug messages --- openpype/cli.py | 14 +++++++------- openpype/lib/log.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index 155e07dea3..cbeb7fef9b 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -101,7 +101,7 @@ def eventserver(debug, on linux and window service). """ if debug: - os.environ['OPENPYPE_DEBUG'] = "3" + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().launch_eventservercli( ftrack_url, @@ -128,7 +128,7 @@ def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None): Expect "pype.club" user created on Ftrack. """ if debug: - os.environ['OPENPYPE_DEBUG'] = "3" + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().launch_webpublisher_webservercli( upload_dir=upload_dir, @@ -176,7 +176,7 @@ def publish(debug, paths, targets, gui): More than one path is allowed. """ if debug: - os.environ['OPENPYPE_DEBUG'] = '3' + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands.publish(list(paths), targets, gui) @@ -195,7 +195,7 @@ def remotepublishfromapp(debug, project, path, host, user=None, targets=None): More than one path is allowed. """ if debug: - os.environ['OPENPYPE_DEBUG'] = '3' + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands.remotepublishfromapp( project, path, host, user, targets=targets ) @@ -215,7 +215,7 @@ def remotepublish(debug, project, path, user=None, targets=None): More than one path is allowed. """ if debug: - os.environ['OPENPYPE_DEBUG'] = '3' + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands.remotepublish(project, path, user, targets=targets) @@ -240,7 +240,7 @@ def texturecopy(debug, project, asset, path): Nothing is written to database. """ if debug: - os.environ['OPENPYPE_DEBUG'] = '3' + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().texture_copy(project, asset, path) @@ -409,7 +409,7 @@ def syncserver(debug, active_site): var OPENPYPE_LOCAL_ID set to 'active_site'. """ if debug: - os.environ['OPENPYPE_DEBUG'] = '3' + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().syncserver(active_site) diff --git a/openpype/lib/log.py b/openpype/lib/log.py index a42faef008..98a3bae8e6 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -227,7 +227,7 @@ class PypeLogger: logger = logging.getLogger(name or "__main__") - if cls.pype_debug > 1: + if cls.pype_debug > 0: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) From c1200c16d5900d3b23af6406b43ced45385e58cd Mon Sep 17 00:00:00 2001 From: OpenPype Date: Thu, 17 Mar 2022 23:00:09 +0000 Subject: [PATCH 53/69] [Automated] Bump version --- CHANGELOG.md | 11 ++++++++++- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a1da69f13..78ebf8f164 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,16 +1,21 @@ # Changelog -## [3.9.1-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.9.1-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...HEAD) **🚀 Enhancements** +- General: Change how OPENPYPE\_DEBUG value is handled [\#2907](https://github.com/pypeclub/OpenPype/pull/2907) +- nuke: imageio adding ocio config version 1.2 [\#2897](https://github.com/pypeclub/OpenPype/pull/2897) +- Flame: support for comment with xml attribute overrides [\#2892](https://github.com/pypeclub/OpenPype/pull/2892) - Nuke: ExtractReviewSlate can handle more codes and profiles [\#2879](https://github.com/pypeclub/OpenPype/pull/2879) - Flame: sequence used for reference video [\#2869](https://github.com/pypeclub/OpenPype/pull/2869) **🐛 Bug fixes** +- General: Fix use of Anatomy roots [\#2904](https://github.com/pypeclub/OpenPype/pull/2904) +- Fixing gap detection in extract review [\#2902](https://github.com/pypeclub/OpenPype/pull/2902) - Pyblish Pype - ensure current state is correct when entering new group order [\#2899](https://github.com/pypeclub/OpenPype/pull/2899) - SceneInventory: Fix import of load function [\#2894](https://github.com/pypeclub/OpenPype/pull/2894) - Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891) @@ -32,6 +37,10 @@ - AssetCreator: Remove the tool [\#2845](https://github.com/pypeclub/OpenPype/pull/2845) +### 📖 Documentation + +- Documentation: Change Photoshop & AfterEffects plugin path [\#2878](https://github.com/pypeclub/OpenPype/pull/2878) + **🚀 Enhancements** - General: Subset name filtering in ExtractReview outpus [\#2872](https://github.com/pypeclub/OpenPype/pull/2872) diff --git a/openpype/version.py b/openpype/version.py index 5eca7c1d90..a62afd1953 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.1-nightly.2" +__version__ = "3.9.1-nightly.3" diff --git a/pyproject.toml b/pyproject.toml index af448ed24c..71c0af0b4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.1-nightly.2" # OpenPype +version = "3.9.1-nightly.3" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 10d9b42c74ff84e747e3f61c97499f29f33fb45c Mon Sep 17 00:00:00 2001 From: OpenPype Date: Thu, 17 Mar 2022 23:40:09 +0000 Subject: [PATCH 54/69] [Automated] Release --- CHANGELOG.md | 4 ++-- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 78ebf8f164..f3c7820d8f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.9.1-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-17) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...3.9.1) **🚀 Enhancements** diff --git a/openpype/version.py b/openpype/version.py index a62afd1953..1ef25e3f48 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.1-nightly.3" +__version__ = "3.9.1" diff --git a/pyproject.toml b/pyproject.toml index 71c0af0b4f..7c09495a99 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.1-nightly.3" # OpenPype +version = "3.9.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From a912c4db80729ab2b87c4b6c5c07403254e82cba Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 18 Mar 2022 09:45:04 +0100 Subject: [PATCH 55/69] update avalon-core --- repos/avalon-core | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/avalon-core b/repos/avalon-core index 7753d15507..64491fbbcf 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit 7753d15507afadc143b7d49db8fcfaa6a29fed91 +Subproject commit 64491fbbcf89ba2a0b3a20d67d7486c6142232b3 From 5d25de8997c46c46ac2c6bdfeb36cf9e032266ec Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Mar 2022 11:00:26 +0100 Subject: [PATCH 56/69] OP-2813 - added documentation how to run test file in IDE --- tests/README.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/tests/README.md b/tests/README.md index bb1cdbdef8..d0b537d425 100644 --- a/tests/README.md +++ b/tests/README.md @@ -21,3 +21,27 @@ Specific location could be provided to this command as an argument, either as ab (eg. `python ${OPENPYPE_ROOT}/start.py start.py runtests ../tests/integration`) will trigger only tests in `integration` folder. See `${OPENPYPE_ROOT}/cli.py:runtests` for other arguments. + +Run in IDE: +----------- +If you would prefer to run/debug single file dirrectly in IDE of your choice, you might encounter issues with imports. +It would manifest like `KeyError: 'OPENPYPE_DATABASE_NAME'`. That means you are importing module that depends on OP to be running, eg. all expected variables are set. + +In some cases your tests might be so localized, that you don't care about all env vars to be set properly. +In that case you might add this dummy configuration BEFORE any imports in your test file +``` +import os +os.environ["AVALON_MONGO"] = "mongodb://localhost:27017" +os.environ["OPENPYPE_MONGO"] = "mongodb://localhost:27017" +os.environ["AVALON_DB"] = "avalon" +os.environ["OPENPYPE_DATABASE_NAME"] = "openpype" +os.environ["AVALON_TIMEOUT"] = '3000' +os.environ["OPENPYPE_DEBUG"] = "3" +os.environ["AVALON_CONFIG"] = "pype" +os.environ["AVALON_ASSET"] = "Asset" +os.environ["AVALON_PROJECT"] = "test_project" +``` +(AVALON_ASSET and AVALON_PROJECT values should exist in your environment) + +This might be enough to run your test file separately. Do not commit this skeleton though. +Use only when you know what you are doing! \ No newline at end of file From c83202a023484aaa6a9a64aaab63ad879d71ded7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Mar 2022 11:13:10 +0100 Subject: [PATCH 57/69] OP-2813 - changed logic of parsing frames from names Adhering to clique standard FRAMES patter, eg pattern is separated by . It seems that this is most widely used (according to Discord). --- openpype/lib/delivery.py | 18 ++++---------- tests/unit/openpype/lib/test_delivery.py | 30 +++++++++++++++++++++--- 2 files changed, 32 insertions(+), 16 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 78d743003b..03abe5802c 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -4,7 +4,6 @@ import shutil import glob import clique import collections -import re def collect_frames(files): @@ -14,31 +13,24 @@ def collect_frames(files): Uses clique as most precise solution, used when anatomy template that created files is not known. - Depends that version substring starts with 'v' with any number of - numeric characters after. + Assumption is that frames are separated by '.', negative frames are not + allowed. Args: files(list) or (set with single value): list of source paths Returns: (dict): {'/asset/subset_v001.0001.png': '0001', ....} """ - collections, remainder = clique.assemble(files, minimum_items=1) + patterns = [clique.PATTERNS["frames"]] + collections, remainder = clique.assemble(files, minimum_items=1, + patterns=patterns) - real_file_name = None sources_and_frames = {} - if len(files) == 1: - real_file_name = list(files)[0] - sources_and_frames[real_file_name] = None - if collections: for collection in collections: src_head = collection.head src_tail = collection.tail - # version recognized as a collection - if re.match(".*([a-zA-Z0-9]%[0-9]+d).*", collection.format()): - continue - for index in collection.indexes: src_frame = collection.format("{padding}") % index src_file_name = "{}{}{}".format(src_head, src_frame, diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index 871ea95df7..04a71655e3 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -47,6 +47,18 @@ def test_collect_frames_single_sequence(): assert ret == expected, "Not matching" +def test_collect_frames_single_sequence_negative(): + files = ["Asset_renderCompositingMain_v001.-0000.png"] + ret = collect_frames(files) + + expected = { + "Asset_renderCompositingMain_v001.-0000.png": None + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence_shot(): files = ["testing_sh010_workfileCompositing_v001.aep"] ret = collect_frames(files) @@ -59,12 +71,24 @@ def test_collect_frames_single_sequence_shot(): assert ret == expected, "Not matching" +def test_collect_frames_single_sequence_numbers(): + files = ["PRJ_204_430_0005_renderLayoutMain_v001.0001.exr"] + ret = collect_frames(files) + + expected = { + "PRJ_204_430_0005_renderLayoutMain_v001.0001.exr": "0001" + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence_shot_with_frame(): files = ["testing_sh010_workfileCompositing_000_v001.aep"] ret = collect_frames(files) expected = { - "testing_sh010_workfileCompositing_000_v001.aep": "000" + "testing_sh010_workfileCompositing_000_v001.aep": None } print(ret) @@ -88,7 +112,7 @@ def test_collect_frames_single_sequence_different_format(): ret = collect_frames(files) expected = { - "Asset.v001.renderCompositingMain_0000.png": "0000" + "Asset.v001.renderCompositingMain_0000.png": None } print(ret) @@ -100,7 +124,7 @@ def test_collect_frames_single_sequence_withhout_version(): ret = collect_frames(files) expected = { - "pngv001.renderCompositingMain_0000.png": "0000" + "pngv001.renderCompositingMain_0000.png": None } print(ret) From 55087ec5b849eb27496ea72c06fbdf5f55cb057d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Mar 2022 12:27:15 +0100 Subject: [PATCH 58/69] OP-2813 - fix typo --- tests/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/README.md b/tests/README.md index d0b537d425..69828cdbc2 100644 --- a/tests/README.md +++ b/tests/README.md @@ -24,7 +24,7 @@ See `${OPENPYPE_ROOT}/cli.py:runtests` for other arguments. Run in IDE: ----------- -If you would prefer to run/debug single file dirrectly in IDE of your choice, you might encounter issues with imports. +If you prefer to run/debug single file directly in IDE of your choice, you might encounter issues with imports. It would manifest like `KeyError: 'OPENPYPE_DATABASE_NAME'`. That means you are importing module that depends on OP to be running, eg. all expected variables are set. In some cases your tests might be so localized, that you don't care about all env vars to be set properly. From 6eaf7017eb66d85ca0089a84dbd63ebd874cf9f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 14:20:20 +0100 Subject: [PATCH 59/69] replaced 'format_template_with_optional_keys' with 'StringTemplate' --- .../plugins/publish/collect_texture.py | 17 ++++++++++------- .../tvpaint/plugins/load/load_workfile.py | 18 ++++++++++-------- openpype/lib/delivery.py | 15 ++++++++------- .../action_delete_old_versions.py | 14 ++++++-------- openpype/pipeline/load/utils.py | 9 +++++---- openpype/plugins/publish/integrate_new.py | 12 +++++++----- 6 files changed, 46 insertions(+), 39 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_texture.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_texture.py index ea0b6cdf41..c1c48ec72d 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_texture.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_texture.py @@ -3,9 +3,10 @@ import re import pyblish.api import json -from avalon.api import format_template_with_optional_keys - -from openpype.lib import prepare_template_data +from openpype.lib import ( + prepare_template_data, + StringTemplate, +) class CollectTextures(pyblish.api.ContextPlugin): @@ -110,8 +111,9 @@ class CollectTextures(pyblish.api.ContextPlugin): formatting_data.update(explicit_data) fill_pairs = prepare_template_data(formatting_data) - workfile_subset = format_template_with_optional_keys( - fill_pairs, self.workfile_subset_template) + workfile_subset = StringTemplate.format_strict_template( + self.workfile_subset_template, fill_pairs + ) asset_build = self._get_asset_build( repre_file, @@ -201,8 +203,9 @@ class CollectTextures(pyblish.api.ContextPlugin): formatting_data.update(explicit_data) fill_pairs = prepare_template_data(formatting_data) - subset = format_template_with_optional_keys( - fill_pairs, self.texture_subset_template) + subset = StringTemplate.format_strict_template( + self.texture_subset_template, fill_pairs + ) asset_build = self._get_asset_build( repre_file, diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index 33e2a76cc9..11219320ca 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -4,7 +4,8 @@ import os from avalon import api, io from openpype.lib import ( get_workfile_template_key_from_context, - get_workdir_data + get_workdir_data, + StringTemplate, ) from openpype.api import Anatomy from openpype.hosts.tvpaint.api import lib, pipeline, plugin @@ -69,7 +70,7 @@ class LoadWorkfile(plugin.Loader): data["root"] = anatomy.roots data["user"] = getpass.getuser() - template = anatomy.templates[template_key]["file"] + file_template = anatomy.templates[template_key]["file"] # Define saving file extension if current_file: @@ -81,11 +82,12 @@ class LoadWorkfile(plugin.Loader): data["ext"] = extension - work_root = api.format_template_with_optional_keys( - data, anatomy.templates[template_key]["folder"] + folder_template = anatomy.templates[template_key]["folder"] + work_root = StringTemplate.format_strict_template( + folder_template, data ) version = api.last_workfile_with_version( - work_root, template, data, host.file_extensions() + work_root, file_template, data, host.file_extensions() )[1] if version is None: @@ -95,8 +97,8 @@ class LoadWorkfile(plugin.Loader): data["version"] = version - path = os.path.join( - work_root, - api.format_template_with_optional_keys(data, template) + filename = StringTemplate.format_strict_template( + file_template, data ) + path = os.path.join(work_root, filename) host.save_file(path) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 03abe5802c..ffcfe9fa4d 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -5,6 +5,11 @@ import glob import clique import collections +from .path_templates import ( + StringTemplate, + TemplateUnsolved, +) + def collect_frames(files): """ @@ -52,8 +57,6 @@ def sizeof_fmt(num, suffix='B'): def path_from_representation(representation, anatomy): - from avalon import pipeline # safer importing - try: template = representation["data"]["template"] @@ -63,12 +66,10 @@ def path_from_representation(representation, anatomy): try: context = representation["context"] context["root"] = anatomy.roots - path = pipeline.format_template_with_optional_keys( - context, template - ) - path = os.path.normpath(path.replace("/", "\\")) + path = StringTemplate.format_strict_template(template, context) + return os.path.normpath(path) - except KeyError: + except TemplateUnsolved: # Template references unavailable data return None diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py index c66d1819ac..1b694e25f1 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py @@ -5,11 +5,11 @@ import uuid import clique from pymongo import UpdateOne -from openpype_modules.ftrack.lib import BaseAction, statics_icon from avalon.api import AvalonMongoDB -from openpype.api import Anatomy -import avalon.pipeline +from openpype.api import Anatomy +from openpype.lib import StringTemplate, TemplateUnsolved +from openpype_modules.ftrack.lib import BaseAction, statics_icon class DeleteOldVersions(BaseAction): @@ -563,18 +563,16 @@ class DeleteOldVersions(BaseAction): try: context = representation["context"] context["root"] = anatomy.roots - path = avalon.pipeline.format_template_with_optional_keys( - context, template - ) + path = StringTemplate.format_strict_template(template, context) if "frame" in context: context["frame"] = self.sequence_splitter sequence_path = os.path.normpath( - avalon.pipeline.format_template_with_optional_keys( + StringTemplate.format_strict_template( context, template ) ) - except KeyError: + except (KeyError, TemplateUnsolved): # Template references unavailable data return (None, None) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 118f86a570..6d32c11cd7 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -525,7 +525,7 @@ def get_representation_path(representation, root=None, dbcon=None): """ - from openpype.lib import StringTemplate + from openpype.lib import StringTemplate, TemplateUnsolved if dbcon is None: dbcon = io @@ -542,13 +542,14 @@ def get_representation_path(representation, root=None, dbcon=None): try: context = representation["context"] context["root"] = root - template_obj = StringTemplate(template) - path = str(template_obj.format(context)) + path = StringTemplate.format_strict_template( + template, context + ) # Force replacing backslashes with forward slashed if not on # windows if platform.system().lower() != "windows": path = path.replace("\\", "/") - except KeyError: + except (TemplateUnsolved, KeyError): # Template references unavailable data return None diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e8dab089af..6ca6125cb2 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -12,14 +12,15 @@ import shutil from pymongo import DeleteOne, InsertOne import pyblish.api from avalon import io -from avalon.api import format_template_with_optional_keys import openpype.api from datetime import datetime # from pype.modules import ModulesManager from openpype.lib.profiles_filtering import filter_profiles from openpype.lib import ( prepare_template_data, - create_hard_link + create_hard_link, + StringTemplate, + TemplateUnsolved ) # this is needed until speedcopy for linux is fixed @@ -854,9 +855,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): fill_pairs = prepare_template_data(fill_pairs) try: - filled_template = \ - format_template_with_optional_keys(fill_pairs, template) - except KeyError: + filled_template = StringTemplate.format_strict_template( + template, fill_pairs + ) + except (KeyError, TemplateUnsolved): keys = [] if fill_pairs: keys = fill_pairs.keys() From e961144969dccb207d6d7e7e2d270a7b5b45fbec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 14:55:53 +0100 Subject: [PATCH 60/69] moved functions to get last workfile into avalon context lib functions --- openpype/lib/avalon_context.py | 126 ++++++++++++++++++++++++++++++++- 1 file changed, 123 insertions(+), 3 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 26beba41ee..0b1d09908c 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -16,6 +16,7 @@ from openpype.settings import ( from .anatomy import Anatomy from .profiles_filtering import filter_profiles from .events import emit_event +from .path_templates import StringTemplate # avalon module is not imported at the top # - may not be in path at the time of pype.lib initialization @@ -1735,8 +1736,6 @@ def get_custom_workfile_template_by_context( context. (Existence of formatted path is not validated.) """ - from openpype.lib import filter_profiles - if anatomy is None: anatomy = Anatomy(project_doc["name"]) @@ -1759,7 +1758,9 @@ def get_custom_workfile_template_by_context( # there are some anatomy template strings if matching_item: template = matching_item["path"][platform.system().lower()] - return template.format(**anatomy_context_data) + return StringTemplate.format_strict_template( + template, anatomy_context_data + ) return None @@ -1847,3 +1848,122 @@ def get_custom_workfile_template(template_profiles): io.Session["AVALON_TASK"], io ) + + +def get_last_workfile_with_version( + workdir, file_template, fill_data, extensions +): + """Return last workfile version. + + Args: + workdir(str): Path to dir where workfiles are stored. + file_template(str): Template of file name. + fill_data(dict): Data for filling template. + extensions(list, tuple): All allowed file extensions of workfile. + + Returns: + tuple: Last workfile with version if there is any otherwise + returns (None, None). + """ + if not os.path.exists(workdir): + return None, None + + # Fast match on extension + filenames = [ + filename + for filename in os.listdir(workdir) + if os.path.splitext(filename)[1] in extensions + ] + + # Build template without optionals, version to digits only regex + # and comment to any definable value. + _ext = [] + for ext in extensions: + if not ext.startswith("."): + ext = "." + ext + # Escape dot for regex + ext = "\\" + ext + _ext.append(ext) + ext_expression = "(?:" + "|".join(_ext) + ")" + + # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end + file_template = re.sub(r"\.?{ext}", ext_expression, file_template) + # Replace optional keys with optional content regex + file_template = re.sub(r"<.*?>", r".*?", file_template) + # Replace `{version}` with group regex + file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) + file_template = re.sub(r"{comment.*?}", r".+?", file_template) + filename = StringTemplate.format_strict_template(file_template, fill_data) + + # Match with ignore case on Windows due to the Windows + # OS not being case-sensitive. This avoids later running + # into the error that the file did exist if it existed + # with a different upper/lower-case. + kwargs = {} + if platform.system().lower() == "windows": + kwargs["flags"] = re.IGNORECASE + + # Get highest version among existing matching files + version = None + output_filenames = [] + for filename in sorted(filenames): + match = re.match(file_template, filename, **kwargs) + if not match: + continue + + file_version = int(match.group(1)) + if version is None or file_version > version: + output_filenames[:] = [] + version = file_version + + if file_version == version: + output_filenames.append(filename) + + output_filename = None + if output_filenames: + if len(output_filenames) == 1: + output_filename = output_filenames[0] + else: + last_time = None + for _output_filename in output_filenames: + full_path = os.path.join(workdir, _output_filename) + mod_time = os.path.getmtime(full_path) + if last_time is None or last_time < mod_time: + output_filename = _output_filename + last_time = mod_time + + return output_filename, version + + +def get_last_workfile( + workdir, file_template, fill_data, extensions, full_path=False +): + """Return last workfile filename. + + Returns file with version 1 if there is not workfile yet. + + Args: + workdir(str): Path to dir where workfiles are stored. + file_template(str): Template of file name. + fill_data(dict): Data for filling template. + extensions(list, tuple): All allowed file extensions of workfile. + full_path(bool): Full path to file is returned if set to True. + + Returns: + str: Last or first workfile as filename of full path to filename. + """ + filename, version = get_last_workfile_with_version( + workdir, file_template, fill_data, extensions + ) + if filename is None: + data = copy.deepcopy(fill_data) + data["version"] = 1 + data.pop("comment", None) + if not data.get("ext"): + data["ext"] = extensions[0] + filename = StringTemplate.format_strict_template(file_template, data) + + if full_path: + return os.path.normpath(os.path.join(workdir, filename)) + + return filename From 65bc619bcb1238ef917060c46e31f771dec6d9c7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 14:57:02 +0100 Subject: [PATCH 61/69] use moved workfile functions --- openpype/hosts/tvpaint/plugins/load/load_workfile.py | 7 +++---- openpype/lib/__init__.py | 4 ++++ openpype/lib/applications.py | 5 +++-- openpype/tools/workfiles/app.py | 8 ++++---- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index 11219320ca..d224cfc390 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,11 +1,11 @@ -import getpass import os from avalon import api, io from openpype.lib import ( + StringTemplate, get_workfile_template_key_from_context, get_workdir_data, - StringTemplate, + get_last_workfile_with_version, ) from openpype.api import Anatomy from openpype.hosts.tvpaint.api import lib, pipeline, plugin @@ -68,7 +68,6 @@ class LoadWorkfile(plugin.Loader): data = get_workdir_data(project_doc, asset_doc, task_name, host_name) data["root"] = anatomy.roots - data["user"] = getpass.getuser() file_template = anatomy.templates[template_key]["file"] @@ -86,7 +85,7 @@ class LoadWorkfile(plugin.Loader): work_root = StringTemplate.format_strict_template( folder_template, data ) - version = api.last_workfile_with_version( + version = get_last_workfile_with_version( work_root, file_template, data, host.file_extensions() )[1] diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index b8502ae718..1ebafbb2d2 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -114,6 +114,8 @@ from .avalon_context import ( get_workdir_data, get_workdir, get_workdir_with_workdir_data, + get_last_workfile_with_version, + get_last_workfile, create_workfile_doc, save_workfile_data_to_doc, @@ -263,6 +265,8 @@ __all__ = [ "get_workdir_data", "get_workdir", "get_workdir_with_workdir_data", + "get_last_workfile_with_version", + "get_last_workfile", "create_workfile_doc", "save_workfile_data_to_doc", diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index ef175ac89a..557c016d74 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -28,7 +28,8 @@ from .local_settings import get_openpype_username from .avalon_context import ( get_workdir_data, get_workdir_with_workdir_data, - get_workfile_template_key + get_workfile_template_key, + get_last_workfile ) from .python_module_tools import ( @@ -1609,7 +1610,7 @@ def _prepare_last_workfile(data, workdir): "ext": extensions[0] }) - last_workfile_path = avalon.api.last_workfile( + last_workfile_path = get_last_workfile( workdir, file_template, workdir_data, extensions, True ) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index da5524331a..713992bc4b 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -2,7 +2,6 @@ import sys import os import re import copy -import getpass import shutil import logging import datetime @@ -27,7 +26,8 @@ from openpype.lib import ( save_workfile_data_to_doc, get_workfile_template_key, create_workdir_extra_folders, - get_workdir_data + get_workdir_data, + get_last_workfile_with_version ) from openpype.lib.avalon_context import ( update_current_task, @@ -441,7 +441,7 @@ class NameWindow(QtWidgets.QDialog): data["ext"] = data["ext"][1:] - version = api.last_workfile_with_version( + version = get_last_workfile_with_version( self.root, template, data, extensions )[1] @@ -469,7 +469,7 @@ class NameWindow(QtWidgets.QDialog): # Log warning if idx == 0: log.warning(( - "BUG: Function `last_workfile_with_version` " + "BUG: Function `get_last_workfile_with_version` " "didn't return last version." )) # Raise exception if even 100 version fallback didn't help From 4a8a7b86889d4af5dd9662b1331320a89c674c94 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 15:34:30 +0100 Subject: [PATCH 62/69] add headless argument --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 82c2494e7a..eeb1f7744c 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -46,6 +46,7 @@ def inject_openpype_environment(deadlinePlugin): args = [ openpype_app, + "--headless", 'extractenvironments', export_url ] From 10c7fb21e48cdb281102068a6c3e2acf49feb1af Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 15:34:42 +0100 Subject: [PATCH 63/69] use headless in submit publish job --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 06505b4b47..fad4d14ea0 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -236,6 +236,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): environment["OPENPYPE_MONGO"] = mongo_url args = [ + "--headless", 'publish', roothless_metadata_path, "--targets", "deadline", From 4f643a2928bc2c49f96c5724d10e63cff254ce7b Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 18 Mar 2022 16:46:40 +0100 Subject: [PATCH 64/69] Only raise minor version if `Bump Minor` label is found --- tools/ci_tools.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tools/ci_tools.py b/tools/ci_tools.py index aeb367af38..3e1e3d8d02 100644 --- a/tools/ci_tools.py +++ b/tools/ci_tools.py @@ -8,8 +8,12 @@ import os def get_release_type_github(Log, github_token): # print(Log) - minor_labels = ["type: feature", "type: deprecated"] - patch_labels = ["type: enhancement", "type: bug"] + minor_labels = ["Bump Minor"] + # patch_labels = [ + # "type: enhancement", + # "type: bug", + # "type: deprecated", + # "type: Feature"] g = Github(github_token) repo = g.get_repo("pypeclub/OpenPype") @@ -28,9 +32,12 @@ def get_release_type_github(Log, github_token): if any(label in labels for label in minor_labels): return "minor" - - if any(label in labels for label in patch_labels): + else return "patch" + + #TODO: if all is working fine, this part can be cleaned up eventually + # if any(label in labels for label in patch_labels): + # return "patch" return None From f804b5f7e193e174c2cee885d14d3459ae52fbd9 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 18 Mar 2022 16:56:36 +0100 Subject: [PATCH 65/69] fix typo --- tools/ci_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/ci_tools.py b/tools/ci_tools.py index 3e1e3d8d02..5a28d3fd66 100644 --- a/tools/ci_tools.py +++ b/tools/ci_tools.py @@ -32,7 +32,7 @@ def get_release_type_github(Log, github_token): if any(label in labels for label in minor_labels): return "minor" - else + else: return "patch" #TODO: if all is working fine, this part can be cleaned up eventually From 952fc093682685a1893a0cb7615eb2e6ab197071 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 18 Mar 2022 16:57:07 +0100 Subject: [PATCH 66/69] fix hound nitpicking --- tools/ci_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/ci_tools.py b/tools/ci_tools.py index 5a28d3fd66..4c59cd6af6 100644 --- a/tools/ci_tools.py +++ b/tools/ci_tools.py @@ -35,7 +35,7 @@ def get_release_type_github(Log, github_token): else: return "patch" - #TODO: if all is working fine, this part can be cleaned up eventually + # TODO: if all is working fine, this part can be cleaned up eventually # if any(label in labels for label in patch_labels): # return "patch" From 32bf6cb3e0d2f44a9378fbba1954fe99d6338fe1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 18:02:34 +0100 Subject: [PATCH 67/69] fix last workfile --- openpype/lib/avalon_context.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 0b1d09908c..8e9fff5f67 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1893,7 +1893,9 @@ def get_last_workfile_with_version( # Replace `{version}` with group regex file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) file_template = re.sub(r"{comment.*?}", r".+?", file_template) - filename = StringTemplate.format_strict_template(file_template, fill_data) + file_template = StringTemplate.format_strict_template( + file_template, fill_data + ) # Match with ignore case on Windows due to the Windows # OS not being case-sensitive. This avoids later running From 1b5ca6a86ef977f45f501d8a3571c4398915b740 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 19 Mar 2022 03:35:01 +0000 Subject: [PATCH 68/69] [Automated] Bump version --- CHANGELOG.md | 27 +++++++++++++++++++++++---- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 25 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f3c7820d8f..f20276cbd7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,28 @@ # Changelog -## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-17) +## [3.9.2-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...3.9.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.1...HEAD) + +**🚀 Enhancements** + +- CI: change the version bump logic [\#2919](https://github.com/pypeclub/OpenPype/pull/2919) +- Deadline: Add headless argument [\#2916](https://github.com/pypeclub/OpenPype/pull/2916) +- Ftrack: Fill workfile in custom attribute [\#2906](https://github.com/pypeclub/OpenPype/pull/2906) +- Settings UI: Add simple tooltips for settings entities [\#2901](https://github.com/pypeclub/OpenPype/pull/2901) + +**🐛 Bug fixes** + +- Ftrack: Missing Ftrack id after editorial publish [\#2905](https://github.com/pypeclub/OpenPype/pull/2905) +- AfterEffects: Fix rendering for single frame in DL [\#2875](https://github.com/pypeclub/OpenPype/pull/2875) + +**🔀 Refactored code** + +- General: Move formatting and workfile functions [\#2914](https://github.com/pypeclub/OpenPype/pull/2914) + +## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-18) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.1-nightly.3...3.9.1) **🚀 Enhancements** @@ -22,7 +42,6 @@ - General: Remove forgotten use of avalon Creator [\#2885](https://github.com/pypeclub/OpenPype/pull/2885) - General: Avoid circular import [\#2884](https://github.com/pypeclub/OpenPype/pull/2884) - Fixes for attaching loaded containers \(\#2837\) [\#2874](https://github.com/pypeclub/OpenPype/pull/2874) -- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) **🔀 Refactored code** @@ -75,6 +94,7 @@ - Maya: Stop creation of reviews for Cryptomattes [\#2832](https://github.com/pypeclub/OpenPype/pull/2832) - Deadline: Remove recreated event [\#2828](https://github.com/pypeclub/OpenPype/pull/2828) - Deadline: Added missing events folder [\#2827](https://github.com/pypeclub/OpenPype/pull/2827) +- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) - Settings: Missing document with OP versions may break start of OpenPype [\#2825](https://github.com/pypeclub/OpenPype/pull/2825) - Deadline: more detailed temp file name for environment json [\#2824](https://github.com/pypeclub/OpenPype/pull/2824) - General: Host name was formed from obsolete code [\#2821](https://github.com/pypeclub/OpenPype/pull/2821) @@ -92,7 +112,6 @@ - General: Move change context functions [\#2839](https://github.com/pypeclub/OpenPype/pull/2839) - Tools: Don't use avalon tools code [\#2829](https://github.com/pypeclub/OpenPype/pull/2829) - Move Unreal Implementation to OpenPype [\#2823](https://github.com/pypeclub/OpenPype/pull/2823) -- General: Extract template formatting from anatomy [\#2766](https://github.com/pypeclub/OpenPype/pull/2766) ## [3.8.2](https://github.com/pypeclub/OpenPype/tree/3.8.2) (2022-02-07) diff --git a/openpype/version.py b/openpype/version.py index 1ef25e3f48..2390309e76 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.1" +__version__ = "3.9.2-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 7c09495a99..90e264d456 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.1" # OpenPype +version = "3.9.2-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 9d98d5ea2e579c704a92b5c68c0f07edd49005d7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 09:50:34 +0100 Subject: [PATCH 69/69] fix import of 'register_event_callback' --- openpype/hosts/hiero/api/events.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/hiero/api/events.py b/openpype/hosts/hiero/api/events.py index 9439199933..7fab3edfc8 100644 --- a/openpype/hosts/hiero/api/events.py +++ b/openpype/hosts/hiero/api/events.py @@ -1,12 +1,12 @@ import os import hiero.core.events from openpype.api import Logger +from openpype.lib import register_event_callback from .lib import ( sync_avalon_data_to_workfile, launch_workfiles_app, selection_changed_timeline, before_project_save, - register_event_callback ) from .tags import add_tags_to_workfile from .menu import update_menu_task_label