From 51957dd3aec5d09c83ffca921966229d4a382b3f Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Wed, 2 Mar 2022 15:37:53 +0700 Subject: [PATCH 001/196] Deformer node ids validation plugin for Maya --- .../validate_node_ids_deformer_transfer.py | 105 ++++++++++++++++++ 1 file changed, 105 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py new file mode 100644 index 0000000000..67b4aff136 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py @@ -0,0 +1,105 @@ +from maya import cmds + +import pyblish.api +import openpype.api +import openpype.hosts.maya.api.action +from openpype.hosts.maya.api import lib + + +class ValidateNodeIdsDeformerTransfer(pyblish.api.InstancePlugin): + """Validate if deformed shapes have related IDs to the original + shapes. + + When a deformer is applied in the scene on a mesh, + Maya creates a new "deformer" shape node for the mesh. + This new node does not get the original ID and later references + to the original node ID don't match. + + This validator checks whether the IDs are valid on all the shape + nodes in the instance. + """ + + order = openpype.api.ValidateContentsOrder + families = ['rig'] + hosts = ['maya'] + label = 'Deformed shape ids transferred' + actions = [ + openpype.hosts.maya.api.action.SelectInvalidAction, + openpype.api.RepairAction + ] + + def process(self, instance): + """Process all the nodes in the instance""" + + # Ensure nodes with sibling share the same ID + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "Shapes found that are considered 'Deformed'" + " with invalid object ids: {0}".format(invalid) + ) + + @classmethod + def get_invalid(cls, instance): + """Get all nodes which do not match the criteria""" + + shapes = cmds.ls(instance[:], + dag=True, + leaf=True, + shapes=True, + long=True, + noIntermediate=True) + + invalid = [] + for shape in shapes: + sibling_id = cls._get_id_from_sibling(shape) + if not sibling_id: + continue + + current_id = lib.get_id(shape) + if current_id != sibling_id: + invalid.append(shape) + + return invalid + + @classmethod + def _get_id_from_sibling(cls, node): + """In some cases, the history of the deformed shapes cannot be used + to get the original shape, as the relation with the orignal shape + has been lost. + The original shape can be found as a sibling of the deformed shape + (sharing same transform parent), which has the "intermediate object" + attribute set. + The ID of that shape node can then be transferred to the deformed + shape node. + """ + + # Get long name + node = cmds.ls(node, long=True)[0] + + parent = cmds.listRelatives(node, parent=True, fullPath=True) + + # Get siblings of same type + node_type = cmds.nodeType(node) + similar_nodes = cmds.listRelatives(parent, type=node_type, fullPath=1) + # Exclude itself + similar_nodes = [x for x in similar_nodes if x != node] + + for similar_node in similar_nodes: + # Make sure it is an "intermediate object" + if cmds.getAttr(similar_node + ".io"): + _id = lib.get_id(similar_node) + if _id: + return _id + + @classmethod + def repair(cls, instance): + + for node in cls.get_invalid(instance): + # Get the original id from sibling + sibling_id = cls._get_id_from_sibling(node) + if not sibling_id: + cls.log.error("Could not find ID from sibling for '%s'", node) + continue + + lib.set_id(node, sibling_id, overwrite=True) From d714e52921ca6a36d2568e1a9e98fc7da8085662 Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Wed, 2 Mar 2022 18:47:07 +0700 Subject: [PATCH 002/196] Refactor to existing lib function + plugin --- openpype/hosts/maya/api/lib.py | 30 ++++- ...date_animation_out_set_related_node_ids.py | 4 +- .../validate_node_ids_deformed_shapes.py | 4 +- .../validate_node_ids_deformer_transfer.py | 105 ------------------ .../publish/validate_rig_out_set_node_ids.py | 16 +-- 5 files changed, 38 insertions(+), 121 deletions(-) delete mode 100644 openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 41528f20ba..2f7a09d4c4 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1751,18 +1751,24 @@ def remove_other_uv_sets(mesh): cmds.removeMultiInstance(attr, b=True) -def get_id_from_history(node): +def get_id_from_sibling(node, history_only=True): """Return first node id in the history chain that matches this node. The nodes in history must be of the exact same node type and must be parented under the same parent. + If no matching node is found in history, the siblings of the node + are checked. Additionally to having the same parent, the sibling must + be marked as 'intermediate object'. + Args: - node (str): node to retrieve the + node (str): node to retrieve the history from + history_only (bool): also looks in node's siblings if True + and if nothing found in history Returns: - str or None: The id from the node in history or None when no id found - on any valid nodes in the history. + str or None: The id from the sibling node or None when no id found + on any valid nodes in the history or siblings. """ @@ -1791,6 +1797,22 @@ def get_id_from_history(node): if _id: return _id + if not history_only: + # Get siblings of same type + similar_nodes = cmds.listRelatives(parent, + type=node_type, + fullPath=True) + # Exclude itself + similar_nodes = [x for x in similar_nodes if x != node] + + for similar_node in similar_nodes: + # Check if "intermediate object" + if cmds.getAttr(similar_node + ".io"): + _id = get_id(similar_node) + if _id: + return _id + + # Project settings def set_scene_fps(fps, update=True): diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 00f0d38775..7c1c695237 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -65,7 +65,7 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): invalid.append(node) continue - history_id = lib.get_id_from_history(node) + history_id = lib.get_id_from_sibling(node) if history_id is not None and node_id != history_id: invalid.append(node) @@ -76,7 +76,7 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): for node in cls.get_invalid(instance): # Get the original id from history - history_id = lib.get_id_from_history(node) + history_id = lib.get_id_from_sibling(node) if not history_id: cls.log.error("Could not find ID in history for '%s'", node) continue diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py index a4d4d2bcc2..0324be9fc9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py @@ -48,7 +48,7 @@ class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): invalid = [] for shape in shapes: - history_id = lib.get_id_from_history(shape) + history_id = lib.get_id_from_sibling(shape) if history_id: current_id = lib.get_id(shape) if current_id != history_id: @@ -61,7 +61,7 @@ class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): for node in cls.get_invalid(instance): # Get the original id from history - history_id = lib.get_id_from_history(node) + history_id = lib.get_id_from_sibling(node) if not history_id: cls.log.error("Could not find ID in history for '%s'", node) continue diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py deleted file mode 100644 index 67b4aff136..0000000000 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_deformer_transfer.py +++ /dev/null @@ -1,105 +0,0 @@ -from maya import cmds - -import pyblish.api -import openpype.api -import openpype.hosts.maya.api.action -from openpype.hosts.maya.api import lib - - -class ValidateNodeIdsDeformerTransfer(pyblish.api.InstancePlugin): - """Validate if deformed shapes have related IDs to the original - shapes. - - When a deformer is applied in the scene on a mesh, - Maya creates a new "deformer" shape node for the mesh. - This new node does not get the original ID and later references - to the original node ID don't match. - - This validator checks whether the IDs are valid on all the shape - nodes in the instance. - """ - - order = openpype.api.ValidateContentsOrder - families = ['rig'] - hosts = ['maya'] - label = 'Deformed shape ids transferred' - actions = [ - openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.api.RepairAction - ] - - def process(self, instance): - """Process all the nodes in the instance""" - - # Ensure nodes with sibling share the same ID - invalid = self.get_invalid(instance) - if invalid: - raise RuntimeError( - "Shapes found that are considered 'Deformed'" - " with invalid object ids: {0}".format(invalid) - ) - - @classmethod - def get_invalid(cls, instance): - """Get all nodes which do not match the criteria""" - - shapes = cmds.ls(instance[:], - dag=True, - leaf=True, - shapes=True, - long=True, - noIntermediate=True) - - invalid = [] - for shape in shapes: - sibling_id = cls._get_id_from_sibling(shape) - if not sibling_id: - continue - - current_id = lib.get_id(shape) - if current_id != sibling_id: - invalid.append(shape) - - return invalid - - @classmethod - def _get_id_from_sibling(cls, node): - """In some cases, the history of the deformed shapes cannot be used - to get the original shape, as the relation with the orignal shape - has been lost. - The original shape can be found as a sibling of the deformed shape - (sharing same transform parent), which has the "intermediate object" - attribute set. - The ID of that shape node can then be transferred to the deformed - shape node. - """ - - # Get long name - node = cmds.ls(node, long=True)[0] - - parent = cmds.listRelatives(node, parent=True, fullPath=True) - - # Get siblings of same type - node_type = cmds.nodeType(node) - similar_nodes = cmds.listRelatives(parent, type=node_type, fullPath=1) - # Exclude itself - similar_nodes = [x for x in similar_nodes if x != node] - - for similar_node in similar_nodes: - # Make sure it is an "intermediate object" - if cmds.getAttr(similar_node + ".io"): - _id = lib.get_id(similar_node) - if _id: - return _id - - @classmethod - def repair(cls, instance): - - for node in cls.get_invalid(instance): - # Get the original id from sibling - sibling_id = cls._get_id_from_sibling(node) - if not sibling_id: - cls.log.error("Could not find ID from sibling for '%s'", node) - continue - - lib.set_id(node, sibling_id, overwrite=True) diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index e2090080f6..c1029366e8 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -51,10 +51,10 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): noIntermediate=True) for shape in shapes: - history_id = lib.get_id_from_history(shape) - if history_id: + sibling_id = lib.get_id_from_sibling(shape, history_only=False) + if sibling_id: current_id = lib.get_id(shape) - if current_id != history_id: + if current_id != sibling_id: invalid.append(shape) return invalid @@ -63,10 +63,10 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): def repair(cls, instance): for node in cls.get_invalid(instance): - # Get the original id from history - history_id = lib.get_id_from_history(node) - if not history_id: - cls.log.error("Could not find ID in history for '%s'", node) + # Get the original id from sibling + sibling_id = lib.get_id_from_sibling(node, history_only=False) + if not sibling_id: + cls.log.error("Could not find ID in siblings for '%s'", node) continue - lib.set_id(node, history_id, overwrite=True) + lib.set_id(node, sibling_id, overwrite=True) From c95752be966e5f99f0ac25490a035a47c1264f1a Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 14:57:35 +0700 Subject: [PATCH 003/196] 'history only' as plugin setting --- .../publish/validate_rig_out_set_node_ids.py | 9 +++++++-- .../schemas/schema_maya_publish.json | 20 +++++++++++++++++++ 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index c1029366e8..c272c5c485 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -24,6 +24,7 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): openpype.hosts.maya.api.action.SelectInvalidAction, openpype.api.RepairAction ] + allow_history_only = False def process(self, instance): """Process all meshes""" @@ -51,7 +52,9 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): noIntermediate=True) for shape in shapes: - sibling_id = lib.get_id_from_sibling(shape, history_only=False) + sibling_id = \ + lib.get_id_from_sibling(shape, + history_only=cls.allow_history_only) if sibling_id: current_id = lib.get_id(shape) if current_id != sibling_id: @@ -64,7 +67,9 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): for node in cls.get_invalid(instance): # Get the original id from sibling - sibling_id = lib.get_id_from_sibling(node, history_only=False) + sibling_id = \ + lib.get_id_from_sibling(node, + history_only=cls.allow_history_only) if not sibling_id: cls.log.error("Could not find ID in siblings for '%s'", node) continue diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 7c9a5a6b46..0c82997cce 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -396,6 +396,26 @@ "label": "Validate Rig Controllers" } ] + }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "ValidateRigOutSetNodeIds", + "label": "Validate Rig Out Set Node Ids", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "allow_history_only", + "label": "Allow history only" + } + ] } ] }, From acd86c30914f1a2e52dcd3e2f3c9a63d9b3be7d7 Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 16:25:38 +0700 Subject: [PATCH 004/196] Full attribute name for readability --- openpype/hosts/maya/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 2f7a09d4c4..c9e10c7041 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1807,7 +1807,7 @@ def get_id_from_sibling(node, history_only=True): for similar_node in similar_nodes: # Check if "intermediate object" - if cmds.getAttr(similar_node + ".io"): + if cmds.getAttr(similar_node + ".intermediateObject"): _id = get_id(similar_node) if _id: return _id From cd498441a6aa86d02cc2deb8be6c33102797e1ff Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 16:55:40 +0700 Subject: [PATCH 005/196] Code style fix --- .../publish/validate_rig_out_set_node_ids.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index c272c5c485..ed1d36261a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -52,9 +52,10 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): noIntermediate=True) for shape in shapes: - sibling_id = \ - lib.get_id_from_sibling(shape, - history_only=cls.allow_history_only) + sibling_id = lib.get_id_from_sibling( + shape, + history_only=cls.allow_history_only + ) if sibling_id: current_id = lib.get_id(shape) if current_id != sibling_id: @@ -67,9 +68,10 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): for node in cls.get_invalid(instance): # Get the original id from sibling - sibling_id = \ - lib.get_id_from_sibling(node, - history_only=cls.allow_history_only) + sibling_id = lib.get_id_from_sibling( + node, + history_only=cls.allow_history_only + ) if not sibling_id: cls.log.error("Could not find ID in siblings for '%s'", node) continue From 4ba40f2a175ec78ac8371dbf62ea2a3eeb61998b Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 17:48:40 +0700 Subject: [PATCH 006/196] Exact type for siblings --- openpype/hosts/maya/api/lib.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index c9e10c7041..bbd7786b36 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1802,6 +1802,8 @@ def get_id_from_sibling(node, history_only=True): similar_nodes = cmds.listRelatives(parent, type=node_type, fullPath=True) + similar_nodes = cmds.ls(similar_nodes, exactType=node_type, long=True) + # Exclude itself similar_nodes = [x for x in similar_nodes if x != node] From 024a7220fe36f8a3df60347df10873aa01a1cd6b Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 17:52:18 +0700 Subject: [PATCH 007/196] Plugin setting default value --- openpype/settings/defaults/project_settings/maya.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index a756071106..b6fa3719ef 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -351,6 +351,10 @@ "optional": true, "active": true }, + "ValidateRigOutSetNodeIds": { + "enabled": true, + "allow_history_only": false + }, "ValidateCameraAttributes": { "enabled": false, "optional": true, From 3b0cee19ba2b3f9351e87c49f690950d86587b90 Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Thu, 3 Mar 2022 19:20:52 +0700 Subject: [PATCH 008/196] Adapted/corrected comment --- openpype/hosts/maya/api/lib.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index bbd7786b36..62de5a96eb 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1757,14 +1757,16 @@ def get_id_from_sibling(node, history_only=True): The nodes in history must be of the exact same node type and must be parented under the same parent. - If no matching node is found in history, the siblings of the node - are checked. Additionally to having the same parent, the sibling must - be marked as 'intermediate object'. + Optionally, if no matching node is found from the history, all the + siblings of the node that are of the same type are checked. + Additionally to having the same parent, the sibling must be marked as + 'intermediate object'. Args: node (str): node to retrieve the history from - history_only (bool): also looks in node's siblings if True - and if nothing found in history + history_only (bool): if True and if nothing found in history, + look for an 'intermediate object' in all the node's siblings + of same type Returns: str or None: The id from the sibling node or None when no id found From b86201546fbd498c966ccda59d659b9f61ca8da6 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 4 Mar 2022 18:16:19 +0900 Subject: [PATCH 009/196] create dummy creator plugin for multiverse usd --- .../hosts/maya/plugins/create/create_multiverse_usd.py | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 openpype/hosts/maya/plugins/create/create_multiverse_usd.py diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py new file mode 100644 index 0000000000..2dc57823f1 --- /dev/null +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py @@ -0,0 +1,10 @@ +from openpype.hosts.maya.api import plugin + + +class CreateMultiverseUsd(plugin.Creator): + """Multiverse USD data""" + + name = "usd" + label = "Multiverse USD" + family = "usd" + icon = "cubes" From 2cb1925790555c7bea45e343ebce7ac25cd7d664 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 4 Mar 2022 18:16:48 +0900 Subject: [PATCH 010/196] create first version of multiverse usd extractor --- .../plugins/publish/extract_multiverse_usd.py | 54 +++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py new file mode 100644 index 0000000000..72b1dcbbe5 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -0,0 +1,54 @@ +import os + +import avalon.maya +import openpype.api + +from maya import cmds + + +class ExtractMultiverseUsd(openpype.api.Extractor): + """Extractor for USD by Multiverse.""" + + label = "Extract Multiverse USD" + hosts = ["maya"] + families = ["usd"] + + def process(self, instance): + # Load plugin firstly + cmds.loadPlugin("MultiverseForMaya", quiet=True) + + # Define output file path + staging_dir = self.staging_dir(instance) + file_name = "{}.usd".format(instance.name) + file_path = os.path.join(staging_dir, file_name) + file_path = file_path.replace('\\', '/') + + # Perform extraction + self.log.info("Performing extraction ...") + + with avalon.maya.maintained_selection(): + members = instance.data("setMembers") + members = cmds.ls(members, + dag=True, + shapes=True, + type=("mesh"), + noIntermediate=True, + long=True) + + # TODO: Deal with asset, composition, overide with options. + import multiverse + options = multiverse.AssetWriteOptions() + multiverse.WriteAsset(file_path, members, options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'usd', + 'ext': 'usd', + 'files': file_name, + "stagingDir": staging_dir + } + instance.data["representations"].append(representation) + + self.log.info("Extracted {} to {}".format(instance, file_path)) From d155f6024903070ebbcfd942e89dff49541a59e7 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Tue, 8 Mar 2022 12:11:40 +0900 Subject: [PATCH 011/196] fill attributes to creator CreateMultiverseUsd --- .../plugins/create/create_multiverse_usd.py | 48 ++++++++++++++++++- 1 file changed, 47 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py index 2dc57823f1..fcc6ce231d 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py @@ -1,4 +1,4 @@ -from openpype.hosts.maya.api import plugin +from openpype.hosts.maya.api import plugin, lib class CreateMultiverseUsd(plugin.Creator): @@ -8,3 +8,49 @@ class CreateMultiverseUsd(plugin.Creator): label = "Multiverse USD" family = "usd" icon = "cubes" + + def __init__(self, *args, **kwargs): + super(CreateMultiverseUsd, self).__init__(*args, **kwargs) + + self.data["stripNamespaces"] = False + self.data["mergeTransformAndShape"] = False + self.data["writeAncestors"] = True + self.data["flattenParentXforms"] = False + self.data["writeSparseOverrides"] = False + self.data["useMetaPrimPath"] = False + self.data["customRootPath"] = '' + self.data["customAttributes"] = '' + self.data["nodeTypesToIgnore"] = '' + self.data["writeMeshes"] = True + self.data["writeCurves"] = True + self.data["writeParticles"] = True + self.data["writeCameras"] = False + self.data["writeLights"] = False + self.data["writeJoints"] = False + self.data["writeCollections"] = False + self.data["writePositions"] = True + self.data["writeNormals"] = True + self.data["writeUVs"] = True + self.data["writeColorSets"] = False + self.data["writeTangents"] = False + self.data["writeRefPositions"] = False + self.data["writeBlendShapes"] = False + self.data["writeDisplayColor"] = False + self.data["writeSkinWeights"] = False + self.data["writeMaterialAssignment"] = False + self.data["writeHardwareShader"] = False + self.data["writeShadingNetworks"] = False + self.data["writeTransformMatrix"] = True + self.data["writeUsdAttributes"] = False + self.data["timeVaryingTopology"] = False + self.data["customMaterialNamespace"] = '' + + animation_data = lib.collect_animation_data() + + self.data["writeTimeRange"] = False + self.data["timeRangeStart"] = animation_data["frameStart"] + self.data["timeRangeEnd"] = animation_data["frameEnd"] + self.data["timeRangeIncrement"] = animation_data["step"] + self.data["timeRangeNumTimeSamples"] = 0 + self.data["timeRangeSamplesSpan"] = 0.0 + self.data["timeRangeFramesPerSecond"] = 24.0 From 1c58a3d1dfbbd2122a83607aebb574ba328f717d Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Tue, 8 Mar 2022 13:00:00 +0900 Subject: [PATCH 012/196] created load plugin to read USD file by multiverse --- .../maya/plugins/load/load_multiverse_usd.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 openpype/hosts/maya/plugins/load/load_multiverse_usd.py diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py new file mode 100644 index 0000000000..b46dbdc56b --- /dev/null +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -0,0 +1,45 @@ +from avalon import api + +class MultiverseUsdLoader(api.Loader): + """Load the USD by Multiverse""" + + families = ["usd"] + representations = ["usd", "usda", "usdc", "usdz", "abc"] + + label = "Read USD by Multiverse" + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, options=None): + + import maya.cmds as cmds + from openpype.hosts.maya.api.pipeline import containerise + from openpype.hosts.maya.api.lib import unique_namespace + + asset = context['asset']['name'] + namespace = namespace or unique_namespace( + asset + "_", + prefix="_" if asset[0].isdigit() else "", + suffix="_", + ) + + cmds.loadPlugin("MultiverseForMaya", quiet=True) + + # Root group + label = "{}:{}".format(namespace, name) + root = cmds.group(name=label, empty=True) + + # Create shape and move it under root + import multiverse + shape = multiverse.CreateUsdCompound(self.fname) + cmds.parent(shape, root) + + def update(self, container, representation): + pass + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + pass From 9f83eec3244ce83a43a5c2240f2f989427d7a2c3 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Wed, 9 Mar 2022 10:21:26 +0900 Subject: [PATCH 013/196] imroved data for creator of multiverse usd on animated parameters --- .../maya/plugins/create/create_multiverse_usd.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py index fcc6ce231d..b7f892deb4 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py @@ -45,12 +45,14 @@ class CreateMultiverseUsd(plugin.Creator): self.data["timeVaryingTopology"] = False self.data["customMaterialNamespace"] = '' - animation_data = lib.collect_animation_data() - + # The attributes below are about animated cache. self.data["writeTimeRange"] = False + self.data["timeRangeNumTimeSamples"] = 0 + self.data["timeRangeSamplesSpan"] = 0.0 + + animation_data = lib.collect_animation_data(True) + self.data["timeRangeStart"] = animation_data["frameStart"] self.data["timeRangeEnd"] = animation_data["frameEnd"] self.data["timeRangeIncrement"] = animation_data["step"] - self.data["timeRangeNumTimeSamples"] = 0 - self.data["timeRangeSamplesSpan"] = 0.0 - self.data["timeRangeFramesPerSecond"] = 24.0 + self.data["timeRangeFramesPerSecond"] = animation_data["fps"] From 2adbe78122c10b159341b78d2e542b4f455658d5 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Wed, 9 Mar 2022 10:27:51 +0900 Subject: [PATCH 014/196] implemented load method of multiverse usd loader --- .../maya/plugins/load/load_multiverse_usd.py | 26 ++++++++++++++++--- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index b46dbdc56b..4e9952b1a1 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -1,5 +1,9 @@ +# -*- coding: utf-8 -*- from avalon import api +import maya.cmds as cmds + + class MultiverseUsdLoader(api.Loader): """Load the USD by Multiverse""" @@ -10,10 +14,9 @@ class MultiverseUsdLoader(api.Loader): order = -10 icon = "code-fork" color = "orange" - + def load(self, context, name=None, namespace=None, options=None): - import maya.cmds as cmds from openpype.hosts.maya.api.pipeline import containerise from openpype.hosts.maya.api.lib import unique_namespace @@ -36,10 +39,25 @@ class MultiverseUsdLoader(api.Loader): cmds.parent(shape, root) def update(self, container, representation): - pass + + path = api.get_representation_path(representation) + + # Update the shape + members = cmds.sets(container['objectName'], query=True) + shapes = cmds.ls(members, type="mvUsdPackedShape", long=True) + + assert len(shapes) == 1, "This is a bug" + + import multiverse + for shape in shapes: + multiverse.SetUsdCompoundAssetPaths(shape, [path]) + + cmds.setAttr(container["objectName"] + ".representation", + str(representation["_id"]), + type="string") def switch(self, container, representation): self.update(container, representation) def remove(self, container): - pass + pass From d6a0d2cd2e4a9ff2ec915eb8c25a9d38ee34286d Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Wed, 9 Mar 2022 10:34:25 +0900 Subject: [PATCH 015/196] use time options for multiverse usd extractor --- .../plugins/publish/extract_multiverse_usd.py | 137 +++++++++++++++++- 1 file changed, 134 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 72b1dcbbe5..a45ebf2f9b 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -13,6 +13,110 @@ class ExtractMultiverseUsd(openpype.api.Extractor): hosts = ["maya"] families = ["usd"] + @property + def options(self): + """Overridable options for Multiverse USD Export + + Given in the following format + - {NAME: EXPECTED TYPE} + + If the overridden option's type does not match, + the option is not included and a warning is logged. + + """ + + return { + "stripNamespaces": bool, + "mergeTransformAndShape": bool, + "writeAncestors": bool, + "flattenParentXforms": bool, + "writeSparseOverrides": bool, + "useMetaPrimPath": bool, + "customRootPath": str, + "customAttributes": str, + "nodeTypesToIgnore": str, + "writeMeshes": bool, + "writeCurves": bool, + "writeParticles": bool, + "writeCameras": bool, + "writeLights": bool, + "writeJoints": bool, + "writeCollections": bool, + "writePositions": bool, + "writeNormals": bool, + "writeUVs": bool, + "writeColorSets": bool, + "writeTangents": bool, + "writeRefPositions": bool, + "writeBlendShapes": bool, + "writeDisplayColor": bool, + "writeSkinWeights": bool, + "writeMaterialAssignment": bool, + "writeHardwareShader": bool, + "writeShadingNetworks": bool, + "writeTransformMatrix": bool, + "writeUsdAttributes": bool, + "timeVaryingTopology": bool, + "customMaterialNamespace": str, + "writeTimeRange": bool, + "timeRangeStart": int, + "timeRangeEnd": int, + "timeRangeIncrement": int, + "timeRangeNumTimeSamples": int, + "timeRangeSamplesSpan": float, + "timeRangeFramesPerSecond": float + } + + @property + def default_options(self): + """The default options for Multiverse USD extraction.""" + start_frame = int(cmds.playbackOptions(query=True, + animationStartTime=True)) + end_frame = int(cmds.playbackOptions(query=True, + animationEndTime=True)) + + return { + "stripNamespaces": False, + "mergeTransformAndShape": False, + "writeAncestors": True, + "flattenParentXforms": False, + "writeSparseOverrides": False, + "useMetaPrimPath": False, + "customRootPath": '', + "customAttributes": '', + "nodeTypesToIgnore": '', + "writeMeshes": True, + "writeCurves": True, + "writeParticles": True, + "writeCameras": False, + "writeLights": False, + "writeJoints": False, + "writeCollections": False, + "writePositions": True, + "writeNormals": True, + "writeUVs": True, + "writeColorSets": False, + "writeTangents": False, + "writeRefPositions": False, + "writeBlendShapes": False, + "writeDisplayColor": False, + "writeSkinWeights": False, + "writeMaterialAssignment": False, + "writeHardwareShader": False, + "writeShadingNetworks": False, + "writeTransformMatrix": True, + "writeUsdAttributes": False, + "timeVaryingTopology": False, + "customMaterialNamespace": '', + "writeTimeRange": False, + "timeRangeStart": 1, + "timeRangeEnd": 1, + "timeRangeIncrement": 1, + "timeRangeNumTimeSamples": 0, + "timeRangeSamplesSpan": 0.0, + "timeRangeFramesPerSecond": 24.0 + } + def process(self, instance): # Load plugin firstly cmds.loadPlugin("MultiverseForMaya", quiet=True) @@ -23,6 +127,10 @@ class ExtractMultiverseUsd(openpype.api.Extractor): file_path = os.path.join(staging_dir, file_name) file_path = file_path.replace('\\', '/') + # Parse export options + options = self.default_options + self.log.info("Export options: {0}".format(options)) + # Perform extraction self.log.info("Performing extraction ...") @@ -34,11 +142,33 @@ class ExtractMultiverseUsd(openpype.api.Extractor): type=("mesh"), noIntermediate=True, long=True) + self.log.info('Collected object {}'.format(members)) # TODO: Deal with asset, composition, overide with options. import multiverse - options = multiverse.AssetWriteOptions() - multiverse.WriteAsset(file_path, members, options) + + time_opts = None + if options["writeTimeRange"]: + time_opts = multiverse.TimeOptions() + + time_opts.writeTimeRange = True + + time_range_start = options["timeRangeStart"] + time_range_end = options["timeRangeEnd"] + time_opts.frameRange = (time_range_start, time_range_end) + + time_opts.frameIncrement = options["timeRangeIncrement"] + time_opts.numTimeSamples = options["timeRangeNumTimeSamples"] + time_opts.timeSamplesSpan = options["timeRangeSamplesSpan"] + time_opts.framePerSecond = options["timeRangeFramesPerSecond"] + + asset_write_opts = multiverse.AssetWriteOptions(time_opts) + for (k, v) in options.iteritems(): + if k == "writeTimeRange" or k.startswith("timeRange"): + continue + setattr(asset_write_opts, k, v) + + multiverse.WriteAsset(file_path, members, asset_write_opts) if "representations" not in instance.data: instance.data["representations"] = [] @@ -51,4 +181,5 @@ class ExtractMultiverseUsd(openpype.api.Extractor): } instance.data["representations"].append(representation) - self.log.info("Extracted {} to {}".format(instance, file_path)) + self.log.info("Extracted instance {} to {}".format( + instance.name, file_path)) From 9418a496c8cb96b18644bc9da08904cf4602b053 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 10 Mar 2022 16:32:38 +0900 Subject: [PATCH 016/196] improved multiverse usd loader --- .../maya/plugins/load/load_multiverse_usd.py | 23 ++++++++++++++----- 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index 4e9952b1a1..d5006cccb7 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -30,13 +30,24 @@ class MultiverseUsdLoader(api.Loader): cmds.loadPlugin("MultiverseForMaya", quiet=True) # Root group - label = "{}:{}".format(namespace, name) - root = cmds.group(name=label, empty=True) + rootName = "{}:{}".format(namespace, name) + root = cmds.group(name=rootName, empty=True) - # Create shape and move it under root + # Create shape with transform and move it under root import multiverse - shape = multiverse.CreateUsdCompound(self.fname) - cmds.parent(shape, root) + transform = multiverse.CreateUsdCompound(self.fname) + cmds.parent(transform, root) + + # Rename transform + nodes = [root, transform] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) def update(self, container, representation): @@ -44,7 +55,7 @@ class MultiverseUsdLoader(api.Loader): # Update the shape members = cmds.sets(container['objectName'], query=True) - shapes = cmds.ls(members, type="mvUsdPackedShape", long=True) + shapes = cmds.ls(members, type="mvUsdCompoundShape", long=True) assert len(shapes) == 1, "This is a bug" From 310c572e65e5505996efc0fd8d1aa7307a385267 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 11 Mar 2022 11:40:32 +0900 Subject: [PATCH 017/196] added creator for multiverse usd override --- .../create/create_multiverse_usd_over.py | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py new file mode 100644 index 0000000000..522487e99b --- /dev/null +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py @@ -0,0 +1,35 @@ +from openpype.hosts.maya.api import plugin, lib + + +class CreateMultiverseUsdOver(plugin.Creator): + """Multiverse USD data""" + + name = "usd" + label = "Multiverse USD" + family = "usd" + icon = "cubes" + + def __init__(self, *args, **kwargs): + super(CreateMultiverseUsdOver, self).__init__(*args, **kwargs) + + self.data["writeAll"] = False + self.data["writeTransforms"] = True + self.data["writeVisibility"] = True + self.data["writeAttributes"] = True + self.data["writeMaterials"] = True + self.data["writeVariants"] = True + self.data["writeVariantsDefinition"] = True + self.data["writeActiveState"] = True + self.data["writeNamespaces"] = False + + # The attributes below are about animated cache. + self.data["writeTimeRange"] = True + self.data["timeRangeNumTimeSamples"] = 0 + self.data["timeRangeSamplesSpan"] = 0.0 + + animation_data = lib.collect_animation_data(True) + + self.data["timeRangeStart"] = animation_data["frameStart"] + self.data["timeRangeEnd"] = animation_data["frameEnd"] + self.data["timeRangeIncrement"] = animation_data["step"] + self.data["timeRangeFramesPerSecond"] = animation_data["fps"] From 8ab9a146525219ff6b73483c2ead4e418b2f2933 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 11 Mar 2022 12:15:42 +0900 Subject: [PATCH 018/196] Changed name, label family of multiverse usd override creator --- .../hosts/maya/plugins/create/create_multiverse_usd_over.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py index 522487e99b..3bb563d20c 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py @@ -4,9 +4,9 @@ from openpype.hosts.maya.api import plugin, lib class CreateMultiverseUsdOver(plugin.Creator): """Multiverse USD data""" - name = "usd" - label = "Multiverse USD" - family = "usd" + name = "usdOverrideMain" + label = "Multiverse USD Override" + family = "usd_override" icon = "cubes" def __init__(self, *args, **kwargs): From ab4849ab968203e00db2cc4503f92a9faf83be82 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 11 Mar 2022 12:16:48 +0900 Subject: [PATCH 019/196] Changed name and writeTimeRange of multiverse usd creator --- openpype/hosts/maya/plugins/create/create_multiverse_usd.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py index b7f892deb4..6851e0f6bc 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py @@ -4,7 +4,7 @@ from openpype.hosts.maya.api import plugin, lib class CreateMultiverseUsd(plugin.Creator): """Multiverse USD data""" - name = "usd" + name = "usdMain" label = "Multiverse USD" family = "usd" icon = "cubes" @@ -46,7 +46,7 @@ class CreateMultiverseUsd(plugin.Creator): self.data["customMaterialNamespace"] = '' # The attributes below are about animated cache. - self.data["writeTimeRange"] = False + self.data["writeTimeRange"] = True self.data["timeRangeNumTimeSamples"] = 0 self.data["timeRangeSamplesSpan"] = 0.0 From 3151784b1826392228776901abd0f93f8a94da79 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 11 Mar 2022 12:18:14 +0900 Subject: [PATCH 020/196] Updated multiverse usd extracor for default option values --- .../hosts/maya/plugins/publish/extract_multiverse_usd.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index a45ebf2f9b..565fbd1ee3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -108,9 +108,9 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "writeUsdAttributes": False, "timeVaryingTopology": False, "customMaterialNamespace": '', - "writeTimeRange": False, - "timeRangeStart": 1, - "timeRangeEnd": 1, + "writeTimeRange": True, + "timeRangeStart": start_frame, + "timeRangeEnd": end_frame, "timeRangeIncrement": 1, "timeRangeNumTimeSamples": 0, "timeRangeSamplesSpan": 0.0, From b36e3127dc50314cf7adc72749ebab2bbbc40451 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 11 Mar 2022 12:20:10 +0900 Subject: [PATCH 021/196] add multiverse usd override extractor plugin --- .../publish/extract_multiverse_usd_over.py | 141 ++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py new file mode 100644 index 0000000000..f1b9ca88f0 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -0,0 +1,141 @@ +import os + +import avalon.maya +import openpype.api + +from maya import cmds + + +class ExtractMultiverseUsdOverride(openpype.api.Extractor): + """Extractor for USD Override by Multiverse.""" + + label = "Extract Multiverse USD Override" + hosts = ["maya"] + families = ["usd_override"] + + @property + def options(self): + """Overridable options for Multiverse USD Export + + Given in the following format + - {NAME: EXPECTED TYPE} + + If the overridden option's type does not match, + the option is not included and a warning is logged. + + """ + + return { + "writeAll": bool, + "writeTransforms": bool, + "writeVisibility": bool, + "writeAttributes": bool, + "writeMaterials": bool, + "writeVariants": bool, + "writeVariantsDefinition": bool, + "writeActiveState": bool, + "writeNamespaces": bool, + "writeTimeRange": bool, + "timeRangeStart": int, + "timeRangeEnd": int, + "timeRangeIncrement": int, + "timeRangeNumTimeSamples": int, + "timeRangeSamplesSpan": float, + "timeRangeFramesPerSecond": float + } + + @property + def default_options(self): + """The default options for Multiverse USD extraction.""" + start_frame = int(cmds.playbackOptions(query=True, + animationStartTime=True)) + end_frame = int(cmds.playbackOptions(query=True, + animationEndTime=True)) + + return { + "writeAll": False, + "writeTransforms": True, + "writeVisibility": True, + "writeAttributes": True, + "writeMaterials": True, + "writeVariants": True, + "writeVariantsDefinition": True, + "writeActiveState": True, + "writeNamespaces": False, + "writeTimeRange": True, + "timeRangeStart": start_frame, + "timeRangeEnd": end_frame, + "timeRangeIncrement": 1, + "timeRangeNumTimeSamples": 0, + "timeRangeSamplesSpan": 0.0, + "timeRangeFramesPerSecond": 24.0 + } + + def process(self, instance): + # Load plugin firstly + cmds.loadPlugin("MultiverseForMaya", quiet=True) + + # Define output file path + staging_dir = self.staging_dir(instance) + file_name = "{}.usda".format(instance.name) + file_path = os.path.join(staging_dir, file_name) + file_path = file_path.replace('\\', '/') + + # Parse export options + options = self.default_options + self.log.info("Export options: {0}".format(options)) + + # Perform extraction + self.log.info("Performing extraction ...") + + with avalon.maya.maintained_selection(): + members = instance.data("setMembers") + members = cmds.ls(members, + dag=True, + shapes=True, + type=("mvUsdCompoundShape"), + noIntermediate=True, + long=True) + self.log.info('Collected object {}'.format(members)) + + # TODO: Deal with asset, composition, overide with options. + import multiverse + + time_opts = None + if options["writeTimeRange"]: + time_opts = multiverse.TimeOptions() + + time_opts.writeTimeRange = True + + time_range_start = options["timeRangeStart"] + time_range_end = options["timeRangeEnd"] + time_opts.frameRange = (time_range_start, time_range_end) + + time_opts.frameIncrement = options["timeRangeIncrement"] + time_opts.numTimeSamples = options["timeRangeNumTimeSamples"] + time_opts.timeSamplesSpan = options["timeRangeSamplesSpan"] + time_opts.framePerSecond = options["timeRangeFramesPerSecond"] + + over_write_opts = multiverse.OverridesWriteOptions() + for (k, v) in options.iteritems(): + if k == "writeTimeRange" or k.startswith("timeRange"): + continue + setattr(over_write_opts, k, v) + over_write_opts.timeOptions = time_opts + + for member in members: + multiverse.WriteOverrides(file_path, member, over_write_opts) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'usda', + 'ext': 'usda', + 'files': file_name, + "stagingDir": staging_dir + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance {} to {}".format( + instance.name, file_path)) From bf5f5365b1dc90f51cdf5200420d3a0e6954317d Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 11 Mar 2022 12:22:07 +0900 Subject: [PATCH 022/196] added new family usd_override --- openpype/plugins/publish/integrate_new.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e8dab089af..fc98327f2d 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -103,7 +103,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "effect", "xgen", "hda", - "usd" + "usd", + "usd_override" ] exclude_families = ["clip"] db_representation_context_keys = [ From f724e0ca222bd27d6a202ab7814fca449569830a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Mar 2022 13:33:17 +0100 Subject: [PATCH 023/196] OP-2813 - fix for rendering single file from AE in DL Solves issue with rendering .mov or .avi file. Added test cae for collect_frames --- openpype/lib/delivery.py | 21 +++++-- .../plugins/publish/submit_publish_job.py | 1 + tests/unit/openpype/lib/test_delivery.py | 57 +++++++++++++++++++ 3 files changed, 75 insertions(+), 4 deletions(-) create mode 100644 tests/unit/openpype/lib/test_delivery.py diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 9fc65aae8e..f1855d9550 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -13,18 +13,30 @@ def collect_frames(files): Uses clique as most precise solution Args: - files(list): list of source paths + files(list) or (set with single value): list of source paths Returns: (dict): {'/asset/subset_v001.0001.png': '0001', ....} """ collections, remainder = clique.assemble(files, minimum_items=1) + real_file_name = None + if len(files) == 1: + real_file_name = list(files)[0] + sources_and_frames = {} if collections: for collection in collections: src_head = collection.head src_tail = collection.tail + if src_head.endswith("_v"): + # print("Collection gathered incorrectly, not a sequence " + # "just a version found in {}".format(files)) + if len(collections) > 1: + continue + else: + return {real_file_name: None} + for index in collection.indexes: src_frame = collection.format("{padding}") % index src_file_name = "{}{}{}".format(src_head, src_frame, @@ -71,14 +83,15 @@ def path_from_representation(representation, anatomy): def copy_file(src_path, dst_path): """Hardlink file if possible(to save space), copy if not""" - from openpype.lib import create_hard_link # safer importing + from avalon.vendor import filelink # safer importing if os.path.exists(dst_path): return try: - create_hard_link( + filelink.create( src_path, - dst_path + dst_path, + filelink.HARDLINK ) except OSError: shutil.copyfile(src_path, dst_path) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 1de1c37575..964fe003fd 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -599,6 +599,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "files": os.path.basename(remainder), "stagingDir": os.path.dirname(remainder), } + representations.append(rep) if "render" in instance.get("families"): rep.update({ "fps": instance.get("fps"), diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py new file mode 100644 index 0000000000..affe14a89f --- /dev/null +++ b/tests/unit/openpype/lib/test_delivery.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +"""Test suite for delivery functions.""" +from openpype.lib.delivery import collect_frames + + +def test_collect_frames_multi_sequence(): + files = ["Asset_renderCompositingMain_v001.0000.png", + "Asset_renderCompositingMain_v001.0001.png", + "Asset_renderCompositingMain_v001.0002.png"] + ret = collect_frames(files) + + expected = { + "Asset_renderCompositingMain_v001.0000.png": "0000", + "Asset_renderCompositingMain_v001.0001.png": "0001", + "Asset_renderCompositingMain_v001.0002.png": "0002" + } + + print(ret) + assert ret == expected, "Not matching" + + +def test_collect_frames_single_sequence(): + files = ["Asset_renderCompositingMain_v001.0000.png"] + ret = collect_frames(files) + + expected = { + "Asset_renderCompositingMain_v001.0000.png": "0000" + } + + print(ret) + assert ret == expected, "Not matching" + + +def test_collect_frames_single_sequence_as_dict(): + files = {"Asset_renderCompositingMain_v001.0000.png"} + ret = collect_frames(files) + + expected = { + "Asset_renderCompositingMain_v001.0000.png": "0000" + } + + print(ret) + assert ret == expected, "Not matching" + + +def test_collect_frames_single_file(): + files = {"Asset_renderCompositingMain_v001.png"} + ret = collect_frames(files) + + expected = { + "Asset_renderCompositingMain_v001.png": None + } + + print(ret) + assert ret == expected, "Not matching" + + From 7ca997de92fd465d9c46b3473f3198a82dd84e2a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Mar 2022 14:14:47 +0100 Subject: [PATCH 024/196] OP-2813 - fix for rendering single file from AE in DL for sequence Solves issue with rendering single frame sequence, eg with 00000 in its file. --- .../publish/submit_aftereffects_deadline.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index 2918b54d4a..c499c14d40 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -6,6 +6,7 @@ import pyblish.api from avalon import api from openpype.lib import env_value_to_bool +from openpype.lib.delivery import collect_frames from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @@ -102,24 +103,18 @@ class AfterEffectsSubmitDeadline( def get_plugin_info(self): deadline_plugin_info = DeadlinePluginInfo() - context = self._instance.context - script_path = context.data["currentFile"] render_path = self._instance.data["expectedFiles"][0] - if len(self._instance.data["expectedFiles"]) > 1: + file_name, frame = list(collect_frames([render_path]).items())[0] + if frame: # replace frame ('000001') with Deadline's required '[#######]' # expects filename in format project_asset_subset_version.FRAME.ext render_dir = os.path.dirname(render_path) file_name = os.path.basename(render_path) - arr = file_name.split('.') - assert len(arr) == 3, \ - "Unable to parse frames from {}".format(file_name) - hashed = '[{}]'.format(len(arr[1]) * "#") - - render_path = os.path.join(render_dir, - '{}.{}.{}'.format(arr[0], hashed, - arr[2])) + hashed = '[{}]'.format(len(frame) * "#") + file_name = file_name.replace(frame, hashed) + render_path = os.path.join(render_dir, file_name) deadline_plugin_info.Comp = self._instance.data["comp_name"] deadline_plugin_info.Version = self._instance.data["app_version"] From 9de8504c4d89e800e4bff2b69376fe6f9f1f3eb2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Mar 2022 14:37:48 +0100 Subject: [PATCH 025/196] OP-2815 - Hound --- tests/unit/openpype/lib/test_delivery.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index affe14a89f..7c2c92c101 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -54,4 +54,3 @@ def test_collect_frames_single_file(): print(ret) assert ret == expected, "Not matching" - From 28f57a045b714d01113d3efc5317a5c02df63369 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Mon, 14 Mar 2022 12:13:51 +0900 Subject: [PATCH 026/196] added new creator for multiverse usd composition --- .../create/create_multiverse_usd_comp.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py new file mode 100644 index 0000000000..397f31d577 --- /dev/null +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py @@ -0,0 +1,30 @@ +from openpype.hosts.maya.api import plugin, lib + + +class CreateMultiverseUsdComp(plugin.Creator): + """Create Multiverse USD Composition""" + + name = "usdOverrideMain" + label = "Multiverse USD Override" + family = "usd_override" + icon = "cubes" + + def __init__(self, *args, **kwargs): + super(CreateMultiverseUsdComp, self).__init__(*args, **kwargs) + + self.data["stripNamespaces"] = False + self.data["mergeTransformAndShape"] = False + self.data["flattenContent"] = False + self.data["writePendingOverrides"] = False + + # The attributes below are about animated cache. + self.data["writeTimeRange"] = True + self.data["timeRangeNumTimeSamples"] = 0 + self.data["timeRangeSamplesSpan"] = 0.0 + + animation_data = lib.collect_animation_data(True) + + self.data["timeRangeStart"] = animation_data["frameStart"] + self.data["timeRangeEnd"] = animation_data["frameEnd"] + self.data["timeRangeIncrement"] = animation_data["step"] + self.data["timeRangeFramesPerSecond"] = animation_data["fps"] From 9f54faed1c989ac6fa1b1cb904daa0f37fd24e43 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Mon, 14 Mar 2022 12:37:09 +0900 Subject: [PATCH 027/196] fixed label and family for multiverse usd composition creator --- .../hosts/maya/plugins/create/create_multiverse_usd_comp.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py index 397f31d577..2f57ccec6c 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py @@ -5,8 +5,8 @@ class CreateMultiverseUsdComp(plugin.Creator): """Create Multiverse USD Composition""" name = "usdOverrideMain" - label = "Multiverse USD Override" - family = "usd_override" + label = "Multiverse USD Composition" + family = "usdComposition" icon = "cubes" def __init__(self, *args, **kwargs): From 86d51270e22650effeff64e16f6943b67c622b6f Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Mon, 14 Mar 2022 14:04:58 +0900 Subject: [PATCH 028/196] declare more families for multiverse usd loader --- openpype/hosts/maya/plugins/load/load_multiverse_usd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index d5006cccb7..7214e1cbb6 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -7,7 +7,7 @@ import maya.cmds as cmds class MultiverseUsdLoader(api.Loader): """Load the USD by Multiverse""" - families = ["usd"] + families = ["model", "usd", "usdComposition", "usd_override"] representations = ["usd", "usda", "usdc", "usdz", "abc"] label = "Read USD by Multiverse" From 82929bba785bd1966038002ae63a69bde566f378 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Mon, 14 Mar 2022 14:06:26 +0900 Subject: [PATCH 029/196] renamed family usd_override to usdOverride --- .../hosts/maya/plugins/create/create_multiverse_usd_over.py | 2 +- openpype/hosts/maya/plugins/load/load_multiverse_usd.py | 2 +- .../hosts/maya/plugins/publish/extract_multiverse_usd_over.py | 2 +- openpype/plugins/publish/integrate_new.py | 3 ++- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py index 3bb563d20c..bdec96c2ff 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py @@ -6,7 +6,7 @@ class CreateMultiverseUsdOver(plugin.Creator): name = "usdOverrideMain" label = "Multiverse USD Override" - family = "usd_override" + family = "usdOverride" icon = "cubes" def __init__(self, *args, **kwargs): diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index 7214e1cbb6..3370033141 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -7,7 +7,7 @@ import maya.cmds as cmds class MultiverseUsdLoader(api.Loader): """Load the USD by Multiverse""" - families = ["model", "usd", "usdComposition", "usd_override"] + families = ["model", "usd", "usdComposition", "usdOverride"] representations = ["usd", "usda", "usdc", "usdz", "abc"] label = "Read USD by Multiverse" diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py index f1b9ca88f0..e0e65d83d1 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -11,7 +11,7 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): label = "Extract Multiverse USD Override" hosts = ["maya"] - families = ["usd_override"] + families = ["usdOverride"] @property def options(self): diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index fc98327f2d..4118583787 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -104,7 +104,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "xgen", "hda", "usd", - "usd_override" + "usdComposition", + "usdOverride" ] exclude_families = ["clip"] db_representation_context_keys = [ From b46a7a538787e733f8d77a7cba89b7166bde133a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Mar 2022 10:49:48 +0100 Subject: [PATCH 030/196] OP-2813 - fix wrong merge --- openpype/lib/delivery.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index f1855d9550..5a69afd5aa 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -83,15 +83,14 @@ def path_from_representation(representation, anatomy): def copy_file(src_path, dst_path): """Hardlink file if possible(to save space), copy if not""" - from avalon.vendor import filelink # safer importing + from openpype.lib import create_hard_link # safer importing if os.path.exists(dst_path): return try: - filelink.create( + create_hard_link( src_path, - dst_path, - filelink.HARDLINK + dst_path ) except OSError: shutil.copyfile(src_path, dst_path) From 392963032732c8248b5c66d03b731d2ef5468237 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Mar 2022 12:06:59 +0100 Subject: [PATCH 031/196] OP-2813 - fix hardcoded value Updated regular expression to match version substring to be more generic. --- openpype/lib/delivery.py | 12 ++++--- tests/unit/openpype/lib/test_delivery.py | 40 ++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 4 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 5a69afd5aa..ee21b01854 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -4,13 +4,18 @@ import shutil import glob import clique import collections +import re def collect_frames(files): """ Returns dict of source path and its frame, if from sequence - Uses clique as most precise solution + Uses clique as most precise solution, used when anatomy template that + created files is not known. + + Depends that version substring starts with 'v' with any number of + numeric characters after. Args: files(list) or (set with single value): list of source paths @@ -29,9 +34,8 @@ def collect_frames(files): src_head = collection.head src_tail = collection.tail - if src_head.endswith("_v"): - # print("Collection gathered incorrectly, not a sequence " - # "just a version found in {}".format(files)) + # version recognized as a collection + if re.match(".*([^a-zA-Z0-9]v%[0-9]+d).*", collection.format()): if len(collections) > 1: continue else: diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index 7c2c92c101..1787286032 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -19,6 +19,22 @@ def test_collect_frames_multi_sequence(): assert ret == expected, "Not matching" +def test_collect_frames_multi_sequence_different_format(): + files = ["Asset.v001.renderCompositingMain.0000.png", + "Asset.v001.renderCompositingMain.0001.png", + "Asset.v001.renderCompositingMain.0002.png"] + ret = collect_frames(files) + + expected = { + "Asset.v001.renderCompositingMain.0000.png": "0000", + "Asset.v001.renderCompositingMain.0001.png": "0001", + "Asset.v001.renderCompositingMain.0002.png": "0002" + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence(): files = ["Asset_renderCompositingMain_v001.0000.png"] ret = collect_frames(files) @@ -31,6 +47,30 @@ def test_collect_frames_single_sequence(): assert ret == expected, "Not matching" +def test_collect_frames_single_sequence_different_format(): + files = ["Asset.v001.renderCompositingMain_0000.png"] + ret = collect_frames(files) + + expected = { + "Asset.v001.renderCompositingMain_0000.png": "0000" + } + + print(ret) + assert ret == expected, "Not matching" + + +def test_collect_frames_single_sequence_withhout_version(): + files = ["pngv001.renderCompositingMain_0000.png"] + ret = collect_frames(files) + + expected = { + "pngv001.renderCompositingMain_0000.png": "0000" + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence_as_dict(): files = {"Asset_renderCompositingMain_v001.0000.png"} ret = collect_frames(files) From a864b80862d91ace2d46e23aa1fbb10b8a6a7481 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 14 Mar 2022 15:53:41 +0100 Subject: [PATCH 032/196] flame: convert segment comment to attributes wip --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 6424bce3bc..54ff543f21 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -16,6 +16,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): audio_track_items = [] + def _get_comment_attributes(self, segment): + comment = segment.comment.get_value() + def process(self, context): project = context.data["flameProject"] sequence = context.data["flameSequence"] @@ -26,6 +29,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # process all sellected with opfapi.maintained_segment_selection(sequence) as segments: for segment in segments: + comment_attributes = self._get_comment_attributes(segment) clip_data = opfapi.get_segment_attributes(segment) clip_name = clip_data["segment_name"] self.log.debug("clip_name: {}".format(clip_name)) From 34b44bec6306c807c3c652872d5b53b8838b0e11 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 14 Mar 2022 19:36:25 +0100 Subject: [PATCH 033/196] flame: resolving attributes from segment comments --- .../publish/collect_timeline_instances.py | 45 +++++++++++++++++-- 1 file changed, 42 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 54ff543f21..9e6c7210fb 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -1,3 +1,4 @@ +import re import pyblish import openpype import openpype.hosts.flame.api as opfapi @@ -16,9 +17,6 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): audio_track_items = [] - def _get_comment_attributes(self, segment): - comment = segment.comment.get_value() - def process(self, context): project = context.data["flameProject"] sequence = context.data["flameSequence"] @@ -30,6 +28,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): with opfapi.maintained_segment_selection(sequence) as segments: for segment in segments: comment_attributes = self._get_comment_attributes(segment) + self.log.debug("_ comment_attributes: {}".format( + pformat(comment_attributes))) + clip_data = opfapi.get_segment_attributes(segment) clip_name = clip_data["segment_name"] self.log.debug("clip_name: {}".format(clip_name)) @@ -130,6 +131,44 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): if marker_data.get("reviewTrack") is not None: instance.data["reviewAudio"] = True + def _get_comment_attributes(self, segment): + comment = segment.comment.get_value() + + # first split comment by comma + split_comments = [] + if "," in comment: + split_comments.extend(iter(comment.split(","))) + elif ";" in comment: + split_comments.extend(iter(comment.split(";"))) + else: + split_comments.append(comment) + + # try to find attributes + attributes = {} + # search for `:` + for split in split_comments: + # make sure we ignore if not `:` in key + if ":" not in split: + continue + + # split to key and value + key, value = split.split(":") + + # condition for resolution in key + if "resolution" in key.lower(): + patern = re.compile(r"([0-9]+)") + res_goup = patern.findall(value) + + # check if axpect was also defined + # 1920x1080x1.5 + aspect = res_goup[2] if len(res_goup) > 2 else 1 + + attributes["resolution"] = { + "width": int(res_goup[0]), + "height": int(res_goup[1]), + "pixelAspect": float(aspect) + } + def _get_head_tail(self, clip_data, first_frame): # calculate head and tail with forward compatibility head = clip_data.get("segment_head") From bd57a0fd56f76c71328020eeaa29aec294ea7efb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 14 Mar 2022 19:49:15 +0100 Subject: [PATCH 034/196] flame: add comment attributes to instance data --- .../plugins/publish/collect_timeline_instances.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 9e6c7210fb..dd44627021 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -106,6 +106,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # add resolution self._get_resolution_to_data(inst_data, context) + # add comment attributes if any + inst_data.update(comment_attributes) + # create instance instance = context.create_instance(**inst_data) @@ -163,11 +166,13 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # 1920x1080x1.5 aspect = res_goup[2] if len(res_goup) > 2 else 1 - attributes["resolution"] = { - "width": int(res_goup[0]), - "height": int(res_goup[1]), + attributes.update({ + "resolutionWidth": int(res_goup[0]), + "resolutionHeight": int(res_goup[1]), "pixelAspect": float(aspect) - } + }) + + return attributes def _get_head_tail(self, clip_data, first_frame): # calculate head and tail with forward compatibility From 420122b8c9ec5e3eeefe7f89e8627c06a30f6eed Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 14 Mar 2022 19:57:35 +0100 Subject: [PATCH 035/196] flame: fix regex to get float number too --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index dd44627021..f41f773802 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -159,7 +159,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # condition for resolution in key if "resolution" in key.lower(): - patern = re.compile(r"([0-9]+)") + patern = re.compile(r"([0-9\.]+)") res_goup = patern.findall(value) # check if axpect was also defined From 0a7cbeef6df772531270755c93120dcb4fa20fad Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 15 Mar 2022 14:17:24 +0100 Subject: [PATCH 036/196] flame: refactor to settings configurability --- .../publish/collect_timeline_instances.py | 103 +++++++++++++----- 1 file changed, 76 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index f41f773802..e54ff9a167 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -7,6 +7,10 @@ from openpype.hosts.flame.otio import flame_export # # developer reload modules from pprint import pformat +# constatns +NUM_PATERN = re.compile(r"([0-9\.]+)") +TXT_PATERN = re.compile(r"([a-zA-Z]+)") + class CollectTimelineInstances(pyblish.api.ContextPlugin): """Collect all Timeline segment selection.""" @@ -17,6 +21,16 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): audio_track_items = [] + # TODO: add to settings + # settings + xml_preset_attrs_from_comments = { + "width": "number", + "height": "number", + "pixelRatio": "number", + "resizeType": "string", + "resizeFilter": "string" + } + def process(self, context): project = context.data["flameProject"] sequence = context.data["flameSequence"] @@ -137,43 +151,78 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): def _get_comment_attributes(self, segment): comment = segment.comment.get_value() - # first split comment by comma - split_comments = [] - if "," in comment: - split_comments.extend(iter(comment.split(","))) - elif ";" in comment: - split_comments.extend(iter(comment.split(";"))) - else: - split_comments.append(comment) - # try to find attributes - attributes = {} + attributes = { + "pixelRatio": 1.00 + } # search for `:` - for split in split_comments: + for split in self._split_comments(comment): # make sure we ignore if not `:` in key if ":" not in split: continue - # split to key and value - key, value = split.split(":") + self._get_xml_preset_attrs( + attributes, split) - # condition for resolution in key - if "resolution" in key.lower(): - patern = re.compile(r"([0-9\.]+)") - res_goup = patern.findall(value) - - # check if axpect was also defined - # 1920x1080x1.5 - aspect = res_goup[2] if len(res_goup) > 2 else 1 - - attributes.update({ - "resolutionWidth": int(res_goup[0]), - "resolutionHeight": int(res_goup[1]), - "pixelAspect": float(aspect) - }) + if attributes.get("width"): + attributes["resolution"] = { + "resolutionWidth": attributes["width"], + "resolutionHeight": attributes["height"], + "pixelAspect": attributes["pixelRatio"] + } return attributes + def _get_xml_preset_attrs(self, attributes, split): + + # split to key and value + key, value = split.split(":") + + for a_name, a_type in self.xml_preset_attrs_from_comments.items(): + # exclude all not related attributes + if a_name.lower() not in key: + continue + + # get pattern defined by type + pattern = TXT_PATERN if "string" in a_type else NUM_PATERN + res_goup = pattern.findall(value) + + # raise if nothing is found as it is not correctly defined + if not res_goup: + raise ValueError(( + "Value for `{}` attribute is not " + "set correctly: `{}`").format(a_name, split)) + + attributes[a_name] = res_goup[0] + + # condition for resolution in key + if "resolution" in key.lower(): + res_goup = NUM_PATERN.findall(value) + # check if axpect was also defined + # 1920x1080x1.5 + aspect = res_goup[2] if len(res_goup) > 2 else 1 + + width = int(res_goup[0]) + height = int(res_goup[1]) + pixel_ratio = float(aspect) + attributes.update({ + "width": width, + "height": height, + "pixelRatio": pixel_ratio + }) + + def _split_comments(self, comment_string): + # first split comment by comma + split_comments = [] + if "," in comment_string: + split_comments.extend(iter(comment_string.split(","))) + elif ";" in comment_string: + split_comments.extend(iter(comment_string.split(";"))) + else: + split_comments.append(comment_string) + + return split_comments + def _get_head_tail(self, clip_data, first_frame): # calculate head and tail with forward compatibility head = clip_data.get("segment_head") From d408139bb7a9753e0892d648819af4db6093e9e9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 15 Mar 2022 14:26:08 +0100 Subject: [PATCH 037/196] flame: restructure data nesting for better absorption to instance data --- .../publish/collect_timeline_instances.py | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index e54ff9a167..72ad2cd1c3 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -153,7 +153,8 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # try to find attributes attributes = { - "pixelRatio": 1.00 + "xml_overrides": { + "pixelRatio": 1.00} } # search for `:` for split in self._split_comments(comment): @@ -164,12 +165,14 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): self._get_xml_preset_attrs( attributes, split) - if attributes.get("width"): - attributes["resolution"] = { - "resolutionWidth": attributes["width"], - "resolutionHeight": attributes["height"], - "pixelAspect": attributes["pixelRatio"] - } + # add xml overides resolution to instance data + xml_overrides = attributes["xml_overrides"] + if xml_overrides.get("width"): + attributes.update({ + "resolutionWidth": xml_overrides["width"], + "resolutionHeight": xml_overrides["height"], + "pixelAspect": xml_overrides["pixelRatio"] + }) return attributes @@ -193,7 +196,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "Value for `{}` attribute is not " "set correctly: `{}`").format(a_name, split)) - attributes[a_name] = res_goup[0] + attributes["xml_overrides"][a_name] = res_goup[0] # condition for resolution in key if "resolution" in key.lower(): @@ -205,7 +208,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): width = int(res_goup[0]) height = int(res_goup[1]) pixel_ratio = float(aspect) - attributes.update({ + attributes["xml_overrides"].update({ "width": width, "height": height, "pixelRatio": pixel_ratio From 48ce34c58e960e458676bf215b21fb5416ad960d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 15 Mar 2022 14:26:47 +0100 Subject: [PATCH 038/196] flame: add xml_overrides to extracting profiles --- .../flame/plugins/publish/extract_subset_resources.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 5c3aed9672..194557e37a 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -1,9 +1,11 @@ import os from pprint import pformat from copy import deepcopy + import pyblish.api import openpype.api from openpype.hosts.flame import api as opfapi +from pprint import pformat class ExtractSubsetResources(openpype.api.Extractor): @@ -131,6 +133,12 @@ class ExtractSubsetResources(openpype.api.Extractor): "startFrame": frame_start }) + # add any xml overrides collected form segment.comment + modify_xml_data.update(instance.data["xml_overrides"]) + self.log.debug("__ modify_xml_data: {}".format(pformat( + modify_xml_data + ))) + # with maintained duplication loop all presets with opfapi.maintained_object_duplication( exporting_clip) as duplclip: From db7e9cc4aa6b4fd09496139d2fff878e3606312f Mon Sep 17 00:00:00 2001 From: 2-REC Date: Wed, 16 Mar 2022 12:13:47 +0700 Subject: [PATCH 039/196] Warning log if more than 1 shape id --- openpype/hosts/maya/api/lib.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 62de5a96eb..f49c0f689e 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1809,12 +1809,22 @@ def get_id_from_sibling(node, history_only=True): # Exclude itself similar_nodes = [x for x in similar_nodes if x != node] + first_id = None for similar_node in similar_nodes: # Check if "intermediate object" if cmds.getAttr(similar_node + ".intermediateObject"): _id = get_id(similar_node) if _id: - return _id + # Check if already found an id + if first_id: + log.warning(("Found more than 1 matching intermediate" + " shape for '{}'. Using id of first" + " found: '{}'".format(node, found_node))) + break + first_id = _id + found_node = similar_node + + return first_id From 802f8a482d6278f2184b0f38ad52fc56efb737cb Mon Sep 17 00:00:00 2001 From: 2-REC Date: Wed, 16 Mar 2022 12:16:33 +0700 Subject: [PATCH 040/196] Variable declaration --- openpype/hosts/maya/api/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index f49c0f689e..e2c07624e6 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1810,6 +1810,7 @@ def get_id_from_sibling(node, history_only=True): similar_nodes = [x for x in similar_nodes if x != node] first_id = None + found_node = None for similar_node in similar_nodes: # Check if "intermediate object" if cmds.getAttr(similar_node + ".intermediateObject"): From a11fe7a5503c993b53c72c16eb306d3447ead29a Mon Sep 17 00:00:00 2001 From: 2-REC Date: Wed, 16 Mar 2022 16:38:17 +0700 Subject: [PATCH 041/196] Fix to allow more than 1 shape with same ids --- openpype/hosts/maya/api/lib.py | 40 ++++++++++++++++++++++------------ 1 file changed, 26 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index b46eff5a4b..f0f6bb706f 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1989,23 +1989,35 @@ def get_id_from_sibling(node, history_only=True): # Exclude itself similar_nodes = [x for x in similar_nodes if x != node] - first_id = None - found_node = None + + # Get all unique ids from siblings in order since + # we consistently take the first one found + sibling_ids = OrderedDict() for similar_node in similar_nodes: # Check if "intermediate object" - if cmds.getAttr(similar_node + ".intermediateObject"): - _id = get_id(similar_node) - if _id: - # Check if already found an id - if first_id: - log.warning(("Found more than 1 matching intermediate" - " shape for '{}'. Using id of first" - " found: '{}'".format(node, found_node))) - break - first_id = _id - found_node = similar_node + if not cmds.getAttr(similar_node + ".intermediateObject"): + continue - return first_id + _id = get_id(similar_node) + if not _id: + continue + + if _id in sibling_ids: + sibling_ids[_id].append(similar_node) + else: + sibling_ids[_id] = [similar_node] + + if sibling_ids: + first_id, found_nodes = next(iter(sibling_ids.items())) + + # Log a warning if we've found multiple unique ids + if len(sibling_ids) > 1: + log.warning(("Found more than 1 intermediate shape with" + " unique id for '{}'. Using id of first" + " found: '{}'".format(node, found_nodes[0]))) + break + + return first_id From 9e4a3cf9504ebbb09c30d3c1d2bcd772eed1cf4d Mon Sep 17 00:00:00 2001 From: 2-REC Date: Wed, 16 Mar 2022 16:41:27 +0700 Subject: [PATCH 042/196] Distraction fix... --- openpype/hosts/maya/api/lib.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index f0f6bb706f..f7507d87c5 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1989,7 +1989,6 @@ def get_id_from_sibling(node, history_only=True): # Exclude itself similar_nodes = [x for x in similar_nodes if x != node] - # Get all unique ids from siblings in order since # we consistently take the first one found sibling_ids = OrderedDict() @@ -2015,7 +2014,6 @@ def get_id_from_sibling(node, history_only=True): log.warning(("Found more than 1 intermediate shape with" " unique id for '{}'. Using id of first" " found: '{}'".format(node, found_nodes[0]))) - break return first_id From 78ae6c1c86ab5a8accce4906bf1eabf87ba4a607 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 11:14:20 +0100 Subject: [PATCH 043/196] OP-2813 - fixed one too many frame after loaded clip in Nuke For 0-229 range it previously produced 229 - 0 + 1 = 230 (duration). last = 1 + 230 = 231 (should be 230). --- openpype/hosts/nuke/plugins/load/load_clip.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index a253ba4a9d..ce1693f700 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -97,7 +97,7 @@ class LoadClip(plugin.NukeLoader): last += self.handle_end if not is_sequence: - duration = last - first + 1 + duration = last - first first = 1 last = first + duration elif "#" not in file: @@ -212,7 +212,7 @@ class LoadClip(plugin.NukeLoader): last += self.handle_end if not is_sequence: - duration = last - first + 1 + duration = last - first first = 1 last = first + duration elif "#" not in file: From 8af535adba3303ae759638f9933cb68ec46517bb Mon Sep 17 00:00:00 2001 From: 2-REC Date: Wed, 16 Mar 2022 18:09:50 +0700 Subject: [PATCH 044/196] More adapted error message --- .../publish/validate_animation_out_set_related_node_ids.py | 4 ++-- .../maya/plugins/publish/validate_rig_out_set_node_ids.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 7c1c695237..05d63f1d56 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -32,8 +32,8 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): # if a deformer has been created on the shape invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Nodes found with non-related " - "asset IDs: {0}".format(invalid)) + raise RuntimeError("Nodes found with mismatching " + "IDs: {0}".format(invalid)) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index ed1d36261a..cc3723a6e1 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -33,8 +33,8 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): # if a deformer has been created on the shape invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Nodes found with non-related " - "asset IDs: {0}".format(invalid)) + raise RuntimeError("Nodes found with mismatching " + "IDs: {0}".format(invalid)) @classmethod def get_invalid(cls, instance): From 87b44b4b14c989c7dce61492650fb54202c37ee0 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 13:48:50 +0100 Subject: [PATCH 045/196] OP-2813 - fix collect_frames when multiple version numbers in path Added new test case. --- openpype/lib/delivery.py | 8 +++----- tests/unit/openpype/lib/test_delivery.py | 12 ++++++++++++ 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index ee21b01854..b9f3f0b106 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -25,10 +25,11 @@ def collect_frames(files): collections, remainder = clique.assemble(files, minimum_items=1) real_file_name = None + sources_and_frames = {} if len(files) == 1: real_file_name = list(files)[0] + sources_and_frames[real_file_name] = None - sources_and_frames = {} if collections: for collection in collections: src_head = collection.head @@ -36,10 +37,7 @@ def collect_frames(files): # version recognized as a collection if re.match(".*([^a-zA-Z0-9]v%[0-9]+d).*", collection.format()): - if len(collections) > 1: - continue - else: - return {real_file_name: None} + continue for index in collection.indexes: src_frame = collection.format("{padding}") % index diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index 1787286032..de87f99d79 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -47,6 +47,18 @@ def test_collect_frames_single_sequence(): assert ret == expected, "Not matching" +def test_collect_frames_single_sequence_full_path(): + files = ['C:/test_project/assets/locations/Town/work/compositing\\renders\\aftereffects\\test_project_TestAsset_compositing_v001\\TestAsset_renderCompositingMain_v001.mov'] # noqa: E501 + ret = collect_frames(files) + + expected = { + 'C:/test_project/assets/locations/Town/work/compositing\\renders\\aftereffects\\test_project_TestAsset_compositing_v001\\TestAsset_renderCompositingMain_v001.mov': None # noqa: E501 + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence_different_format(): files = ["Asset.v001.renderCompositingMain_0000.png"] ret = collect_frames(files) From 033eaa324ffec6dce7d5f44dcfe84464a20c961d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 15:10:38 +0100 Subject: [PATCH 046/196] nuke: imageio adding ocio config version 1.2 --- .../projects_schema/schemas/schema_anatomy_imageio.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 3bec19c3d0..6532f2b6ce 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -195,6 +195,9 @@ { "aces_1.1": "aces_1.1" }, + { + "aces_1.1": "aces_1.2" + }, { "custom": "custom" } From d867b872a894986579709718e2894596ed9e527a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 16:55:51 +0100 Subject: [PATCH 047/196] flame: distribute better value types --- .../publish/collect_timeline_instances.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 72ad2cd1c3..44c25f04a2 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -26,7 +26,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): xml_preset_attrs_from_comments = { "width": "number", "height": "number", - "pixelRatio": "number", + "pixelRatio": "float", "resizeType": "string", "resizeFilter": "string" } @@ -183,11 +183,14 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): for a_name, a_type in self.xml_preset_attrs_from_comments.items(): # exclude all not related attributes - if a_name.lower() not in key: + if a_name.lower() not in key.lower(): continue # get pattern defined by type - pattern = TXT_PATERN if "string" in a_type else NUM_PATERN + pattern = TXT_PATERN + if "number" in a_type or "float" in a_type: + pattern = NUM_PATERN + res_goup = pattern.findall(value) # raise if nothing is found as it is not correctly defined @@ -196,7 +199,14 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "Value for `{}` attribute is not " "set correctly: `{}`").format(a_name, split)) - attributes["xml_overrides"][a_name] = res_goup[0] + if "string" in a_type: + _value = res_goup[0] + if "float" in a_type: + _value = float(res_goup[0]) + if "number" in a_type: + _value = int(res_goup[0]) + + attributes["xml_overrides"][a_name] = _value # condition for resolution in key if "resolution" in key.lower(): From d98d8905afb1ae3a28af03904adc6b4e57114fff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 17:10:56 +0100 Subject: [PATCH 048/196] Flame: add ignoring toggle to settings parsed attributes from comments can be ignored now --- .../plugins/publish/extract_subset_resources.py | 14 +++++++++----- .../settings/defaults/project_settings/flame.json | 1 + .../projects_schema/schema_project_flame.json | 11 +++++++++++ 3 files changed, 21 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 2e3b84def8..ac50c7c980 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -25,6 +25,7 @@ class ExtractSubsetResources(openpype.api.Extractor): "xml_preset_file": "Jpeg (8-bit).xml", "xml_preset_dir": "", "export_type": "File Sequence", + "ignore_comment_attrs": True, "colorspace_out": "Output - sRGB", "representation_add_range": False, "representation_tags": ["thumbnail"] @@ -34,6 +35,7 @@ class ExtractSubsetResources(openpype.api.Extractor): "xml_preset_file": "Apple iPad (1920x1080).xml", "xml_preset_dir": "", "export_type": "Movie", + "ignore_comment_attrs": True, "colorspace_out": "Output - Rec.709", "representation_add_range": True, "representation_tags": [ @@ -104,6 +106,7 @@ class ExtractSubsetResources(openpype.api.Extractor): preset_dir = preset_config["xml_preset_dir"] export_type = preset_config["export_type"] repre_tags = preset_config["representation_tags"] + ignore_comment_attrs = preset_config["ignore_comment_attrs"] color_out = preset_config["colorspace_out"] # get frame range with handles for representation range @@ -133,11 +136,12 @@ class ExtractSubsetResources(openpype.api.Extractor): "startFrame": frame_start }) - # add any xml overrides collected form segment.comment - modify_xml_data.update(instance.data["xml_overrides"]) - self.log.debug("__ modify_xml_data: {}".format(pformat( - modify_xml_data - ))) + if not ignore_comment_attrs: + # add any xml overrides collected form segment.comment + modify_xml_data.update(instance.data["xml_overrides"]) + self.log.debug("__ modify_xml_data: {}".format(pformat( + modify_xml_data + ))) # with maintained duplication loop all presets with opfapi.maintained_object_duplication( diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index ef9c2b1041..c7188b10b5 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -28,6 +28,7 @@ "xml_preset_file": "OpenEXR (16-bit fp DWAA).xml", "xml_preset_dir": "", "export_type": "File Sequence", + "ignore_comment_attrs": false, "colorspace_out": "ACES - ACEScg", "representation_add_range": true, "representation_tags": [] diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index 1f30b45981..e352f8b132 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -189,6 +189,17 @@ ] }, + { + "type": "separator" + }, + { + "type": "boolean", + "key": "ignore_comment_attrs", + "label": "Ignore attributes parsed from a segment comments" + }, + { + "type": "separator" + }, { "key": "colorspace_out", "label": "Output color (imageio)", From 4b83446230d54a804fd2a509a709abab463c44cc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 17:42:27 +0100 Subject: [PATCH 049/196] flame: moving logging outside of condition --- .../flame/plugins/publish/extract_subset_resources.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index ac50c7c980..d52669d955 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -139,9 +139,10 @@ class ExtractSubsetResources(openpype.api.Extractor): if not ignore_comment_attrs: # add any xml overrides collected form segment.comment modify_xml_data.update(instance.data["xml_overrides"]) - self.log.debug("__ modify_xml_data: {}".format(pformat( - modify_xml_data - ))) + + self.log.debug("__ modify_xml_data: {}".format(pformat( + modify_xml_data + ))) # with maintained duplication loop all presets with opfapi.maintained_object_duplication( From d0a79e31f5afb8dcdd5bbcf7d376b89c98d29456 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 17:44:01 +0100 Subject: [PATCH 050/196] hound and suggested changes --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 4 ++-- .../hosts/flame/plugins/publish/extract_subset_resources.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 44c25f04a2..c6793874c0 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -228,9 +228,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # first split comment by comma split_comments = [] if "," in comment_string: - split_comments.extend(iter(comment_string.split(","))) + split_comments.extend(comment_string.split(",")) elif ";" in comment_string: - split_comments.extend(iter(comment_string.split(";"))) + split_comments.extend(comment_string.split(";")) else: split_comments.append(comment_string) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index d52669d955..32f6b9508f 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -5,7 +5,6 @@ from copy import deepcopy import pyblish.api import openpype.api from openpype.hosts.flame import api as opfapi -from pprint import pformat class ExtractSubsetResources(openpype.api.Extractor): From 550f0603d4865da46e8878355208c0a4ff8f639d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Mar 2022 20:47:21 +0100 Subject: [PATCH 051/196] fixing ocio config name --- .../schemas/projects_schema/schemas/schema_anatomy_imageio.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 6532f2b6ce..acfd4602df 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -196,7 +196,7 @@ "aces_1.1": "aces_1.1" }, { - "aces_1.1": "aces_1.2" + "aces_1.2": "aces_1.2" }, { "custom": "custom" From 99288ee03aa06613734c7feac627281bb94cb938 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 17 Mar 2022 17:29:54 +0900 Subject: [PATCH 052/196] added multiverse usd composition extractor --- .../publish/extract_multiverse_usd_comp.py | 129 ++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py new file mode 100644 index 0000000000..f35096e516 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py @@ -0,0 +1,129 @@ +import os + +import avalon.maya +import openpype.api + +from maya import cmds + + +class ExtractMultiverseUsdComposition(openpype.api.Extractor): + """Extractor of Multiverse USD Composition.""" + + label = "Extract Multiverse USD Composition" + hosts = ["maya"] + families = ["usdComposition"] + + @property + def options(self): + """Overridable options for Multiverse USD Export + + Given in the following format + - {NAME: EXPECTED TYPE} + + If the overridden option's type does not match, + the option is not included and a warning is logged. + + """ + + return { + "stripNamespaces": bool, + "mergeTransformAndShape": bool, + "flattenContent": bool, + "writePendingOverrides": bool, + "writeTimeRange": bool, + "timeRangeStart": int, + "timeRangeEnd": int, + "timeRangeIncrement": int, + "timeRangeNumTimeSamples": int, + "timeRangeSamplesSpan": float, + "timeRangeFramesPerSecond": float + } + + @property + def default_options(self): + """The default options for Multiverse USD extraction.""" + start_frame = int(cmds.playbackOptions(query=True, + animationStartTime=True)) + end_frame = int(cmds.playbackOptions(query=True, + animationEndTime=True)) + + return { + "stripNamespaces": False, + "mergeTransformAndShape": False, + "flattenContent": False, + "writePendingOverrides": False, + "writeTimeRange": True, + "timeRangeStart": start_frame, + "timeRangeEnd": end_frame, + "timeRangeIncrement": 1, + "timeRangeNumTimeSamples": 0, + "timeRangeSamplesSpan": 0.0, + "timeRangeFramesPerSecond": 24.0 + } + + def process(self, instance): + # Load plugin firstly + cmds.loadPlugin("MultiverseForMaya", quiet=True) + + # Define output file path + staging_dir = self.staging_dir(instance) + file_name = "{}.usda".format(instance.name) + file_path = os.path.join(staging_dir, file_name) + file_path = file_path.replace('\\', '/') + + # Parse export options + options = self.default_options + self.log.info("Export options: {0}".format(options)) + + # Perform extraction + self.log.info("Performing extraction ...") + + with avalon.maya.maintained_selection(): + members = instance.data("setMembers") + members = cmds.ls(members, + dag=True, + shapes=True, + type=("mvUsdCompoundShape"), + noIntermediate=True, + long=True) + self.log.info('Collected object {}'.format(members)) + + # TODO: Deal with asset, composition, overide with options. + import multiverse + + time_opts = None + if options["writeTimeRange"]: + time_opts = multiverse.TimeOptions() + + time_opts.writeTimeRange = True + + time_range_start = options["timeRangeStart"] + time_range_end = options["timeRangeEnd"] + time_opts.frameRange = (time_range_start, time_range_end) + + time_opts.frameIncrement = options["timeRangeIncrement"] + time_opts.numTimeSamples = options["timeRangeNumTimeSamples"] + time_opts.timeSamplesSpan = options["timeRangeSamplesSpan"] + time_opts.framePerSecond = options["timeRangeFramesPerSecond"] + + comp_write_opts = multiverse.CompositionWriteOptions() + for (k, v) in options.iteritems(): + if k == "writeTimeRange" or k.startswith("timeRange"): + continue + setattr(comp_write_opts, k, v) + comp_write_opts.timeOptions = time_opts + multiverse.WriteComposition(file_path, members, comp_write_opts) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'usda', + 'ext': 'usda', + 'files': file_name, + "stagingDir": staging_dir + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance {} to {}".format( + instance.name, file_path)) From d14d9eecc86f090bdc4478161da111688e06a581 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 12:44:30 +0100 Subject: [PATCH 053/196] added simple tooltips for settings entities --- openpype/settings/entities/base_entity.py | 4 ++++ openpype/settings/entities/schemas/README.md | 1 + openpype/tools/settings/settings/base.py | 3 +++ 3 files changed, 8 insertions(+) diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index b5bc44640b..76700d605d 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -28,6 +28,10 @@ class BaseEntity: def __init__(self, schema_data, *args, **kwargs): self.schema_data = schema_data + tooltip = None + if schema_data: + tooltip = schema_data.get("tooltip") + self.tooltip = tooltip # Entity id self._id = uuid4() diff --git a/openpype/settings/entities/schemas/README.md b/openpype/settings/entities/schemas/README.md index dd7601c017..fbfd699937 100644 --- a/openpype/settings/entities/schemas/README.md +++ b/openpype/settings/entities/schemas/README.md @@ -14,6 +14,7 @@ - this keys is not allowed for all inputs as they may have not reason for that - key is validated, can be only once in hierarchy but is not required - currently there are `system settings` and `project settings` +- all entities can have set `"tooltip"` key with description which will be shown in UI ## Inner schema - GUI schemas are huge json files, to be able to split whole configuration into multiple schema there's type `schema` diff --git a/openpype/tools/settings/settings/base.py b/openpype/tools/settings/settings/base.py index 706e2fdcf0..bd48b3a966 100644 --- a/openpype/tools/settings/settings/base.py +++ b/openpype/tools/settings/settings/base.py @@ -30,6 +30,9 @@ class BaseWidget(QtWidgets.QWidget): if not self.entity.gui_type: self.entity.on_change_callbacks.append(self._on_entity_change) + if self.entity.tooltip: + self.setToolTip(self.entity.tooltip) + self.label_widget = None self.create_ui() From fdb880c5440568e1f5f1a8fdc539ae7ddcad15f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Thu, 17 Mar 2022 12:57:34 +0100 Subject: [PATCH 054/196] Update openpype/hosts/flame/plugins/publish/collect_timeline_instances.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index c6793874c0..70340ad7a2 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -188,7 +188,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # get pattern defined by type pattern = TXT_PATERN - if "number" in a_type or "float" in a_type: + if a_type in ("number" , "float"): pattern = NUM_PATERN res_goup = pattern.findall(value) From 0ea4e0acd4f78899df9e2ba6932a11beb88283dc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 17 Mar 2022 12:59:30 +0100 Subject: [PATCH 055/196] improving gap detection in extract review --- openpype/plugins/publish/extract_review.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index cbe1924408..3ecea1f8bd 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -747,10 +747,14 @@ class ExtractReview(pyblish.api.InstancePlugin): collections = clique.assemble(files)[0] assert len(collections) == 1, "Multiple collections found." col = collections[0] - # do nothing if sequence is complete - if list(col.indexes)[0] == start_frame and \ - list(col.indexes)[-1] == end_frame and \ - col.is_contiguous(): + + # do nothing if no gap is found in input range + not_gap = True + for fr in range(start_frame, end_frame + 1): + if fr not in col.indexes: + not_gap = False + + if not_gap: return [] holes = col.holes() From 338aac4de6b0cc37a98e624726611fdd1af5a6e7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 13:05:53 +0100 Subject: [PATCH 056/196] ignore 'team' entities in process event --- openpype/modules/ftrack/lib/ftrack_event_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/lib/ftrack_event_handler.py b/openpype/modules/ftrack/lib/ftrack_event_handler.py index af565c5421..0a70b0e301 100644 --- a/openpype/modules/ftrack/lib/ftrack_event_handler.py +++ b/openpype/modules/ftrack/lib/ftrack_event_handler.py @@ -44,7 +44,7 @@ class BaseEvent(BaseHandler): return self._get_entities( event, session, - ignore=['socialfeed', 'socialnotification'] + ignore=['socialfeed', 'socialnotification', 'team'] ) def get_project_name_from_event(self, session, event, project_id): From 3420c68796a6d8aa6f6dc22c3584aed931c0662d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 13:06:12 +0100 Subject: [PATCH 057/196] use 'first' instead of 'one' when querying user and task --- .../ftrack/event_handlers_server/event_user_assigment.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py index efc1e76775..96243c8c36 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py +++ b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py @@ -87,8 +87,8 @@ class UserAssigmentEvent(BaseEvent): if not user_id: return None, None - task = session.query('Task where id is "{}"'.format(task_id)).one() - user = session.query('User where id is "{}"'.format(user_id)).one() + task = session.query('Task where id is "{}"'.format(task_id)).first() + user = session.query('User where id is "{}"'.format(user_id)).first() return task, user From eaae7f4828ba68b1e4b11f688357a9bd13c46ec1 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Thu, 17 Mar 2022 13:33:11 +0000 Subject: [PATCH 058/196] [Automated] Bump version --- CHANGELOG.md | 11 ++++++++++- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7790894b7f..6a1da69f13 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,19 +1,28 @@ # Changelog -## [3.9.1-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.9.1-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...HEAD) +**🚀 Enhancements** + +- Nuke: ExtractReviewSlate can handle more codes and profiles [\#2879](https://github.com/pypeclub/OpenPype/pull/2879) +- Flame: sequence used for reference video [\#2869](https://github.com/pypeclub/OpenPype/pull/2869) + **🐛 Bug fixes** +- Pyblish Pype - ensure current state is correct when entering new group order [\#2899](https://github.com/pypeclub/OpenPype/pull/2899) +- SceneInventory: Fix import of load function [\#2894](https://github.com/pypeclub/OpenPype/pull/2894) - Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891) - General: Remove forgotten use of avalon Creator [\#2885](https://github.com/pypeclub/OpenPype/pull/2885) - General: Avoid circular import [\#2884](https://github.com/pypeclub/OpenPype/pull/2884) - Fixes for attaching loaded containers \(\#2837\) [\#2874](https://github.com/pypeclub/OpenPype/pull/2874) +- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) **🔀 Refactored code** - General: Reduce style usage to OpenPype repository [\#2889](https://github.com/pypeclub/OpenPype/pull/2889) +- General: Move loader logic from avalon to openpype [\#2886](https://github.com/pypeclub/OpenPype/pull/2886) ## [3.9.0](https://github.com/pypeclub/OpenPype/tree/3.9.0) (2022-03-14) diff --git a/openpype/version.py b/openpype/version.py index 17e514642d..5eca7c1d90 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.1-nightly.1" +__version__ = "3.9.1-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 128d1cd615..af448ed24c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.1-nightly.1" # OpenPype +version = "3.9.1-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From e208e7976d4f69207bedba5d55a0c925ac6e6b38 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Mar 2022 14:57:28 +0100 Subject: [PATCH 059/196] OP-2813 - fixed duplication of representations nuke.api.plugin.ExporterReview adds representation explicitly via publish_on_farm, so skip adding repre if already there. (Issue in ExtractBurnin other way.) ExporterReview should be probably refactored and publish_on_farm removed altogether. --- .../deadline/plugins/publish/submit_publish_job.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index b92fd2fe69..8c0d78cae5 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -601,13 +601,22 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "files": os.path.basename(remainder), "stagingDir": os.path.dirname(remainder), } - representations.append(rep) if "render" in instance.get("families"): rep.update({ "fps": instance.get("fps"), "tags": ["review"] }) - self._solve_families(instance, True) + self._solve_families(instance, True) + + already_there = False + for repre in instance.get("representations", []): + # might be added explicitly before by publish_on_farm + already_there = repre.get("files") == rep["files"] + if already_there: + break + self.log.debug("repre {} already_there".format(repre)) + if not already_there: + representations.append(rep) return representations From 72f84c52baf7fed7b31fd59a995880a5bf5a41b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 15:06:24 +0100 Subject: [PATCH 060/196] handle missing ftrack id in more cases --- .../event_sync_to_avalon.py | 72 ++++++++++++++++--- 1 file changed, 64 insertions(+), 8 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py index eea6436b53..237bf9fd80 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -199,8 +199,10 @@ class SyncToAvalonEvent(BaseEvent): if proj: ftrack_id = proj["data"].get("ftrackId") if ftrack_id is None: - ftrack_id = self._update_project_ftrack_id() - proj["data"]["ftrackId"] = ftrack_id + self.handle_missing_ftrack_id(proj) + ftrack_id = proj["data"]["ftrackId"] + self._avalon_ents_by_ftrack_id[ftrack_id] = proj + self._avalon_ents_by_ftrack_id[ftrack_id] = proj for ent in ents: ftrack_id = ent["data"].get("ftrackId") @@ -209,15 +211,56 @@ class SyncToAvalonEvent(BaseEvent): self._avalon_ents_by_ftrack_id[ftrack_id] = ent return self._avalon_ents_by_ftrack_id - def _update_project_ftrack_id(self): - ftrack_id = self.cur_project["id"] + def handle_missing_ftrack_id(self, doc): + ftrack_id = doc["data"].get("ftrackId") + if ftrack_id is not None: + return + if doc["type"] == "project": + ftrack_id = self.cur_project["id"] + + self.dbcon.update_one( + {"type": "project"}, + {"$set": {"data.ftrackId": ftrack_id}} + ) + + doc["data"]["ftrackId"] = ftrack_id + return + + if doc["type"] != "asset": + return + + doc_parents = doc.get("data", {}).get("parents") + if doc_parents is None: + return + + entities = self.process_session.query(( + "select id, link from TypedContext" + " where project_id is \"{}\" and name is \"{}\"" + ).format(self.cur_project["id"], doc["name"])).all() + matching_entity = None + for entity in entities: + parents = [] + for item in entity["link"]: + if item["id"] == entity["id"]: + break + low_type = item["type"].lower() + if low_type == "typedcontext": + parents.append(item["name"]) + if doc_parents == parents: + matching_entity = entity + break + + if matching_entity is None: + return + + ftrack_id = matching_entity["id"] self.dbcon.update_one( - {"type": "project"}, + {"_id": doc["_id"]}, {"$set": {"data.ftrackId": ftrack_id}} ) - return ftrack_id + self._avalon_ents_by_ftrack_id[ftrack_id] = doc @property def avalon_subsets_by_parents(self): @@ -857,7 +900,14 @@ class SyncToAvalonEvent(BaseEvent): if vis_par is None: vis_par = proj["_id"] parent_ent = self.avalon_ents_by_id[vis_par] - parent_ftrack_id = parent_ent["data"]["ftrackId"] + + parent_ftrack_id = parent_ent["data"].get("ftrackId") + if parent_ftrack_id is None: + self.handle_missing_ftrack_id(parent_ent) + parent_ftrack_id = parent_ent["data"].get("ftrackId") + if parent_ftrack_id is None: + continue + parent_ftrack_ent = self.ftrack_ents_by_id.get( parent_ftrack_id ) @@ -2128,7 +2178,13 @@ class SyncToAvalonEvent(BaseEvent): vis_par = avalon_ent["parent"] parent_ent = self.avalon_ents_by_id[vis_par] - parent_ftrack_id = parent_ent["data"]["ftrackId"] + parent_ftrack_id = parent_ent["data"].get("ftrackId") + if parent_ftrack_id is None: + self.handle_missing_ftrack_id(parent_ent) + parent_ftrack_id = parent_ent["data"].get("ftrackId") + if parent_ftrack_id is None: + continue + if parent_ftrack_id not in entities_dict: entities_dict[parent_ftrack_id] = { "children": [], From 9cc9c1afcbcd28557d72cbd50984ef8990eff52b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 16:04:28 +0100 Subject: [PATCH 061/196] added settings for new action --- .../defaults/project_settings/ftrack.json | 5 ++++ .../schema_project_ftrack.json | 28 +++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 01831efad1..89bb41a164 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -193,6 +193,11 @@ "Administrator" ] }, + "fill_workfile_attribute": { + "enabled": false, + "custom_attribute_key": "", + "role_list": [] + }, "seed_project": { "enabled": true, "role_list": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index 6d0e2693d4..cb59e9d67e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -589,6 +589,34 @@ } ] }, + { + "type": "dict", + "key": "fill_workfile_attribute", + "label": "Fill workfile Custom attribute", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "label", + "label": "Custom attribute must be Text type added to Task entity type" + }, + { + "type": "text", + "key": "custom_attribute_key", + "label": "Custom attribute key" + }, + { + "type": "list", + "key": "role_list", + "label": "Roles", + "object_type": "text" + } + ] + }, { "type": "dict", "key": "seed_project", From 1cdbe4568ee7ea7c4d72b96e3f434e072973f05b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 16:06:50 +0100 Subject: [PATCH 062/196] initial commit of new action for filling workfile name in custom attribute --- .../action_fill_workfile_attr.py | 289 ++++++++++++++++++ 1 file changed, 289 insertions(+) create mode 100644 openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py new file mode 100644 index 0000000000..a72b29bdbe --- /dev/null +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -0,0 +1,289 @@ +import collections + +import ftrack_api + +from avalon.api import AvalonMongoDB +from openpype.api import get_project_settings +from openpype.lib import ( + get_workfile_template_key, + get_workdir_data, + Anatomy, + StringTemplate, +) +from openpype_modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib.avalon_sync import create_chunks + + +class FillWorkfileAttributeAction(BaseAction): + """Action fill work filename into custom attribute on tasks. + + Prerequirements are that the project is synchronized so it is possible to + access project anatomy and project/asset documents. Tasks that are not + synchronized are skipped too. + """ + + identifier = "fill.workfile.attr" + label = "OpenPype Admin" + variant = "- Fill workfile attribute" + description = "Precalculate and fill workfile name into a custom attribute" + icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg") + + settings_key = "fill_workfile_attribute" + + def discover(self, session, entities, event): + """ Validate selection. """ + is_valid = False + for ent in event["data"]["selection"]: + # Ignore entities that are not tasks or projects + if ent["entityType"].lower() in ["show", "task"]: + is_valid = True + break + + if is_valid: + is_valid = self.valid_roles(session, entities, event) + return is_valid + + def launch(self, session, entities, event): + task_entities = [] + other_entities = [] + project_entity = None + project_selected = False + for entity in entities: + if project_entity is None: + project_entity = self.get_project_from_entity(entity) + + ent_type_low = entity.entity_type.lower() + if ent_type_low == "project": + project_selected = True + break + + elif ent_type_low == "task": + task_entities.append(entity) + else: + other_entities.append(entity) + + project_name = project_entity["full_name"] + project_settings = get_project_settings(project_name) + custom_attribute_key = ( + project_settings + .get("ftrack", {}) + .get("user_handlers", {}) + .get(self.settings_key, {}) + .get("custom_attribute_key") + ) + if not custom_attribute_key: + return { + "success": False, + "message": "Custom attribute key is not set in settings" + } + + task_obj_type = session.query( + "select id from ObjectType where name is \"Task\"" + ).one() + text_type = session.query( + "select id from CustomAttributeType where name is \"text\"" + ).one() + attr_conf = session.query( + ( + "select id, key from CustomAttributeConfiguration" + " where object_type_id is \"{}\"" + " and type_id is \"{}\"" + " and key is \"{}\"" + ).format( + task_obj_type["id"], text_type["id"], custom_attribute_key + ) + ).first() + if not attr_conf: + return { + "success": False, + "message": ( + "Could not find Task (text) Custom attribute \"{}\"" + ).format(custom_attribute_key) + } + + dbcon = AvalonMongoDB() + dbcon.Session["AVALON_PROJECT"] = project_name + asset_docs = list(dbcon.find({"type": "asset"})) + if project_selected: + asset_docs_with_task_names = self._get_asset_docs_for_project( + session, project_entity, asset_docs + ) + + else: + asset_docs_with_task_names = self._get_tasks_for_selection( + session, other_entities, task_entities, asset_docs + ) + + host_name = "{host}" + project_doc = dbcon.find_one({"type": "project"}) + project_settings = get_project_settings(project_name) + anatomy = Anatomy(project_name) + templates_by_key = {} + + operations = [] + for asset_doc, task_entities in asset_docs_with_task_names: + for task_entity in task_entities: + workfile_data = get_workdir_data( + project_doc, asset_doc, task_entity["name"], host_name + ) + workfile_data["version"] = 1 + workfile_data["ext"] = "{ext}" + + task_type = workfile_data["task"]["type"] + template_key = get_workfile_template_key( + task_type, host_name, project_settings=project_settings + ) + if template_key in templates_by_key: + template = templates_by_key[template_key] + else: + template = StringTemplate( + anatomy.templates[template_key]["file"] + ) + templates_by_key[template_key] = template + + result = template.format(workfile_data) + if not result.solved: + # TODO report + pass + else: + table_values = collections.OrderedDict(( + ("configuration_id", attr_conf["id"]), + ("entity_id", task_entity["id"]) + )) + operations.append( + ftrack_api.operation.UpdateEntityOperation( + "ContextCustomAttributeValue", + table_values, + "value", + ftrack_api.symbol.NOT_SET, + str(result) + ) + ) + + if operations: + for sub_operations in create_chunks(operations, 50): + for op in sub_operations: + session.recorded_operations.push(op) + session.commit() + + return True + + def _get_asset_docs_for_project(self, session, project_entity, asset_docs): + asset_docs_task_names = collections.defaultdict(list) + for asset_doc in asset_docs: + asset_data = asset_doc["data"] + asset_tasks = asset_data.get("tasks") + ftrack_id = asset_data.get("ftrackId") + if not asset_tasks or not ftrack_id: + continue + asset_docs_task_names[ftrack_id].append( + (asset_doc, list(asset_tasks.keys())) + ) + + task_entities = session.query(( + "select id, name, parent_id from Task where project_id is {}" + ).format(project_entity["id"])).all() + task_entities_by_parent_id = collections.defaultdict(list) + for task_entity in task_entities: + parent_id = task_entity["parent_id"] + task_entities_by_parent_id[parent_id].append(task_entity) + + output = [] + for ftrack_id, items in asset_docs_task_names.items(): + for item in items: + asset_doc, task_names = item + valid_task_entities = [] + for task_entity in task_entities_by_parent_id[ftrack_id]: + if task_entity["name"] in task_names: + valid_task_entities.append(task_entity) + + if valid_task_entities: + output.append((asset_doc, valid_task_entities)) + + return output + + def _get_tasks_for_selection( + self, session, other_entities, task_entities, asset_docs + ): + all_tasks = object() + asset_docs_by_ftrack_id = {} + asset_docs_by_parent_id = collections.defaultdict(list) + for asset_doc in asset_docs: + asset_data = asset_doc["data"] + ftrack_id = asset_data.get("ftrackId") + parent_id = asset_data.get("visualParent") + asset_docs_by_parent_id[parent_id].append(asset_doc) + if ftrack_id: + asset_docs_by_ftrack_id[ftrack_id] = asset_doc + + missing_docs = set() + all_tasks_ids = set() + task_names_by_ftrack_id = collections.defaultdict(list) + for other_entity in other_entities: + ftrack_id = other_entity["id"] + if ftrack_id not in asset_docs_by_ftrack_id: + missing_docs.add(ftrack_id) + continue + all_tasks_ids.add(ftrack_id) + task_names_by_ftrack_id[ftrack_id] = all_tasks + + for task_entity in task_entities: + parent_id = task_entity["parent_id"] + if parent_id not in asset_docs_by_ftrack_id: + missing_docs.add(parent_id) + continue + + if all_tasks_ids not in all_tasks_ids: + task_names_by_ftrack_id[ftrack_id].append(task_entity["name"]) + + ftrack_ids = set() + asset_doc_with_task_names_by_id = collections.defaultdict(list) + for ftrack_id, task_names in task_names_by_ftrack_id.items(): + asset_doc = asset_docs_by_ftrack_id[ftrack_id] + asset_data = asset_doc["data"] + asset_tasks = asset_data.get("tasks") + if not asset_tasks: + # TODO add to report + continue + + if task_names is all_tasks: + task_names = list(asset_tasks.keys()) + else: + new_task_names = [] + for task_name in task_names: + if task_name in asset_tasks: + new_task_names.append(task_name) + else: + # TODO add report + pass + task_names = new_task_names + + if task_names: + ftrack_ids.add(ftrack_id) + asset_doc_with_task_names_by_id[ftrack_id].append( + (asset_doc, task_names) + ) + + task_entities = session.query(( + "select id, name, parent_id from Task where parent_id in ({})" + ).format(self.join_query_keys(ftrack_ids))).all() + task_entitiy_by_parent_id = collections.defaultdict(list) + for task_entity in task_entities: + parent_id = task_entity["parent_id"] + task_entitiy_by_parent_id[parent_id].append(task_entity) + + output = [] + for ftrack_id, items in asset_doc_with_task_names_by_id.items(): + for item in items: + asset_doc, task_names = item + valid_task_entities = [] + for task_entity in task_entitiy_by_parent_id[ftrack_id]: + if task_entity["name"] in task_names: + valid_task_entities.append(task_entity) + if valid_task_entities: + output.append((asset_doc, valid_task_entities)) + return output + + +def register(session): + FillWorkfileAttributeAction(session).register() From 395d567aa2d7285e204ca6fb35a2344e0d7f2f94 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Mar 2022 16:07:08 +0100 Subject: [PATCH 063/196] OP-2813 - fix wrong parsing when short label is used --- openpype/lib/delivery.py | 2 +- tests/unit/openpype/lib/test_delivery.py | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index b9f3f0b106..78d743003b 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -36,7 +36,7 @@ def collect_frames(files): src_tail = collection.tail # version recognized as a collection - if re.match(".*([^a-zA-Z0-9]v%[0-9]+d).*", collection.format()): + if re.match(".*([a-zA-Z0-9]%[0-9]+d).*", collection.format()): continue for index in collection.indexes: diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index de87f99d79..871ea95df7 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -47,6 +47,30 @@ def test_collect_frames_single_sequence(): assert ret == expected, "Not matching" +def test_collect_frames_single_sequence_shot(): + files = ["testing_sh010_workfileCompositing_v001.aep"] + ret = collect_frames(files) + + expected = { + "testing_sh010_workfileCompositing_v001.aep": None + } + + print(ret) + assert ret == expected, "Not matching" + + +def test_collect_frames_single_sequence_shot_with_frame(): + files = ["testing_sh010_workfileCompositing_000_v001.aep"] + ret = collect_frames(files) + + expected = { + "testing_sh010_workfileCompositing_000_v001.aep": "000" + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence_full_path(): files = ['C:/test_project/assets/locations/Town/work/compositing\\renders\\aftereffects\\test_project_TestAsset_compositing_v001\\TestAsset_renderCompositingMain_v001.mov'] # noqa: E501 ret = collect_frames(files) From c2b37588a647514f76cd6626fb239b9c0d21a203 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 17 Mar 2022 17:24:15 +0100 Subject: [PATCH 064/196] nuke: making better readability --- .../publish/extract_review_data_mov.py | 37 ++++--------------- 1 file changed, 8 insertions(+), 29 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index 544b9e04da..f5bb03fc69 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -51,39 +51,18 @@ class ExtractReviewDataMov(openpype.api.Extractor): f_subsets = o_data["filter"]["sebsets"] # test if family found in context - test_families = any([ - # first if exact family set is matching - # make sure only interesetion of list is correct - bool(set(families).intersection(f_families)), - # and if famiies are set at all - # if not then return True because we want this preset - # to be active if nothig is set - bool(not f_families) - ]) + # using intersection to make sure all defined + # families are present in combinantion + test_families = not f_families or any( + set(families).intersection(f_families)) # test task types from filter - test_task_types = any([ - # check if actual task type is defined in task types - # set in preset's filter - bool(task_type in f_task_types), - # and if taskTypes are defined in preset filter - # if not then return True, because we want this filter - # to be active if no taskType is set - bool(not f_task_types) - ]) + test_task_types = not f_task_types or any( + task_type in f_task_types) # test subsets from filter - test_subsets = any([ - # check if any of subset filter inputs - # converted to regex patern is not found in subset - # we keep strict case sensitivity - bool(next(( - s for s in f_subsets - if re.search(re.compile(s), subset) - ), None)), - # but if no subsets were set then make this acuntable too - bool(not f_subsets) - ]) + test_subsets = not f_subsets or any( + re.search(s, subset) for s in f_subsets) # we need all filters to be positive for this # preset to be activated From 16f4ada2ad4772debe465231cfb60bf4c22b1f27 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 17:59:56 +0100 Subject: [PATCH 065/196] use 'roots' instead of 'roots_obj' --- openpype/pipeline/load/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index ae47cb9ce9..118f86a570 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -502,7 +502,7 @@ def get_representation_path_from_context(context): session_project = Session.get("AVALON_PROJECT") if project_doc and project_doc["name"] != session_project: anatomy = Anatomy(project_doc["name"]) - root = anatomy.roots_obj + root = anatomy.roots return get_representation_path(representation, root) From d080b17cce56f6d128d45b26f9224db9294dcd5f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Mar 2022 18:00:25 +0100 Subject: [PATCH 066/196] OP-2813 - fix wrong logging --- .../modules/deadline/plugins/publish/submit_publish_job.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 8c0d78cae5..06505b4b47 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -613,8 +613,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # might be added explicitly before by publish_on_farm already_there = repre.get("files") == rep["files"] if already_there: + self.log.debug("repre {} already_there".format(repre)) break - self.log.debug("repre {} already_there".format(repre)) + if not already_there: representations.append(rep) From cd65332942ee14e9b47ee0608e24f5ae8c189aff Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 18:47:51 +0100 Subject: [PATCH 067/196] fixed filling of ftrack id --- .../event_sync_to_avalon.py | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py index 237bf9fd80..46c333c4c4 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -212,6 +212,9 @@ class SyncToAvalonEvent(BaseEvent): return self._avalon_ents_by_ftrack_id def handle_missing_ftrack_id(self, doc): + # TODO handling of missing ftrack id is primarily issue of editorial + # publishing it would be better to find out what causes that + # ftrack id is removed during the publishing ftrack_id = doc["data"].get("ftrackId") if ftrack_id is not None: return @@ -221,10 +224,17 @@ class SyncToAvalonEvent(BaseEvent): self.dbcon.update_one( {"type": "project"}, - {"$set": {"data.ftrackId": ftrack_id}} + {"$set": { + "data.ftrackId": ftrack_id, + "data.entityType": self.cur_project.entity_type + }} ) doc["data"]["ftrackId"] = ftrack_id + doc["data"]["entityType"] = self.cur_project.entity_type + self.log.info("Updated ftrack id of project \"{}\"".format( + self.cur_project["full_name"] + )) return if doc["type"] != "asset": @@ -238,6 +248,7 @@ class SyncToAvalonEvent(BaseEvent): "select id, link from TypedContext" " where project_id is \"{}\" and name is \"{}\"" ).format(self.cur_project["id"], doc["name"])).all() + self.log.info("Entities: {}".format(str(entities))) matching_entity = None for entity in entities: parents = [] @@ -257,9 +268,20 @@ class SyncToAvalonEvent(BaseEvent): ftrack_id = matching_entity["id"] self.dbcon.update_one( {"_id": doc["_id"]}, - {"$set": {"data.ftrackId": ftrack_id}} + {"$set": { + "data.ftrackId": ftrack_id, + "data.entityType": matching_entity.entity_type + }} ) + doc["data"]["ftrackId"] = ftrack_id + doc["data"]["entityType"] = matching_entity.entity_type + entity_path_items = [] + for item in entity["link"]: + entity_path_items.append(item["name"]) + self.log.info("Updated ftrack id of entity \"{}\"".format( + "/".join(entity_path_items) + )) self._avalon_ents_by_ftrack_id[ftrack_id] = doc @property From 4dd95fba6842d2b4c4556e2465cb2ef00f70cb1f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 19:22:35 +0100 Subject: [PATCH 068/196] added job and report messages --- .../action_fill_workfile_attr.py | 319 ++++++++++++++---- 1 file changed, 262 insertions(+), 57 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py index a72b29bdbe..77f18c49c1 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -1,4 +1,9 @@ +import os +import sys +import json import collections +import tempfile +import datetime import ftrack_api @@ -13,6 +18,8 @@ from openpype.lib import ( from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import create_chunks +NOT_SYNCHRONIZED_TITLE = "Not synchronized" + class FillWorkfileAttributeAction(BaseAction): """Action fill work filename into custom attribute on tasks. @@ -44,24 +51,24 @@ class FillWorkfileAttributeAction(BaseAction): return is_valid def launch(self, session, entities, event): - task_entities = [] - other_entities = [] + # Separate entities and get project entity project_entity = None - project_selected = False for entity in entities: if project_entity is None: project_entity = self.get_project_from_entity(entity) - - ent_type_low = entity.entity_type.lower() - if ent_type_low == "project": - project_selected = True break - elif ent_type_low == "task": - task_entities.append(entity) - else: - other_entities.append(entity) + if not project_entity: + return { + "message": ( + "Couldn't find project entity." + " Could be an issue with permissions." + ), + "success": False + } + # Get project settings and check if custom attribute where workfile + # should be set is defined. project_name = project_entity["full_name"] project_settings = get_project_settings(project_name) custom_attribute_key = ( @@ -77,12 +84,16 @@ class FillWorkfileAttributeAction(BaseAction): "message": "Custom attribute key is not set in settings" } + # Try to find the custom attribute + # - get Task type object id task_obj_type = session.query( "select id from ObjectType where name is \"Task\"" ).one() + # - get text custom attribute type text_type = session.query( "select id from CustomAttributeType where name is \"text\"" ).one() + # - find the attribute attr_conf = session.query( ( "select id, key from CustomAttributeConfiguration" @@ -101,33 +112,184 @@ class FillWorkfileAttributeAction(BaseAction): ).format(custom_attribute_key) } + # Store report information + report = collections.defaultdict(list) + user_entity = session.query( + "User where id is {}".format(event["source"]["user"]["id"]) + ).one() + job_entity = session.create("Job", { + "user": user_entity, + "status": "running", + "data": json.dumps({ + "description": "(0/3) Fill of workfiles started" + }) + }) + session.commit() + + try: + self.in_job_process( + session, + entities, + job_entity, + project_entity, + project_settings, + attr_conf, + report + ) + except Exception: + self.log.error( + "Fill of workfiles to custom attribute failed", exc_info=True + ) + session.rollback() + + description = "Fill of workfiles Failed (Download traceback)" + self.add_traceback_to_job( + job_entity, session, sys.exc_info(), description + ) + return { + "message": ( + "Fill of workfiles failed." + " Check job for more information" + ), + "success": False + } + + job_entity["status"] = "done" + job_entity["data"] = json.dumps({ + "description": "Fill of workfiles completed." + }) + session.commit() + if report: + temp_obj = tempfile.NamedTemporaryFile( + mode="w", + prefix="openpype_ftrack_", + suffix=".json", + delete=False + ) + temp_obj.close() + temp_filepath = temp_obj.name + with open(temp_filepath, "w") as temp_file: + json.dump(report, temp_file) + + component_name = "{}_{}".format( + "FillWorkfilesReport", + datetime.datetime.now().strftime("%y-%m-%d-%H%M") + ) + self.add_file_component_to_job( + job_entity, session, temp_filepath, component_name + ) + # Delete temp file + os.remove(temp_filepath) + self._show_report(event, report, project_name) + return { + "message": ( + "Fill of workfiles finished with few issues." + " Check job for more information" + ), + "success": True + } + + return { + "success": True, + "message": "Finished with filling of work filenames" + } + + def _show_report(self, event, report, project_name): + items = [] + title = "Fill workfiles report ({}):".format(project_name) + + for subtitle, lines in report.items(): + if items: + items.append({ + "type": "label", + "value": "---" + }) + items.append({ + "type": "label", + "value": "# {}".format(subtitle) + }) + items.append({ + "type": "label", + "value": '

{}

'.format("
".join(lines)) + }) + + self.show_interface( + items=items, + title=title, + event=event + ) + + def in_job_process( + self, + session, + entities, + job_entity, + project_entity, + project_settings, + attr_conf, + report + ): + task_entities = [] + other_entities = [] + project_selected = False + for entity in entities: + ent_type_low = entity.entity_type.lower() + if ent_type_low == "project": + project_selected = True + break + + elif ent_type_low == "task": + task_entities.append(entity) + else: + other_entities.append(entity) + + project_name = project_entity["full_name"] + + # Find matchin asset documents and map them by ftrack task entities + # - result stored to 'asset_docs_with_task_entities' is list with + # tuple `(asset document, [task entitis, ...])` dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name + # Quety all asset documents asset_docs = list(dbcon.find({"type": "asset"})) + job_entity["data"] = json.dumps({ + "description": "(1/3) Asset documents queried." + }) + session.commit() + + # When project is selected then we can query whole project if project_selected: - asset_docs_with_task_names = self._get_asset_docs_for_project( - session, project_entity, asset_docs + asset_docs_with_task_entities = self._get_asset_docs_for_project( + session, project_entity, asset_docs, report ) else: - asset_docs_with_task_names = self._get_tasks_for_selection( - session, other_entities, task_entities, asset_docs + asset_docs_with_task_entities = self._get_tasks_for_selection( + session, other_entities, task_entities, asset_docs, report ) + job_entity["data"] = json.dumps({ + "description": "(2/3) Queried related task entities." + }) + session.commit() + + # Keep placeholders in the template unfilled host_name = "{host}" + extension = "{ext}" project_doc = dbcon.find_one({"type": "project"}) project_settings = get_project_settings(project_name) anatomy = Anatomy(project_name) templates_by_key = {} operations = [] - for asset_doc, task_entities in asset_docs_with_task_names: + for asset_doc, task_entities in asset_docs_with_task_entities: for task_entity in task_entities: workfile_data = get_workdir_data( project_doc, asset_doc, task_entity["name"], host_name ) + # Use version 1 for each workfile workfile_data["version"] = 1 - workfile_data["ext"] = "{ext}" + workfile_data["ext"] = extension task_type = workfile_data["task"]["type"] template_key = get_workfile_template_key( @@ -166,22 +328,40 @@ class FillWorkfileAttributeAction(BaseAction): session.recorded_operations.push(op) session.commit() - return True + job_entity["data"] = json.dumps({ + "description": "(3/3) Set custom attribute values." + }) + session.commit() + + def _get_entity_path(self, entity): + path_items = [] + for item in entity["link"]: + if item["type"].lower() != "project": + path_items.append(item["name"]) + return "/".join(path_items) + + def _get_asset_docs_for_project( + self, session, project_entity, asset_docs, report + ): + asset_docs_task_names = {} - def _get_asset_docs_for_project(self, session, project_entity, asset_docs): - asset_docs_task_names = collections.defaultdict(list) for asset_doc in asset_docs: asset_data = asset_doc["data"] - asset_tasks = asset_data.get("tasks") ftrack_id = asset_data.get("ftrackId") - if not asset_tasks or not ftrack_id: + if not ftrack_id: + hierarchy = list(asset_data.get("parents") or []) + hierarchy.append(asset_doc["name"]) + path = "/".join(hierarchy) + report[NOT_SYNCHRONIZED_TITLE].append(path) continue - asset_docs_task_names[ftrack_id].append( - (asset_doc, list(asset_tasks.keys())) + + asset_tasks = asset_data.get("tasks") or {} + asset_docs_task_names[ftrack_id] = ( + asset_doc, list(asset_tasks.keys()) ) task_entities = session.query(( - "select id, name, parent_id from Task where project_id is {}" + "select id, name, parent_id, link from Task where project_id is {}" ).format(project_entity["id"])).all() task_entities_by_parent_id = collections.defaultdict(list) for task_entity in task_entities: @@ -189,21 +369,23 @@ class FillWorkfileAttributeAction(BaseAction): task_entities_by_parent_id[parent_id].append(task_entity) output = [] - for ftrack_id, items in asset_docs_task_names.items(): - for item in items: - asset_doc, task_names = item - valid_task_entities = [] - for task_entity in task_entities_by_parent_id[ftrack_id]: - if task_entity["name"] in task_names: - valid_task_entities.append(task_entity) + for ftrack_id, item in asset_docs_task_names.items(): + asset_doc, task_names = item + valid_task_entities = [] + for task_entity in task_entities_by_parent_id[ftrack_id]: + if task_entity["name"] in task_names: + valid_task_entities.append(task_entity) + else: + path = self._get_entity_path(task_entity) + report[NOT_SYNCHRONIZED_TITLE].append(path) - if valid_task_entities: - output.append((asset_doc, valid_task_entities)) + if valid_task_entities: + output.append((asset_doc, valid_task_entities)) return output def _get_tasks_for_selection( - self, session, other_entities, task_entities, asset_docs + self, session, other_entities, task_entities, asset_docs, report ): all_tasks = object() asset_docs_by_ftrack_id = {} @@ -216,13 +398,13 @@ class FillWorkfileAttributeAction(BaseAction): if ftrack_id: asset_docs_by_ftrack_id[ftrack_id] = asset_doc - missing_docs = set() + missing_doc_ftrack_ids = {} all_tasks_ids = set() task_names_by_ftrack_id = collections.defaultdict(list) for other_entity in other_entities: ftrack_id = other_entity["id"] if ftrack_id not in asset_docs_by_ftrack_id: - missing_docs.add(ftrack_id) + missing_doc_ftrack_ids[ftrack_id] = None continue all_tasks_ids.add(ftrack_id) task_names_by_ftrack_id[ftrack_id] = all_tasks @@ -230,21 +412,18 @@ class FillWorkfileAttributeAction(BaseAction): for task_entity in task_entities: parent_id = task_entity["parent_id"] if parent_id not in asset_docs_by_ftrack_id: - missing_docs.add(parent_id) + missing_doc_ftrack_ids[parent_id] = None continue if all_tasks_ids not in all_tasks_ids: task_names_by_ftrack_id[ftrack_id].append(task_entity["name"]) ftrack_ids = set() - asset_doc_with_task_names_by_id = collections.defaultdict(list) + asset_doc_with_task_names_by_id = {} for ftrack_id, task_names in task_names_by_ftrack_id.items(): asset_doc = asset_docs_by_ftrack_id[ftrack_id] asset_data = asset_doc["data"] - asset_tasks = asset_data.get("tasks") - if not asset_tasks: - # TODO add to report - continue + asset_tasks = asset_data.get("tasks") or {} if task_names is all_tasks: task_names = list(asset_tasks.keys()) @@ -253,15 +432,19 @@ class FillWorkfileAttributeAction(BaseAction): for task_name in task_names: if task_name in asset_tasks: new_task_names.append(task_name) - else: - # TODO add report - pass + continue + + if ftrack_id not in missing_doc_ftrack_ids: + missing_doc_ftrack_ids[ftrack_id] = [] + if missing_doc_ftrack_ids[ftrack_id] is not None: + missing_doc_ftrack_ids[ftrack_id].append(task_name) + task_names = new_task_names if task_names: ftrack_ids.add(ftrack_id) - asset_doc_with_task_names_by_id[ftrack_id].append( - (asset_doc, task_names) + asset_doc_with_task_names_by_id[ftrack_id] = ( + asset_doc, task_names ) task_entities = session.query(( @@ -273,15 +456,37 @@ class FillWorkfileAttributeAction(BaseAction): task_entitiy_by_parent_id[parent_id].append(task_entity) output = [] - for ftrack_id, items in asset_doc_with_task_names_by_id.items(): - for item in items: - asset_doc, task_names = item - valid_task_entities = [] - for task_entity in task_entitiy_by_parent_id[ftrack_id]: - if task_entity["name"] in task_names: - valid_task_entities.append(task_entity) - if valid_task_entities: - output.append((asset_doc, valid_task_entities)) + for ftrack_id, item in asset_doc_with_task_names_by_id.items(): + asset_doc, task_names = item + valid_task_entities = [] + for task_entity in task_entitiy_by_parent_id[ftrack_id]: + if task_entity["name"] in task_names: + valid_task_entities.append(task_entity) + else: + if ftrack_id not in missing_doc_ftrack_ids: + missing_doc_ftrack_ids[ftrack_id] = [] + if missing_doc_ftrack_ids[ftrack_id] is not None: + missing_doc_ftrack_ids[ftrack_id].append(task_name) + if valid_task_entities: + output.append((asset_doc, valid_task_entities)) + + # Store report information about not synchronized entities + if missing_doc_ftrack_ids: + missing_entities = session.query( + "select id, link from TypedContext where id in ({})".format( + self.join_query_keys(missing_doc_ftrack_ids.keys()) + ) + ).all() + for missing_entity in missing_entities: + path = self._get_entity_path(missing_entity) + task_names = missing_doc_ftrack_ids[missing_entity["id"]] + if task_names is None: + report[NOT_SYNCHRONIZED_TITLE].append(path) + else: + for task_name in task_names: + task_path = "/".join([path, task_name]) + report[NOT_SYNCHRONIZED_TITLE].append(task_path) + return output From fe8caa3b3aef5b78bf76fe7ff8fce5c37b92227a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 19:24:11 +0100 Subject: [PATCH 069/196] fix app key --- .../ftrack/event_handlers_user/action_fill_workfile_attr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py index 77f18c49c1..3888379e04 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -274,7 +274,7 @@ class FillWorkfileAttributeAction(BaseAction): session.commit() # Keep placeholders in the template unfilled - host_name = "{host}" + host_name = "{app}" extension = "{ext}" project_doc = dbcon.find_one({"type": "project"}) project_settings = get_project_settings(project_name) From d3dc406b905f0554e867e5447e2f71ec8de85862 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 19:27:05 +0100 Subject: [PATCH 070/196] use get_workdir_data in wokrfiles tool --- openpype/tools/workfiles/app.py | 36 ++++++--------------------------- 1 file changed, 6 insertions(+), 30 deletions(-) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 63958ac57b..da5524331a 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -27,7 +27,7 @@ from openpype.lib import ( save_workfile_data_to_doc, get_workfile_template_key, create_workdir_extra_folders, - get_system_general_anatomy_data + get_workdir_data ) from openpype.lib.avalon_context import ( update_current_task, @@ -48,6 +48,7 @@ def build_workfile_data(session): # Set work file data for template formatting asset_name = session["AVALON_ASSET"] task_name = session["AVALON_TASK"] + host_name = session["AVALON_APP"] project_doc = io.find_one( {"type": "project"}, { @@ -63,42 +64,17 @@ def build_workfile_data(session): "name": asset_name }, { + "name": True, "data.tasks": True, "data.parents": True } ) - - task_type = asset_doc["data"]["tasks"].get(task_name, {}).get("type") - - project_task_types = project_doc["config"]["tasks"] - task_short = project_task_types.get(task_type, {}).get("short_name") - - asset_parents = asset_doc["data"]["parents"] - parent_name = project_doc["name"] - if asset_parents: - parent_name = asset_parents[-1] - - data = { - "project": { - "name": project_doc["name"], - "code": project_doc["data"].get("code") - }, - "asset": asset_name, - "task": { - "name": task_name, - "type": task_type, - "short": task_short, - }, - "parent": parent_name, + data = get_workdir_data(project_doc, asset_doc, task_name, host_name) + data.update({ "version": 1, - "user": getpass.getuser(), "comment": "", "ext": None - } - - # add system general settings anatomy data - system_general_data = get_system_general_anatomy_data() - data.update(system_general_data) + }) return data From cbb7db98f7a917bf30a0159a6f3ae548a6a8a906 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 17 Mar 2022 19:36:50 +0100 Subject: [PATCH 071/196] OPENPYPE_DEBUG can be set to 1 to log debug messages --- openpype/cli.py | 14 +++++++------- openpype/lib/log.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index 155e07dea3..cbeb7fef9b 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -101,7 +101,7 @@ def eventserver(debug, on linux and window service). """ if debug: - os.environ['OPENPYPE_DEBUG'] = "3" + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().launch_eventservercli( ftrack_url, @@ -128,7 +128,7 @@ def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None): Expect "pype.club" user created on Ftrack. """ if debug: - os.environ['OPENPYPE_DEBUG'] = "3" + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().launch_webpublisher_webservercli( upload_dir=upload_dir, @@ -176,7 +176,7 @@ def publish(debug, paths, targets, gui): More than one path is allowed. """ if debug: - os.environ['OPENPYPE_DEBUG'] = '3' + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands.publish(list(paths), targets, gui) @@ -195,7 +195,7 @@ def remotepublishfromapp(debug, project, path, host, user=None, targets=None): More than one path is allowed. """ if debug: - os.environ['OPENPYPE_DEBUG'] = '3' + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands.remotepublishfromapp( project, path, host, user, targets=targets ) @@ -215,7 +215,7 @@ def remotepublish(debug, project, path, user=None, targets=None): More than one path is allowed. """ if debug: - os.environ['OPENPYPE_DEBUG'] = '3' + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands.remotepublish(project, path, user, targets=targets) @@ -240,7 +240,7 @@ def texturecopy(debug, project, asset, path): Nothing is written to database. """ if debug: - os.environ['OPENPYPE_DEBUG'] = '3' + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().texture_copy(project, asset, path) @@ -409,7 +409,7 @@ def syncserver(debug, active_site): var OPENPYPE_LOCAL_ID set to 'active_site'. """ if debug: - os.environ['OPENPYPE_DEBUG'] = '3' + os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().syncserver(active_site) diff --git a/openpype/lib/log.py b/openpype/lib/log.py index a42faef008..98a3bae8e6 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -227,7 +227,7 @@ class PypeLogger: logger = logging.getLogger(name or "__main__") - if cls.pype_debug > 1: + if cls.pype_debug > 0: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) From c1200c16d5900d3b23af6406b43ced45385e58cd Mon Sep 17 00:00:00 2001 From: OpenPype Date: Thu, 17 Mar 2022 23:00:09 +0000 Subject: [PATCH 072/196] [Automated] Bump version --- CHANGELOG.md | 11 ++++++++++- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a1da69f13..78ebf8f164 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,16 +1,21 @@ # Changelog -## [3.9.1-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.9.1-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...HEAD) **🚀 Enhancements** +- General: Change how OPENPYPE\_DEBUG value is handled [\#2907](https://github.com/pypeclub/OpenPype/pull/2907) +- nuke: imageio adding ocio config version 1.2 [\#2897](https://github.com/pypeclub/OpenPype/pull/2897) +- Flame: support for comment with xml attribute overrides [\#2892](https://github.com/pypeclub/OpenPype/pull/2892) - Nuke: ExtractReviewSlate can handle more codes and profiles [\#2879](https://github.com/pypeclub/OpenPype/pull/2879) - Flame: sequence used for reference video [\#2869](https://github.com/pypeclub/OpenPype/pull/2869) **🐛 Bug fixes** +- General: Fix use of Anatomy roots [\#2904](https://github.com/pypeclub/OpenPype/pull/2904) +- Fixing gap detection in extract review [\#2902](https://github.com/pypeclub/OpenPype/pull/2902) - Pyblish Pype - ensure current state is correct when entering new group order [\#2899](https://github.com/pypeclub/OpenPype/pull/2899) - SceneInventory: Fix import of load function [\#2894](https://github.com/pypeclub/OpenPype/pull/2894) - Harmony - fixed creator issue [\#2891](https://github.com/pypeclub/OpenPype/pull/2891) @@ -32,6 +37,10 @@ - AssetCreator: Remove the tool [\#2845](https://github.com/pypeclub/OpenPype/pull/2845) +### 📖 Documentation + +- Documentation: Change Photoshop & AfterEffects plugin path [\#2878](https://github.com/pypeclub/OpenPype/pull/2878) + **🚀 Enhancements** - General: Subset name filtering in ExtractReview outpus [\#2872](https://github.com/pypeclub/OpenPype/pull/2872) diff --git a/openpype/version.py b/openpype/version.py index 5eca7c1d90..a62afd1953 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.1-nightly.2" +__version__ = "3.9.1-nightly.3" diff --git a/pyproject.toml b/pyproject.toml index af448ed24c..71c0af0b4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.1-nightly.2" # OpenPype +version = "3.9.1-nightly.3" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 10d9b42c74ff84e747e3f61c97499f29f33fb45c Mon Sep 17 00:00:00 2001 From: OpenPype Date: Thu, 17 Mar 2022 23:40:09 +0000 Subject: [PATCH 073/196] [Automated] Release --- CHANGELOG.md | 4 ++-- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 78ebf8f164..f3c7820d8f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.9.1-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-17) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...3.9.1) **🚀 Enhancements** diff --git a/openpype/version.py b/openpype/version.py index a62afd1953..1ef25e3f48 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.1-nightly.3" +__version__ = "3.9.1" diff --git a/pyproject.toml b/pyproject.toml index 71c0af0b4f..7c09495a99 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.1-nightly.3" # OpenPype +version = "3.9.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From f8c06e0e1fb0d91b09a76c4611cfdf4d1ac546d8 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 18 Mar 2022 11:15:19 +0900 Subject: [PATCH 074/196] fix iteration for options among multiverse extractor --- openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py | 3 ++- .../hosts/maya/plugins/publish/extract_multiverse_usd_comp.py | 3 ++- .../hosts/maya/plugins/publish/extract_multiverse_usd_over.py | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 565fbd1ee3..96e0e79c29 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -163,7 +163,8 @@ class ExtractMultiverseUsd(openpype.api.Extractor): time_opts.framePerSecond = options["timeRangeFramesPerSecond"] asset_write_opts = multiverse.AssetWriteOptions(time_opts) - for (k, v) in options.iteritems(): + options_items = getattr(options, "iteritems", options.items) + for (k, v) in options_items: if k == "writeTimeRange" or k.startswith("timeRange"): continue setattr(asset_write_opts, k, v) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py index f35096e516..ef54e9ceff 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py @@ -107,7 +107,8 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): time_opts.framePerSecond = options["timeRangeFramesPerSecond"] comp_write_opts = multiverse.CompositionWriteOptions() - for (k, v) in options.iteritems(): + options_items = getattr(options, "iteritems", options.items) + for (k, v) in options_items: if k == "writeTimeRange" or k.startswith("timeRange"): continue setattr(comp_write_opts, k, v) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py index e0e65d83d1..df76614f5a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -117,7 +117,8 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): time_opts.framePerSecond = options["timeRangeFramesPerSecond"] over_write_opts = multiverse.OverridesWriteOptions() - for (k, v) in options.iteritems(): + options_items = getattr(options, "iteritems", options.items) + for (k, v) in options_items: if k == "writeTimeRange" or k.startswith("timeRange"): continue setattr(over_write_opts, k, v) From ca5017f730f71de4a4034b06cd3ce5835724e4fa Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 18 Mar 2022 12:23:42 +0900 Subject: [PATCH 075/196] fix iteration loop --- openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py | 2 +- .../hosts/maya/plugins/publish/extract_multiverse_usd_comp.py | 2 +- .../hosts/maya/plugins/publish/extract_multiverse_usd_over.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 96e0e79c29..7c13252957 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -164,7 +164,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): asset_write_opts = multiverse.AssetWriteOptions(time_opts) options_items = getattr(options, "iteritems", options.items) - for (k, v) in options_items: + for (k, v) in options_items(): if k == "writeTimeRange" or k.startswith("timeRange"): continue setattr(asset_write_opts, k, v) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py index ef54e9ceff..449a99e1be 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py @@ -108,7 +108,7 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): comp_write_opts = multiverse.CompositionWriteOptions() options_items = getattr(options, "iteritems", options.items) - for (k, v) in options_items: + for (k, v) in options_items(): if k == "writeTimeRange" or k.startswith("timeRange"): continue setattr(comp_write_opts, k, v) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py index df76614f5a..406ff8ba11 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -118,7 +118,7 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): over_write_opts = multiverse.OverridesWriteOptions() options_items = getattr(options, "iteritems", options.items) - for (k, v) in options_items: + for (k, v) in options_items(): if k == "writeTimeRange" or k.startswith("timeRange"): continue setattr(over_write_opts, k, v) From a912c4db80729ab2b87c4b6c5c07403254e82cba Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 18 Mar 2022 09:45:04 +0100 Subject: [PATCH 076/196] update avalon-core --- repos/avalon-core | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/avalon-core b/repos/avalon-core index 7753d15507..64491fbbcf 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit 7753d15507afadc143b7d49db8fcfaa6a29fed91 +Subproject commit 64491fbbcf89ba2a0b3a20d67d7486c6142232b3 From 5d25de8997c46c46ac2c6bdfeb36cf9e032266ec Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Mar 2022 11:00:26 +0100 Subject: [PATCH 077/196] OP-2813 - added documentation how to run test file in IDE --- tests/README.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/tests/README.md b/tests/README.md index bb1cdbdef8..d0b537d425 100644 --- a/tests/README.md +++ b/tests/README.md @@ -21,3 +21,27 @@ Specific location could be provided to this command as an argument, either as ab (eg. `python ${OPENPYPE_ROOT}/start.py start.py runtests ../tests/integration`) will trigger only tests in `integration` folder. See `${OPENPYPE_ROOT}/cli.py:runtests` for other arguments. + +Run in IDE: +----------- +If you would prefer to run/debug single file dirrectly in IDE of your choice, you might encounter issues with imports. +It would manifest like `KeyError: 'OPENPYPE_DATABASE_NAME'`. That means you are importing module that depends on OP to be running, eg. all expected variables are set. + +In some cases your tests might be so localized, that you don't care about all env vars to be set properly. +In that case you might add this dummy configuration BEFORE any imports in your test file +``` +import os +os.environ["AVALON_MONGO"] = "mongodb://localhost:27017" +os.environ["OPENPYPE_MONGO"] = "mongodb://localhost:27017" +os.environ["AVALON_DB"] = "avalon" +os.environ["OPENPYPE_DATABASE_NAME"] = "openpype" +os.environ["AVALON_TIMEOUT"] = '3000' +os.environ["OPENPYPE_DEBUG"] = "3" +os.environ["AVALON_CONFIG"] = "pype" +os.environ["AVALON_ASSET"] = "Asset" +os.environ["AVALON_PROJECT"] = "test_project" +``` +(AVALON_ASSET and AVALON_PROJECT values should exist in your environment) + +This might be enough to run your test file separately. Do not commit this skeleton though. +Use only when you know what you are doing! \ No newline at end of file From c83202a023484aaa6a9a64aaab63ad879d71ded7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Mar 2022 11:13:10 +0100 Subject: [PATCH 078/196] OP-2813 - changed logic of parsing frames from names Adhering to clique standard FRAMES patter, eg pattern is separated by . It seems that this is most widely used (according to Discord). --- openpype/lib/delivery.py | 18 ++++---------- tests/unit/openpype/lib/test_delivery.py | 30 +++++++++++++++++++++--- 2 files changed, 32 insertions(+), 16 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 78d743003b..03abe5802c 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -4,7 +4,6 @@ import shutil import glob import clique import collections -import re def collect_frames(files): @@ -14,31 +13,24 @@ def collect_frames(files): Uses clique as most precise solution, used when anatomy template that created files is not known. - Depends that version substring starts with 'v' with any number of - numeric characters after. + Assumption is that frames are separated by '.', negative frames are not + allowed. Args: files(list) or (set with single value): list of source paths Returns: (dict): {'/asset/subset_v001.0001.png': '0001', ....} """ - collections, remainder = clique.assemble(files, minimum_items=1) + patterns = [clique.PATTERNS["frames"]] + collections, remainder = clique.assemble(files, minimum_items=1, + patterns=patterns) - real_file_name = None sources_and_frames = {} - if len(files) == 1: - real_file_name = list(files)[0] - sources_and_frames[real_file_name] = None - if collections: for collection in collections: src_head = collection.head src_tail = collection.tail - # version recognized as a collection - if re.match(".*([a-zA-Z0-9]%[0-9]+d).*", collection.format()): - continue - for index in collection.indexes: src_frame = collection.format("{padding}") % index src_file_name = "{}{}{}".format(src_head, src_frame, diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index 871ea95df7..04a71655e3 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -47,6 +47,18 @@ def test_collect_frames_single_sequence(): assert ret == expected, "Not matching" +def test_collect_frames_single_sequence_negative(): + files = ["Asset_renderCompositingMain_v001.-0000.png"] + ret = collect_frames(files) + + expected = { + "Asset_renderCompositingMain_v001.-0000.png": None + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence_shot(): files = ["testing_sh010_workfileCompositing_v001.aep"] ret = collect_frames(files) @@ -59,12 +71,24 @@ def test_collect_frames_single_sequence_shot(): assert ret == expected, "Not matching" +def test_collect_frames_single_sequence_numbers(): + files = ["PRJ_204_430_0005_renderLayoutMain_v001.0001.exr"] + ret = collect_frames(files) + + expected = { + "PRJ_204_430_0005_renderLayoutMain_v001.0001.exr": "0001" + } + + print(ret) + assert ret == expected, "Not matching" + + def test_collect_frames_single_sequence_shot_with_frame(): files = ["testing_sh010_workfileCompositing_000_v001.aep"] ret = collect_frames(files) expected = { - "testing_sh010_workfileCompositing_000_v001.aep": "000" + "testing_sh010_workfileCompositing_000_v001.aep": None } print(ret) @@ -88,7 +112,7 @@ def test_collect_frames_single_sequence_different_format(): ret = collect_frames(files) expected = { - "Asset.v001.renderCompositingMain_0000.png": "0000" + "Asset.v001.renderCompositingMain_0000.png": None } print(ret) @@ -100,7 +124,7 @@ def test_collect_frames_single_sequence_withhout_version(): ret = collect_frames(files) expected = { - "pngv001.renderCompositingMain_0000.png": "0000" + "pngv001.renderCompositingMain_0000.png": None } print(ret) From 4c1fa1d632e7880131f62bb6dc8f064795cb431a Mon Sep 17 00:00:00 2001 From: jrsndlr Date: Fri, 18 Mar 2022 11:50:41 +0100 Subject: [PATCH 079/196] no-audio tag Allow skipping audio for reviews. --- .../projects_schema/schemas/schema_representation_tags.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json index 7607e1a8c1..484fbf9d07 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json @@ -24,6 +24,9 @@ }, { "sequence": "Output as image sequence" + }, + { + "no-audio": "Do not add audio" } ] } From 55087ec5b849eb27496ea72c06fbdf5f55cb057d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Mar 2022 12:27:15 +0100 Subject: [PATCH 080/196] OP-2813 - fix typo --- tests/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/README.md b/tests/README.md index d0b537d425..69828cdbc2 100644 --- a/tests/README.md +++ b/tests/README.md @@ -24,7 +24,7 @@ See `${OPENPYPE_ROOT}/cli.py:runtests` for other arguments. Run in IDE: ----------- -If you would prefer to run/debug single file dirrectly in IDE of your choice, you might encounter issues with imports. +If you prefer to run/debug single file directly in IDE of your choice, you might encounter issues with imports. It would manifest like `KeyError: 'OPENPYPE_DATABASE_NAME'`. That means you are importing module that depends on OP to be running, eg. all expected variables are set. In some cases your tests might be so localized, that you don't care about all env vars to be set properly. From 7fa905a5e3d028586dcd93aa480b78060ff6b800 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 12:30:04 +0100 Subject: [PATCH 081/196] removed registering of inventory actions from hosts that don't have any --- openpype/hosts/aftereffects/api/pipeline.py | 1 - openpype/hosts/blender/api/pipeline.py | 1 - openpype/hosts/flame/api/pipeline.py | 5 +---- openpype/hosts/hiero/api/pipeline.py | 2 -- openpype/hosts/resolve/api/pipeline.py | 3 --- 5 files changed, 1 insertion(+), 11 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 681f1c51a7..71270e1a12 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -29,7 +29,6 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") def check_inventory(): diff --git a/openpype/hosts/blender/api/pipeline.py b/openpype/hosts/blender/api/pipeline.py index 07a7509dd7..6c1dfb6cc7 100644 --- a/openpype/hosts/blender/api/pipeline.py +++ b/openpype/hosts/blender/api/pipeline.py @@ -31,7 +31,6 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") ORIGINAL_EXCEPTHOOK = sys.excepthook diff --git a/openpype/hosts/flame/api/pipeline.py b/openpype/hosts/flame/api/pipeline.py index 930c6abe29..650416d58b 100644 --- a/openpype/hosts/flame/api/pipeline.py +++ b/openpype/hosts/flame/api/pipeline.py @@ -26,7 +26,6 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") AVALON_CONTAINERS = "AVALON_CONTAINERS" @@ -34,12 +33,10 @@ log = Logger.get_logger(__name__) def install(): - pyblish.register_host("flame") pyblish.register_plugin_path(PUBLISH_PATH) register_loader_plugin_path(LOAD_PATH) avalon.register_plugin_path(LegacyCreator, CREATE_PATH) - avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH) log.info("OpenPype Flame plug-ins registred ...") # register callback for switching publishable @@ -47,6 +44,7 @@ def install(): log.info("OpenPype Flame host installed ...") + def uninstall(): pyblish.deregister_host("flame") @@ -54,7 +52,6 @@ def uninstall(): pyblish.deregister_plugin_path(PUBLISH_PATH) deregister_loader_plugin_path(LOAD_PATH) avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH) - avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH) # register callback for switching publishable pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index eff126c0b6..131d8c98d4 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -28,7 +28,6 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish").replace("\\", "/") LOAD_PATH = os.path.join(PLUGINS_DIR, "load").replace("\\", "/") CREATE_PATH = os.path.join(PLUGINS_DIR, "create").replace("\\", "/") -INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory").replace("\\", "/") AVALON_CONTAINERS = ":AVALON_CONTAINERS" @@ -51,7 +50,6 @@ def install(): pyblish.register_plugin_path(PUBLISH_PATH) register_loader_plugin_path(LOAD_PATH) avalon.register_plugin_path(LegacyCreator, CREATE_PATH) - avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH) # register callback for switching publishable pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled) diff --git a/openpype/hosts/resolve/api/pipeline.py b/openpype/hosts/resolve/api/pipeline.py index fa309e3503..c538507c63 100644 --- a/openpype/hosts/resolve/api/pipeline.py +++ b/openpype/hosts/resolve/api/pipeline.py @@ -22,7 +22,6 @@ log = Logger().get_logger(__name__) PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") AVALON_CONTAINERS = ":AVALON_CONTAINERS" @@ -48,7 +47,6 @@ def install(): register_loader_plugin_path(LOAD_PATH) avalon.register_plugin_path(LegacyCreator, CREATE_PATH) - avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH) # register callback for switching publishable pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled) @@ -73,7 +71,6 @@ def uninstall(): deregister_loader_plugin_path(LOAD_PATH) avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH) - avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH) # register callback for switching publishable pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled) From d16f8f1384b56030ccb8f68784d90565fb21b7a5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 13:15:48 +0100 Subject: [PATCH 082/196] moved remaining plugins/actions from avalon into openpype --- openpype/pipeline/__init__.py | 32 ++++++- openpype/pipeline/actions.py | 148 ++++++++++++++++++++++++++++++ openpype/pipeline/load/plugins.py | 1 + openpype/pipeline/thumbnails.py | 147 +++++++++++++++++++++++++++++ 4 files changed, 327 insertions(+), 1 deletion(-) create mode 100644 openpype/pipeline/actions.py create mode 100644 openpype/pipeline/thumbnails.py diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index 26970e4edc..80c9cafcab 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -41,6 +41,22 @@ from .publish import ( OpenPypePyblishPluginMixin ) +from .actions import ( + LauncherAction, + + InventoryAction, + + discover_launcher_actions, + register_launcher_action, + register_launcher_action_path, + + discover_inventory_actions, + register_inventory_action, + register_inventory_action_path, + deregister_inventory_action, + deregister_inventory_action_path, +) + __all__ = ( "attribute_definitions", @@ -82,5 +98,19 @@ __all__ = ( "PublishValidationError", "PublishXmlValidationError", "KnownPublishError", - "OpenPypePyblishPluginMixin" + "OpenPypePyblishPluginMixin", + + # --- Plugins --- + "LauncherAction", + "InventoryAction", + + "discover_launcher_actions", + "register_launcher_action", + "register_launcher_action_path", + + "discover_inventory_actions", + "register_inventory_action", + "register_inventory_action_path", + "deregister_inventory_action", + "deregister_inventory_action_path", ) diff --git a/openpype/pipeline/actions.py b/openpype/pipeline/actions.py new file mode 100644 index 0000000000..544acbc8d3 --- /dev/null +++ b/openpype/pipeline/actions.py @@ -0,0 +1,148 @@ +import os +import copy +import logging + + +class LauncherAction(object): + """A custom action available""" + name = None + label = None + icon = None + color = None + order = 0 + + log = logging.getLogger("LauncherAction") + log.propagate = True + + def is_compatible(self, session): + """Return whether the class is compatible with the Session.""" + return True + + def process(self, session, **kwargs): + pass + + +class InventoryAction(object): + """A custom action for the scene inventory tool + + If registered the action will be visible in the Right Mouse Button menu + under the submenu "Actions". + + """ + + label = None + icon = None + color = None + order = 0 + + log = logging.getLogger("InventoryAction") + log.propagate = True + + @staticmethod + def is_compatible(container): + """Override function in a custom class + + This method is specifically used to ensure the action can operate on + the container. + + Args: + container(dict): the data of a loaded asset, see host.ls() + + Returns: + bool + """ + return bool(container.get("objectName")) + + def process(self, containers): + """Override function in a custom class + + This method will receive all containers even those which are + incompatible. It is advised to create a small filter along the lines + of this example: + + valid_containers = filter(self.is_compatible(c) for c in containers) + + The return value will need to be a True-ish value to trigger + the data_changed signal in order to refresh the view. + + You can return a list of container names to trigger GUI to select + treeview items. + + You can return a dict to carry extra GUI options. For example: + { + "objectNames": [container names...], + "options": {"mode": "toggle", + "clear": False} + } + Currently workable GUI options are: + - clear (bool): Clear current selection before selecting by action. + Default `True`. + - mode (str): selection mode, use one of these: + "select", "deselect", "toggle". Default is "select". + + Args: + containers (list): list of dictionaries + + Return: + bool, list or dict + + """ + return True + + +# Launcher action +def discover_launcher_actions(): + import avalon.api + + return avalon.api.discover(LauncherAction) + + +def register_launcher_action(plugin): + import avalon.api + + return avalon.api.register_plugin(LauncherAction, plugin) + + +def register_launcher_action_path(path): + import avalon.api + + return avalon.api.register_plugin_path(LauncherAction, path) + + +# Inventory action +def discover_inventory_actions(): + import avalon.api + + actions = avalon.api.discover(InventoryAction) + filtered_actions = [] + for action in actions: + if action is not InventoryAction: + print("DISCOVERED", action) + filtered_actions.append(action) + else: + print("GOT SOURCE") + return filtered_actions + + +def register_inventory_action(plugin): + import avalon.api + + return avalon.api.register_plugin(InventoryAction, plugin) + + +def deregister_inventory_action(plugin): + import avalon.api + + avalon.api.deregister_plugin(InventoryAction, plugin) + + +def register_inventory_action_path(path): + import avalon.api + + return avalon.api.register_plugin_path(InventoryAction, path) + + +def deregister_inventory_action_path(path): + import avalon.api + + return avalon.api.deregister_plugin_path(InventoryAction, path) diff --git a/openpype/pipeline/load/plugins.py b/openpype/pipeline/load/plugins.py index 601ad3b258..9b2b6bb084 100644 --- a/openpype/pipeline/load/plugins.py +++ b/openpype/pipeline/load/plugins.py @@ -127,4 +127,5 @@ def register_loader_plugin_path(path): def deregister_loader_plugin(plugin): import avalon.api + avalon.api.deregister_plugin(LoaderPlugin, plugin) diff --git a/openpype/pipeline/thumbnails.py b/openpype/pipeline/thumbnails.py new file mode 100644 index 0000000000..12bab83be6 --- /dev/null +++ b/openpype/pipeline/thumbnails.py @@ -0,0 +1,147 @@ +import os +import copy +import logging + +log = logging.getLogger(__name__) + + +def get_thumbnail_binary(thumbnail_entity, thumbnail_type, dbcon=None): + if not thumbnail_entity: + return + + resolvers = discover_thumbnail_resolvers() + resolvers = sorted(resolvers, key=lambda cls: cls.priority) + if dbcon is None: + from avalon import io + dbcon = io + + for Resolver in resolvers: + available_types = Resolver.thumbnail_types + if ( + thumbnail_type not in available_types + and "*" not in available_types + and ( + isinstance(available_types, (list, tuple)) + and len(available_types) == 0 + ) + ): + continue + try: + instance = Resolver(dbcon) + result = instance.process(thumbnail_entity, thumbnail_type) + if result: + return result + + except Exception: + log.warning("Resolver {0} failed durring process.".format( + Resolver.__class__.__name__, exc_info=True + )) + + +class ThumbnailResolver(object): + """Determine how to get data from thumbnail entity. + + "priority" - determines the order of processing in `get_thumbnail_binary`, + lower number is processed earlier. + "thumbnail_types" - it is expected that thumbnails will be used in more + more than one level, there is only ["thumbnail"] type at the moment + of creating this docstring but it is expected to add "ico" and "full" + in future. + """ + + priority = 100 + thumbnail_types = ["*"] + + def __init__(self, dbcon): + self._log = None + self.dbcon = dbcon + + @property + def log(self): + if self._log is None: + self._log = logging.getLogger(self.__class__.__name__) + return self._log + + def process(self, thumbnail_entity, thumbnail_type): + pass + + +class TemplateResolver(ThumbnailResolver): + + priority = 90 + + def process(self, thumbnail_entity, thumbnail_type): + + if not os.environ.get("AVALON_THUMBNAIL_ROOT"): + return + + template = thumbnail_entity["data"].get("template") + if not template: + self.log.debug("Thumbnail entity does not have set template") + return + + project = self.dbcon.find_one( + {"type": "project"}, + { + "name": True, + "data.code": True + } + ) + + template_data = copy.deepcopy( + thumbnail_entity["data"].get("template_data") or {} + ) + template_data.update({ + "_id": str(thumbnail_entity["_id"]), + "thumbnail_type": thumbnail_type, + "thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"), + "project": { + "name": project["name"], + "code": project["data"].get("code") + } + }) + + try: + filepath = os.path.normpath(template.format(**template_data)) + except KeyError: + self.log.warning(( + "Missing template data keys for template <{0}> || Data: {1}" + ).format(template, str(template_data))) + return + + if not os.path.exists(filepath): + self.log.warning("File does not exist \"{0}\"".format(filepath)) + return + + with open(filepath, "rb") as _file: + content = _file.read() + + return content + + +class BinaryThumbnail(ThumbnailResolver): + def process(self, thumbnail_entity, thumbnail_type): + return thumbnail_entity["data"].get("binary_data") + + +# Thumbnail resolvers +def discover_thumbnail_resolvers(): + import avalon.api + + return avalon.api.discover(ThumbnailResolver) + + +def register_thumbnail_resolver(plugin): + import avalon.api + + return avalon.api.register_plugin(ThumbnailResolver, plugin) + + +def register_thumbnail_resolver_path(path): + import avalon.api + + return avalon.api.register_plugin_path(ThumbnailResolver, path) + + +register_thumbnail_resolver(TemplateResolver) +register_thumbnail_resolver(BinaryThumbnail) From 0710540aa482053612e023ccf0388623ee1afe85 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 13:17:19 +0100 Subject: [PATCH 083/196] changed imports of moved plugins/actions --- openpype/__init__.py | 3 ++- openpype/hosts/fusion/api/pipeline.py | 8 ++++---- .../plugins/inventory/select_containers.py | 4 ++-- .../plugins/inventory/set_tool_color.py | 4 ++-- openpype/hosts/maya/api/pipeline.py | 8 ++++---- .../plugins/inventory/import_modelrender.py | 6 ++++-- .../plugins/inventory/import_reference.py | 5 ++--- openpype/hosts/nuke/api/pipeline.py | 5 ++++- .../plugins/inventory/repair_old_loaders.py | 4 ++-- .../plugins/inventory/select_containers.py | 4 ++-- openpype/tools/launcher/actions.py | 19 +++++++++++++------ openpype/tools/launcher/models.py | 5 +++-- openpype/tools/loader/widgets.py | 5 +++-- openpype/tools/sceneinventory/view.py | 5 +++-- 14 files changed, 50 insertions(+), 35 deletions(-) diff --git a/openpype/__init__.py b/openpype/__init__.py index 99629a4257..8b94b2dc3f 100644 --- a/openpype/__init__.py +++ b/openpype/__init__.py @@ -78,6 +78,7 @@ def install(): from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, + register_inventory_action, ) from avalon import pipeline @@ -124,7 +125,7 @@ def install(): pyblish.register_plugin_path(path) register_loader_plugin_path(path) avalon.register_plugin_path(LegacyCreator, path) - avalon.register_plugin_path(avalon.InventoryAction, path) + register_inventory_action(path) # apply monkey patched discover to original one log.info("Patching discovery") diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 92e54ad6f5..51442d23ff 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -15,6 +15,8 @@ from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + register_inventory_action_path, + deregister_inventory_action_path, ) import openpype.hosts.fusion @@ -69,7 +71,7 @@ def install(): register_loader_plugin_path(LOAD_PATH) avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH) - avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH) + register_inventory_action_path(INVENTORY_PATH) pyblish.api.register_callback( "instanceToggled", on_pyblish_instance_toggled @@ -93,9 +95,7 @@ def uninstall(): deregister_loader_plugin_path(LOAD_PATH) avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH) - avalon.api.deregister_plugin_path( - avalon.api.InventoryAction, INVENTORY_PATH - ) + deregister_inventory_action_path(INVENTORY_PATH) pyblish.api.deregister_callback( "instanceToggled", on_pyblish_instance_toggled diff --git a/openpype/hosts/fusion/plugins/inventory/select_containers.py b/openpype/hosts/fusion/plugins/inventory/select_containers.py index 294c134505..d554b73a5b 100644 --- a/openpype/hosts/fusion/plugins/inventory/select_containers.py +++ b/openpype/hosts/fusion/plugins/inventory/select_containers.py @@ -1,7 +1,7 @@ -from avalon import api +from openpype.pipeline import InventoryAction -class FusionSelectContainers(api.InventoryAction): +class FusionSelectContainers(InventoryAction): label = "Select Containers" icon = "mouse-pointer" diff --git a/openpype/hosts/fusion/plugins/inventory/set_tool_color.py b/openpype/hosts/fusion/plugins/inventory/set_tool_color.py index 2f5ae4d241..c7530ce674 100644 --- a/openpype/hosts/fusion/plugins/inventory/set_tool_color.py +++ b/openpype/hosts/fusion/plugins/inventory/set_tool_color.py @@ -1,6 +1,6 @@ -from avalon import api from Qt import QtGui, QtWidgets +from openpype.pipeline import InventoryAction from openpype import style from openpype.hosts.fusion.api import ( get_current_comp, @@ -8,7 +8,7 @@ from openpype.hosts.fusion.api import ( ) -class FusionSetToolColor(api.InventoryAction): +class FusionSetToolColor(InventoryAction): """Update the color of the selected tools""" label = "Set Tool Color" diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 5cdc3ff4fd..3c09417b21 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -23,7 +23,9 @@ from openpype.lib.path_tools import HostDirmap from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, + register_inventory_action_path, deregister_loader_plugin_path, + deregister_inventory_action_path, ) from openpype.hosts.maya.lib import copy_workspace_mel from . import menu, lib @@ -59,7 +61,7 @@ def install(): register_loader_plugin_path(LOAD_PATH) avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH) - avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH) + register_inventory_action_path(INVENTORY_PATH) log.info(PUBLISH_PATH) log.info("Installing callbacks ... ") @@ -188,9 +190,7 @@ def uninstall(): deregister_loader_plugin_path(LOAD_PATH) avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH) - avalon.api.deregister_plugin_path( - avalon.api.InventoryAction, INVENTORY_PATH - ) + deregister_inventory_action_path(INVENTORY_PATH) menu.uninstall() diff --git a/openpype/hosts/maya/plugins/inventory/import_modelrender.py b/openpype/hosts/maya/plugins/inventory/import_modelrender.py index c5d3d0c8f4..8fc26930cb 100644 --- a/openpype/hosts/maya/plugins/inventory/import_modelrender.py +++ b/openpype/hosts/maya/plugins/inventory/import_modelrender.py @@ -1,6 +1,8 @@ import json -from avalon import api, io +from avalon import io + from openpype.pipeline import ( + InventoryAction, get_representation_context, get_representation_path_from_context, ) @@ -10,7 +12,7 @@ from openpype.hosts.maya.api.lib import ( ) -class ImportModelRender(api.InventoryAction): +class ImportModelRender(InventoryAction): label = "Import Model Render Sets" icon = "industry" diff --git a/openpype/hosts/maya/plugins/inventory/import_reference.py b/openpype/hosts/maya/plugins/inventory/import_reference.py index 2fa132a867..afb1e0e17f 100644 --- a/openpype/hosts/maya/plugins/inventory/import_reference.py +++ b/openpype/hosts/maya/plugins/inventory/import_reference.py @@ -1,11 +1,10 @@ from maya import cmds -from avalon import api - +from openpype.pipeline import InventoryAction from openpype.hosts.maya.api.plugin import get_reference_node -class ImportReference(api.InventoryAction): +class ImportReference(InventoryAction): """Imports selected reference to inside of the file.""" label = "Import Reference" diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index fd2e16b8d3..fef4a1d401 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -18,7 +18,9 @@ from openpype.lib import register_event_callback from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, + register_inventory_action_path, deregister_loader_plugin_path, + deregister_inventory_action_path, ) from openpype.tools.utils import host_tools @@ -105,7 +107,7 @@ def install(): pyblish.api.register_plugin_path(PUBLISH_PATH) register_loader_plugin_path(LOAD_PATH) avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH) - avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH) + register_inventory_action_path(INVENTORY_PATH) # Register Avalon event for workfiles loading. register_event_callback("workio.open_file", check_inventory_versions) @@ -131,6 +133,7 @@ def uninstall(): pyblish.api.deregister_plugin_path(PUBLISH_PATH) deregister_loader_plugin_path(LOAD_PATH) avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH) + deregister_inventory_action_path(INVENTORY_PATH) pyblish.api.deregister_callback( "instanceToggled", on_pyblish_instance_toggled) diff --git a/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py b/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py index 5f834be557..c04c939a8d 100644 --- a/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py +++ b/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py @@ -1,9 +1,9 @@ -from avalon import api from openpype.api import Logger +from openpype.pipeline import InventoryAction from openpype.hosts.nuke.api.lib import set_avalon_knob_data -class RepairOldLoaders(api.InventoryAction): +class RepairOldLoaders(InventoryAction): label = "Repair Old Loaders" icon = "gears" diff --git a/openpype/hosts/nuke/plugins/inventory/select_containers.py b/openpype/hosts/nuke/plugins/inventory/select_containers.py index 3f174b3562..d7d5f00b87 100644 --- a/openpype/hosts/nuke/plugins/inventory/select_containers.py +++ b/openpype/hosts/nuke/plugins/inventory/select_containers.py @@ -1,8 +1,8 @@ -from avalon import api +from openpype.pipeline import InventoryAction from openpype.hosts.nuke.api.commands import viewer_update_and_undo_stop -class SelectContainers(api.InventoryAction): +class SelectContainers(InventoryAction): label = "Select Containers" icon = "mouse-pointer" diff --git a/openpype/tools/launcher/actions.py b/openpype/tools/launcher/actions.py index fbaef05261..546bda1c34 100644 --- a/openpype/tools/launcher/actions.py +++ b/openpype/tools/launcher/actions.py @@ -1,6 +1,7 @@ import os -from avalon import api +from Qt import QtWidgets, QtGui + from openpype import PLUGINS_DIR from openpype import style from openpype.api import Logger, resources @@ -8,7 +9,10 @@ from openpype.lib import ( ApplictionExecutableNotFound, ApplicationLaunchFailed ) -from Qt import QtWidgets, QtGui +from openpype.pipeline import ( + LauncherAction, + register_launcher_action_path, +) def register_actions_from_paths(paths): @@ -29,14 +33,15 @@ def register_actions_from_paths(paths): print("Path was not found: {}".format(path)) continue - api.register_plugin_path(api.Action, path) + register_launcher_action_path(path) def register_config_actions(): """Register actions from the configuration for Launcher""" actions_dir = os.path.join(PLUGINS_DIR, "actions") - register_actions_from_paths([actions_dir]) + if os.path.exists(actions_dir): + register_actions_from_paths([actions_dir]) def register_environment_actions(): @@ -46,7 +51,9 @@ def register_environment_actions(): register_actions_from_paths(paths_str.split(os.pathsep)) -class ApplicationAction(api.Action): +# TODO move to 'openpype.pipeline.actions' +# - remove Qt related stuff and implement exceptions to show error in launcher +class ApplicationAction(LauncherAction): """Pype's application launcher Application action based on pype's ApplicationManager system. @@ -74,7 +81,7 @@ class ApplicationAction(api.Action): @property def log(self): if self._log is None: - self._log = Logger().get_logger(self.__class__.__name__) + self._log = Logger.get_logger(self.__class__.__name__) return self._log def is_compatible(self, session): diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 85d553fca4..13567e7916 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -8,12 +8,13 @@ import time import appdirs from Qt import QtCore, QtGui import qtawesome -from avalon import api + from openpype.lib import JSONSettingRegistry from openpype.lib.applications import ( CUSTOM_LAUNCH_APP_GROUPS, ApplicationManager ) +from openpype.pipeline import discover_launcher_actions from openpype.tools.utils.lib import ( DynamicQThread, get_project_icon, @@ -68,7 +69,7 @@ class ActionModel(QtGui.QStandardItemModel): def discover(self): """Set up Actions cache. Run this for each new project.""" # Discover all registered actions - actions = api.discover(api.Action) + actions = discover_launcher_actions() # Get available project actions and the application actions app_actions = self.get_application_actions() diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index b14bdd0e93..a4c7d4bd24 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -7,9 +7,10 @@ import collections from Qt import QtWidgets, QtCore, QtGui -from avalon import api, pipeline +from avalon import api from openpype.pipeline import HeroVersionType +from openpype.pipeline.thumbnails import get_thumbnail_binary from openpype.pipeline.load import ( discover_loader_plugins, SubsetLoaderPlugin, @@ -863,7 +864,7 @@ class ThumbnailWidget(QtWidgets.QLabel): if not thumbnail_ent: return - thumbnail_bin = pipeline.get_thumbnail_binary( + thumbnail_bin = get_thumbnail_binary( thumbnail_ent, "thumbnail", self.dbcon ) if not thumbnail_bin: diff --git a/openpype/tools/sceneinventory/view.py b/openpype/tools/sceneinventory/view.py index c38390c614..2f9996a4ae 100644 --- a/openpype/tools/sceneinventory/view.py +++ b/openpype/tools/sceneinventory/view.py @@ -5,13 +5,14 @@ from functools import partial from Qt import QtWidgets, QtCore import qtawesome -from avalon import io, api +from avalon import io from openpype import style from openpype.pipeline import ( HeroVersionType, update_container, remove_container, + discover_inventory_actions, ) from openpype.modules import ModulesManager from openpype.tools.utils.lib import ( @@ -487,7 +488,7 @@ class SceneInventoryView(QtWidgets.QTreeView): containers = containers or [dict()] # Check which action will be available in the menu - Plugins = api.discover(api.InventoryAction) + Plugins = discover_inventory_actions() compatible = [p() for p in Plugins if any(p.is_compatible(c) for c in containers)] From 5fcc6a035891a3c4ecddd3ce24f5c3592778e96f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 13:37:24 +0100 Subject: [PATCH 084/196] remove unused imports --- openpype/pipeline/actions.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/pipeline/actions.py b/openpype/pipeline/actions.py index 544acbc8d3..a045c92aa7 100644 --- a/openpype/pipeline/actions.py +++ b/openpype/pipeline/actions.py @@ -1,5 +1,3 @@ -import os -import copy import logging From b2b129d9e89e6daf4e189055e6580310e718df4c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Mar 2022 13:49:03 +0100 Subject: [PATCH 085/196] nuke: simplification of simplified code --- .../publish/extract_review_data_mov.py | 25 ++++++------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index f5bb03fc69..93615fb23d 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -53,27 +53,18 @@ class ExtractReviewDataMov(openpype.api.Extractor): # test if family found in context # using intersection to make sure all defined # families are present in combinantion - test_families = not f_families or any( - set(families).intersection(f_families)) + if f_families and not any( + set(families).intersection(f_families)): + continue # test task types from filter - test_task_types = not f_task_types or any( - task_type in f_task_types) + if f_task_types and not any( + task_type in f_task_types): + continue # test subsets from filter - test_subsets = not f_subsets or any( - re.search(s, subset) for s in f_subsets) - - # we need all filters to be positive for this - # preset to be activated - test_all = all([ - test_families, - test_task_types, - test_subsets - ]) - - # if it is not positive then skip this preset - if not test_all: + if f_subsets and not any( + re.search(s, subset) for s in f_subsets): continue self.log.info( From 945bdc5a9b8689110c7c6dfd3141b894ee441b50 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Mar 2022 14:04:01 +0100 Subject: [PATCH 086/196] nuke: fixing any to bool - adding family to testing families - adding debug logging for filtering --- .../plugins/publish/extract_review_data_mov.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index 93615fb23d..22ebdda05d 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -25,6 +25,8 @@ class ExtractReviewDataMov(openpype.api.Extractor): def process(self, instance): families = instance.data["families"] + families.append(instance.data["family"]) + task_type = instance.context.data["taskType"] subset = instance.data["subset"] self.log.info("Creating staging dir...") @@ -50,6 +52,18 @@ class ExtractReviewDataMov(openpype.api.Extractor): f_task_types = o_data["filter"]["task_types"] f_subsets = o_data["filter"]["sebsets"] + self.log.debug( + "f_families `{}` > families: {}".format( + f_families, families)) + + self.log.debug( + "f_task_types `{}` > task_type: {}".format( + f_task_types, task_type)) + + self.log.debug( + "f_subsets `{}` > subset: {}".format( + f_subsets, subset)) + # test if family found in context # using intersection to make sure all defined # families are present in combinantion @@ -58,7 +72,7 @@ class ExtractReviewDataMov(openpype.api.Extractor): continue # test task types from filter - if f_task_types and not any( + if f_task_types and not bool( task_type in f_task_types): continue From f03e5974931df57e3ac844e690d5f134754c52aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Fri, 18 Mar 2022 14:05:32 +0100 Subject: [PATCH 087/196] Update openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py Co-authored-by: Roy Nieterau --- openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index 22ebdda05d..6f6e07fc28 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -66,7 +66,7 @@ class ExtractReviewDataMov(openpype.api.Extractor): # test if family found in context # using intersection to make sure all defined - # families are present in combinantion + # families are present in combination if f_families and not any( set(families).intersection(f_families)): continue From e8d0839cafb5dac3c8d6b5d5b4f524db864d80f0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 18 Mar 2022 14:06:12 +0100 Subject: [PATCH 088/196] Change label to 'Actions' Co-authored-by: Roy Nieterau --- openpype/pipeline/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index 80c9cafcab..2ee8d4f118 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -100,7 +100,7 @@ __all__ = ( "KnownPublishError", "OpenPypePyblishPluginMixin", - # --- Plugins --- + # --- Actions --- "LauncherAction", "InventoryAction", From 6eaf7017eb66d85ca0089a84dbd63ebd874cf9f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 14:20:20 +0100 Subject: [PATCH 089/196] replaced 'format_template_with_optional_keys' with 'StringTemplate' --- .../plugins/publish/collect_texture.py | 17 ++++++++++------- .../tvpaint/plugins/load/load_workfile.py | 18 ++++++++++-------- openpype/lib/delivery.py | 15 ++++++++------- .../action_delete_old_versions.py | 14 ++++++-------- openpype/pipeline/load/utils.py | 9 +++++---- openpype/plugins/publish/integrate_new.py | 12 +++++++----- 6 files changed, 46 insertions(+), 39 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_texture.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_texture.py index ea0b6cdf41..c1c48ec72d 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_texture.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_texture.py @@ -3,9 +3,10 @@ import re import pyblish.api import json -from avalon.api import format_template_with_optional_keys - -from openpype.lib import prepare_template_data +from openpype.lib import ( + prepare_template_data, + StringTemplate, +) class CollectTextures(pyblish.api.ContextPlugin): @@ -110,8 +111,9 @@ class CollectTextures(pyblish.api.ContextPlugin): formatting_data.update(explicit_data) fill_pairs = prepare_template_data(formatting_data) - workfile_subset = format_template_with_optional_keys( - fill_pairs, self.workfile_subset_template) + workfile_subset = StringTemplate.format_strict_template( + self.workfile_subset_template, fill_pairs + ) asset_build = self._get_asset_build( repre_file, @@ -201,8 +203,9 @@ class CollectTextures(pyblish.api.ContextPlugin): formatting_data.update(explicit_data) fill_pairs = prepare_template_data(formatting_data) - subset = format_template_with_optional_keys( - fill_pairs, self.texture_subset_template) + subset = StringTemplate.format_strict_template( + self.texture_subset_template, fill_pairs + ) asset_build = self._get_asset_build( repre_file, diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index 33e2a76cc9..11219320ca 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -4,7 +4,8 @@ import os from avalon import api, io from openpype.lib import ( get_workfile_template_key_from_context, - get_workdir_data + get_workdir_data, + StringTemplate, ) from openpype.api import Anatomy from openpype.hosts.tvpaint.api import lib, pipeline, plugin @@ -69,7 +70,7 @@ class LoadWorkfile(plugin.Loader): data["root"] = anatomy.roots data["user"] = getpass.getuser() - template = anatomy.templates[template_key]["file"] + file_template = anatomy.templates[template_key]["file"] # Define saving file extension if current_file: @@ -81,11 +82,12 @@ class LoadWorkfile(plugin.Loader): data["ext"] = extension - work_root = api.format_template_with_optional_keys( - data, anatomy.templates[template_key]["folder"] + folder_template = anatomy.templates[template_key]["folder"] + work_root = StringTemplate.format_strict_template( + folder_template, data ) version = api.last_workfile_with_version( - work_root, template, data, host.file_extensions() + work_root, file_template, data, host.file_extensions() )[1] if version is None: @@ -95,8 +97,8 @@ class LoadWorkfile(plugin.Loader): data["version"] = version - path = os.path.join( - work_root, - api.format_template_with_optional_keys(data, template) + filename = StringTemplate.format_strict_template( + file_template, data ) + path = os.path.join(work_root, filename) host.save_file(path) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index 03abe5802c..ffcfe9fa4d 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -5,6 +5,11 @@ import glob import clique import collections +from .path_templates import ( + StringTemplate, + TemplateUnsolved, +) + def collect_frames(files): """ @@ -52,8 +57,6 @@ def sizeof_fmt(num, suffix='B'): def path_from_representation(representation, anatomy): - from avalon import pipeline # safer importing - try: template = representation["data"]["template"] @@ -63,12 +66,10 @@ def path_from_representation(representation, anatomy): try: context = representation["context"] context["root"] = anatomy.roots - path = pipeline.format_template_with_optional_keys( - context, template - ) - path = os.path.normpath(path.replace("/", "\\")) + path = StringTemplate.format_strict_template(template, context) + return os.path.normpath(path) - except KeyError: + except TemplateUnsolved: # Template references unavailable data return None diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py index c66d1819ac..1b694e25f1 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py @@ -5,11 +5,11 @@ import uuid import clique from pymongo import UpdateOne -from openpype_modules.ftrack.lib import BaseAction, statics_icon from avalon.api import AvalonMongoDB -from openpype.api import Anatomy -import avalon.pipeline +from openpype.api import Anatomy +from openpype.lib import StringTemplate, TemplateUnsolved +from openpype_modules.ftrack.lib import BaseAction, statics_icon class DeleteOldVersions(BaseAction): @@ -563,18 +563,16 @@ class DeleteOldVersions(BaseAction): try: context = representation["context"] context["root"] = anatomy.roots - path = avalon.pipeline.format_template_with_optional_keys( - context, template - ) + path = StringTemplate.format_strict_template(template, context) if "frame" in context: context["frame"] = self.sequence_splitter sequence_path = os.path.normpath( - avalon.pipeline.format_template_with_optional_keys( + StringTemplate.format_strict_template( context, template ) ) - except KeyError: + except (KeyError, TemplateUnsolved): # Template references unavailable data return (None, None) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 118f86a570..6d32c11cd7 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -525,7 +525,7 @@ def get_representation_path(representation, root=None, dbcon=None): """ - from openpype.lib import StringTemplate + from openpype.lib import StringTemplate, TemplateUnsolved if dbcon is None: dbcon = io @@ -542,13 +542,14 @@ def get_representation_path(representation, root=None, dbcon=None): try: context = representation["context"] context["root"] = root - template_obj = StringTemplate(template) - path = str(template_obj.format(context)) + path = StringTemplate.format_strict_template( + template, context + ) # Force replacing backslashes with forward slashed if not on # windows if platform.system().lower() != "windows": path = path.replace("\\", "/") - except KeyError: + except (TemplateUnsolved, KeyError): # Template references unavailable data return None diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e8dab089af..6ca6125cb2 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -12,14 +12,15 @@ import shutil from pymongo import DeleteOne, InsertOne import pyblish.api from avalon import io -from avalon.api import format_template_with_optional_keys import openpype.api from datetime import datetime # from pype.modules import ModulesManager from openpype.lib.profiles_filtering import filter_profiles from openpype.lib import ( prepare_template_data, - create_hard_link + create_hard_link, + StringTemplate, + TemplateUnsolved ) # this is needed until speedcopy for linux is fixed @@ -854,9 +855,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): fill_pairs = prepare_template_data(fill_pairs) try: - filled_template = \ - format_template_with_optional_keys(fill_pairs, template) - except KeyError: + filled_template = StringTemplate.format_strict_template( + template, fill_pairs + ) + except (KeyError, TemplateUnsolved): keys = [] if fill_pairs: keys = fill_pairs.keys() From e961144969dccb207d6d7e7e2d270a7b5b45fbec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 14:55:53 +0100 Subject: [PATCH 090/196] moved functions to get last workfile into avalon context lib functions --- openpype/lib/avalon_context.py | 126 ++++++++++++++++++++++++++++++++- 1 file changed, 123 insertions(+), 3 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 26beba41ee..0b1d09908c 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -16,6 +16,7 @@ from openpype.settings import ( from .anatomy import Anatomy from .profiles_filtering import filter_profiles from .events import emit_event +from .path_templates import StringTemplate # avalon module is not imported at the top # - may not be in path at the time of pype.lib initialization @@ -1735,8 +1736,6 @@ def get_custom_workfile_template_by_context( context. (Existence of formatted path is not validated.) """ - from openpype.lib import filter_profiles - if anatomy is None: anatomy = Anatomy(project_doc["name"]) @@ -1759,7 +1758,9 @@ def get_custom_workfile_template_by_context( # there are some anatomy template strings if matching_item: template = matching_item["path"][platform.system().lower()] - return template.format(**anatomy_context_data) + return StringTemplate.format_strict_template( + template, anatomy_context_data + ) return None @@ -1847,3 +1848,122 @@ def get_custom_workfile_template(template_profiles): io.Session["AVALON_TASK"], io ) + + +def get_last_workfile_with_version( + workdir, file_template, fill_data, extensions +): + """Return last workfile version. + + Args: + workdir(str): Path to dir where workfiles are stored. + file_template(str): Template of file name. + fill_data(dict): Data for filling template. + extensions(list, tuple): All allowed file extensions of workfile. + + Returns: + tuple: Last workfile with version if there is any otherwise + returns (None, None). + """ + if not os.path.exists(workdir): + return None, None + + # Fast match on extension + filenames = [ + filename + for filename in os.listdir(workdir) + if os.path.splitext(filename)[1] in extensions + ] + + # Build template without optionals, version to digits only regex + # and comment to any definable value. + _ext = [] + for ext in extensions: + if not ext.startswith("."): + ext = "." + ext + # Escape dot for regex + ext = "\\" + ext + _ext.append(ext) + ext_expression = "(?:" + "|".join(_ext) + ")" + + # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end + file_template = re.sub(r"\.?{ext}", ext_expression, file_template) + # Replace optional keys with optional content regex + file_template = re.sub(r"<.*?>", r".*?", file_template) + # Replace `{version}` with group regex + file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) + file_template = re.sub(r"{comment.*?}", r".+?", file_template) + filename = StringTemplate.format_strict_template(file_template, fill_data) + + # Match with ignore case on Windows due to the Windows + # OS not being case-sensitive. This avoids later running + # into the error that the file did exist if it existed + # with a different upper/lower-case. + kwargs = {} + if platform.system().lower() == "windows": + kwargs["flags"] = re.IGNORECASE + + # Get highest version among existing matching files + version = None + output_filenames = [] + for filename in sorted(filenames): + match = re.match(file_template, filename, **kwargs) + if not match: + continue + + file_version = int(match.group(1)) + if version is None or file_version > version: + output_filenames[:] = [] + version = file_version + + if file_version == version: + output_filenames.append(filename) + + output_filename = None + if output_filenames: + if len(output_filenames) == 1: + output_filename = output_filenames[0] + else: + last_time = None + for _output_filename in output_filenames: + full_path = os.path.join(workdir, _output_filename) + mod_time = os.path.getmtime(full_path) + if last_time is None or last_time < mod_time: + output_filename = _output_filename + last_time = mod_time + + return output_filename, version + + +def get_last_workfile( + workdir, file_template, fill_data, extensions, full_path=False +): + """Return last workfile filename. + + Returns file with version 1 if there is not workfile yet. + + Args: + workdir(str): Path to dir where workfiles are stored. + file_template(str): Template of file name. + fill_data(dict): Data for filling template. + extensions(list, tuple): All allowed file extensions of workfile. + full_path(bool): Full path to file is returned if set to True. + + Returns: + str: Last or first workfile as filename of full path to filename. + """ + filename, version = get_last_workfile_with_version( + workdir, file_template, fill_data, extensions + ) + if filename is None: + data = copy.deepcopy(fill_data) + data["version"] = 1 + data.pop("comment", None) + if not data.get("ext"): + data["ext"] = extensions[0] + filename = StringTemplate.format_strict_template(file_template, data) + + if full_path: + return os.path.normpath(os.path.join(workdir, filename)) + + return filename From 65bc619bcb1238ef917060c46e31f771dec6d9c7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 14:57:02 +0100 Subject: [PATCH 091/196] use moved workfile functions --- openpype/hosts/tvpaint/plugins/load/load_workfile.py | 7 +++---- openpype/lib/__init__.py | 4 ++++ openpype/lib/applications.py | 5 +++-- openpype/tools/workfiles/app.py | 8 ++++---- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index 11219320ca..d224cfc390 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,11 +1,11 @@ -import getpass import os from avalon import api, io from openpype.lib import ( + StringTemplate, get_workfile_template_key_from_context, get_workdir_data, - StringTemplate, + get_last_workfile_with_version, ) from openpype.api import Anatomy from openpype.hosts.tvpaint.api import lib, pipeline, plugin @@ -68,7 +68,6 @@ class LoadWorkfile(plugin.Loader): data = get_workdir_data(project_doc, asset_doc, task_name, host_name) data["root"] = anatomy.roots - data["user"] = getpass.getuser() file_template = anatomy.templates[template_key]["file"] @@ -86,7 +85,7 @@ class LoadWorkfile(plugin.Loader): work_root = StringTemplate.format_strict_template( folder_template, data ) - version = api.last_workfile_with_version( + version = get_last_workfile_with_version( work_root, file_template, data, host.file_extensions() )[1] diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index b8502ae718..1ebafbb2d2 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -114,6 +114,8 @@ from .avalon_context import ( get_workdir_data, get_workdir, get_workdir_with_workdir_data, + get_last_workfile_with_version, + get_last_workfile, create_workfile_doc, save_workfile_data_to_doc, @@ -263,6 +265,8 @@ __all__ = [ "get_workdir_data", "get_workdir", "get_workdir_with_workdir_data", + "get_last_workfile_with_version", + "get_last_workfile", "create_workfile_doc", "save_workfile_data_to_doc", diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index ef175ac89a..557c016d74 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -28,7 +28,8 @@ from .local_settings import get_openpype_username from .avalon_context import ( get_workdir_data, get_workdir_with_workdir_data, - get_workfile_template_key + get_workfile_template_key, + get_last_workfile ) from .python_module_tools import ( @@ -1609,7 +1610,7 @@ def _prepare_last_workfile(data, workdir): "ext": extensions[0] }) - last_workfile_path = avalon.api.last_workfile( + last_workfile_path = get_last_workfile( workdir, file_template, workdir_data, extensions, True ) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index da5524331a..713992bc4b 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -2,7 +2,6 @@ import sys import os import re import copy -import getpass import shutil import logging import datetime @@ -27,7 +26,8 @@ from openpype.lib import ( save_workfile_data_to_doc, get_workfile_template_key, create_workdir_extra_folders, - get_workdir_data + get_workdir_data, + get_last_workfile_with_version ) from openpype.lib.avalon_context import ( update_current_task, @@ -441,7 +441,7 @@ class NameWindow(QtWidgets.QDialog): data["ext"] = data["ext"][1:] - version = api.last_workfile_with_version( + version = get_last_workfile_with_version( self.root, template, data, extensions )[1] @@ -469,7 +469,7 @@ class NameWindow(QtWidgets.QDialog): # Log warning if idx == 0: log.warning(( - "BUG: Function `last_workfile_with_version` " + "BUG: Function `get_last_workfile_with_version` " "didn't return last version." )) # Raise exception if even 100 version fallback didn't help From 050851731ad35edaeb5ee59fd747b9feab0e67b3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 15:19:34 +0100 Subject: [PATCH 092/196] renamed 'thumbnails.py' to 'thumbnail.py' --- openpype/pipeline/{thumbnails.py => thumbnail.py} | 0 openpype/tools/loader/widgets.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename openpype/pipeline/{thumbnails.py => thumbnail.py} (100%) diff --git a/openpype/pipeline/thumbnails.py b/openpype/pipeline/thumbnail.py similarity index 100% rename from openpype/pipeline/thumbnails.py rename to openpype/pipeline/thumbnail.py diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index a4c7d4bd24..2de43cf42a 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -10,7 +10,7 @@ from Qt import QtWidgets, QtCore, QtGui from avalon import api from openpype.pipeline import HeroVersionType -from openpype.pipeline.thumbnails import get_thumbnail_binary +from openpype.pipeline.thumbnail import get_thumbnail_binary from openpype.pipeline.load import ( discover_loader_plugins, SubsetLoaderPlugin, From 4a8a7b86889d4af5dd9662b1331320a89c674c94 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 15:34:30 +0100 Subject: [PATCH 093/196] add headless argument --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 82c2494e7a..eeb1f7744c 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -46,6 +46,7 @@ def inject_openpype_environment(deadlinePlugin): args = [ openpype_app, + "--headless", 'extractenvironments', export_url ] From 10c7fb21e48cdb281102068a6c3e2acf49feb1af Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 15:34:42 +0100 Subject: [PATCH 094/196] use headless in submit publish job --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 06505b4b47..fad4d14ea0 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -236,6 +236,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): environment["OPENPYPE_MONGO"] = mongo_url args = [ + "--headless", 'publish', roothless_metadata_path, "--targets", "deadline", From 89bdf2965cd8c92608f65cfd302d24ec5e4bcc5c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 15:54:19 +0100 Subject: [PATCH 095/196] moved AVALON_CONTAINER_ID from avalon into openpype --- openpype/hosts/aftereffects/api/pipeline.py | 5 +++-- openpype/hosts/blender/api/pipeline.py | 3 +-- openpype/hosts/blender/plugins/load/load_abc.py | 7 +++++-- openpype/hosts/blender/plugins/load/load_audio.py | 6 ++++-- openpype/hosts/blender/plugins/load/load_camera_blend.py | 6 ++++-- openpype/hosts/blender/plugins/load/load_camera_fbx.py | 6 ++++-- openpype/hosts/blender/plugins/load/load_fbx.py | 6 ++++-- openpype/hosts/blender/plugins/load/load_layout_blend.py | 2 +- openpype/hosts/blender/plugins/load/load_layout_json.py | 2 +- openpype/hosts/blender/plugins/load/load_model.py | 6 ++++-- openpype/hosts/blender/plugins/load/load_rig.py | 2 +- openpype/hosts/flame/api/pipeline.py | 3 ++- openpype/hosts/fusion/api/pipeline.py | 2 +- openpype/hosts/harmony/api/pipeline.py | 2 +- openpype/hosts/hiero/api/pipeline.py | 3 ++- openpype/hosts/houdini/api/pipeline.py | 2 +- openpype/hosts/houdini/plugins/load/load_image.py | 3 ++- openpype/hosts/houdini/plugins/load/load_usd_layer.py | 5 +++-- openpype/hosts/houdini/plugins/load/load_usd_reference.py | 5 +++-- openpype/hosts/maya/api/pipeline.py | 2 +- openpype/hosts/maya/api/plugin.py | 2 +- .../hosts/maya/plugins/publish/extract_maya_scene_raw.py | 2 +- openpype/hosts/nuke/api/pipeline.py | 4 ++-- openpype/hosts/photoshop/api/pipeline.py | 5 +++-- openpype/hosts/resolve/api/pipeline.py | 2 +- openpype/hosts/tvpaint/api/pipeline.py | 2 +- openpype/hosts/unreal/api/pipeline.py | 2 +- .../unreal/plugins/load/load_alembic_geometrycache.py | 8 +++++--- .../unreal/plugins/load/load_alembic_skeletalmesh.py | 8 +++++--- .../hosts/unreal/plugins/load/load_alembic_staticmesh.py | 8 +++++--- openpype/hosts/unreal/plugins/load/load_animation.py | 8 +++++--- openpype/hosts/unreal/plugins/load/load_camera.py | 5 +++-- openpype/hosts/unreal/plugins/load/load_layout.py | 2 +- openpype/hosts/unreal/plugins/load/load_rig.py | 8 +++++--- openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py | 8 +++++--- openpype/pipeline/__init__.py | 6 ++++++ openpype/pipeline/constants.py | 2 ++ 37 files changed, 100 insertions(+), 60 deletions(-) create mode 100644 openpype/pipeline/constants.py diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 681f1c51a7..47d0bdacc5 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -5,7 +5,7 @@ from Qt import QtWidgets import pyblish.api import avalon.api -from avalon import io, pipeline +from avalon import io from openpype import lib from openpype.api import Logger @@ -13,6 +13,7 @@ from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) import openpype.hosts.aftereffects from openpype.lib import register_event_callback @@ -149,7 +150,7 @@ def containerise(name, """ data = { "schema": "openpype:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "name": name, "namespace": namespace, "loader": str(loader), diff --git a/openpype/hosts/blender/api/pipeline.py b/openpype/hosts/blender/api/pipeline.py index 07a7509dd7..8c580cf214 100644 --- a/openpype/hosts/blender/api/pipeline.py +++ b/openpype/hosts/blender/api/pipeline.py @@ -12,12 +12,12 @@ from . import ops import pyblish.api import avalon.api from avalon import io, schema -from avalon.pipeline import AVALON_CONTAINER_ID from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) from openpype.api import Logger from openpype.lib import ( @@ -31,7 +31,6 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") ORIGINAL_EXCEPTHOOK = sys.excepthook diff --git a/openpype/hosts/blender/plugins/load/load_abc.py b/openpype/hosts/blender/plugins/load/load_abc.py index 3daaeceffe..1b2e800769 100644 --- a/openpype/hosts/blender/plugins/load/load_abc.py +++ b/openpype/hosts/blender/plugins/load/load_abc.py @@ -6,11 +6,14 @@ from typing import Dict, List, Optional import bpy -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID, +) + from openpype.hosts.blender.api.pipeline import ( AVALON_CONTAINERS, AVALON_PROPERTY, - AVALON_CONTAINER_ID ) from openpype.hosts.blender.api import plugin, lib diff --git a/openpype/hosts/blender/plugins/load/load_audio.py b/openpype/hosts/blender/plugins/load/load_audio.py index b95c5db270..3f4fcc17de 100644 --- a/openpype/hosts/blender/plugins/load/load_audio.py +++ b/openpype/hosts/blender/plugins/load/load_audio.py @@ -6,12 +6,14 @@ from typing import Dict, List, Optional import bpy -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID, +) from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import ( AVALON_CONTAINERS, AVALON_PROPERTY, - AVALON_CONTAINER_ID ) diff --git a/openpype/hosts/blender/plugins/load/load_camera_blend.py b/openpype/hosts/blender/plugins/load/load_camera_blend.py index 6ed2e8a575..f00027f0b4 100644 --- a/openpype/hosts/blender/plugins/load/load_camera_blend.py +++ b/openpype/hosts/blender/plugins/load/load_camera_blend.py @@ -7,12 +7,14 @@ from typing import Dict, List, Optional import bpy -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID, +) from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import ( AVALON_CONTAINERS, AVALON_PROPERTY, - AVALON_CONTAINER_ID ) logger = logging.getLogger("openpype").getChild( diff --git a/openpype/hosts/blender/plugins/load/load_camera_fbx.py b/openpype/hosts/blender/plugins/load/load_camera_fbx.py index 626ed44f08..97f844e610 100644 --- a/openpype/hosts/blender/plugins/load/load_camera_fbx.py +++ b/openpype/hosts/blender/plugins/load/load_camera_fbx.py @@ -6,12 +6,14 @@ from typing import Dict, List, Optional import bpy -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID, +) from openpype.hosts.blender.api import plugin, lib from openpype.hosts.blender.api.pipeline import ( AVALON_CONTAINERS, AVALON_PROPERTY, - AVALON_CONTAINER_ID ) diff --git a/openpype/hosts/blender/plugins/load/load_fbx.py b/openpype/hosts/blender/plugins/load/load_fbx.py index 2d249ef647..ee2e7d175c 100644 --- a/openpype/hosts/blender/plugins/load/load_fbx.py +++ b/openpype/hosts/blender/plugins/load/load_fbx.py @@ -6,12 +6,14 @@ from typing import Dict, List, Optional import bpy -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID, +) from openpype.hosts.blender.api import plugin, lib from openpype.hosts.blender.api.pipeline import ( AVALON_CONTAINERS, AVALON_PROPERTY, - AVALON_CONTAINER_ID ) diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index d87df3c010..cf8e89ed1f 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -10,12 +10,12 @@ from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, + AVALON_CONTAINER_ID, ) from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import ( AVALON_CONTAINERS, AVALON_PROPERTY, - AVALON_CONTAINER_ID ) diff --git a/openpype/hosts/blender/plugins/load/load_layout_json.py b/openpype/hosts/blender/plugins/load/load_layout_json.py index 0693937fec..a0580af4a0 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_json.py +++ b/openpype/hosts/blender/plugins/load/load_layout_json.py @@ -13,12 +13,12 @@ from openpype.pipeline import ( load_container, get_representation_path, loaders_from_representation, + AVALON_CONTAINER_ID, ) from openpype.hosts.blender.api.pipeline import ( AVALON_INSTANCES, AVALON_CONTAINERS, AVALON_PROPERTY, - AVALON_CONTAINER_ID ) from openpype.hosts.blender.api import plugin diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index 18d01dcb29..0a5d98ffa0 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -6,12 +6,14 @@ from typing import Dict, List, Optional import bpy -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID, +) from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import ( AVALON_CONTAINERS, AVALON_PROPERTY, - AVALON_CONTAINER_ID ) diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index cec088076c..4dfa96167f 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -10,6 +10,7 @@ from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, + AVALON_CONTAINER_ID, ) from openpype.hosts.blender.api import ( plugin, @@ -18,7 +19,6 @@ from openpype.hosts.blender.api import ( from openpype.hosts.blender.api.pipeline import ( AVALON_CONTAINERS, AVALON_PROPERTY, - AVALON_CONTAINER_ID ) diff --git a/openpype/hosts/flame/api/pipeline.py b/openpype/hosts/flame/api/pipeline.py index 930c6abe29..aae03cce17 100644 --- a/openpype/hosts/flame/api/pipeline.py +++ b/openpype/hosts/flame/api/pipeline.py @@ -4,13 +4,14 @@ Basic avalon integration import os import contextlib from avalon import api as avalon -from avalon.pipeline import AVALON_CONTAINER_ID from pyblish import api as pyblish + from openpype.api import Logger from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) from .lib import ( set_segment_data_marker, diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 92e54ad6f5..d498f0fd75 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -8,13 +8,13 @@ import contextlib import pyblish.api import avalon.api -from avalon.pipeline import AVALON_CONTAINER_ID from openpype.api import Logger from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) import openpype.hosts.fusion diff --git a/openpype/hosts/harmony/api/pipeline.py b/openpype/hosts/harmony/api/pipeline.py index f967da15ca..cdc58a6f19 100644 --- a/openpype/hosts/harmony/api/pipeline.py +++ b/openpype/hosts/harmony/api/pipeline.py @@ -6,7 +6,6 @@ import pyblish.api from avalon import io import avalon.api -from avalon.pipeline import AVALON_CONTAINER_ID from openpype import lib from openpype.lib import register_event_callback @@ -14,6 +13,7 @@ from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) import openpype.hosts.harmony import openpype.hosts.harmony.api as harmony diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index eff126c0b6..8e70c6a638 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -4,7 +4,7 @@ Basic avalon integration import os import contextlib from collections import OrderedDict -from avalon.pipeline import AVALON_CONTAINER_ID + from avalon import api as avalon from avalon import schema from pyblish import api as pyblish @@ -13,6 +13,7 @@ from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) from openpype.tools.utils import host_tools from . import lib, menu, events diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 7d4e58efb7..d079c9ea81 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -8,12 +8,12 @@ import hdefereval import pyblish.api import avalon.api -from avalon.pipeline import AVALON_CONTAINER_ID from avalon.lib import find_submodule from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, + AVALON_CONTAINER_ID, ) import openpype.hosts.houdini from openpype.hosts.houdini.api import lib diff --git a/openpype/hosts/houdini/plugins/load/load_image.py b/openpype/hosts/houdini/plugins/load/load_image.py index bd9ea3eee3..671f08f18f 100644 --- a/openpype/hosts/houdini/plugins/load/load_image.py +++ b/openpype/hosts/houdini/plugins/load/load_image.py @@ -3,6 +3,7 @@ import os from openpype.pipeline import ( load, get_representation_path, + AVALON_CONTAINER_ID, ) from openpype.hosts.houdini.api import lib, pipeline @@ -73,7 +74,7 @@ class ImageLoader(load.LoaderPlugin): # Imprint it manually data = { "schema": "avalon-core:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "name": node_name, "namespace": namespace, "loader": str(self.__class__.__name__), diff --git a/openpype/hosts/houdini/plugins/load/load_usd_layer.py b/openpype/hosts/houdini/plugins/load/load_usd_layer.py index d803e6abfe..48580fc3aa 100644 --- a/openpype/hosts/houdini/plugins/load/load_usd_layer.py +++ b/openpype/hosts/houdini/plugins/load/load_usd_layer.py @@ -1,8 +1,9 @@ from openpype.pipeline import ( load, get_representation_path, + AVALON_CONTAINER_ID, ) -from openpype.hosts.houdini.api import lib, pipeline +from openpype.hosts.houdini.api import lib class USDSublayerLoader(load.LoaderPlugin): @@ -43,7 +44,7 @@ class USDSublayerLoader(load.LoaderPlugin): # Imprint it manually data = { "schema": "avalon-core:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "name": node_name, "namespace": namespace, "loader": str(self.__class__.__name__), diff --git a/openpype/hosts/houdini/plugins/load/load_usd_reference.py b/openpype/hosts/houdini/plugins/load/load_usd_reference.py index fdb443f4cf..6851c77e6d 100644 --- a/openpype/hosts/houdini/plugins/load/load_usd_reference.py +++ b/openpype/hosts/houdini/plugins/load/load_usd_reference.py @@ -1,8 +1,9 @@ from openpype.pipeline import ( load, get_representation_path, + AVALON_CONTAINER_ID, ) -from openpype.hosts.houdini.api import lib, pipeline +from openpype.hosts.houdini.api import lib class USDReferenceLoader(load.LoaderPlugin): @@ -43,7 +44,7 @@ class USDReferenceLoader(load.LoaderPlugin): # Imprint it manually data = { "schema": "avalon-core:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "name": node_name, "namespace": namespace, "loader": str(self.__class__.__name__), diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 5cdc3ff4fd..12446b6d1c 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -10,7 +10,6 @@ import pyblish.api import avalon.api from avalon.lib import find_submodule -from avalon.pipeline import AVALON_CONTAINER_ID import openpype.hosts.maya from openpype.tools.utils import host_tools @@ -24,6 +23,7 @@ from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) from openpype.hosts.maya.lib import copy_workspace_mel from . import menu, lib diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 84379bc145..3721868823 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -4,11 +4,11 @@ from maya import cmds import qargparse -from avalon.pipeline import AVALON_CONTAINER_ID from openpype.pipeline import ( LegacyCreator, LoaderPlugin, get_representation_path, + AVALON_CONTAINER_ID, ) from .pipeline import containerise diff --git a/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py b/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py index 389995d30c..3a47cdadb5 100644 --- a/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py +++ b/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py @@ -6,7 +6,7 @@ from maya import cmds import openpype.api from openpype.hosts.maya.api.lib import maintained_selection -from avalon.pipeline import AVALON_CONTAINER_ID +from openpype.pipeline import AVALON_CONTAINER_ID class ExtractMayaSceneRaw(openpype.api.Extractor): diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index fd2e16b8d3..657b24eb2a 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -6,7 +6,6 @@ import nuke import pyblish.api import avalon.api -from avalon import pipeline import openpype from openpype.api import ( @@ -19,6 +18,7 @@ from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) from openpype.tools.utils import host_tools @@ -330,7 +330,7 @@ def containerise(node, data = OrderedDict( [ ("schema", "openpype:container-2.0"), - ("id", pipeline.AVALON_CONTAINER_ID), + ("id", AVALON_CONTAINER_ID), ("name", name), ("namespace", namespace), ("loader", str(loader)), diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index e814e1ca4d..ac7f20ab59 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -3,7 +3,7 @@ from Qt import QtWidgets import pyblish.api import avalon.api -from avalon import pipeline, io +from avalon import io from openpype.api import Logger from openpype.lib import register_event_callback @@ -11,6 +11,7 @@ from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) import openpype.hosts.photoshop @@ -221,7 +222,7 @@ def containerise( data = { "schema": "openpype:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "name": name, "namespace": namespace, "loader": str(loader), diff --git a/openpype/hosts/resolve/api/pipeline.py b/openpype/hosts/resolve/api/pipeline.py index fa309e3503..0083a4547d 100644 --- a/openpype/hosts/resolve/api/pipeline.py +++ b/openpype/hosts/resolve/api/pipeline.py @@ -6,13 +6,13 @@ import contextlib from collections import OrderedDict from avalon import api as avalon from avalon import schema -from avalon.pipeline import AVALON_CONTAINER_ID from pyblish import api as pyblish from openpype.api import Logger from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) from . import lib from . import PLUGINS_DIR diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index 46c9d3a1dd..ec880a1abc 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -10,7 +10,6 @@ import pyblish.api import avalon.api from avalon import io -from avalon.pipeline import AVALON_CONTAINER_ID from openpype.hosts import tvpaint from openpype.api import get_current_project_settings @@ -19,6 +18,7 @@ from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) from .lib import ( diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index 9ec11b942d..713c588976 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -4,13 +4,13 @@ import logging from typing import List import pyblish.api -from avalon.pipeline import AVALON_CONTAINER_ID from avalon import api from openpype.pipeline import ( LegacyCreator, register_loader_plugin_path, deregister_loader_plugin_path, + AVALON_CONTAINER_ID, ) from openpype.tools.utils import host_tools import openpype.hosts.unreal diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py b/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py index 3508fe5ed7..6ac3531b40 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py @@ -2,8 +2,10 @@ """Loader for published alembics.""" import os -from avalon import pipeline -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID +) from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline @@ -117,7 +119,7 @@ class PointCacheAlembicLoader(plugin.Loader): data = { "schema": "openpype:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "asset": asset, "namespace": asset_dir, "container_name": container_name, diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py index 180942de51..b2c3889f68 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py @@ -2,8 +2,10 @@ """Load Skeletal Mesh alembics.""" import os -from avalon import pipeline -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID +) from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline import unreal # noqa @@ -81,7 +83,7 @@ class SkeletalMeshAlembicLoader(plugin.Loader): data = { "schema": "openpype:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "asset": asset, "namespace": asset_dir, "container_name": container_name, diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py index 4e00af1d97..5a73c72c64 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py @@ -2,8 +2,10 @@ """Loader for Static Mesh alembics.""" import os -from avalon import pipeline -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID +) from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline import unreal # noqa @@ -100,7 +102,7 @@ class StaticMeshAlembicLoader(plugin.Loader): data = { "schema": "openpype:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "asset": asset, "namespace": asset_dir, "container_name": container_name, diff --git a/openpype/hosts/unreal/plugins/load/load_animation.py b/openpype/hosts/unreal/plugins/load/load_animation.py index 8ef81f7851..c9a1633031 100644 --- a/openpype/hosts/unreal/plugins/load/load_animation.py +++ b/openpype/hosts/unreal/plugins/load/load_animation.py @@ -3,8 +3,10 @@ import os import json -from avalon import pipeline -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID +) from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline import unreal # noqa @@ -135,7 +137,7 @@ class AnimationFBXLoader(plugin.Loader): data = { "schema": "openpype:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "asset": asset, "namespace": asset_dir, "container_name": container_name, diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index 0de9470ef9..40bca0b0c7 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -2,7 +2,8 @@ """Load camera from FBX.""" import os -from avalon import io, pipeline +from avalon import io +from openpype.pipeline import AVALON_CONTAINER_ID from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline import unreal # noqa @@ -116,7 +117,7 @@ class CameraLoader(plugin.Loader): data = { "schema": "openpype:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "asset": asset, "namespace": asset_dir, "container_name": container_name, diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 19ee179d20..7f6ce7d822 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -11,12 +11,12 @@ from unreal import AssetToolsHelpers from unreal import FBXImportType from unreal import MathLibrary as umath -from avalon.pipeline import AVALON_CONTAINER_ID from openpype.pipeline import ( discover_loader_plugins, loaders_from_representation, load_container, get_representation_path, + AVALON_CONTAINER_ID, ) from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline diff --git a/openpype/hosts/unreal/plugins/load/load_rig.py b/openpype/hosts/unreal/plugins/load/load_rig.py index 3d5616364c..ff844a5e94 100644 --- a/openpype/hosts/unreal/plugins/load/load_rig.py +++ b/openpype/hosts/unreal/plugins/load/load_rig.py @@ -2,8 +2,10 @@ """Load Skeletal Meshes form FBX.""" import os -from avalon import pipeline -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID +) from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline import unreal # noqa @@ -101,7 +103,7 @@ class SkeletalMeshFBXLoader(plugin.Loader): data = { "schema": "openpype:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "asset": asset, "namespace": asset_dir, "container_name": container_name, diff --git a/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py b/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py index 587fc83a77..282d249947 100644 --- a/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py +++ b/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py @@ -2,8 +2,10 @@ """Load Static meshes form FBX.""" import os -from avalon import pipeline -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID +) from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline import unreal # noqa @@ -95,7 +97,7 @@ class StaticMeshFBXLoader(plugin.Loader): data = { "schema": "openpype:container-2.0", - "id": pipeline.AVALON_CONTAINER_ID, + "id": AVALON_CONTAINER_ID, "asset": asset, "namespace": asset_dir, "container_name": container_name, diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index 26970e4edc..0fc6af744a 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -1,3 +1,7 @@ +from .constants import ( + AVALON_CONTAINER_ID, +) + from .lib import attribute_definitions from .create import ( @@ -43,6 +47,8 @@ from .publish import ( __all__ = ( + "AVALON_CONTAINER_ID", + "attribute_definitions", # --- Create --- diff --git a/openpype/pipeline/constants.py b/openpype/pipeline/constants.py new file mode 100644 index 0000000000..90890cc0a8 --- /dev/null +++ b/openpype/pipeline/constants.py @@ -0,0 +1,2 @@ +# Metadata ID of loaded container into scene +AVALON_CONTAINER_ID = "pyblish.avalon.container" From e323429ab4939996ae058cdba95e09d07747c7f5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 16:08:15 +0100 Subject: [PATCH 096/196] moved host workfile extensions --- openpype/hosts/aftereffects/api/workio.py | 4 ++-- openpype/hosts/blender/api/workio.py | 5 +++-- openpype/hosts/fusion/api/workio.py | 6 ++++-- openpype/hosts/harmony/api/workio.py | 5 +++-- openpype/hosts/hiero/api/workio.py | 8 ++++---- openpype/hosts/houdini/api/workio.py | 4 ++-- openpype/hosts/maya/api/workio.py | 5 +++-- openpype/hosts/nuke/api/workio.py | 5 +++-- openpype/hosts/photoshop/api/workio.py | 5 ++--- openpype/hosts/tvpaint/api/workio.py | 3 ++- openpype/lib/applications.py | 3 ++- openpype/pipeline/__init__.py | 2 ++ openpype/pipeline/constants.py | 17 +++++++++++++++++ 13 files changed, 49 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/aftereffects/api/workio.py b/openpype/hosts/aftereffects/api/workio.py index 04c7834d8f..5a8f86ead5 100644 --- a/openpype/hosts/aftereffects/api/workio.py +++ b/openpype/hosts/aftereffects/api/workio.py @@ -1,8 +1,8 @@ """Host API required Work Files tool""" import os +from openpype.pipeline import HOST_WORKFILE_EXTENSIONS from .launch_logic import get_stub -from avalon import api def _active_document(): @@ -14,7 +14,7 @@ def _active_document(): def file_extensions(): - return api.HOST_WORKFILE_EXTENSIONS["aftereffects"] + return HOST_WORKFILE_EXTENSIONS["aftereffects"] def has_unsaved_changes(): diff --git a/openpype/hosts/blender/api/workio.py b/openpype/hosts/blender/api/workio.py index fd68761982..5eb9f82999 100644 --- a/openpype/hosts/blender/api/workio.py +++ b/openpype/hosts/blender/api/workio.py @@ -4,7 +4,8 @@ from pathlib import Path from typing import List, Optional import bpy -from avalon import api + +from openpype.pipeline import HOST_WORKFILE_EXTENSIONS class OpenFileCacher: @@ -77,7 +78,7 @@ def has_unsaved_changes() -> bool: def file_extensions() -> List[str]: """Return the supported file extensions for Blender scene files.""" - return api.HOST_WORKFILE_EXTENSIONS["blender"] + return HOST_WORKFILE_EXTENSIONS["blender"] def work_root(session: dict) -> str: diff --git a/openpype/hosts/fusion/api/workio.py b/openpype/hosts/fusion/api/workio.py index ec9ac7481a..a1710c6e3a 100644 --- a/openpype/hosts/fusion/api/workio.py +++ b/openpype/hosts/fusion/api/workio.py @@ -1,12 +1,14 @@ """Host API required Work Files tool""" import sys import os -from avalon import api + +from openpype.pipeline import HOST_WORKFILE_EXTENSIONS + from .pipeline import get_current_comp def file_extensions(): - return api.HOST_WORKFILE_EXTENSIONS["fusion"] + return HOST_WORKFILE_EXTENSIONS["fusion"] def has_unsaved_changes(): diff --git a/openpype/hosts/harmony/api/workio.py b/openpype/hosts/harmony/api/workio.py index 38a00ae414..ab1cb9b1a9 100644 --- a/openpype/hosts/harmony/api/workio.py +++ b/openpype/hosts/harmony/api/workio.py @@ -2,20 +2,21 @@ import os import shutil +from openpype.pipeline import HOST_WORKFILE_EXTENSIONS + from .lib import ( ProcessContext, get_local_harmony_path, zip_and_move, launch_zip_file ) -from avalon import api # used to lock saving until previous save is done. save_disabled = False def file_extensions(): - return api.HOST_WORKFILE_EXTENSIONS["harmony"] + return HOST_WORKFILE_EXTENSIONS["harmony"] def has_unsaved_changes(): diff --git a/openpype/hosts/hiero/api/workio.py b/openpype/hosts/hiero/api/workio.py index dacb11624f..394cb5e2ab 100644 --- a/openpype/hosts/hiero/api/workio.py +++ b/openpype/hosts/hiero/api/workio.py @@ -1,14 +1,14 @@ import os import hiero -from avalon import api + from openpype.api import Logger +from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) def file_extensions(): - return api.HOST_WORKFILE_EXTENSIONS["hiero"] + return HOST_WORKFILE_EXTENSIONS["hiero"] def has_unsaved_changes(): diff --git a/openpype/hosts/houdini/api/workio.py b/openpype/hosts/houdini/api/workio.py index e7310163ea..e0213023fd 100644 --- a/openpype/hosts/houdini/api/workio.py +++ b/openpype/hosts/houdini/api/workio.py @@ -2,11 +2,11 @@ import os import hou -from avalon import api +from openpype.pipeline import HOST_WORKFILE_EXTENSIONS def file_extensions(): - return api.HOST_WORKFILE_EXTENSIONS["houdini"] + return HOST_WORKFILE_EXTENSIONS["houdini"] def has_unsaved_changes(): diff --git a/openpype/hosts/maya/api/workio.py b/openpype/hosts/maya/api/workio.py index 698c48e81e..fd4961c4bf 100644 --- a/openpype/hosts/maya/api/workio.py +++ b/openpype/hosts/maya/api/workio.py @@ -1,11 +1,12 @@ """Host API required Work Files tool""" import os from maya import cmds -from avalon import api + +from openpype.pipeline import HOST_WORKFILE_EXTENSIONS def file_extensions(): - return api.HOST_WORKFILE_EXTENSIONS["maya"] + return HOST_WORKFILE_EXTENSIONS["maya"] def has_unsaved_changes(): diff --git a/openpype/hosts/nuke/api/workio.py b/openpype/hosts/nuke/api/workio.py index dbc24fdc9b..68fcb0927f 100644 --- a/openpype/hosts/nuke/api/workio.py +++ b/openpype/hosts/nuke/api/workio.py @@ -1,11 +1,12 @@ """Host API required Work Files tool""" import os import nuke -import avalon.api + +from openpype.pipeline import HOST_WORKFILE_EXTENSIONS def file_extensions(): - return avalon.api.HOST_WORKFILE_EXTENSIONS["nuke"] + return HOST_WORKFILE_EXTENSIONS["nuke"] def has_unsaved_changes(): diff --git a/openpype/hosts/photoshop/api/workio.py b/openpype/hosts/photoshop/api/workio.py index 0bf3ed2bd9..951c5dbfff 100644 --- a/openpype/hosts/photoshop/api/workio.py +++ b/openpype/hosts/photoshop/api/workio.py @@ -1,8 +1,7 @@ """Host API required Work Files tool""" import os -import avalon.api - +from openpype.pipeline import HOST_WORKFILE_EXTENSIONS from . import lib @@ -15,7 +14,7 @@ def _active_document(): def file_extensions(): - return avalon.api.HOST_WORKFILE_EXTENSIONS["photoshop"] + return HOST_WORKFILE_EXTENSIONS["photoshop"] def has_unsaved_changes(): diff --git a/openpype/hosts/tvpaint/api/workio.py b/openpype/hosts/tvpaint/api/workio.py index c513bec6cf..88bdd7117e 100644 --- a/openpype/hosts/tvpaint/api/workio.py +++ b/openpype/hosts/tvpaint/api/workio.py @@ -4,6 +4,7 @@ """ from avalon import api +from openpype.pipeline import HOST_WORKFILE_EXTENSIONS from .lib import ( execute_george, execute_george_through_file @@ -47,7 +48,7 @@ def has_unsaved_changes(): def file_extensions(): """Return the supported file extensions for Blender scene files.""" - return api.HOST_WORKFILE_EXTENSIONS["tvpaint"] + return HOST_WORKFILE_EXTENSIONS["tvpaint"] def work_root(session): diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index ef175ac89a..a7621be2b5 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1544,6 +1544,7 @@ def _prepare_last_workfile(data, workdir): workdir (str): Path to folder where workfiles should be stored. """ import avalon.api + from openpype.pipeline import HOST_WORKFILE_EXTENSIONS log = data["log"] @@ -1592,7 +1593,7 @@ def _prepare_last_workfile(data, workdir): # Last workfile path last_workfile_path = data.get("last_workfile_path") or "" if not last_workfile_path: - extensions = avalon.api.HOST_WORKFILE_EXTENSIONS.get(app.host_name) + extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) if extensions: anatomy = data["anatomy"] project_settings = data["project_settings"] diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index 0fc6af744a..000441c720 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -1,5 +1,6 @@ from .constants import ( AVALON_CONTAINER_ID, + HOST_WORKFILE_EXTENSIONS, ) from .lib import attribute_definitions @@ -48,6 +49,7 @@ from .publish import ( __all__ = ( "AVALON_CONTAINER_ID", + "HOST_WORKFILE_EXTENSIONS", "attribute_definitions", diff --git a/openpype/pipeline/constants.py b/openpype/pipeline/constants.py index 90890cc0a8..e6496cbf95 100644 --- a/openpype/pipeline/constants.py +++ b/openpype/pipeline/constants.py @@ -1,2 +1,19 @@ # Metadata ID of loaded container into scene AVALON_CONTAINER_ID = "pyblish.avalon.container" + +# TODO get extensions from host implementations +HOST_WORKFILE_EXTENSIONS = { + "blender": [".blend"], + "celaction": [".scn"], + "tvpaint": [".tvpp"], + "fusion": [".comp"], + "harmony": [".zip"], + "houdini": [".hip", ".hiplc", ".hipnc"], + "maya": [".ma", ".mb"], + "nuke": [".nk"], + "hiero": [".hrox"], + "photoshop": [".psd", ".psb"], + "premiere": [".prproj"], + "resolve": [".drp"], + "aftereffects": [".aep"] +} From 93eca512b8a2860d875934924765009d4f51b777 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 16:23:00 +0100 Subject: [PATCH 097/196] use ObjectId imported from bson instead of avalon.io --- openpype/hosts/aftereffects/api/pipeline.py | 3 ++- .../blender/plugins/publish/extract_layout.py | 8 +++++--- openpype/hosts/fusion/api/lib.py | 3 ++- openpype/hosts/harmony/api/pipeline.py | 3 ++- openpype/hosts/hiero/api/lib.py | 5 ++++- openpype/hosts/maya/api/setdress.py | 10 ++++++---- .../maya/plugins/inventory/import_modelrender.py | 3 ++- .../hosts/maya/plugins/load/load_vrayproxy.py | 4 +++- openpype/hosts/nuke/api/command.py | 5 +++-- openpype/hosts/nuke/api/lib.py | 6 +++--- openpype/hosts/photoshop/api/pipeline.py | 3 ++- .../unreal/plugins/publish/extract_layout.py | 4 +++- openpype/lib/avalon_context.py | 4 +++- openpype/pipeline/load/utils.py | 15 ++++++++------- .../publish/collect_scene_loaded_versions.py | 5 +++-- .../plugins/publish/integrate_hero_version.py | 7 ++++--- openpype/plugins/publish/integrate_inputlinks.py | 8 +++++--- openpype/plugins/publish/integrate_new.py | 11 ++++++----- openpype/tools/mayalookassigner/commands.py | 3 ++- openpype/tools/mayalookassigner/vray_proxies.py | 3 ++- openpype/tools/sceneinventory/model.py | 3 ++- openpype/tools/sceneinventory/switch_dialog.py | 5 +++-- openpype/tools/sceneinventory/view.py | 9 +++++---- 23 files changed, 80 insertions(+), 50 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 47d0bdacc5..b578b03d70 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -2,6 +2,7 @@ import os import sys from Qt import QtWidgets +from bson.objectid import ObjectId import pyblish.api import avalon.api @@ -43,7 +44,7 @@ def check_inventory(): representation = container['representation'] representation_doc = io.find_one( { - "_id": io.ObjectId(representation), + "_id": ObjectId(representation), "type": "representation" }, projection={"parent": True} diff --git a/openpype/hosts/blender/plugins/publish/extract_layout.py b/openpype/hosts/blender/plugins/publish/extract_layout.py index cc7c90f4c8..b78a193d81 100644 --- a/openpype/hosts/blender/plugins/publish/extract_layout.py +++ b/openpype/hosts/blender/plugins/publish/extract_layout.py @@ -1,6 +1,8 @@ import os import json +from bson.objectid import ObjectId + import bpy import bpy_extras import bpy_extras.anim_utils @@ -140,7 +142,7 @@ class ExtractLayout(openpype.api.Extractor): blend = io.find_one( { "type": "representation", - "parent": io.ObjectId(parent), + "parent": ObjectId(parent), "name": "blend" }, projection={"_id": True}) @@ -151,7 +153,7 @@ class ExtractLayout(openpype.api.Extractor): fbx = io.find_one( { "type": "representation", - "parent": io.ObjectId(parent), + "parent": ObjectId(parent), "name": "fbx" }, projection={"_id": True}) @@ -162,7 +164,7 @@ class ExtractLayout(openpype.api.Extractor): abc = io.find_one( { "type": "representation", - "parent": io.ObjectId(parent), + "parent": ObjectId(parent), "name": "abc" }, projection={"_id": True}) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index 2bb5ea8aae..f7a2360bfa 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -3,6 +3,7 @@ import sys import re import contextlib +from bson.objectid import ObjectId from Qt import QtGui from avalon import io @@ -92,7 +93,7 @@ def switch_item(container, # Collect any of current asset, subset and representation if not provided # so we can use the original name from those. if any(not x for x in [asset_name, subset_name, representation_name]): - _id = io.ObjectId(container["representation"]) + _id = ObjectId(container["representation"]) representation = io.find_one({"type": "representation", "_id": _id}) version, subset, asset, project = io.parenthood(representation) diff --git a/openpype/hosts/harmony/api/pipeline.py b/openpype/hosts/harmony/api/pipeline.py index cdc58a6f19..420e9720db 100644 --- a/openpype/hosts/harmony/api/pipeline.py +++ b/openpype/hosts/harmony/api/pipeline.py @@ -2,6 +2,7 @@ import os from pathlib import Path import logging +from bson.objectid import ObjectId import pyblish.api from avalon import io @@ -113,7 +114,7 @@ def check_inventory(): representation = container['representation'] representation_doc = io.find_one( { - "_id": io.ObjectId(representation), + "_id": ObjectId(representation), "type": "representation" }, projection={"parent": True} diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index a9467ae5a4..df3b24ff2c 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -8,7 +8,10 @@ import platform import ast import shutil import hiero + from Qt import QtWidgets +from bson.objectid import ObjectId + import avalon.api as avalon import avalon.io from openpype.api import (Logger, Anatomy, get_anatomy_settings) @@ -1006,7 +1009,7 @@ def check_inventory_versions(): # get representation from io representation = io.find_one({ "type": "representation", - "_id": io.ObjectId(container["representation"]) + "_id": ObjectId(container["representation"]) }) # Get start frame from version data diff --git a/openpype/hosts/maya/api/setdress.py b/openpype/hosts/maya/api/setdress.py index 96a9700b88..0b60564e5e 100644 --- a/openpype/hosts/maya/api/setdress.py +++ b/openpype/hosts/maya/api/setdress.py @@ -6,6 +6,8 @@ import contextlib import copy import six +from bson.objectid import ObjectId + from maya import cmds from avalon import io @@ -282,7 +284,7 @@ def update_package_version(container, version): # Versioning (from `core.maya.pipeline`) current_representation = io.find_one({ - "_id": io.ObjectId(container["representation"]) + "_id": ObjectId(container["representation"]) }) assert current_representation is not None, "This is a bug" @@ -327,7 +329,7 @@ def update_package(set_container, representation): # Load the original package data current_representation = io.find_one({ - "_id": io.ObjectId(set_container['representation']), + "_id": ObjectId(set_container['representation']), "type": "representation" }) @@ -478,10 +480,10 @@ def update_scene(set_container, containers, current_data, new_data, new_file): # They *must* use the same asset, subset and Loader for # `update_container` to make sense. old = io.find_one({ - "_id": io.ObjectId(representation_current) + "_id": ObjectId(representation_current) }) new = io.find_one({ - "_id": io.ObjectId(representation_new) + "_id": ObjectId(representation_new) }) is_valid = compare_representations(old=old, new=new) if not is_valid: diff --git a/openpype/hosts/maya/plugins/inventory/import_modelrender.py b/openpype/hosts/maya/plugins/inventory/import_modelrender.py index c5d3d0c8f4..50ee7a15fc 100644 --- a/openpype/hosts/maya/plugins/inventory/import_modelrender.py +++ b/openpype/hosts/maya/plugins/inventory/import_modelrender.py @@ -1,5 +1,6 @@ import json from avalon import api, io +from bson.objectid import ObjectId from openpype.pipeline import ( get_representation_context, get_representation_path_from_context, @@ -39,7 +40,7 @@ class ImportModelRender(api.InventoryAction): nodes.append(n) repr_doc = io.find_one({ - "_id": io.ObjectId(container["representation"]), + "_id": ObjectId(container["representation"]), }) version_id = repr_doc["parent"] diff --git a/openpype/hosts/maya/plugins/load/load_vrayproxy.py b/openpype/hosts/maya/plugins/load/load_vrayproxy.py index 5b79b1efb3..69d54df62b 100644 --- a/openpype/hosts/maya/plugins/load/load_vrayproxy.py +++ b/openpype/hosts/maya/plugins/load/load_vrayproxy.py @@ -7,6 +7,8 @@ loader will use them instead of native vray vrmesh format. """ import os +from bson.objectid import ObjectId + import maya.cmds as cmds from avalon import io @@ -186,7 +188,7 @@ class VRayProxyLoader(load.LoaderPlugin): abc_rep = io.find_one( { "type": "representation", - "parent": io.ObjectId(version_id), + "parent": ObjectId(version_id), "name": "abc" }) diff --git a/openpype/hosts/nuke/api/command.py b/openpype/hosts/nuke/api/command.py index 212d4757c6..6f74c08e97 100644 --- a/openpype/hosts/nuke/api/command.py +++ b/openpype/hosts/nuke/api/command.py @@ -1,6 +1,7 @@ import logging import contextlib import nuke +from bson.objectid import ObjectId from avalon import api, io @@ -70,10 +71,10 @@ def get_handles(asset): if "visualParent" in data: vp = data["visualParent"] if vp is not None: - parent_asset = io.find_one({"_id": io.ObjectId(vp)}) + parent_asset = io.find_one({"_id": ObjectId(vp)}) if parent_asset is None: - parent_asset = io.find_one({"_id": io.ObjectId(asset["parent"])}) + parent_asset = io.find_one({"_id": ObjectId(asset["parent"])}) if parent_asset is not None: return get_handles(parent_asset) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index dba7ec1b85..3c8ba3e77c 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -6,10 +6,11 @@ import contextlib from collections import OrderedDict import clique +from bson.objectid import ObjectId import nuke -from avalon import api, io, lib +from avalon import api, io from openpype.api import ( Logger, @@ -20,7 +21,6 @@ from openpype.api import ( get_workdir_data, get_asset, get_current_project_settings, - ApplicationManager ) from openpype.tools.utils import host_tools from openpype.lib.path_tools import HostDirmap @@ -570,7 +570,7 @@ def check_inventory_versions(): # get representation from io representation = io.find_one({ "type": "representation", - "_id": io.ObjectId(avalon_knob_data["representation"]) + "_id": ObjectId(avalon_knob_data["representation"]) }) # Failsafe for not finding the representation. diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index ac7f20ab59..c2ad0ac7b0 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -1,5 +1,6 @@ import os from Qt import QtWidgets +from bson.objectid import ObjectId import pyblish.api import avalon.api @@ -37,7 +38,7 @@ def check_inventory(): representation = container['representation'] representation_doc = io.find_one( { - "_id": io.ObjectId(representation), + "_id": ObjectId(representation), "type": "representation" }, projection={"parent": True} diff --git a/openpype/hosts/unreal/plugins/publish/extract_layout.py b/openpype/hosts/unreal/plugins/publish/extract_layout.py index 2d09b0e7bd..f34a47b89f 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_layout.py +++ b/openpype/hosts/unreal/plugins/publish/extract_layout.py @@ -3,6 +3,8 @@ import os import json import math +from bson.objectid import ObjectId + import unreal from unreal import EditorLevelLibrary as ell from unreal import EditorAssetLibrary as eal @@ -62,7 +64,7 @@ class ExtractLayout(openpype.api.Extractor): blend = io.find_one( { "type": "representation", - "parent": io.ObjectId(parent), + "parent": ObjectId(parent), "name": "blend" }, projection={"_id": True}) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 26beba41ee..e16d14dd16 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -9,6 +9,8 @@ import collections import functools import getpass +from bson.objectid import ObjectId + from openpype.settings import ( get_project_settings, get_system_settings @@ -168,7 +170,7 @@ def any_outdated(): representation_doc = avalon.io.find_one( { - "_id": avalon.io.ObjectId(representation), + "_id": ObjectId(representation), "type": "representation" }, projection={"parent": True} diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 118f86a570..e48074ebb1 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -7,6 +7,7 @@ import inspect import numbers import six +from bson.objectid import ObjectId from avalon import io, schema from avalon.api import Session, registered_root @@ -67,7 +68,7 @@ def get_repres_contexts(representation_ids, dbcon=None): _representation_ids = [] for repre_id in representation_ids: if isinstance(repre_id, six.string_types): - repre_id = io.ObjectId(repre_id) + repre_id = ObjectId(repre_id) _representation_ids.append(repre_id) repre_docs = dbcon.find({ @@ -174,7 +175,7 @@ def get_subset_contexts(subset_ids, dbcon=None): _subset_ids = set() for subset_id in subset_ids: if isinstance(subset_id, six.string_types): - subset_id = io.ObjectId(subset_id) + subset_id = ObjectId(subset_id) _subset_ids.add(subset_id) subset_docs = dbcon.find({ @@ -217,7 +218,7 @@ def get_representation_context(representation): """Return parenthood context for representation. Args: - representation (str or io.ObjectId or dict): The representation id + representation (str or ObjectId or dict): The representation id or full representation as returned by the database. Returns: @@ -227,9 +228,9 @@ def get_representation_context(representation): assert representation is not None, "This is a bug" - if isinstance(representation, (six.string_types, io.ObjectId)): + if isinstance(representation, (six.string_types, ObjectId)): representation = io.find_one( - {"_id": io.ObjectId(str(representation))}) + {"_id": ObjectId(str(representation))}) version, subset, asset, project = io.parenthood(representation) @@ -340,7 +341,7 @@ def load_container( Args: Loader (Loader): The loader class to trigger. - representation (str or io.ObjectId or dict): The representation id + representation (str or ObjectId or dict): The representation id or full representation as returned by the database. namespace (str, Optional): The namespace to assign. Defaults to None. name (str, Optional): The name to assign. Defaults to subset name. @@ -404,7 +405,7 @@ def update_container(container, version=-1): # Compute the different version from 'representation' current_representation = io.find_one({ - "_id": io.ObjectId(container["representation"]) + "_id": ObjectId(container["representation"]) }) assert current_representation is not None, "This is a bug" diff --git a/openpype/plugins/publish/collect_scene_loaded_versions.py b/openpype/plugins/publish/collect_scene_loaded_versions.py index d8119846c6..6746757e5f 100644 --- a/openpype/plugins/publish/collect_scene_loaded_versions.py +++ b/openpype/plugins/publish/collect_scene_loaded_versions.py @@ -1,3 +1,4 @@ +from bson.objectid import ObjectId import pyblish.api from avalon import api, io @@ -35,7 +36,7 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): loaded_versions = [] _containers = list(host.ls()) - _repr_ids = [io.ObjectId(c["representation"]) for c in _containers] + _repr_ids = [ObjectId(c["representation"]) for c in _containers] version_by_repr = { str(doc["_id"]): doc["parent"] for doc in io.find({"_id": {"$in": _repr_ids}}, projection={"parent": 1}) @@ -46,7 +47,7 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): # may have more then one representation that are same version version = { "subsetName": con["name"], - "representation": io.ObjectId(con["representation"]), + "representation": ObjectId(con["representation"]), "version": version_by_repr[con["representation"]], # _id } loaded_versions.append(version) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 60245314f4..466606d08b 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -4,6 +4,7 @@ import clique import errno import shutil +from bson.objectid import ObjectId from pymongo import InsertOne, ReplaceOne import pyblish.api from avalon import api, io, schema @@ -161,7 +162,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): if old_version: new_version_id = old_version["_id"] else: - new_version_id = io.ObjectId() + new_version_id = ObjectId() new_hero_version = { "_id": new_version_id, @@ -384,7 +385,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): # Create representation else: - repre["_id"] = io.ObjectId() + repre["_id"] = ObjectId() bulk_writes.append( InsertOne(repre) ) @@ -420,7 +421,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): else: repre["old_id"] = repre["_id"] - repre["_id"] = io.ObjectId() + repre["_id"] = ObjectId() repre["type"] = "archived_representation" bulk_writes.append( InsertOne(repre) diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py index f973dfc963..11cffc4638 100644 --- a/openpype/plugins/publish/integrate_inputlinks.py +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -1,8 +1,10 @@ - from collections import OrderedDict -from avalon import io + +from bson.objectid import ObjectId import pyblish.api +from avalon import io + class IntegrateInputLinks(pyblish.api.ContextPlugin): """Connecting version level dependency links""" @@ -104,7 +106,7 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): # future. link = OrderedDict() link["type"] = link_type - link["id"] = io.ObjectId(input_id) + link["id"] = ObjectId(input_id) link["linkedBy"] = "publish" if "inputLinks" not in version_doc["data"]: diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index e8dab089af..c26d3559ec 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -9,6 +9,7 @@ import six import re import shutil +from bson.objectid import ObjectId from pymongo import DeleteOne, InsertOne import pyblish.api from avalon import io @@ -293,7 +294,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): bulk_writes.append(DeleteOne({"_id": repre_id})) repre["orig_id"] = repre_id - repre["_id"] = io.ObjectId() + repre["_id"] = ObjectId() repre["type"] = "archived_representation" bulk_writes.append(InsertOne(repre)) @@ -572,7 +573,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Create new id if existing representations does not match if repre_id is None: - repre_id = io.ObjectId() + repre_id = ObjectId() data = repre.get("data") or {} data.update({'path': dst, 'template': template}) @@ -781,7 +782,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): families = [instance.data["family"]] families.extend(instance.data.get("families", [])) io.update_many( - {"type": "subset", "_id": io.ObjectId(subset["_id"])}, + {"type": "subset", "_id": ObjectId(subset["_id"])}, {"$set": {"data.families": families}} ) @@ -806,7 +807,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if subset_group: io.update_many({ 'type': 'subset', - '_id': io.ObjectId(subset_id) + '_id': ObjectId(subset_id) }, {'$set': {'data.subsetGroup': subset_group}}) def _get_subset_group(self, instance): @@ -1052,7 +1053,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): sync_project_presets = None rec = { - "_id": io.ObjectId(), + "_id": ObjectId(), "path": path } if size: diff --git a/openpype/tools/mayalookassigner/commands.py b/openpype/tools/mayalookassigner/commands.py index df72e41354..78fd51c7a3 100644 --- a/openpype/tools/mayalookassigner/commands.py +++ b/openpype/tools/mayalookassigner/commands.py @@ -2,6 +2,7 @@ from collections import defaultdict import logging import os +from bson.objectid import ObjectId import maya.cmds as cmds from avalon import io, api @@ -157,7 +158,7 @@ def create_items_from_nodes(nodes): return asset_view_items for _id, id_nodes in id_hashes.items(): - asset = io.find_one({"_id": io.ObjectId(_id)}, + asset = io.find_one({"_id": ObjectId(_id)}, projection={"name": True}) # Skip if asset id is not found diff --git a/openpype/tools/mayalookassigner/vray_proxies.py b/openpype/tools/mayalookassigner/vray_proxies.py index 6a9347449a..25621fc652 100644 --- a/openpype/tools/mayalookassigner/vray_proxies.py +++ b/openpype/tools/mayalookassigner/vray_proxies.py @@ -6,6 +6,7 @@ import logging import json import six +from bson.objectid import ObjectId import alembic.Abc from maya import cmds @@ -231,7 +232,7 @@ def get_latest_version(asset_id, subset): """ subset = io.find_one({"name": subset, - "parent": io.ObjectId(asset_id), + "parent": ObjectId(asset_id), "type": "subset"}) if not subset: raise RuntimeError("Subset does not exist: %s" % subset) diff --git a/openpype/tools/sceneinventory/model.py b/openpype/tools/sceneinventory/model.py index 7173ae751e..091d6ca925 100644 --- a/openpype/tools/sceneinventory/model.py +++ b/openpype/tools/sceneinventory/model.py @@ -5,6 +5,7 @@ from collections import defaultdict from Qt import QtCore, QtGui import qtawesome +from bson.objectid import ObjectId from avalon import api, io, schema from openpype.pipeline import HeroVersionType @@ -299,7 +300,7 @@ class InventoryModel(TreeModel): for repre_id, group_dict in sorted(grouped.items()): group_items = group_dict["items"] # Get parenthood per group - representation = io.find_one({"_id": io.ObjectId(repre_id)}) + representation = io.find_one({"_id": ObjectId(repre_id)}) if not representation: not_found["representation"].append(group_items) not_found_ids.append(repre_id) diff --git a/openpype/tools/sceneinventory/switch_dialog.py b/openpype/tools/sceneinventory/switch_dialog.py index 0e7b1b759a..252f5cde4c 100644 --- a/openpype/tools/sceneinventory/switch_dialog.py +++ b/openpype/tools/sceneinventory/switch_dialog.py @@ -2,6 +2,7 @@ import collections import logging from Qt import QtWidgets, QtCore import qtawesome +from bson.objectid import ObjectId from avalon import io, pipeline from openpype.pipeline import ( @@ -146,7 +147,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): repre_ids = set() content_loaders = set() for item in self._items: - repre_ids.add(io.ObjectId(item["representation"])) + repre_ids.add(ObjectId(item["representation"])) content_loaders.add(item["loader"]) repres = list(io.find({ @@ -1306,7 +1307,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): repre_docs_by_parent_id_by_name[parent_id][name] = repre_doc for container in self._items: - container_repre_id = io.ObjectId(container["representation"]) + container_repre_id = ObjectId(container["representation"]) container_repre = self.content_repres[container_repre_id] container_repre_name = container_repre["name"] diff --git a/openpype/tools/sceneinventory/view.py b/openpype/tools/sceneinventory/view.py index c38390c614..76103b83a9 100644 --- a/openpype/tools/sceneinventory/view.py +++ b/openpype/tools/sceneinventory/view.py @@ -4,6 +4,7 @@ from functools import partial from Qt import QtWidgets, QtCore import qtawesome +from bson.objectid import ObjectId from avalon import io, api @@ -78,7 +79,7 @@ class SceneInventoryView(QtWidgets.QTreeView): repre_ids = [] for item in items: - item_id = io.ObjectId(item["representation"]) + item_id = ObjectId(item["representation"]) if item_id not in repre_ids: repre_ids.append(item_id) @@ -145,7 +146,7 @@ class SceneInventoryView(QtWidgets.QTreeView): def _on_switch_to_versioned(items): repre_ids = [] for item in items: - item_id = io.ObjectId(item["representation"]) + item_id = ObjectId(item["representation"]) if item_id not in repre_ids: repre_ids.append(item_id) @@ -195,7 +196,7 @@ class SceneInventoryView(QtWidgets.QTreeView): version_doc["name"] for item in items: - repre_id = io.ObjectId(item["representation"]) + repre_id = ObjectId(item["representation"]) version_id = version_id_by_repre_id.get(repre_id) version_name = version_name_by_id.get(version_id) if version_name is not None: @@ -658,7 +659,7 @@ class SceneInventoryView(QtWidgets.QTreeView): active = items[-1] # Get available versions for active representation - representation_id = io.ObjectId(active["representation"]) + representation_id = ObjectId(active["representation"]) representation = io.find_one({"_id": representation_id}) version = io.find_one({ "_id": representation["parent"] From 4f643a2928bc2c49f96c5724d10e63cff254ce7b Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 18 Mar 2022 16:46:40 +0100 Subject: [PATCH 098/196] Only raise minor version if `Bump Minor` label is found --- tools/ci_tools.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tools/ci_tools.py b/tools/ci_tools.py index aeb367af38..3e1e3d8d02 100644 --- a/tools/ci_tools.py +++ b/tools/ci_tools.py @@ -8,8 +8,12 @@ import os def get_release_type_github(Log, github_token): # print(Log) - minor_labels = ["type: feature", "type: deprecated"] - patch_labels = ["type: enhancement", "type: bug"] + minor_labels = ["Bump Minor"] + # patch_labels = [ + # "type: enhancement", + # "type: bug", + # "type: deprecated", + # "type: Feature"] g = Github(github_token) repo = g.get_repo("pypeclub/OpenPype") @@ -28,9 +32,12 @@ def get_release_type_github(Log, github_token): if any(label in labels for label in minor_labels): return "minor" - - if any(label in labels for label in patch_labels): + else return "patch" + + #TODO: if all is working fine, this part can be cleaned up eventually + # if any(label in labels for label in patch_labels): + # return "patch" return None From f804b5f7e193e174c2cee885d14d3459ae52fbd9 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 18 Mar 2022 16:56:36 +0100 Subject: [PATCH 099/196] fix typo --- tools/ci_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/ci_tools.py b/tools/ci_tools.py index 3e1e3d8d02..5a28d3fd66 100644 --- a/tools/ci_tools.py +++ b/tools/ci_tools.py @@ -32,7 +32,7 @@ def get_release_type_github(Log, github_token): if any(label in labels for label in minor_labels): return "minor" - else + else: return "patch" #TODO: if all is working fine, this part can be cleaned up eventually From 952fc093682685a1893a0cb7615eb2e6ab197071 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 18 Mar 2022 16:57:07 +0100 Subject: [PATCH 100/196] fix hound nitpicking --- tools/ci_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/ci_tools.py b/tools/ci_tools.py index 5a28d3fd66..4c59cd6af6 100644 --- a/tools/ci_tools.py +++ b/tools/ci_tools.py @@ -35,7 +35,7 @@ def get_release_type_github(Log, github_token): else: return "patch" - #TODO: if all is working fine, this part can be cleaned up eventually + # TODO: if all is working fine, this part can be cleaned up eventually # if any(label in labels for label in patch_labels): # return "patch" From 32bf6cb3e0d2f44a9378fbba1954fe99d6338fe1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Mar 2022 18:02:34 +0100 Subject: [PATCH 101/196] fix last workfile --- openpype/lib/avalon_context.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 0b1d09908c..8e9fff5f67 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1893,7 +1893,9 @@ def get_last_workfile_with_version( # Replace `{version}` with group regex file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) file_template = re.sub(r"{comment.*?}", r".+?", file_template) - filename = StringTemplate.format_strict_template(file_template, fill_data) + file_template = StringTemplate.format_strict_template( + file_template, fill_data + ) # Match with ignore case on Windows due to the Windows # OS not being case-sensitive. This avoids later running From 1b5ca6a86ef977f45f501d8a3571c4398915b740 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 19 Mar 2022 03:35:01 +0000 Subject: [PATCH 102/196] [Automated] Bump version --- CHANGELOG.md | 27 +++++++++++++++++++++++---- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 25 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f3c7820d8f..f20276cbd7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,28 @@ # Changelog -## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-17) +## [3.9.2-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.0...3.9.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.1...HEAD) + +**🚀 Enhancements** + +- CI: change the version bump logic [\#2919](https://github.com/pypeclub/OpenPype/pull/2919) +- Deadline: Add headless argument [\#2916](https://github.com/pypeclub/OpenPype/pull/2916) +- Ftrack: Fill workfile in custom attribute [\#2906](https://github.com/pypeclub/OpenPype/pull/2906) +- Settings UI: Add simple tooltips for settings entities [\#2901](https://github.com/pypeclub/OpenPype/pull/2901) + +**🐛 Bug fixes** + +- Ftrack: Missing Ftrack id after editorial publish [\#2905](https://github.com/pypeclub/OpenPype/pull/2905) +- AfterEffects: Fix rendering for single frame in DL [\#2875](https://github.com/pypeclub/OpenPype/pull/2875) + +**🔀 Refactored code** + +- General: Move formatting and workfile functions [\#2914](https://github.com/pypeclub/OpenPype/pull/2914) + +## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-18) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.1-nightly.3...3.9.1) **🚀 Enhancements** @@ -22,7 +42,6 @@ - General: Remove forgotten use of avalon Creator [\#2885](https://github.com/pypeclub/OpenPype/pull/2885) - General: Avoid circular import [\#2884](https://github.com/pypeclub/OpenPype/pull/2884) - Fixes for attaching loaded containers \(\#2837\) [\#2874](https://github.com/pypeclub/OpenPype/pull/2874) -- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) **🔀 Refactored code** @@ -75,6 +94,7 @@ - Maya: Stop creation of reviews for Cryptomattes [\#2832](https://github.com/pypeclub/OpenPype/pull/2832) - Deadline: Remove recreated event [\#2828](https://github.com/pypeclub/OpenPype/pull/2828) - Deadline: Added missing events folder [\#2827](https://github.com/pypeclub/OpenPype/pull/2827) +- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) - Settings: Missing document with OP versions may break start of OpenPype [\#2825](https://github.com/pypeclub/OpenPype/pull/2825) - Deadline: more detailed temp file name for environment json [\#2824](https://github.com/pypeclub/OpenPype/pull/2824) - General: Host name was formed from obsolete code [\#2821](https://github.com/pypeclub/OpenPype/pull/2821) @@ -92,7 +112,6 @@ - General: Move change context functions [\#2839](https://github.com/pypeclub/OpenPype/pull/2839) - Tools: Don't use avalon tools code [\#2829](https://github.com/pypeclub/OpenPype/pull/2829) - Move Unreal Implementation to OpenPype [\#2823](https://github.com/pypeclub/OpenPype/pull/2823) -- General: Extract template formatting from anatomy [\#2766](https://github.com/pypeclub/OpenPype/pull/2766) ## [3.8.2](https://github.com/pypeclub/OpenPype/tree/3.8.2) (2022-02-07) diff --git a/openpype/version.py b/openpype/version.py index 1ef25e3f48..2390309e76 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.1" +__version__ = "3.9.2-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 7c09495a99..90e264d456 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.1" # OpenPype +version = "3.9.2-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 225698d006ab8492ece9dcdea45a50be363fcab0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sun, 20 Mar 2022 11:18:06 +0100 Subject: [PATCH 103/196] don't add default_modules dir if does not exists --- openpype/modules/base.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 175957ae39..5a8d33aa6e 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -146,9 +146,16 @@ class _LoadCache: def get_default_modules_dir(): """Path to default OpenPype modules.""" + current_dir = os.path.abspath(os.path.dirname(__file__)) - return os.path.join(current_dir, "default_modules") + output = [] + for folder_name in ("default_modules", ): + path = os.path.join(current_dir, folder_name) + if os.path.exists(path) and os.path.isdir(path): + output.append(path) + + return output def get_dynamic_modules_dirs(): @@ -186,7 +193,7 @@ def get_dynamic_modules_dirs(): def get_module_dirs(): """List of paths where OpenPype modules can be found.""" _dirpaths = [] - _dirpaths.append(get_default_modules_dir()) + _dirpaths.extend(get_default_modules_dir()) _dirpaths.extend(get_dynamic_modules_dirs()) dirpaths = [] From 0bf019a00afe11087558163d856ee400ba365d0e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sun, 20 Mar 2022 11:18:31 +0100 Subject: [PATCH 104/196] default modules are loaded dynamically by skipping known files --- openpype/modules/base.py | 68 +++++++++++++++++++++++----------------- 1 file changed, 40 insertions(+), 28 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 5a8d33aa6e..5cdeb86087 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -28,26 +28,15 @@ from openpype.settings.lib import ( ) from openpype.lib import PypeLogger - -DEFAULT_OPENPYPE_MODULES = ( - "avalon_apps", - "clockify", - "log_viewer", - "deadline", - "muster", - "royalrender", - "python_console_interpreter", - "ftrack", - "slack", - "webserver", - "launcher_action", - "project_manager_action", - "settings_action", - "standalonepublish_action", - "traypublish_action", - "job_queue", - "timers_manager", - "sync_server", +# Files that will be always ignored on modules import +IGNORED_FILENAMES = ( + "__pycache__", +) +# Files ignored on modules import from "./openpype/modules" +IGNORED_DEFAULT_FILENAMES = ( + "__init__.py", + "base.py", + "interfaces.py", ) @@ -299,25 +288,45 @@ def _load_modules(): log = PypeLogger.get_logger("ModulesLoader") + current_dir = os.path.abspath(os.path.dirname(__file__)) + processed_paths = set() + processed_paths.add(current_dir) # Import default modules imported from 'openpype.modules' - for default_module_name in DEFAULT_OPENPYPE_MODULES: + for filename in os.listdir(current_dir): + # Ignore filenames + if ( + filename in IGNORED_FILENAMES + or filename in IGNORED_DEFAULT_FILENAMES + ): + continue + + fullpath = os.path.join(current_dir, filename) + basename, ext = os.path.splitext(filename) + + if not os.path.isdir(fullpath) and ext not in (".py", ): + continue + try: - import_str = "openpype.modules.{}".format(default_module_name) - new_import_str = "{}.{}".format(modules_key, default_module_name) + import_str = "openpype.modules.{}".format(basename) + new_import_str = "{}.{}".format(modules_key, basename) default_module = __import__(import_str, fromlist=("", )) sys.modules[new_import_str] = default_module - setattr(openpype_modules, default_module_name, default_module) + setattr(openpype_modules, basename, default_module) except Exception: msg = ( "Failed to import default module '{}'." - ).format(default_module_name) + ).format(basename) log.error(msg, exc_info=True) # Look for OpenPype modules in paths defined with `get_module_dirs` # - dynamically imported OpenPype modules and addons - dirpaths = get_module_dirs() - for dirpath in dirpaths: + for dirpath in get_module_dirs(): + # Skip already processed paths + if dirpath in processed_paths: + continue + processed_paths.add(dirpath) + if not os.path.exists(dirpath): log.warning(( "Could not find path when loading OpenPype modules \"{}\"" @@ -326,12 +335,15 @@ def _load_modules(): for filename in os.listdir(dirpath): # Ignore filenames - if filename in ("__pycache__", ): + if filename in IGNORED_FILENAMES: continue fullpath = os.path.join(dirpath, filename) basename, ext = os.path.splitext(filename) + if not os.path.isdir(fullpath) and ext not in (".py", ): + continue + # TODO add more logic how to define if folder is module or not # - check manifest and content of manifest try: From 9d98d5ea2e579c704a92b5c68c0f07edd49005d7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 09:50:34 +0100 Subject: [PATCH 105/196] fix import of 'register_event_callback' --- openpype/hosts/hiero/api/events.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/hiero/api/events.py b/openpype/hosts/hiero/api/events.py index 9439199933..7fab3edfc8 100644 --- a/openpype/hosts/hiero/api/events.py +++ b/openpype/hosts/hiero/api/events.py @@ -1,12 +1,12 @@ import os import hiero.core.events from openpype.api import Logger +from openpype.lib import register_event_callback from .lib import ( sync_avalon_data_to_workfile, launch_workfiles_app, selection_changed_timeline, before_project_save, - register_event_callback ) from .tags import add_tags_to_workfile from .menu import update_menu_task_label From d292b122227dc81a1acf914e7938ac21c9f2dfb8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 10:20:10 +0100 Subject: [PATCH 106/196] moved save as dialog logic into separated file --- openpype/tools/workfiles/app.py | 460 +------------------- openpype/tools/workfiles/save_as_dialog.py | 468 +++++++++++++++++++++ 2 files changed, 470 insertions(+), 458 deletions(-) create mode 100644 openpype/tools/workfiles/save_as_dialog.py diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 713992bc4b..1452c8ff54 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -1,7 +1,5 @@ import sys import os -import re -import copy import shutil import logging import datetime @@ -26,14 +24,13 @@ from openpype.lib import ( save_workfile_data_to_doc, get_workfile_template_key, create_workdir_extra_folders, - get_workdir_data, - get_last_workfile_with_version ) from openpype.lib.avalon_context import ( update_current_task, compute_session_changes ) from .model import FilesModel +from .save_as_dialog import SaveAsDialog from .view import FilesView log = logging.getLogger(__name__) @@ -42,459 +39,6 @@ module = sys.modules[__name__] module.window = None -def build_workfile_data(session): - """Get the data required for workfile formatting from avalon `session`""" - - # Set work file data for template formatting - asset_name = session["AVALON_ASSET"] - task_name = session["AVALON_TASK"] - host_name = session["AVALON_APP"] - project_doc = io.find_one( - {"type": "project"}, - { - "name": True, - "data.code": True, - "config.tasks": True, - } - ) - - asset_doc = io.find_one( - { - "type": "asset", - "name": asset_name - }, - { - "name": True, - "data.tasks": True, - "data.parents": True - } - ) - data = get_workdir_data(project_doc, asset_doc, task_name, host_name) - data.update({ - "version": 1, - "comment": "", - "ext": None - }) - - return data - - -class CommentMatcher(object): - """Use anatomy and work file data to parse comments from filenames""" - def __init__(self, anatomy, template_key, data): - - self.fname_regex = None - - template = anatomy.templates[template_key]["file"] - if "{comment}" not in template: - # Don't look for comment if template doesn't allow it - return - - # Create a regex group for extensions - extensions = api.registered_host().file_extensions() - any_extension = "(?:{})".format( - "|".join(re.escape(ext[1:]) for ext in extensions) - ) - - # Use placeholders that will never be in the filename - temp_data = copy.deepcopy(data) - temp_data["comment"] = "<>" - temp_data["version"] = "<>" - temp_data["ext"] = "<>" - - formatted = anatomy.format(temp_data) - fname_pattern = formatted[template_key]["file"] - fname_pattern = re.escape(fname_pattern) - - # Replace comment and version with something we can match with regex - replacements = { - "<>": "(.+)", - "<>": "[0-9]+", - "<>": any_extension, - } - for src, dest in replacements.items(): - fname_pattern = fname_pattern.replace(re.escape(src), dest) - - # Match from beginning to end of string to be safe - fname_pattern = "^{}$".format(fname_pattern) - - self.fname_regex = re.compile(fname_pattern) - - def parse_comment(self, filepath): - """Parse the {comment} part from a filename""" - if not self.fname_regex: - return - - fname = os.path.basename(filepath) - match = self.fname_regex.match(fname) - if match: - return match.group(1) - - -class SubversionLineEdit(QtWidgets.QWidget): - """QLineEdit with QPushButton for drop down selection of list of strings""" - def __init__(self, parent=None): - super(SubversionLineEdit, self).__init__(parent=parent) - - layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.setSpacing(3) - - self._input = PlaceholderLineEdit() - self._button = QtWidgets.QPushButton("") - self._button.setFixedWidth(18) - self._menu = QtWidgets.QMenu(self) - self._button.setMenu(self._menu) - - layout.addWidget(self._input) - layout.addWidget(self._button) - - @property - def input(self): - return self._input - - def set_values(self, values): - self._update(values) - - def _on_button_clicked(self): - self._menu.exec_() - - def _on_action_clicked(self, action): - self._input.setText(action.text()) - - def _update(self, values): - """Create optional predefined subset names - - Args: - default_names(list): all predefined names - - Returns: - None - """ - - menu = self._menu - button = self._button - - state = any(values) - button.setEnabled(state) - if state is False: - return - - # Include an empty string - values = [""] + sorted(values) - - # Get and destroy the action group - group = button.findChild(QtWidgets.QActionGroup) - if group: - group.deleteLater() - - # Build new action group - group = QtWidgets.QActionGroup(button) - for name in values: - action = group.addAction(name) - menu.addAction(action) - - group.triggered.connect(self._on_action_clicked) - - -class NameWindow(QtWidgets.QDialog): - """Name Window to define a unique filename inside a root folder - - The filename will be based on the "workfile" template defined in the - project["config"]["template"]. - - """ - - def __init__(self, parent, root, anatomy, template_key, session=None): - super(NameWindow, self).__init__(parent=parent) - self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint) - - self.result = None - self.host = api.registered_host() - self.root = root - self.work_file = None - - if not session: - # Fallback to active session - session = api.Session - - self.data = build_workfile_data(session) - - # Store project anatomy - self.anatomy = anatomy - self.template = anatomy.templates[template_key]["file"] - self.template_key = template_key - - # Btns widget - btns_widget = QtWidgets.QWidget(self) - - btn_ok = QtWidgets.QPushButton("Ok", btns_widget) - btn_cancel = QtWidgets.QPushButton("Cancel", btns_widget) - - btns_layout = QtWidgets.QHBoxLayout(btns_widget) - btns_layout.addWidget(btn_ok) - btns_layout.addWidget(btn_cancel) - - # Inputs widget - inputs_widget = QtWidgets.QWidget(self) - - # Version widget - version_widget = QtWidgets.QWidget(inputs_widget) - - # Version number input - version_input = QtWidgets.QSpinBox(version_widget) - version_input.setMinimum(1) - version_input.setMaximum(9999) - - # Last version checkbox - last_version_check = QtWidgets.QCheckBox( - "Next Available Version", version_widget - ) - last_version_check.setChecked(True) - - version_layout = QtWidgets.QHBoxLayout(version_widget) - version_layout.setContentsMargins(0, 0, 0, 0) - version_layout.addWidget(version_input) - version_layout.addWidget(last_version_check) - - # Preview widget - preview_label = QtWidgets.QLabel("Preview filename", inputs_widget) - - # Subversion input - subversion = SubversionLineEdit(inputs_widget) - subversion.input.setPlaceholderText("Will be part of filename.") - - # Extensions combobox - ext_combo = QtWidgets.QComboBox(inputs_widget) - # Add styled delegate to use stylesheets - ext_delegate = QtWidgets.QStyledItemDelegate() - ext_combo.setItemDelegate(ext_delegate) - ext_combo.addItems(self.host.file_extensions()) - - # Build inputs - inputs_layout = QtWidgets.QFormLayout(inputs_widget) - # Add version only if template contains version key - # - since the version can be padded with "{version:0>4}" we only search - # for "{version". - if "{version" in self.template: - inputs_layout.addRow("Version:", version_widget) - else: - version_widget.setVisible(False) - - # Add subversion only if template contains `{comment}` - if "{comment}" in self.template: - inputs_layout.addRow("Subversion:", subversion) - - # Detect whether a {comment} is in the current filename - if so, - # preserve it by default and set it in the comment/subversion field - current_filepath = self.host.current_file() - if current_filepath: - # We match the current filename against the current session - # instead of the session where the user is saving to. - current_data = build_workfile_data(api.Session) - matcher = CommentMatcher(anatomy, template_key, current_data) - comment = matcher.parse_comment(current_filepath) - if comment: - log.info("Detected subversion comment: {}".format(comment)) - self.data["comment"] = comment - subversion.input.setText(comment) - - existing_comments = self.get_existing_comments() - subversion.set_values(existing_comments) - - else: - subversion.setVisible(False) - inputs_layout.addRow("Extension:", ext_combo) - inputs_layout.addRow("Preview:", preview_label) - - # Build layout - main_layout = QtWidgets.QVBoxLayout(self) - main_layout.addWidget(inputs_widget) - main_layout.addWidget(btns_widget) - - # Signal callback registration - version_input.valueChanged.connect(self.on_version_spinbox_changed) - last_version_check.stateChanged.connect( - self.on_version_checkbox_changed - ) - - subversion.input.textChanged.connect(self.on_comment_changed) - ext_combo.currentIndexChanged.connect(self.on_extension_changed) - - btn_ok.pressed.connect(self.on_ok_pressed) - btn_cancel.pressed.connect(self.on_cancel_pressed) - - # Allow "Enter" key to accept the save. - btn_ok.setDefault(True) - - # Force default focus to comment, some hosts didn't automatically - # apply focus to this line edit (e.g. Houdini) - subversion.input.setFocus() - - # Store widgets - self.btn_ok = btn_ok - - self.version_widget = version_widget - - self.version_input = version_input - self.last_version_check = last_version_check - - self.preview_label = preview_label - self.subversion = subversion - self.ext_combo = ext_combo - self._ext_delegate = ext_delegate - - self.refresh() - - def get_existing_comments(self): - - matcher = CommentMatcher(self.anatomy, self.template_key, self.data) - host_extensions = set(self.host.file_extensions()) - comments = set() - if os.path.isdir(self.root): - for fname in os.listdir(self.root): - if not os.path.isfile(os.path.join(self.root, fname)): - continue - - ext = os.path.splitext(fname)[-1] - if ext not in host_extensions: - continue - - comment = matcher.parse_comment(fname) - if comment: - comments.add(comment) - - return list(comments) - - def on_version_spinbox_changed(self, value): - self.data["version"] = value - self.refresh() - - def on_version_checkbox_changed(self, _value): - self.refresh() - - def on_comment_changed(self, text): - self.data["comment"] = text - self.refresh() - - def on_extension_changed(self): - ext = self.ext_combo.currentText() - if ext == self.data["ext"]: - return - self.data["ext"] = ext - self.refresh() - - def on_ok_pressed(self): - self.result = self.work_file - self.close() - - def on_cancel_pressed(self): - self.close() - - def get_result(self): - return self.result - - def get_work_file(self): - data = copy.deepcopy(self.data) - if not data["comment"]: - data.pop("comment", None) - - data["ext"] = data["ext"][1:] - - anatomy_filled = self.anatomy.format(data) - return anatomy_filled[self.template_key]["file"] - - def refresh(self): - extensions = self.host.file_extensions() - extension = self.data["ext"] - if extension is None: - # Define saving file extension - current_file = self.host.current_file() - if current_file: - # Match the extension of current file - _, extension = os.path.splitext(current_file) - else: - extension = extensions[0] - - if extension != self.data["ext"]: - self.data["ext"] = extension - index = self.ext_combo.findText( - extension, QtCore.Qt.MatchFixedString - ) - if index >= 0: - self.ext_combo.setCurrentIndex(index) - - if not self.last_version_check.isChecked(): - self.version_input.setEnabled(True) - self.data["version"] = self.version_input.value() - - work_file = self.get_work_file() - - else: - self.version_input.setEnabled(False) - - data = copy.deepcopy(self.data) - template = str(self.template) - - if not data["comment"]: - data.pop("comment", None) - - data["ext"] = data["ext"][1:] - - version = get_last_workfile_with_version( - self.root, template, data, extensions - )[1] - - if version is None: - version = 1 - else: - version += 1 - - found_valid_version = False - # Check if next version is valid version and give a chance to try - # next 100 versions - for idx in range(100): - # Store version to data - self.data["version"] = version - - work_file = self.get_work_file() - # Safety check - path = os.path.join(self.root, work_file) - if not os.path.exists(path): - found_valid_version = True - break - - # Try next version - version += 1 - # Log warning - if idx == 0: - log.warning(( - "BUG: Function `get_last_workfile_with_version` " - "didn't return last version." - )) - # Raise exception if even 100 version fallback didn't help - if not found_valid_version: - raise AssertionError( - "This is a bug. Couldn't find valid version!" - ) - - self.work_file = work_file - - path_exists = os.path.exists(os.path.join(self.root, work_file)) - - self.btn_ok.setEnabled(not path_exists) - - if path_exists: - self.preview_label.setText( - "Cannot create \"{0}\" because file exists!" - "".format(work_file) - ) - else: - self.preview_label.setText( - "{0}".format(work_file) - ) - - class FilesWidget(QtWidgets.QWidget): """A widget displaying files that allows to save and open files.""" file_selected = QtCore.Signal(str) @@ -735,7 +279,7 @@ class FilesWidget(QtWidgets.QWidget): """ session = self._get_session() - window = NameWindow( + window = SaveAsDialog( parent=self, root=self._workfiles_root, anatomy=self.anatomy, diff --git a/openpype/tools/workfiles/save_as_dialog.py b/openpype/tools/workfiles/save_as_dialog.py new file mode 100644 index 0000000000..399d54bd54 --- /dev/null +++ b/openpype/tools/workfiles/save_as_dialog.py @@ -0,0 +1,468 @@ +import os +import re +import copy +import logging + +from Qt import QtWidgets, QtCore + +from avalon import api, io + +from openpype.lib import ( + get_last_workfile_with_version, + get_workdir_data, +) +from openpype.tools.utils import PlaceholderLineEdit + +log = logging.getLogger(__name__) + + +def build_workfile_data(session): + """Get the data required for workfile formatting from avalon `session`""" + + # Set work file data for template formatting + asset_name = session["AVALON_ASSET"] + task_name = session["AVALON_TASK"] + host_name = session["AVALON_APP"] + project_doc = io.find_one( + {"type": "project"}, + { + "name": True, + "data.code": True, + "config.tasks": True, + } + ) + + asset_doc = io.find_one( + { + "type": "asset", + "name": asset_name + }, + { + "name": True, + "data.tasks": True, + "data.parents": True + } + ) + data = get_workdir_data(project_doc, asset_doc, task_name, host_name) + data.update({ + "version": 1, + "comment": "", + "ext": None + }) + + return data + + +class CommentMatcher(object): + """Use anatomy and work file data to parse comments from filenames""" + def __init__(self, anatomy, template_key, data): + + self.fname_regex = None + + template = anatomy.templates[template_key]["file"] + if "{comment}" not in template: + # Don't look for comment if template doesn't allow it + return + + # Create a regex group for extensions + extensions = api.registered_host().file_extensions() + any_extension = "(?:{})".format( + "|".join(re.escape(ext[1:]) for ext in extensions) + ) + + # Use placeholders that will never be in the filename + temp_data = copy.deepcopy(data) + temp_data["comment"] = "<>" + temp_data["version"] = "<>" + temp_data["ext"] = "<>" + + formatted = anatomy.format(temp_data) + fname_pattern = formatted[template_key]["file"] + fname_pattern = re.escape(fname_pattern) + + # Replace comment and version with something we can match with regex + replacements = { + "<>": "(.+)", + "<>": "[0-9]+", + "<>": any_extension, + } + for src, dest in replacements.items(): + fname_pattern = fname_pattern.replace(re.escape(src), dest) + + # Match from beginning to end of string to be safe + fname_pattern = "^{}$".format(fname_pattern) + + self.fname_regex = re.compile(fname_pattern) + + def parse_comment(self, filepath): + """Parse the {comment} part from a filename""" + if not self.fname_regex: + return + + fname = os.path.basename(filepath) + match = self.fname_regex.match(fname) + if match: + return match.group(1) + + +class SubversionLineEdit(QtWidgets.QWidget): + """QLineEdit with QPushButton for drop down selection of list of strings""" + def __init__(self, parent=None): + super(SubversionLineEdit, self).__init__(parent=parent) + + layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.setSpacing(3) + + self._input = PlaceholderLineEdit() + self._button = QtWidgets.QPushButton("") + self._button.setFixedWidth(18) + self._menu = QtWidgets.QMenu(self) + self._button.setMenu(self._menu) + + layout.addWidget(self._input) + layout.addWidget(self._button) + + @property + def input(self): + return self._input + + def set_values(self, values): + self._update(values) + + def _on_button_clicked(self): + self._menu.exec_() + + def _on_action_clicked(self, action): + self._input.setText(action.text()) + + def _update(self, values): + """Create optional predefined subset names + + Args: + default_names(list): all predefined names + + Returns: + None + """ + + menu = self._menu + button = self._button + + state = any(values) + button.setEnabled(state) + if state is False: + return + + # Include an empty string + values = [""] + sorted(values) + + # Get and destroy the action group + group = button.findChild(QtWidgets.QActionGroup) + if group: + group.deleteLater() + + # Build new action group + group = QtWidgets.QActionGroup(button) + for name in values: + action = group.addAction(name) + menu.addAction(action) + + group.triggered.connect(self._on_action_clicked) + + +class SaveAsDialog(QtWidgets.QDialog): + """Name Window to define a unique filename inside a root folder + + The filename will be based on the "workfile" template defined in the + project["config"]["template"]. + + """ + + def __init__(self, parent, root, anatomy, template_key, session=None): + super(SaveAsDialog, self).__init__(parent=parent) + self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint) + + self.result = None + self.host = api.registered_host() + self.root = root + self.work_file = None + + if not session: + # Fallback to active session + session = api.Session + + self.data = build_workfile_data(session) + + # Store project anatomy + self.anatomy = anatomy + self.template = anatomy.templates[template_key]["file"] + self.template_key = template_key + + # Btns widget + btns_widget = QtWidgets.QWidget(self) + + btn_ok = QtWidgets.QPushButton("Ok", btns_widget) + btn_cancel = QtWidgets.QPushButton("Cancel", btns_widget) + + btns_layout = QtWidgets.QHBoxLayout(btns_widget) + btns_layout.addWidget(btn_ok) + btns_layout.addWidget(btn_cancel) + + # Inputs widget + inputs_widget = QtWidgets.QWidget(self) + + # Version widget + version_widget = QtWidgets.QWidget(inputs_widget) + + # Version number input + version_input = QtWidgets.QSpinBox(version_widget) + version_input.setMinimum(1) + version_input.setMaximum(9999) + + # Last version checkbox + last_version_check = QtWidgets.QCheckBox( + "Next Available Version", version_widget + ) + last_version_check.setChecked(True) + + version_layout = QtWidgets.QHBoxLayout(version_widget) + version_layout.setContentsMargins(0, 0, 0, 0) + version_layout.addWidget(version_input) + version_layout.addWidget(last_version_check) + + # Preview widget + preview_label = QtWidgets.QLabel("Preview filename", inputs_widget) + + # Subversion input + subversion = SubversionLineEdit(inputs_widget) + subversion.input.setPlaceholderText("Will be part of filename.") + + # Extensions combobox + ext_combo = QtWidgets.QComboBox(inputs_widget) + # Add styled delegate to use stylesheets + ext_delegate = QtWidgets.QStyledItemDelegate() + ext_combo.setItemDelegate(ext_delegate) + ext_combo.addItems(self.host.file_extensions()) + + # Build inputs + inputs_layout = QtWidgets.QFormLayout(inputs_widget) + # Add version only if template contains version key + # - since the version can be padded with "{version:0>4}" we only search + # for "{version". + if "{version" in self.template: + inputs_layout.addRow("Version:", version_widget) + else: + version_widget.setVisible(False) + + # Add subversion only if template contains `{comment}` + if "{comment}" in self.template: + inputs_layout.addRow("Subversion:", subversion) + + # Detect whether a {comment} is in the current filename - if so, + # preserve it by default and set it in the comment/subversion field + current_filepath = self.host.current_file() + if current_filepath: + # We match the current filename against the current session + # instead of the session where the user is saving to. + current_data = build_workfile_data(api.Session) + matcher = CommentMatcher(anatomy, template_key, current_data) + comment = matcher.parse_comment(current_filepath) + if comment: + log.info("Detected subversion comment: {}".format(comment)) + self.data["comment"] = comment + subversion.input.setText(comment) + + existing_comments = self.get_existing_comments() + subversion.set_values(existing_comments) + + else: + subversion.setVisible(False) + inputs_layout.addRow("Extension:", ext_combo) + inputs_layout.addRow("Preview:", preview_label) + + # Build layout + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.addWidget(inputs_widget) + main_layout.addWidget(btns_widget) + + # Signal callback registration + version_input.valueChanged.connect(self.on_version_spinbox_changed) + last_version_check.stateChanged.connect( + self.on_version_checkbox_changed + ) + + subversion.input.textChanged.connect(self.on_comment_changed) + ext_combo.currentIndexChanged.connect(self.on_extension_changed) + + btn_ok.pressed.connect(self.on_ok_pressed) + btn_cancel.pressed.connect(self.on_cancel_pressed) + + # Allow "Enter" key to accept the save. + btn_ok.setDefault(True) + + # Force default focus to comment, some hosts didn't automatically + # apply focus to this line edit (e.g. Houdini) + subversion.input.setFocus() + + # Store widgets + self.btn_ok = btn_ok + + self.version_widget = version_widget + + self.version_input = version_input + self.last_version_check = last_version_check + + self.preview_label = preview_label + self.subversion = subversion + self.ext_combo = ext_combo + self._ext_delegate = ext_delegate + + self.refresh() + + def get_existing_comments(self): + matcher = CommentMatcher(self.anatomy, self.template_key, self.data) + host_extensions = set(self.host.file_extensions()) + comments = set() + if os.path.isdir(self.root): + for fname in os.listdir(self.root): + if not os.path.isfile(os.path.join(self.root, fname)): + continue + + ext = os.path.splitext(fname)[-1] + if ext not in host_extensions: + continue + + comment = matcher.parse_comment(fname) + if comment: + comments.add(comment) + + return list(comments) + + def on_version_spinbox_changed(self, value): + self.data["version"] = value + self.refresh() + + def on_version_checkbox_changed(self, _value): + self.refresh() + + def on_comment_changed(self, text): + self.data["comment"] = text + self.refresh() + + def on_extension_changed(self): + ext = self.ext_combo.currentText() + if ext == self.data["ext"]: + return + self.data["ext"] = ext + self.refresh() + + def on_ok_pressed(self): + self.result = self.work_file + self.close() + + def on_cancel_pressed(self): + self.close() + + def get_result(self): + return self.result + + def get_work_file(self): + data = copy.deepcopy(self.data) + if not data["comment"]: + data.pop("comment", None) + + data["ext"] = data["ext"][1:] + + anatomy_filled = self.anatomy.format(data) + return anatomy_filled[self.template_key]["file"] + + def refresh(self): + extensions = self.host.file_extensions() + extension = self.data["ext"] + if extension is None: + # Define saving file extension + current_file = self.host.current_file() + if current_file: + # Match the extension of current file + _, extension = os.path.splitext(current_file) + else: + extension = extensions[0] + + if extension != self.data["ext"]: + self.data["ext"] = extension + index = self.ext_combo.findText( + extension, QtCore.Qt.MatchFixedString + ) + if index >= 0: + self.ext_combo.setCurrentIndex(index) + + if not self.last_version_check.isChecked(): + self.version_input.setEnabled(True) + self.data["version"] = self.version_input.value() + + work_file = self.get_work_file() + + else: + self.version_input.setEnabled(False) + + data = copy.deepcopy(self.data) + template = str(self.template) + + if not data["comment"]: + data.pop("comment", None) + + data["ext"] = data["ext"][1:] + + version = get_last_workfile_with_version( + self.root, template, data, extensions + )[1] + + if version is None: + version = 1 + else: + version += 1 + + found_valid_version = False + # Check if next version is valid version and give a chance to try + # next 100 versions + for idx in range(100): + # Store version to data + self.data["version"] = version + + work_file = self.get_work_file() + # Safety check + path = os.path.join(self.root, work_file) + if not os.path.exists(path): + found_valid_version = True + break + + # Try next version + version += 1 + # Log warning + if idx == 0: + log.warning(( + "BUG: Function `get_last_workfile_with_version` " + "didn't return last version." + )) + # Raise exception if even 100 version fallback didn't help + if not found_valid_version: + raise AssertionError( + "This is a bug. Couldn't find valid version!" + ) + + self.work_file = work_file + + path_exists = os.path.exists(os.path.join(self.root, work_file)) + + self.btn_ok.setEnabled(not path_exists) + + if path_exists: + self.preview_label.setText( + "Cannot create \"{0}\" because file exists!" + "".format(work_file) + ) + else: + self.preview_label.setText( + "{0}".format(work_file) + ) From 30fe1b30a22a00decf2493ddfcebe6a8ae012754 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 10:29:22 +0100 Subject: [PATCH 107/196] use standard item model --- openpype/tools/workfiles/app.py | 26 ++-- openpype/tools/workfiles/model.py | 219 ++++++++++++++++-------------- 2 files changed, 128 insertions(+), 117 deletions(-) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 1452c8ff54..d6e5aa9ec1 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -29,7 +29,11 @@ from openpype.lib.avalon_context import ( update_current_task, compute_session_changes ) -from .model import FilesModel +from .model import ( + WorkAreaFilesModel, + FILEPATH_ROLE, + DATE_MODIFIED_ROLE, +) from .save_as_dialog import SaveAsDialog from .view import FilesView @@ -76,7 +80,7 @@ class FilesWidget(QtWidgets.QWidget): # Create the Files model extensions = set(self.host.file_extensions()) - files_model = FilesModel(file_extensions=extensions) + files_model = WorkAreaFilesModel(extensions) # Create proxy model for files to be able sort and filter proxy_model = QtCore.QSortFilterProxyModel() @@ -167,10 +171,10 @@ class FilesWidget(QtWidgets.QWidget): self.files_model.set_root(None) # Disable/Enable buttons based on available files in model - has_filenames = self.files_model.has_filenames() - self.btn_browse.setEnabled(has_filenames) - self.btn_open.setEnabled(has_filenames) - if not has_filenames: + has_valid_items = self.files_model.has_valid_items() + self.btn_browse.setEnabled(has_valid_items) + self.btn_open.setEnabled(has_valid_items) + if not has_valid_items: # Manually trigger file selection self.on_file_select() @@ -310,7 +314,7 @@ class FilesWidget(QtWidgets.QWidget): if not index.isValid(): return - return index.data(self.files_model.FilePathRole) + return index.data(FILEPATH_ROLE) def on_open_pressed(self): path = self._get_selected_filepath() @@ -398,12 +402,11 @@ class FilesWidget(QtWidgets.QWidget): self._select_last_modified_file() def on_context_menu(self, point): - index = self.files_view.indexAt(point) + index = self._workarea_files_view.indexAt(point) if not index.isValid(): return - is_enabled = index.data(FilesModel.IsEnabled) - if not is_enabled: + if not index.flags() & QtCore.Qt.ItemIsEnabled: return menu = QtWidgets.QMenu(self) @@ -424,7 +427,6 @@ class FilesWidget(QtWidgets.QWidget): def _select_last_modified_file(self): """Utility function to select the file with latest date modified""" - role = self.files_model.DateModifiedRole model = self.files_view.model() highest_index = None @@ -434,7 +436,7 @@ class FilesWidget(QtWidgets.QWidget): if not index.isValid(): continue - modified = index.data(role) + modified = index.data(DATE_MODIFIED_ROLE) if modified is not None and modified > highest: highest_index = index highest = modified diff --git a/openpype/tools/workfiles/model.py b/openpype/tools/workfiles/model.py index e9184842fc..fa450f0a8a 100644 --- a/openpype/tools/workfiles/model.py +++ b/openpype/tools/workfiles/model.py @@ -1,7 +1,7 @@ import os import logging -from Qt import QtCore +from Qt import QtCore, QtGui import qtawesome from openpype.style import ( @@ -9,145 +9,152 @@ from openpype.style import ( get_disabled_entity_icon_color, ) -from openpype.tools.utils.models import TreeModel, Item log = logging.getLogger(__name__) +FILEPATH_ROLE = QtCore.Qt.UserRole + 2 +DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 3 +ITEM_ID_ROLE = QtCore.Qt.UserRole + 4 -class FilesModel(TreeModel): - """Model listing files with specified extensions in a root folder""" - Columns = ["filename", "date"] - FileNameRole = QtCore.Qt.UserRole + 2 - DateModifiedRole = QtCore.Qt.UserRole + 3 - FilePathRole = QtCore.Qt.UserRole + 4 - IsEnabled = QtCore.Qt.UserRole + 5 +class WorkAreaFilesModel(QtGui.QStandardItemModel): + def __init__(self, extensions, *args, **kwargs): + super(WorkAreaFilesModel, self).__init__(*args, **kwargs) - def __init__(self, file_extensions, parent=None): - super(FilesModel, self).__init__(parent=parent) + self.setColumnCount(2) self._root = None - self._file_extensions = file_extensions - self._icons = { - "file": qtawesome.icon( - "fa.file-o", - color=get_default_entity_icon_color() + self._file_extensions = extensions + self._invalid_path_item = None + self._empty_root_item = None + self._file_icon = qtawesome.icon( + "fa.file-o", + color=get_default_entity_icon_color() + ) + self._invalid_item_visible = False + self._items_by_filename = {} + + def _get_invalid_path_item(self): + if self._invalid_path_item is None: + message = "Work Area does not exist. Use Save As to create it." + item = QtGui.QStandardItem(message) + icon = qtawesome.icon( + "fa.times", + color=get_disabled_entity_icon_color() ) - } + item.setData(icon, QtCore.Qt.DecorationRole) + item.setFlags(QtCore.Qt.NoItemFlags) + item.setColumnCount(self.columnCount()) + self._invalid_path_item = item + return self._invalid_path_item + + def _get_empty_root_item(self): + if self._empty_root_item is None: + message = "Work Area does not exist. Use Save As to create it." + item = QtGui.QStandardItem(message) + icon = qtawesome.icon( + "fa.times", + color=get_disabled_entity_icon_color() + ) + item.setData(icon, QtCore.Qt.DecorationRole) + item.setFlags(QtCore.Qt.NoItemFlags) + item.setColumnCount(self.columnCount()) + self._empty_root_item = item + return self._empty_root_item def set_root(self, root): self._root = root + if root and not os.path.exists(root): + log.debug("Work Area does not exist: {}".format(root)) self.refresh() - def _add_empty(self): - item = Item() - item.update({ - # Put a display message in 'filename' - "filename": "No files found.", - # Not-selectable - "enabled": False, - "date": None, - "filepath": None - }) - - self.add_child(item) + def _clear(self): + root_item = self.invisibleRootItem() + rows = root_item.rowCount() + if rows > 0: + if self._invalid_item_visible: + for row in range(rows): + root_item.takeRow(row) + else: + root_item.removeRows(0, rows) + self._items_by_filename = {} def refresh(self): - self.clear() - self.beginResetModel() - - root = self._root - - if not root: - self.endResetModel() - return - - if not os.path.exists(root): + root_item = self.invisibleRootItem() + if not self._root or not os.path.exists(self._root): + self._clear() # Add Work Area does not exist placeholder - log.debug("Work Area does not exist: %s", root) - message = "Work Area does not exist. Use Save As to create it." - item = Item({ - "filename": message, - "date": None, - "filepath": None, - "enabled": False, - "icon": qtawesome.icon( - "fa.times", - color=get_disabled_entity_icon_color() - ) - }) - self.add_child(item) - self.endResetModel() + item = self._get_invalid_path_item() + root_item.appendRow(item) + self._invalid_item_visible = True return - extensions = self._file_extensions + if self._invalid_item_visible: + self._clear() - for filename in os.listdir(root): - path = os.path.join(root, filename) - if os.path.isdir(path): + new_items = [] + items_to_remove = set(self._items_by_filename.keys()) + for filename in os.listdir(self._root): + filepath = os.path.join(self._root, filename) + if os.path.isdir(filepath): continue ext = os.path.splitext(filename)[1] - if extensions and ext not in extensions: + if ext not in self._file_extensions: continue - modified = os.path.getmtime(path) + modified = os.path.getmtime(filepath) - item = Item({ - "filename": filename, - "date": modified, - "filepath": path - }) + if filename in items_to_remove: + items_to_remove.remove(filename) + item = self._items_by_filename[filename] + else: + item = QtGui.QStandardItem(filename) + item.setColumnCount(self.columnCount()) + item.setFlags( + QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable + ) + item.setData(self._file_icon, QtCore.Qt.DecorationRole) + new_items.append(item) + self._items_by_filename[filename] = item + item.setData(filepath, FILEPATH_ROLE) + item.setData(modified, DATE_MODIFIED_ROLE) - self.add_child(item) + if new_items: + root_item.appendRows(new_items) - if self.rowCount() == 0: - self._add_empty() + for filename in items_to_remove: + item = self._items_by_filename.pop(filename) + root_item.removeRow(item.row()) - self.endResetModel() - - def has_filenames(self): - for item in self._root_item.children(): - if item.get("enabled", True): - return True - return False - - def rowCount(self, parent=None): - if parent is None or not parent.isValid(): - parent_item = self._root_item + if root_item.rowCount() > 0: + self._invalid_item_visible = False else: - parent_item = parent.internalPointer() - return parent_item.childCount() + self._invalid_item_visible = True + item = self._get_empty_root_item() + root_item.appendRow(item) - def data(self, index, role): - if not index.isValid(): - return + def has_valid_items(self): + return not self._invalid_item_visible - if role == QtCore.Qt.DecorationRole: - # Add icon to filename column - item = index.internalPointer() - if index.column() == 0: - if item["filepath"]: - return self._icons["file"] - return item.get("icon", None) + def flags(self, index): + if index.column() != 0: + index = self.index(index.row(), 0, index.parent()) + return super(WorkAreaFilesModel, self).flags(index) - if role == self.FileNameRole: - item = index.internalPointer() - return item["filename"] + def data(self, index, role=None): + if role is None: + role = QtCore.Qt.DisplayRole - if role == self.DateModifiedRole: - item = index.internalPointer() - return item["date"] + if index.column() == 1: + if role == QtCore.Qt.DecorationRole: + return None - if role == self.FilePathRole: - item = index.internalPointer() - return item["filepath"] + if role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole): + role = DATE_MODIFIED_ROLE + index = self.index(index.row(), 0, index.parent()) - if role == self.IsEnabled: - item = index.internalPointer() - return item.get("enabled", True) - - return super(FilesModel, self).data(index, role) + return super(WorkAreaFilesModel, self).data(index, role) def headerData(self, section, orientation, role): # Show nice labels in the header @@ -160,4 +167,6 @@ class FilesModel(TreeModel): elif section == 1: return "Date modified" - return super(FilesModel, self).headerData(section, orientation, role) + return super(WorkAreaFilesModel, self).headerData( + section, orientation, role + ) From 6dbb48d4e6dd07ff11266584935dfdc2f4f941a5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 11:03:33 +0100 Subject: [PATCH 108/196] separated files widget and Window into separated files --- openpype/tools/workfiles/__init__.py | 7 +- openpype/tools/workfiles/app.py | 757 +---------------------- openpype/tools/workfiles/files_widget.py | 445 +++++++++++++ openpype/tools/workfiles/view.py | 15 - openpype/tools/workfiles/window.py | 334 ++++++++++ 5 files changed, 787 insertions(+), 771 deletions(-) create mode 100644 openpype/tools/workfiles/files_widget.py delete mode 100644 openpype/tools/workfiles/view.py create mode 100644 openpype/tools/workfiles/window.py diff --git a/openpype/tools/workfiles/__init__.py b/openpype/tools/workfiles/__init__.py index cde7293931..5fbc71797d 100644 --- a/openpype/tools/workfiles/__init__.py +++ b/openpype/tools/workfiles/__init__.py @@ -1,9 +1,12 @@ +from .window import Window from .app import ( show, - Window + validate_host_requirements, ) __all__ = [ + "Window", + "show", - "Window" + "validate_host_requirements", ] diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index d6e5aa9ec1..ccf80ee98b 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -1,41 +1,10 @@ import sys -import os -import shutil import logging -import datetime -import Qt -from Qt import QtWidgets, QtCore -from avalon import io, api +from avalon import api -from openpype import style -from openpype.tools.utils.lib import ( - qt_app_context -) -from openpype.tools.utils import PlaceholderLineEdit -from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget -from openpype.tools.utils.tasks_widget import TasksWidget -from openpype.tools.utils.delegates import PrettyTimeDelegate -from openpype.lib import ( - emit_event, - Anatomy, - get_workfile_doc, - create_workfile_doc, - save_workfile_data_to_doc, - get_workfile_template_key, - create_workdir_extra_folders, -) -from openpype.lib.avalon_context import ( - update_current_task, - compute_session_changes -) -from .model import ( - WorkAreaFilesModel, - FILEPATH_ROLE, - DATE_MODIFIED_ROLE, -) -from .save_as_dialog import SaveAsDialog -from .view import FilesView +from openpype.tools.utils import qt_app_context +from .window import Window log = logging.getLogger(__name__) @@ -43,726 +12,6 @@ module = sys.modules[__name__] module.window = None -class FilesWidget(QtWidgets.QWidget): - """A widget displaying files that allows to save and open files.""" - file_selected = QtCore.Signal(str) - workfile_created = QtCore.Signal(str) - file_opened = QtCore.Signal() - - def __init__(self, parent=None): - super(FilesWidget, self).__init__(parent=parent) - - # Setup - self._asset_id = None - self._asset_doc = None - self._task_name = None - self._task_type = None - - # Pype's anatomy object for current project - self.anatomy = Anatomy(io.Session["AVALON_PROJECT"]) - # Template key used to get work template from anatomy templates - self.template_key = "work" - - # This is not root but workfile directory - self._workfiles_root = None - self._workdir_path = None - self.host = api.registered_host() - - # Whether to automatically select the latest modified - # file on a refresh of the files model. - self.auto_select_latest_modified = True - - # Avoid crash in Blender and store the message box - # (setting parent doesn't work as it hides the message box) - self._messagebox = None - - files_view = FilesView(self) - - # Create the Files model - extensions = set(self.host.file_extensions()) - files_model = WorkAreaFilesModel(extensions) - - # Create proxy model for files to be able sort and filter - proxy_model = QtCore.QSortFilterProxyModel() - proxy_model.setSourceModel(files_model) - proxy_model.setDynamicSortFilter(True) - proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive) - - # Set up the file list tree view - files_view.setModel(proxy_model) - files_view.setSortingEnabled(True) - files_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) - - # Date modified delegate - time_delegate = PrettyTimeDelegate() - files_view.setItemDelegateForColumn(1, time_delegate) - files_view.setIndentation(3) # smaller indentation - - # Default to a wider first filename column it is what we mostly care - # about and the date modified is relatively small anyway. - files_view.setColumnWidth(0, 330) - - # Filtering input - filter_input = PlaceholderLineEdit(self) - filter_input.setPlaceholderText("Filter files..") - filter_input.textChanged.connect(proxy_model.setFilterFixedString) - - # Home Page - # Build buttons widget for files widget - btns_widget = QtWidgets.QWidget(self) - btn_save = QtWidgets.QPushButton("Save As", btns_widget) - btn_browse = QtWidgets.QPushButton("Browse", btns_widget) - btn_open = QtWidgets.QPushButton("Open", btns_widget) - - btns_layout = QtWidgets.QHBoxLayout(btns_widget) - btns_layout.setContentsMargins(0, 0, 0, 0) - btns_layout.addWidget(btn_open) - btns_layout.addWidget(btn_browse) - btns_layout.addWidget(btn_save) - - # Build files widgets for home page - main_layout = QtWidgets.QVBoxLayout(self) - main_layout.setContentsMargins(0, 0, 0, 0) - main_layout.addWidget(filter_input) - main_layout.addWidget(files_view) - main_layout.addWidget(btns_widget) - - # Register signal callbacks - files_view.doubleClickedLeft.connect(self.on_open_pressed) - files_view.customContextMenuRequested.connect(self.on_context_menu) - files_view.selectionModel().selectionChanged.connect( - self.on_file_select - ) - - btn_open.pressed.connect(self.on_open_pressed) - btn_browse.pressed.connect(self.on_browse_pressed) - btn_save.pressed.connect(self.on_save_as_pressed) - - # Store attributes - self.time_delegate = time_delegate - - self.filter_input = filter_input - - self.files_view = files_view - self.files_model = files_model - - self.btns_widget = btns_widget - self.btn_open = btn_open - self.btn_browse = btn_browse - self.btn_save = btn_save - - def set_asset_task(self, asset_id, task_name, task_type): - if asset_id != self._asset_id: - self._asset_doc = None - self._asset_id = asset_id - self._task_name = task_name - self._task_type = task_type - - # Define a custom session so we can query the work root - # for a "Work area" that is not our current Session. - # This way we can browse it even before we enter it. - if self._asset_id and self._task_name and self._task_type: - session = self._get_session() - self._workdir_path = session["AVALON_WORKDIR"] - self._workfiles_root = self.host.work_root(session) - self.files_model.set_root(self._workfiles_root) - - else: - self.files_model.set_root(None) - - # Disable/Enable buttons based on available files in model - has_valid_items = self.files_model.has_valid_items() - self.btn_browse.setEnabled(has_valid_items) - self.btn_open.setEnabled(has_valid_items) - if not has_valid_items: - # Manually trigger file selection - self.on_file_select() - - def _get_asset_doc(self): - if self._asset_id is None: - return None - - if self._asset_doc is None: - self._asset_doc = io.find_one({"_id": self._asset_id}) - return self._asset_doc - - def _get_session(self): - """Return a modified session for the current asset and task""" - - session = api.Session.copy() - self.template_key = get_workfile_template_key( - self._task_type, - session["AVALON_APP"], - project_name=session["AVALON_PROJECT"] - ) - changes = compute_session_changes( - session, - asset=self._get_asset_doc(), - task=self._task_name, - template_key=self.template_key - ) - session.update(changes) - - return session - - def _enter_session(self): - """Enter the asset and task session currently selected""" - - session = api.Session.copy() - changes = compute_session_changes( - session, - asset=self._get_asset_doc(), - task=self._task_name, - template_key=self.template_key - ) - if not changes: - # Return early if we're already in the right Session context - # to avoid any unwanted Task Changed callbacks to be triggered. - return - - update_current_task( - asset=self._get_asset_doc(), - task=self._task_name, - template_key=self.template_key - ) - - def open_file(self, filepath): - host = self.host - if host.has_unsaved_changes(): - result = self.save_changes_prompt() - if result is None: - # Cancel operation - return False - - # Save first if has changes - if result: - current_file = host.current_file() - if not current_file: - # If the user requested to save the current scene - # we can't actually automatically do so if the current - # file has not been saved with a name yet. So we'll have - # to opt out. - log.error("Can't save scene with no filename. Please " - "first save your work file using 'Save As'.") - return - - # Save current scene, continue to open file - host.save_file(current_file) - - self._enter_session() - host.open_file(filepath) - self.file_opened.emit() - - def save_changes_prompt(self): - self._messagebox = messagebox = QtWidgets.QMessageBox(parent=self) - messagebox.setWindowFlags(messagebox.windowFlags() | - QtCore.Qt.FramelessWindowHint) - messagebox.setIcon(messagebox.Warning) - messagebox.setWindowTitle("Unsaved Changes!") - messagebox.setText( - "There are unsaved changes to the current file." - "\nDo you want to save the changes?" - ) - messagebox.setStandardButtons( - messagebox.Yes | messagebox.No | messagebox.Cancel - ) - - result = messagebox.exec_() - if result == messagebox.Yes: - return True - if result == messagebox.No: - return False - return None - - def get_filename(self): - """Show save dialog to define filename for save or duplicate - - Returns: - str: The filename to create. - - """ - session = self._get_session() - - window = SaveAsDialog( - parent=self, - root=self._workfiles_root, - anatomy=self.anatomy, - template_key=self.template_key, - session=session - ) - window.exec_() - - return window.get_result() - - def on_duplicate_pressed(self): - work_file = self.get_filename() - if not work_file: - return - - src = self._get_selected_filepath() - dst = os.path.join(self._workfiles_root, work_file) - shutil.copy(src, dst) - - self.workfile_created.emit(dst) - - self.refresh() - - def _get_selected_filepath(self): - """Return current filepath selected in view""" - selection = self.files_view.selectionModel() - index = selection.currentIndex() - if not index.isValid(): - return - - return index.data(FILEPATH_ROLE) - - def on_open_pressed(self): - path = self._get_selected_filepath() - if not path: - print("No file selected to open..") - return - - self.open_file(path) - - def on_browse_pressed(self): - ext_filter = "Work File (*{0})".format( - " *".join(self.host.file_extensions()) - ) - kwargs = { - "caption": "Work Files", - "filter": ext_filter - } - if Qt.__binding__ in ("PySide", "PySide2"): - kwargs["dir"] = self._workfiles_root - else: - kwargs["directory"] = self._workfiles_root - - work_file = QtWidgets.QFileDialog.getOpenFileName(**kwargs)[0] - if work_file: - self.open_file(work_file) - - def on_save_as_pressed(self): - work_filename = self.get_filename() - if not work_filename: - return - - # Trigger before save event - emit_event( - "workfile.save.before", - {"filename": work_filename, "workdir_path": self._workdir_path}, - source="workfiles.tool" - ) - - # Make sure workfiles root is updated - # - this triggers 'workio.work_root(...)' which may change value of - # '_workfiles_root' - self.set_asset_task( - self._asset_id, self._task_name, self._task_type - ) - - # Create workfiles root folder - if not os.path.exists(self._workfiles_root): - log.debug("Initializing Work Directory: %s", self._workfiles_root) - os.makedirs(self._workfiles_root) - - # Update session if context has changed - self._enter_session() - # Prepare full path to workfile and save it - filepath = os.path.join( - os.path.normpath(self._workfiles_root), work_filename - ) - self.host.save_file(filepath) - # Create extra folders - create_workdir_extra_folders( - self._workdir_path, - api.Session["AVALON_APP"], - self._task_type, - self._task_name, - api.Session["AVALON_PROJECT"] - ) - # Trigger after save events - emit_event( - "workfile.save.after", - {"filename": work_filename, "workdir_path": self._workdir_path}, - source="workfiles.tool" - ) - - self.workfile_created.emit(filepath) - # Refresh files model - self.refresh() - - def on_file_select(self): - self.file_selected.emit(self._get_selected_filepath()) - - def refresh(self): - """Refresh listed files for current selection in the interface""" - self.files_model.refresh() - - if self.auto_select_latest_modified: - self._select_last_modified_file() - - def on_context_menu(self, point): - index = self._workarea_files_view.indexAt(point) - if not index.isValid(): - return - - if not index.flags() & QtCore.Qt.ItemIsEnabled: - return - - menu = QtWidgets.QMenu(self) - - # Duplicate - action = QtWidgets.QAction("Duplicate", menu) - tip = "Duplicate selected file." - action.setToolTip(tip) - action.setStatusTip(tip) - action.triggered.connect(self.on_duplicate_pressed) - menu.addAction(action) - - # Show the context action menu - global_point = self.files_view.mapToGlobal(point) - action = menu.exec_(global_point) - if not action: - return - - def _select_last_modified_file(self): - """Utility function to select the file with latest date modified""" - model = self.files_view.model() - - highest_index = None - highest = 0 - for row in range(model.rowCount()): - index = model.index(row, 0, parent=QtCore.QModelIndex()) - if not index.isValid(): - continue - - modified = index.data(DATE_MODIFIED_ROLE) - if modified is not None and modified > highest: - highest_index = index - highest = modified - - if highest_index: - self.files_view.setCurrentIndex(highest_index) - - -class SidePanelWidget(QtWidgets.QWidget): - save_clicked = QtCore.Signal() - - def __init__(self, parent=None): - super(SidePanelWidget, self).__init__(parent) - - details_label = QtWidgets.QLabel("Details", self) - details_input = QtWidgets.QPlainTextEdit(self) - details_input.setReadOnly(True) - - note_label = QtWidgets.QLabel("Artist note", self) - note_input = QtWidgets.QPlainTextEdit(self) - btn_note_save = QtWidgets.QPushButton("Save note", self) - - main_layout = QtWidgets.QVBoxLayout(self) - main_layout.setContentsMargins(0, 0, 0, 0) - main_layout.addWidget(details_label, 0) - main_layout.addWidget(details_input, 0) - main_layout.addWidget(note_label, 0) - main_layout.addWidget(note_input, 1) - main_layout.addWidget(btn_note_save, alignment=QtCore.Qt.AlignRight) - - note_input.textChanged.connect(self.on_note_change) - btn_note_save.clicked.connect(self.on_save_click) - - self.details_input = details_input - self.note_input = note_input - self.btn_note_save = btn_note_save - - self._orig_note = "" - self._workfile_doc = None - - def on_note_change(self): - text = self.note_input.toPlainText() - self.btn_note_save.setEnabled(self._orig_note != text) - - def on_save_click(self): - self._orig_note = self.note_input.toPlainText() - self.on_note_change() - self.save_clicked.emit() - - def set_context(self, asset_id, task_name, filepath, workfile_doc): - # Check if asset, task and file are selected - # NOTE workfile document is not requirement - enabled = bool(asset_id) and bool(task_name) and bool(filepath) - - self.details_input.setEnabled(enabled) - self.note_input.setEnabled(enabled) - self.btn_note_save.setEnabled(enabled) - - # Make sure workfile doc is overridden - self._workfile_doc = workfile_doc - # Disable inputs and remove texts if any required arguments are missing - if not enabled: - self._orig_note = "" - self.details_input.setPlainText("") - self.note_input.setPlainText("") - return - - orig_note = "" - if workfile_doc: - orig_note = workfile_doc["data"].get("note") or orig_note - - self._orig_note = orig_note - self.note_input.setPlainText(orig_note) - # Set as empty string - self.details_input.setPlainText("") - - filestat = os.stat(filepath) - size_ending_mapping = { - "KB": 1024 ** 1, - "MB": 1024 ** 2, - "GB": 1024 ** 3 - } - size = filestat.st_size - ending = "B" - for _ending, _size in size_ending_mapping.items(): - if filestat.st_size < _size: - break - size = filestat.st_size / _size - ending = _ending - - # Append html string - datetime_format = "%b %d %Y %H:%M:%S" - creation_time = datetime.datetime.fromtimestamp(filestat.st_ctime) - modification_time = datetime.datetime.fromtimestamp(filestat.st_mtime) - lines = ( - "Size:", - "{:.2f} {}".format(size, ending), - "Created:", - creation_time.strftime(datetime_format), - "Modified:", - modification_time.strftime(datetime_format) - ) - self.details_input.appendHtml("
".join(lines)) - - def get_workfile_data(self): - data = { - "note": self.note_input.toPlainText() - } - return self._workfile_doc, data - - -class Window(QtWidgets.QMainWindow): - """Work Files Window""" - title = "Work Files" - - def __init__(self, parent=None): - super(Window, self).__init__(parent=parent) - self.setWindowTitle(self.title) - window_flags = QtCore.Qt.Window | QtCore.Qt.WindowCloseButtonHint - if not parent: - window_flags |= QtCore.Qt.WindowStaysOnTopHint - self.setWindowFlags(window_flags) - - # Create pages widget and set it as central widget - pages_widget = QtWidgets.QStackedWidget(self) - self.setCentralWidget(pages_widget) - - home_page_widget = QtWidgets.QWidget(pages_widget) - home_body_widget = QtWidgets.QWidget(home_page_widget) - - assets_widget = SingleSelectAssetsWidget(io, parent=home_body_widget) - assets_widget.set_current_asset_btn_visibility(True) - - tasks_widget = TasksWidget(io, home_body_widget) - files_widget = FilesWidget(home_body_widget) - side_panel = SidePanelWidget(home_body_widget) - - pages_widget.addWidget(home_page_widget) - - # Build home - home_page_layout = QtWidgets.QVBoxLayout(home_page_widget) - home_page_layout.addWidget(home_body_widget) - - # Build home - body - body_layout = QtWidgets.QVBoxLayout(home_body_widget) - split_widget = QtWidgets.QSplitter(home_body_widget) - split_widget.addWidget(assets_widget) - split_widget.addWidget(tasks_widget) - split_widget.addWidget(files_widget) - split_widget.addWidget(side_panel) - split_widget.setSizes([255, 160, 455, 175]) - - body_layout.addWidget(split_widget) - - # Add top margin for tasks to align it visually with files as - # the files widget has a filter field which tasks does not. - tasks_widget.setContentsMargins(0, 32, 0, 0) - - # Set context after asset widget is refreshed - # - to do so it is necessary to wait until refresh is done - set_context_timer = QtCore.QTimer() - set_context_timer.setInterval(100) - - # Connect signals - set_context_timer.timeout.connect(self._on_context_set_timeout) - assets_widget.selection_changed.connect(self._on_asset_changed) - tasks_widget.task_changed.connect(self._on_task_changed) - files_widget.file_selected.connect(self.on_file_select) - files_widget.workfile_created.connect(self.on_workfile_create) - files_widget.file_opened.connect(self._on_file_opened) - side_panel.save_clicked.connect(self.on_side_panel_save) - - self._set_context_timer = set_context_timer - self.home_page_widget = home_page_widget - self.pages_widget = pages_widget - self.home_body_widget = home_body_widget - self.split_widget = split_widget - - self.assets_widget = assets_widget - self.tasks_widget = tasks_widget - self.files_widget = files_widget - self.side_panel = side_panel - - # Force focus on the open button by default, required for Houdini. - files_widget.btn_open.setFocus() - - self.resize(1200, 600) - - self._first_show = True - self._context_to_set = None - - def showEvent(self, event): - super(Window, self).showEvent(event) - if self._first_show: - self._first_show = False - self.refresh() - self.setStyleSheet(style.load_stylesheet()) - - def keyPressEvent(self, event): - """Custom keyPressEvent. - - Override keyPressEvent to do nothing so that Maya's panels won't - take focus when pressing "SHIFT" whilst mouse is over viewport or - outliner. This way users don't accidentally perform Maya commands - whilst trying to name an instance. - - """ - - def set_save_enabled(self, enabled): - self.files_widget.btn_save.setEnabled(enabled) - - def on_file_select(self, filepath): - asset_id = self.assets_widget.get_selected_asset_id() - task_name = self.tasks_widget.get_selected_task_name() - - workfile_doc = None - if asset_id and task_name and filepath: - filename = os.path.split(filepath)[1] - workfile_doc = get_workfile_doc( - asset_id, task_name, filename, io - ) - self.side_panel.set_context( - asset_id, task_name, filepath, workfile_doc - ) - - def on_workfile_create(self, filepath): - self._create_workfile_doc(filepath) - - def _on_file_opened(self): - self.close() - - def on_side_panel_save(self): - workfile_doc, data = self.side_panel.get_workfile_data() - if not workfile_doc: - filepath = self.files_widget._get_selected_filepath() - self._create_workfile_doc(filepath, force=True) - workfile_doc = self._get_current_workfile_doc() - - save_workfile_data_to_doc(workfile_doc, data, io) - - def _get_current_workfile_doc(self, filepath=None): - if filepath is None: - filepath = self.files_widget._get_selected_filepath() - task_name = self.tasks_widget.get_selected_task_name() - asset_id = self.assets_widget.get_selected_asset_id() - if not task_name or not asset_id or not filepath: - return - - filename = os.path.split(filepath)[1] - return get_workfile_doc( - asset_id, task_name, filename, io - ) - - def _create_workfile_doc(self, filepath, force=False): - workfile_doc = None - if not force: - workfile_doc = self._get_current_workfile_doc(filepath) - - if not workfile_doc: - workdir, filename = os.path.split(filepath) - asset_id = self.assets_widget.get_selected_asset_id() - asset_doc = io.find_one({"_id": asset_id}) - task_name = self.tasks_widget.get_selected_task_name() - create_workfile_doc(asset_doc, task_name, filename, workdir, io) - - def refresh(self): - # Refresh asset widget - self.assets_widget.refresh() - - self._on_task_changed() - - def set_context(self, context): - self._context_to_set = context - self._set_context_timer.start() - - def _on_context_set_timeout(self): - if self._context_to_set is None: - self._set_context_timer.stop() - return - - if self.assets_widget.refreshing: - return - - self._context_to_set, context = None, self._context_to_set - if "asset" in context: - asset_doc = io.find_one( - { - "name": context["asset"], - "type": "asset" - }, - {"_id": 1} - ) or {} - asset_id = asset_doc.get("_id") - # Select the asset - self.assets_widget.select_asset(asset_id) - self.tasks_widget.set_asset_id(asset_id) - - if "task" in context: - self.tasks_widget.select_task_name(context["task"]) - self._on_task_changed() - - def _on_asset_changed(self): - asset_id = self.assets_widget.get_selected_asset_id() - if asset_id: - self.tasks_widget.setEnabled(True) - else: - # Force disable the other widgets if no - # active selection - self.tasks_widget.setEnabled(False) - self.files_widget.setEnabled(False) - - self.tasks_widget.set_asset_id(asset_id) - - def _on_task_changed(self): - asset_id = self.assets_widget.get_selected_asset_id() - task_name = self.tasks_widget.get_selected_task_name() - task_type = self.tasks_widget.get_selected_task_type() - - asset_is_valid = asset_id is not None - self.tasks_widget.setEnabled(asset_is_valid) - - self.files_widget.setEnabled(bool(task_name) and asset_is_valid) - self.files_widget.set_asset_task(asset_id, task_name, task_type) - self.files_widget.refresh() - - def validate_host_requirements(host): if host is None: raise RuntimeError("No registered host.") diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py new file mode 100644 index 0000000000..d602ad3c1b --- /dev/null +++ b/openpype/tools/workfiles/files_widget.py @@ -0,0 +1,445 @@ +import os +import logging +import shutil + +import Qt +from Qt import QtWidgets, QtCore +from avalon import io, api + +from openpype.tools.utils import PlaceholderLineEdit +from openpype.tools.utils.delegates import PrettyTimeDelegate +from openpype.lib import ( + emit_event, + Anatomy, + get_workfile_template_key, + create_workdir_extra_folders, +) +from openpype.lib.avalon_context import ( + update_current_task, + compute_session_changes +) +from .model import ( + WorkAreaFilesModel, + + FILEPATH_ROLE, + DATE_MODIFIED_ROLE, +) +from .save_as_dialog import SaveAsDialog + +log = logging.getLogger(__name__) + + +class FilesView(QtWidgets.QTreeView): + doubleClickedLeft = QtCore.Signal() + doubleClickedRight = QtCore.Signal() + + def mouseDoubleClickEvent(self, event): + if event.button() == QtCore.Qt.LeftButton: + self.doubleClickedLeft.emit() + + elif event.button() == QtCore.Qt.RightButton: + self.doubleClickedRight.emit() + + return super(FilesView, self).mouseDoubleClickEvent(event) + + +class FilesWidget(QtWidgets.QWidget): + """A widget displaying files that allows to save and open files.""" + file_selected = QtCore.Signal(str) + workfile_created = QtCore.Signal(str) + file_opened = QtCore.Signal() + + def __init__(self, parent=None): + super(FilesWidget, self).__init__(parent=parent) + + # Setup + self._asset_id = None + self._asset_doc = None + self._task_name = None + self._task_type = None + + # Pype's anatomy object for current project + self.anatomy = Anatomy(io.Session["AVALON_PROJECT"]) + # Template key used to get work template from anatomy templates + self.template_key = "work" + + # This is not root but workfile directory + self._workfiles_root = None + self._workdir_path = None + self.host = api.registered_host() + + # Whether to automatically select the latest modified + # file on a refresh of the files model. + self.auto_select_latest_modified = True + + # Avoid crash in Blender and store the message box + # (setting parent doesn't work as it hides the message box) + self._messagebox = None + + files_view = FilesView(self) + + # Create the Files model + extensions = set(self.host.file_extensions()) + files_model = WorkAreaFilesModel(extensions) + + # Create proxy model for files to be able sort and filter + proxy_model = QtCore.QSortFilterProxyModel() + proxy_model.setSourceModel(files_model) + proxy_model.setDynamicSortFilter(True) + proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive) + + # Set up the file list tree view + files_view.setModel(proxy_model) + files_view.setSortingEnabled(True) + files_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + + # Date modified delegate + time_delegate = PrettyTimeDelegate() + files_view.setItemDelegateForColumn(1, time_delegate) + files_view.setIndentation(3) # smaller indentation + + # Default to a wider first filename column it is what we mostly care + # about and the date modified is relatively small anyway. + files_view.setColumnWidth(0, 330) + + # Filtering input + filter_input = PlaceholderLineEdit(self) + filter_input.setPlaceholderText("Filter files..") + filter_input.textChanged.connect(proxy_model.setFilterFixedString) + + # Home Page + # Build buttons widget for files widget + btns_widget = QtWidgets.QWidget(self) + btn_save = QtWidgets.QPushButton("Save As", btns_widget) + btn_browse = QtWidgets.QPushButton("Browse", btns_widget) + btn_open = QtWidgets.QPushButton("Open", btns_widget) + + btns_layout = QtWidgets.QHBoxLayout(btns_widget) + btns_layout.setContentsMargins(0, 0, 0, 0) + btns_layout.addWidget(btn_open) + btns_layout.addWidget(btn_browse) + btns_layout.addWidget(btn_save) + + # Build files widgets for home page + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.addWidget(filter_input) + main_layout.addWidget(files_view) + main_layout.addWidget(btns_widget) + + # Register signal callbacks + files_view.doubleClickedLeft.connect(self.on_open_pressed) + files_view.customContextMenuRequested.connect(self.on_context_menu) + files_view.selectionModel().selectionChanged.connect( + self.on_file_select + ) + + btn_open.pressed.connect(self.on_open_pressed) + btn_browse.pressed.connect(self.on_browse_pressed) + btn_save.pressed.connect(self.on_save_as_pressed) + + # Store attributes + self.time_delegate = time_delegate + + self.filter_input = filter_input + + self.files_view = files_view + self.files_model = files_model + + self.btns_widget = btns_widget + self.btn_open = btn_open + self.btn_browse = btn_browse + self.btn_save = btn_save + + def set_asset_task(self, asset_id, task_name, task_type): + if asset_id != self._asset_id: + self._asset_doc = None + self._asset_id = asset_id + self._task_name = task_name + self._task_type = task_type + + # Define a custom session so we can query the work root + # for a "Work area" that is not our current Session. + # This way we can browse it even before we enter it. + if self._asset_id and self._task_name and self._task_type: + session = self._get_session() + self._workdir_path = session["AVALON_WORKDIR"] + self._workfiles_root = self.host.work_root(session) + self.files_model.set_root(self._workfiles_root) + + else: + self.files_model.set_root(None) + + # Disable/Enable buttons based on available files in model + has_valid_items = self.files_model.has_valid_items() + self.btn_browse.setEnabled(has_valid_items) + self.btn_open.setEnabled(has_valid_items) + if not has_valid_items: + # Manually trigger file selection + self.on_file_select() + + def _get_asset_doc(self): + if self._asset_id is None: + return None + + if self._asset_doc is None: + self._asset_doc = io.find_one({"_id": self._asset_id}) + return self._asset_doc + + def _get_session(self): + """Return a modified session for the current asset and task""" + + session = api.Session.copy() + self.template_key = get_workfile_template_key( + self._task_type, + session["AVALON_APP"], + project_name=session["AVALON_PROJECT"] + ) + changes = compute_session_changes( + session, + asset=self._get_asset_doc(), + task=self._task_name, + template_key=self.template_key + ) + session.update(changes) + + return session + + def _enter_session(self): + """Enter the asset and task session currently selected""" + + session = api.Session.copy() + changes = compute_session_changes( + session, + asset=self._get_asset_doc(), + task=self._task_name, + template_key=self.template_key + ) + if not changes: + # Return early if we're already in the right Session context + # to avoid any unwanted Task Changed callbacks to be triggered. + return + + update_current_task( + asset=self._get_asset_doc(), + task=self._task_name, + template_key=self.template_key + ) + + def open_file(self, filepath): + host = self.host + if host.has_unsaved_changes(): + result = self.save_changes_prompt() + if result is None: + # Cancel operation + return False + + # Save first if has changes + if result: + current_file = host.current_file() + if not current_file: + # If the user requested to save the current scene + # we can't actually automatically do so if the current + # file has not been saved with a name yet. So we'll have + # to opt out. + log.error("Can't save scene with no filename. Please " + "first save your work file using 'Save As'.") + return + + # Save current scene, continue to open file + host.save_file(current_file) + + self._enter_session() + host.open_file(filepath) + self.file_opened.emit() + + def save_changes_prompt(self): + self._messagebox = messagebox = QtWidgets.QMessageBox(parent=self) + messagebox.setWindowFlags(messagebox.windowFlags() | + QtCore.Qt.FramelessWindowHint) + messagebox.setIcon(messagebox.Warning) + messagebox.setWindowTitle("Unsaved Changes!") + messagebox.setText( + "There are unsaved changes to the current file." + "\nDo you want to save the changes?" + ) + messagebox.setStandardButtons( + messagebox.Yes | messagebox.No | messagebox.Cancel + ) + + result = messagebox.exec_() + if result == messagebox.Yes: + return True + if result == messagebox.No: + return False + return None + + def get_filename(self): + """Show save dialog to define filename for save or duplicate + + Returns: + str: The filename to create. + + """ + session = self._get_session() + + window = SaveAsDialog( + parent=self, + root=self._workfiles_root, + anatomy=self.anatomy, + template_key=self.template_key, + session=session + ) + window.exec_() + + return window.get_result() + + def on_duplicate_pressed(self): + work_file = self.get_filename() + if not work_file: + return + + src = self._get_selected_filepath() + dst = os.path.join(self._workfiles_root, work_file) + shutil.copy(src, dst) + + self.workfile_created.emit(dst) + + self.refresh() + + def _get_selected_filepath(self): + """Return current filepath selected in view""" + selection = self.files_view.selectionModel() + index = selection.currentIndex() + if not index.isValid(): + return + + return index.data(FILEPATH_ROLE) + + def on_open_pressed(self): + path = self._get_selected_filepath() + if not path: + print("No file selected to open..") + return + + self.open_file(path) + + def on_browse_pressed(self): + ext_filter = "Work File (*{0})".format( + " *".join(self.host.file_extensions()) + ) + kwargs = { + "caption": "Work Files", + "filter": ext_filter + } + if Qt.__binding__ in ("PySide", "PySide2"): + kwargs["dir"] = self._workfiles_root + else: + kwargs["directory"] = self._workfiles_root + + work_file = QtWidgets.QFileDialog.getOpenFileName(**kwargs)[0] + if work_file: + self.open_file(work_file) + + def on_save_as_pressed(self): + work_filename = self.get_filename() + if not work_filename: + return + + # Trigger before save event + emit_event( + "workfile.save.before", + {"filename": work_filename, "workdir_path": self._workdir_path}, + source="workfiles.tool" + ) + + # Make sure workfiles root is updated + # - this triggers 'workio.work_root(...)' which may change value of + # '_workfiles_root' + self.set_asset_task( + self._asset_id, self._task_name, self._task_type + ) + + # Create workfiles root folder + if not os.path.exists(self._workfiles_root): + log.debug("Initializing Work Directory: %s", self._workfiles_root) + os.makedirs(self._workfiles_root) + + # Update session if context has changed + self._enter_session() + # Prepare full path to workfile and save it + filepath = os.path.join( + os.path.normpath(self._workfiles_root), work_filename + ) + self.host.save_file(filepath) + # Create extra folders + create_workdir_extra_folders( + self._workdir_path, + api.Session["AVALON_APP"], + self._task_type, + self._task_name, + api.Session["AVALON_PROJECT"] + ) + # Trigger after save events + emit_event( + "workfile.save.after", + {"filename": work_filename, "workdir_path": self._workdir_path}, + source="workfiles.tool" + ) + + self.workfile_created.emit(filepath) + # Refresh files model + self.refresh() + + def on_file_select(self): + self.file_selected.emit(self._get_selected_filepath()) + + def refresh(self): + """Refresh listed files for current selection in the interface""" + self.files_model.refresh() + + if self.auto_select_latest_modified: + self._select_last_modified_file() + + def on_context_menu(self, point): + index = self._workarea_files_view.indexAt(point) + if not index.isValid(): + return + + if not index.flags() & QtCore.Qt.ItemIsEnabled: + return + + menu = QtWidgets.QMenu(self) + + # Duplicate + action = QtWidgets.QAction("Duplicate", menu) + tip = "Duplicate selected file." + action.setToolTip(tip) + action.setStatusTip(tip) + action.triggered.connect(self.on_duplicate_pressed) + menu.addAction(action) + + # Show the context action menu + global_point = self.files_view.mapToGlobal(point) + action = menu.exec_(global_point) + if not action: + return + + def _select_last_modified_file(self): + """Utility function to select the file with latest date modified""" + model = self.files_view.model() + + highest_index = None + highest = 0 + for row in range(model.rowCount()): + index = model.index(row, 0, parent=QtCore.QModelIndex()) + if not index.isValid(): + continue + + modified = index.data(DATE_MODIFIED_ROLE) + if modified is not None and modified > highest: + highest_index = index + highest = modified + + if highest_index: + self.files_view.setCurrentIndex(highest_index) diff --git a/openpype/tools/workfiles/view.py b/openpype/tools/workfiles/view.py deleted file mode 100644 index 8e3993e4c7..0000000000 --- a/openpype/tools/workfiles/view.py +++ /dev/null @@ -1,15 +0,0 @@ -from Qt import QtWidgets, QtCore - - -class FilesView(QtWidgets.QTreeView): - doubleClickedLeft = QtCore.Signal() - doubleClickedRight = QtCore.Signal() - - def mouseDoubleClickEvent(self, event): - if event.button() == QtCore.Qt.LeftButton: - self.doubleClickedLeft.emit() - - elif event.button() == QtCore.Qt.RightButton: - self.doubleClickedRight.emit() - - return super(FilesView, self).mouseDoubleClickEvent(event) diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py new file mode 100644 index 0000000000..f68b721872 --- /dev/null +++ b/openpype/tools/workfiles/window.py @@ -0,0 +1,334 @@ +import os +import datetime +from Qt import QtCore, QtWidgets + +from avalon import io + +from openpype import style +from openpype.lib import ( + get_workfile_doc, + create_workfile_doc, + save_workfile_data_to_doc, +) +from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget +from openpype.tools.utils.tasks_widget import TasksWidget + +from .files_widget import FilesWidget + + +class SidePanelWidget(QtWidgets.QWidget): + save_clicked = QtCore.Signal() + + def __init__(self, parent=None): + super(SidePanelWidget, self).__init__(parent) + + details_label = QtWidgets.QLabel("Details", self) + details_input = QtWidgets.QPlainTextEdit(self) + details_input.setReadOnly(True) + + note_label = QtWidgets.QLabel("Artist note", self) + note_input = QtWidgets.QPlainTextEdit(self) + btn_note_save = QtWidgets.QPushButton("Save note", self) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.addWidget(details_label, 0) + main_layout.addWidget(details_input, 0) + main_layout.addWidget(note_label, 0) + main_layout.addWidget(note_input, 1) + main_layout.addWidget(btn_note_save, alignment=QtCore.Qt.AlignRight) + + note_input.textChanged.connect(self.on_note_change) + btn_note_save.clicked.connect(self.on_save_click) + + self.details_input = details_input + self.note_input = note_input + self.btn_note_save = btn_note_save + + self._orig_note = "" + self._workfile_doc = None + + def on_note_change(self): + text = self.note_input.toPlainText() + self.btn_note_save.setEnabled(self._orig_note != text) + + def on_save_click(self): + self._orig_note = self.note_input.toPlainText() + self.on_note_change() + self.save_clicked.emit() + + def set_context(self, asset_id, task_name, filepath, workfile_doc): + # Check if asset, task and file are selected + # NOTE workfile document is not requirement + enabled = bool(asset_id) and bool(task_name) and bool(filepath) + + self.details_input.setEnabled(enabled) + self.note_input.setEnabled(enabled) + self.btn_note_save.setEnabled(enabled) + + # Make sure workfile doc is overridden + self._workfile_doc = workfile_doc + # Disable inputs and remove texts if any required arguments are missing + if not enabled: + self._orig_note = "" + self.details_input.setPlainText("") + self.note_input.setPlainText("") + return + + orig_note = "" + if workfile_doc: + orig_note = workfile_doc["data"].get("note") or orig_note + + self._orig_note = orig_note + self.note_input.setPlainText(orig_note) + # Set as empty string + self.details_input.setPlainText("") + + filestat = os.stat(filepath) + size_ending_mapping = { + "KB": 1024 ** 1, + "MB": 1024 ** 2, + "GB": 1024 ** 3 + } + size = filestat.st_size + ending = "B" + for _ending, _size in size_ending_mapping.items(): + if filestat.st_size < _size: + break + size = filestat.st_size / _size + ending = _ending + + # Append html string + datetime_format = "%b %d %Y %H:%M:%S" + creation_time = datetime.datetime.fromtimestamp(filestat.st_ctime) + modification_time = datetime.datetime.fromtimestamp(filestat.st_mtime) + lines = ( + "Size:", + "{:.2f} {}".format(size, ending), + "Created:", + creation_time.strftime(datetime_format), + "Modified:", + modification_time.strftime(datetime_format) + ) + self.details_input.appendHtml("
".join(lines)) + + def get_workfile_data(self): + data = { + "note": self.note_input.toPlainText() + } + return self._workfile_doc, data + + +class Window(QtWidgets.QMainWindow): + """Work Files Window""" + title = "Work Files" + + def __init__(self, parent=None): + super(Window, self).__init__(parent=parent) + self.setWindowTitle(self.title) + window_flags = QtCore.Qt.Window | QtCore.Qt.WindowCloseButtonHint + if not parent: + window_flags |= QtCore.Qt.WindowStaysOnTopHint + self.setWindowFlags(window_flags) + + # Create pages widget and set it as central widget + pages_widget = QtWidgets.QStackedWidget(self) + self.setCentralWidget(pages_widget) + + home_page_widget = QtWidgets.QWidget(pages_widget) + home_body_widget = QtWidgets.QWidget(home_page_widget) + + assets_widget = SingleSelectAssetsWidget(io, parent=home_body_widget) + assets_widget.set_current_asset_btn_visibility(True) + + tasks_widget = TasksWidget(io, home_body_widget) + files_widget = FilesWidget(home_body_widget) + side_panel = SidePanelWidget(home_body_widget) + + pages_widget.addWidget(home_page_widget) + + # Build home + home_page_layout = QtWidgets.QVBoxLayout(home_page_widget) + home_page_layout.addWidget(home_body_widget) + + # Build home - body + body_layout = QtWidgets.QVBoxLayout(home_body_widget) + split_widget = QtWidgets.QSplitter(home_body_widget) + split_widget.addWidget(assets_widget) + split_widget.addWidget(tasks_widget) + split_widget.addWidget(files_widget) + split_widget.addWidget(side_panel) + split_widget.setSizes([255, 160, 455, 175]) + + body_layout.addWidget(split_widget) + + # Add top margin for tasks to align it visually with files as + # the files widget has a filter field which tasks does not. + tasks_widget.setContentsMargins(0, 32, 0, 0) + + # Set context after asset widget is refreshed + # - to do so it is necessary to wait until refresh is done + set_context_timer = QtCore.QTimer() + set_context_timer.setInterval(100) + + # Connect signals + set_context_timer.timeout.connect(self._on_context_set_timeout) + assets_widget.selection_changed.connect(self._on_asset_changed) + tasks_widget.task_changed.connect(self._on_task_changed) + files_widget.file_selected.connect(self.on_file_select) + files_widget.workfile_created.connect(self.on_workfile_create) + files_widget.file_opened.connect(self._on_file_opened) + side_panel.save_clicked.connect(self.on_side_panel_save) + + self._set_context_timer = set_context_timer + self.home_page_widget = home_page_widget + self.pages_widget = pages_widget + self.home_body_widget = home_body_widget + self.split_widget = split_widget + + self.assets_widget = assets_widget + self.tasks_widget = tasks_widget + self.files_widget = files_widget + self.side_panel = side_panel + + # Force focus on the open button by default, required for Houdini. + files_widget.btn_open.setFocus() + + self.resize(1200, 600) + + self._first_show = True + self._context_to_set = None + + def showEvent(self, event): + super(Window, self).showEvent(event) + if self._first_show: + self._first_show = False + self.refresh() + self.setStyleSheet(style.load_stylesheet()) + + def keyPressEvent(self, event): + """Custom keyPressEvent. + + Override keyPressEvent to do nothing so that Maya's panels won't + take focus when pressing "SHIFT" whilst mouse is over viewport or + outliner. This way users don't accidentally perform Maya commands + whilst trying to name an instance. + + """ + + def set_save_enabled(self, enabled): + self.files_widget.btn_save.setEnabled(enabled) + + def on_file_select(self, filepath): + asset_id = self.assets_widget.get_selected_asset_id() + task_name = self.tasks_widget.get_selected_task_name() + + workfile_doc = None + if asset_id and task_name and filepath: + filename = os.path.split(filepath)[1] + workfile_doc = get_workfile_doc( + asset_id, task_name, filename, io + ) + self.side_panel.set_context( + asset_id, task_name, filepath, workfile_doc + ) + + def on_workfile_create(self, filepath): + self._create_workfile_doc(filepath) + + def _on_file_opened(self): + self.close() + + def on_side_panel_save(self): + workfile_doc, data = self.side_panel.get_workfile_data() + if not workfile_doc: + filepath = self.files_widget._get_selected_filepath() + self._create_workfile_doc(filepath, force=True) + workfile_doc = self._get_current_workfile_doc() + + save_workfile_data_to_doc(workfile_doc, data, io) + + def _get_current_workfile_doc(self, filepath=None): + if filepath is None: + filepath = self.files_widget._get_selected_filepath() + task_name = self.tasks_widget.get_selected_task_name() + asset_id = self.assets_widget.get_selected_asset_id() + if not task_name or not asset_id or not filepath: + return + + filename = os.path.split(filepath)[1] + return get_workfile_doc( + asset_id, task_name, filename, io + ) + + def _create_workfile_doc(self, filepath, force=False): + workfile_doc = None + if not force: + workfile_doc = self._get_current_workfile_doc(filepath) + + if not workfile_doc: + workdir, filename = os.path.split(filepath) + asset_id = self.assets_widget.get_selected_asset_id() + asset_doc = io.find_one({"_id": asset_id}) + task_name = self.tasks_widget.get_selected_task_name() + create_workfile_doc(asset_doc, task_name, filename, workdir, io) + + def refresh(self): + # Refresh asset widget + self.assets_widget.refresh() + + self._on_task_changed() + + def set_context(self, context): + self._context_to_set = context + self._set_context_timer.start() + + def _on_context_set_timeout(self): + if self._context_to_set is None: + self._set_context_timer.stop() + return + + if self.assets_widget.refreshing: + return + + self._context_to_set, context = None, self._context_to_set + if "asset" in context: + asset_doc = io.find_one( + { + "name": context["asset"], + "type": "asset" + }, + {"_id": 1} + ) or {} + asset_id = asset_doc.get("_id") + # Select the asset + self.assets_widget.select_asset(asset_id) + self.tasks_widget.set_asset_id(asset_id) + + if "task" in context: + self.tasks_widget.select_task_name(context["task"]) + self._on_task_changed() + + def _on_asset_changed(self): + asset_id = self.assets_widget.get_selected_asset_id() + if asset_id: + self.tasks_widget.setEnabled(True) + else: + # Force disable the other widgets if no + # active selection + self.tasks_widget.setEnabled(False) + self.files_widget.setEnabled(False) + + self.tasks_widget.set_asset_id(asset_id) + + def _on_task_changed(self): + asset_id = self.assets_widget.get_selected_asset_id() + task_name = self.tasks_widget.get_selected_task_name() + task_type = self.tasks_widget.get_selected_task_type() + + asset_is_valid = asset_id is not None + self.tasks_widget.setEnabled(asset_is_valid) + + self.files_widget.setEnabled(bool(task_name) and asset_is_valid) + self.files_widget.set_asset_task(asset_id, task_name, task_type) + self.files_widget.refresh() From fa764a12823147ae7bd22d01e2dd09ba2083c3bc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 11:05:38 +0100 Subject: [PATCH 109/196] base implementation of published files model --- openpype/tools/workfiles/model.py | 209 +++++++++++++++++++++++++++++- 1 file changed, 208 insertions(+), 1 deletion(-) diff --git a/openpype/tools/workfiles/model.py b/openpype/tools/workfiles/model.py index fa450f0a8a..f38c80b190 100644 --- a/openpype/tools/workfiles/model.py +++ b/openpype/tools/workfiles/model.py @@ -8,10 +8,11 @@ from openpype.style import ( get_default_entity_icon_color, get_disabled_entity_icon_color, ) - +from openpype.pipeline import get_representation_path log = logging.getLogger(__name__) + FILEPATH_ROLE = QtCore.Qt.UserRole + 2 DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 3 ITEM_ID_ROLE = QtCore.Qt.UserRole + 4 @@ -170,3 +171,209 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): return super(WorkAreaFilesModel, self).headerData( section, orientation, role ) + + +class PublishFilesModel(QtGui.QStandardItemModel): + def __init__(self, extensions, dbcon, anatomy, *args, **kwargs): + super(PublishFilesModel, self).__init__(*args, **kwargs) + + self.setColumnCount(2) + + self._dbcon = dbcon + self._anatomy = anatomy + self._file_extensions = extensions + + self._invalid_context_item = None + self._empty_root_item = None + self._file_icon = qtawesome.icon( + "fa.file-o", + color=get_default_entity_icon_color() + ) + self._invalid_item_visible = False + + self._items_by_id = {} + + self._asset_id = None + self._task_name = None + + def _get_invalid_context_item(self): + if self._invalid_context_item is None: + message = "Selected context is not vald." + item = QtGui.QStandardItem(message) + icon = qtawesome.icon( + "fa.times", + color=get_disabled_entity_icon_color() + ) + item.setData(icon, QtCore.Qt.DecorationRole) + item.setFlags(QtCore.Qt.NoItemFlags) + item.setColumnCount(self.columnCount()) + self._invalid_context_item = item + return self._invalid_context_item + + def _get_empty_root_item(self): + if self._empty_root_item is None: + message = "Didn't find any published workfiles." + item = QtGui.QStandardItem(message) + icon = qtawesome.icon( + "fa.times", + color=get_disabled_entity_icon_color() + ) + item.setData(icon, QtCore.Qt.DecorationRole) + item.setFlags(QtCore.Qt.NoItemFlags) + item.setColumnCount(self.columnCount()) + self._empty_root_item = item + return self._empty_root_item + + def set_context(self, asset_id, task_name): + self._asset_id = asset_id + self._task_name = task_name + self.refresh() + + def _clear(self): + root_item = self.invisibleRootItem() + rows = root_item.rowCount() + if rows > 0: + if self._invalid_item_visible: + for row in range(rows): + root_item.takeRow(row) + else: + root_item.removeRows(0, rows) + self._items_by_id = {} + + def _get_workfie_representations(self): + output = [] + subset_docs = self._dbcon.find({ + "type": "subset", + "parent": self._asset_id + }) + filtered_subsets = [] + for subset_doc in subset_docs: + data = subset_doc.get("data") or {} + families = data.get("families") or [] + if "workfile" in families: + filtered_subsets.append(subset_doc) + + subset_ids = [subset_doc["_id"] for subset_doc in filtered_subsets] + if not subset_ids: + return output + + version_docs = self._dbcon.find({ + "type": "version", + "parent": {"$in": subset_ids} + }) + version_ids = [version_doc["_id"] for version_doc in version_docs] + if not version_ids: + return output + + extensions = [ext.replace(".", "") for ext in self._file_extensions] + repre_docs = self._dbcon.find( + { + "type": "representation", + "parent": {"$in": version_ids}, + "context.ext": {"$in": extensions} + } + ) + for repre_doc in repre_docs: + task_info = repre_doc["context"].get("task") + if not task_info: + print("Not task info") + continue + + if isinstance(task_info, dict): + task_name = task_info.get("name") + else: + task_name = task_info + + if task_name == self._task_name: + path = get_representation_path( + repre_doc, root=self._anatomy.roots + ) + output.append((path, repre_doc["_id"])) + return output + + def refresh(self): + root_item = self.invisibleRootItem() + if not self._asset_id or not self._task_name: + self._clear() + # Add Work Area does not exist placeholder + item = self._get_invalid_path_item() + root_item.appendRow(item) + self._invalid_item_visible = True + return + + if self._invalid_item_visible: + self._clear() + + new_items = [] + items_to_remove = set(self._items_by_id.keys()) + for item in self._get_workfie_representations(): + filepath, repre_id = item + modified = os.path.getmtime(filepath) + filename = os.path.basename(filepath) + + if repre_id in items_to_remove: + items_to_remove.remove(repre_id) + item = self._items_by_id[repre_id] + else: + item = QtGui.QStandardItem(filename) + item.setColumnCount(self.columnCount()) + item.setFlags( + QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable + ) + item.setData(self._file_icon, QtCore.Qt.DecorationRole) + new_items.append(item) + self._items_by_id[repre_id] = item + item.setData(filepath, FILEPATH_ROLE) + item.setData(modified, DATE_MODIFIED_ROLE) + item.setData(repre_id, ITEM_ID_ROLE) + + if new_items: + root_item.appendRows(new_items) + + for filename in items_to_remove: + item = self._items_by_id.pop(filename) + root_item.removeRow(item.row()) + + if root_item.rowCount() > 0: + self._invalid_item_visible = False + else: + self._invalid_item_visible = True + item = self._get_empty_root_item() + root_item.appendRow(item) + + def has_valid_items(self): + return not self._invalid_item_visible + + def flags(self, index): + if index.column() != 0: + index = self.index(index.row(), 0, index.parent()) + return super(PublishFilesModel, self).flags(index) + + def data(self, index, role=None): + if role is None: + role = QtCore.Qt.DisplayRole + + if index.column() == 1: + if role == QtCore.Qt.DecorationRole: + return None + + if role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole): + role = DATE_MODIFIED_ROLE + index = self.index(index.row(), 0, index.parent()) + + return super(PublishFilesModel, self).data(index, role) + + def headerData(self, section, orientation, role): + # Show nice labels in the header + if ( + role == QtCore.Qt.DisplayRole + and orientation == QtCore.Qt.Horizontal + ): + if section == 0: + return "Name" + elif section == 1: + return "Date modified" + + return super(PublishFilesModel, self).headerData( + section, orientation, role + ) From 02e4f239a97a559fab0cff88f20427c434737334 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 11:06:23 +0100 Subject: [PATCH 110/196] modified files widget to have view for workarea and published files --- openpype/tools/workfiles/files_widget.py | 195 +++++++++++++++++------ 1 file changed, 142 insertions(+), 53 deletions(-) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index d602ad3c1b..2c569064d4 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -20,6 +20,7 @@ from openpype.lib.avalon_context import ( ) from .model import ( WorkAreaFilesModel, + PublishFilesModel, FILEPATH_ROLE, DATE_MODIFIED_ROLE, @@ -76,36 +77,77 @@ class FilesWidget(QtWidgets.QWidget): # (setting parent doesn't work as it hides the message box) self._messagebox = None - files_view = FilesView(self) + # Filtering input + filter_widget = QtWidgets.QWidget(self) - # Create the Files model + published_checkbox = QtWidgets.QCheckBox("Published", filter_widget) + + filter_input = PlaceholderLineEdit(filter_widget) + filter_input.setPlaceholderText("Filter files..") + + filter_layout = QtWidgets.QHBoxLayout(filter_widget) + filter_layout.setContentsMargins(0, 0, 0, 0) + filter_layout.addWidget(published_checkbox, 0) + filter_layout.addWidget(filter_input, 1) + + # Create the Files models extensions = set(self.host.file_extensions()) - files_model = WorkAreaFilesModel(extensions) + + views_widget = QtWidgets.QWidget(self) + # Workarea view + workarea_files_model = WorkAreaFilesModel(extensions) # Create proxy model for files to be able sort and filter - proxy_model = QtCore.QSortFilterProxyModel() - proxy_model.setSourceModel(files_model) - proxy_model.setDynamicSortFilter(True) - proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive) + workarea_proxy_model = QtCore.QSortFilterProxyModel() + workarea_proxy_model.setSourceModel(workarea_files_model) + workarea_proxy_model.setDynamicSortFilter(True) + workarea_proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive) # Set up the file list tree view - files_view.setModel(proxy_model) - files_view.setSortingEnabled(True) - files_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + workarea_files_view = FilesView(views_widget) + workarea_files_view.setModel(workarea_proxy_model) + workarea_files_view.setSortingEnabled(True) + workarea_files_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) # Date modified delegate - time_delegate = PrettyTimeDelegate() - files_view.setItemDelegateForColumn(1, time_delegate) - files_view.setIndentation(3) # smaller indentation + workarea_time_delegate = PrettyTimeDelegate() + workarea_files_view.setItemDelegateForColumn(1, workarea_time_delegate) + workarea_files_view.setIndentation(3) # smaller indentation # Default to a wider first filename column it is what we mostly care # about and the date modified is relatively small anyway. - files_view.setColumnWidth(0, 330) + workarea_files_view.setColumnWidth(0, 330) - # Filtering input - filter_input = PlaceholderLineEdit(self) - filter_input.setPlaceholderText("Filter files..") - filter_input.textChanged.connect(proxy_model.setFilterFixedString) + # Publish files view + publish_files_model = PublishFilesModel(extensions, io, self.anatomy) + + publish_proxy_model = QtCore.QSortFilterProxyModel() + publish_proxy_model.setSourceModel(publish_files_model) + publish_proxy_model.setDynamicSortFilter(True) + publish_proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive) + + publish_files_view = FilesView(views_widget) + publish_files_view.setModel(publish_proxy_model) + + publish_files_view.setSortingEnabled(True) + publish_files_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + + # Date modified delegate + publish_time_delegate = PrettyTimeDelegate() + publish_files_view.setItemDelegateForColumn(1, publish_time_delegate) + publish_files_view.setIndentation(3) # smaller indentation + + # Default to a wider first filename column it is what we mostly care + # about and the date modified is relatively small anyway. + publish_files_view.setColumnWidth(0, 330) + + # Hide publish view first + publish_files_view.setVisible(False) + + views_layout = QtWidgets.QHBoxLayout(views_widget) + views_layout.setContentsMargins(0, 0, 0, 0) + views_layout.addWidget(workarea_files_view, 1) + views_layout.addWidget(publish_files_view, 1) # Home Page # Build buttons widget for files widget @@ -123,60 +165,103 @@ class FilesWidget(QtWidgets.QWidget): # Build files widgets for home page main_layout = QtWidgets.QVBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) - main_layout.addWidget(filter_input) - main_layout.addWidget(files_view) - main_layout.addWidget(btns_widget) + main_layout.addWidget(filter_widget, 0) + main_layout.addWidget(views_widget, 1) + main_layout.addWidget(btns_widget, 0) # Register signal callbacks - files_view.doubleClickedLeft.connect(self.on_open_pressed) - files_view.customContextMenuRequested.connect(self.on_context_menu) - files_view.selectionModel().selectionChanged.connect( + published_checkbox.stateChanged.connect(self._on_published_change) + filter_input.textChanged.connect(self._on_filter_text_change) + + workarea_files_view.doubleClickedLeft.connect( + self._on_workarea_open_pressed + ) + workarea_files_view.customContextMenuRequested.connect( + self._on_workarea_context_menu + ) + workarea_files_view.selectionModel().selectionChanged.connect( self.on_file_select ) - btn_open.pressed.connect(self.on_open_pressed) + btn_open.pressed.connect(self._on_workarea_open_pressed) btn_browse.pressed.connect(self.on_browse_pressed) btn_save.pressed.connect(self.on_save_as_pressed) # Store attributes - self.time_delegate = time_delegate + self._published_checkbox = published_checkbox + self._filter_input = filter_input - self.filter_input = filter_input + self._workarea_time_delegate = workarea_time_delegate + self._workarea_files_view = workarea_files_view + self._workarea_files_model = workarea_files_model + self._workarea_proxy_model = workarea_proxy_model - self.files_view = files_view - self.files_model = files_model + self._publish_time_delegate = publish_time_delegate + self._publish_files_view = publish_files_view + self._publish_files_model = publish_files_model + self._publish_proxy_model = publish_proxy_model self.btns_widget = btns_widget self.btn_open = btn_open self.btn_browse = btn_browse self.btn_save = btn_save + self._workarea_visible = True + + def _on_published_change(self): + workarea_visible = not self._published_checkbox.isChecked() + + self._workarea_files_view.setVisible(workarea_visible) + self._publish_files_view.setVisible(not workarea_visible) + + self._workarea_visible = workarea_visible + self._update_filtering() + self._update_asset_task() + + def _on_filter_text_change(self): + self._update_filtering() + + def _update_filtering(self): + text = self._filter_input.text() + if self._workarea_visible: + self._workarea_proxy_model.setFilterFixedString(text) + else: + self._publish_proxy_model.setFilterFixedString(text) + def set_asset_task(self, asset_id, task_name, task_type): if asset_id != self._asset_id: self._asset_doc = None self._asset_id = asset_id self._task_name = task_name self._task_type = task_type + self._update_asset_task() - # Define a custom session so we can query the work root - # for a "Work area" that is not our current Session. - # This way we can browse it even before we enter it. - if self._asset_id and self._task_name and self._task_type: - session = self._get_session() - self._workdir_path = session["AVALON_WORKDIR"] - self._workfiles_root = self.host.work_root(session) - self.files_model.set_root(self._workfiles_root) + def _update_asset_task(self): + if self._workarea_visible: + # Define a custom session so we can query the work root + # for a "Work area" that is not our current Session. + # This way we can browse it even before we enter it. + if self._asset_id and self._task_name and self._task_type: + session = self._get_session() + self._workdir_path = session["AVALON_WORKDIR"] + self._workfiles_root = self.host.work_root(session) + self._workarea_files_model.set_root(self._workfiles_root) + else: + self._workarea_files_model.set_root(None) + + # Disable/Enable buttons based on available files in model + has_valid_items = self._workarea_files_model.has_valid_items() + self.btn_browse.setEnabled(has_valid_items) + self.btn_open.setEnabled(has_valid_items) + if not has_valid_items: + # Manually trigger file selection + self.on_file_select() else: - self.files_model.set_root(None) - - # Disable/Enable buttons based on available files in model - has_valid_items = self.files_model.has_valid_items() - self.btn_browse.setEnabled(has_valid_items) - self.btn_open.setEnabled(has_valid_items) - if not has_valid_items: - # Manually trigger file selection - self.on_file_select() + self._publish_files_model.set_context( + self._asset_id, self._task_name + ) + has_valid_items = self._publish_files_model.has_valid_items() def _get_asset_doc(self): if self._asset_id is None: @@ -309,14 +394,18 @@ class FilesWidget(QtWidgets.QWidget): def _get_selected_filepath(self): """Return current filepath selected in view""" - selection = self.files_view.selectionModel() + if self._workarea_visible: + source_view = self._workarea_files_view + else: + source_view = self._publish_files_view + selection = source_view.selectionModel() index = selection.currentIndex() if not index.isValid(): return return index.data(FILEPATH_ROLE) - def on_open_pressed(self): + def _on_workarea_open_pressed(self): path = self._get_selected_filepath() if not path: print("No file selected to open..") @@ -396,12 +485,12 @@ class FilesWidget(QtWidgets.QWidget): def refresh(self): """Refresh listed files for current selection in the interface""" - self.files_model.refresh() + self._workarea_files_model.refresh() if self.auto_select_latest_modified: self._select_last_modified_file() - def on_context_menu(self, point): + def _on_workarea_context_menu(self, point): index = self._workarea_files_view.indexAt(point) if not index.isValid(): return @@ -420,14 +509,14 @@ class FilesWidget(QtWidgets.QWidget): menu.addAction(action) # Show the context action menu - global_point = self.files_view.mapToGlobal(point) + global_point = self._workarea_files_view.mapToGlobal(point) action = menu.exec_(global_point) if not action: return def _select_last_modified_file(self): """Utility function to select the file with latest date modified""" - model = self.files_view.model() + model = self._workarea_files_view.model() highest_index = None highest = 0 @@ -442,4 +531,4 @@ class FilesWidget(QtWidgets.QWidget): highest = modified if highest_index: - self.files_view.setCurrentIndex(highest_index) + self._workarea_files_view.setCurrentIndex(highest_index) From 2a9a49010506c4c083e82ee5ac7a1ddc864d76fa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 11:21:45 +0100 Subject: [PATCH 111/196] simplified pretty time delegate --- openpype/tools/utils/delegates.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/openpype/tools/utils/delegates.py b/openpype/tools/utils/delegates.py index d3718b1734..41de7cce60 100644 --- a/openpype/tools/utils/delegates.py +++ b/openpype/tools/utils/delegates.py @@ -287,9 +287,6 @@ class PrettyTimeDelegate(QtWidgets.QStyledItemDelegate): """ def displayText(self, value, locale): - - if value is None: - # Ignore None value - return - - return pretty_timestamp(value) + if value is not None: + return pretty_timestamp(value) + return None From 76bef560a6650fcb4efa8a46c79ec8dda42ea4ff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 21 Mar 2022 12:14:16 +0100 Subject: [PATCH 112/196] nuke: PR comments --- .../nuke/plugins/publish/extract_review_data_mov.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index 6f6e07fc28..31a8ff18ee 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -24,8 +24,10 @@ class ExtractReviewDataMov(openpype.api.Extractor): outputs = {} def process(self, instance): - families = instance.data["families"] - families.append(instance.data["family"]) + families = set(instance.data["families"]) + + # add main family to make sure all families are compared + families.add(instance.data["family"]) task_type = instance.context.data["taskType"] subset = instance.data["subset"] @@ -67,13 +69,11 @@ class ExtractReviewDataMov(openpype.api.Extractor): # test if family found in context # using intersection to make sure all defined # families are present in combination - if f_families and not any( - set(families).intersection(f_families)): + if f_families and not families.intersection(f_families): continue # test task types from filter - if f_task_types and not bool( - task_type in f_task_types): + if f_task_types and task_type not in f_task_types: continue # test subsets from filter From ea9fb6c841bafe695d9618e8dc6f05d08fd7980e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 12:18:52 +0100 Subject: [PATCH 113/196] small tweaks and changes --- openpype/tools/workfiles/files_widget.py | 109 +++++++++++++++-------- openpype/tools/workfiles/model.py | 35 +++++--- openpype/tools/workfiles/window.py | 16 +++- 3 files changed, 109 insertions(+), 51 deletions(-) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 2c569064d4..fb36efea33 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -47,11 +47,12 @@ class FilesView(QtWidgets.QTreeView): class FilesWidget(QtWidgets.QWidget): """A widget displaying files that allows to save and open files.""" file_selected = QtCore.Signal(str) - workfile_created = QtCore.Signal(str) file_opened = QtCore.Signal() + workfile_created = QtCore.Signal(str) + published_visible_changed = QtCore.Signal(bool) - def __init__(self, parent=None): - super(FilesWidget, self).__init__(parent=parent) + def __init__(self, parent): + super(FilesWidget, self).__init__(parent) # Setup self._asset_id = None @@ -141,9 +142,6 @@ class FilesWidget(QtWidgets.QWidget): # about and the date modified is relatively small anyway. publish_files_view.setColumnWidth(0, 330) - # Hide publish view first - publish_files_view.setVisible(False) - views_layout = QtWidgets.QHBoxLayout(views_widget) views_layout.setContentsMargins(0, 0, 0, 0) views_layout.addWidget(workarea_files_view, 1) @@ -156,11 +154,14 @@ class FilesWidget(QtWidgets.QWidget): btn_browse = QtWidgets.QPushButton("Browse", btns_widget) btn_open = QtWidgets.QPushButton("Open", btns_widget) + btn_view_published = QtWidgets.QPushButton("View", btns_widget) + btns_layout = QtWidgets.QHBoxLayout(btns_widget) btns_layout.setContentsMargins(0, 0, 0, 0) - btns_layout.addWidget(btn_open) - btns_layout.addWidget(btn_browse) - btns_layout.addWidget(btn_save) + btns_layout.addWidget(btn_open, 1) + btns_layout.addWidget(btn_browse, 1) + btns_layout.addWidget(btn_save, 1) + btns_layout.addWidget(btn_view_published, 1) # Build files widgets for home page main_layout = QtWidgets.QVBoxLayout(self) @@ -186,6 +187,7 @@ class FilesWidget(QtWidgets.QWidget): btn_open.pressed.connect(self._on_workarea_open_pressed) btn_browse.pressed.connect(self.on_browse_pressed) btn_save.pressed.connect(self.on_save_as_pressed) + btn_view_published.pressed.connect(self._on_view_published_pressed) # Store attributes self._published_checkbox = published_checkbox @@ -201,32 +203,51 @@ class FilesWidget(QtWidgets.QWidget): self._publish_files_model = publish_files_model self._publish_proxy_model = publish_proxy_model - self.btns_widget = btns_widget - self.btn_open = btn_open - self.btn_browse = btn_browse - self.btn_save = btn_save + self._btns_widget = btns_widget + self._btn_open = btn_open + self._btn_browse = btn_browse + self._btn_save = btn_save + self._btn_view_published = btn_view_published - self._workarea_visible = True + # Create a proxy widget for files widget + self.setFocusProxy(btn_open) + + # Hide publish files widgets + publish_files_view.setVisible(False) + btn_view_published.setVisible(False) + + @property + def published_enabled(self): + return self._published_checkbox.isChecked() def _on_published_change(self): - workarea_visible = not self._published_checkbox.isChecked() + published_enabled = self.published_enabled - self._workarea_files_view.setVisible(workarea_visible) - self._publish_files_view.setVisible(not workarea_visible) + self._workarea_files_view.setVisible(not published_enabled) + self._btn_open.setVisible(not published_enabled) + self._btn_browse.setVisible(not published_enabled) + self._btn_save.setVisible(not published_enabled) + + self._publish_files_view.setVisible(published_enabled) + self._btn_view_published.setVisible(published_enabled) - self._workarea_visible = workarea_visible self._update_filtering() self._update_asset_task() + self.published_visible_changed.emit(published_enabled) + def _on_filter_text_change(self): self._update_filtering() def _update_filtering(self): text = self._filter_input.text() - if self._workarea_visible: - self._workarea_proxy_model.setFilterFixedString(text) - else: + if self.published_enabled: self._publish_proxy_model.setFilterFixedString(text) + else: + self._workarea_proxy_model.setFilterFixedString(text) + + def set_save_enabled(self, enabled): + self._btn_save.setEnabled(enabled) def set_asset_task(self, asset_id, task_name, task_type): if asset_id != self._asset_id: @@ -237,7 +258,13 @@ class FilesWidget(QtWidgets.QWidget): self._update_asset_task() def _update_asset_task(self): - if self._workarea_visible: + if self.published_enabled: + self._publish_files_model.set_context( + self._asset_id, self._task_name + ) + has_valid_items = self._publish_files_model.has_valid_items() + self._btn_view_published.setEnabled(has_valid_items) + else: # Define a custom session so we can query the work root # for a "Work area" that is not our current Session. # This way we can browse it even before we enter it. @@ -252,16 +279,11 @@ class FilesWidget(QtWidgets.QWidget): # Disable/Enable buttons based on available files in model has_valid_items = self._workarea_files_model.has_valid_items() - self.btn_browse.setEnabled(has_valid_items) - self.btn_open.setEnabled(has_valid_items) - if not has_valid_items: - # Manually trigger file selection - self.on_file_select() - else: - self._publish_files_model.set_context( - self._asset_id, self._task_name - ) - has_valid_items = self._publish_files_model.has_valid_items() + self._btn_browse.setEnabled(has_valid_items) + self._btn_open.setEnabled(has_valid_items) + # Manually trigger file selection + if not has_valid_items: + self.on_file_select() def _get_asset_doc(self): if self._asset_id is None: @@ -394,10 +416,10 @@ class FilesWidget(QtWidgets.QWidget): def _get_selected_filepath(self): """Return current filepath selected in view""" - if self._workarea_visible: - source_view = self._workarea_files_view - else: + if self.published_enabled: source_view = self._publish_files_view + else: + source_view = self._workarea_files_view selection = source_view.selectionModel() index = selection.currentIndex() if not index.isValid(): @@ -480,12 +502,19 @@ class FilesWidget(QtWidgets.QWidget): # Refresh files model self.refresh() + def _on_view_published_pressed(self): + print("View of published workfile triggered") + def on_file_select(self): self.file_selected.emit(self._get_selected_filepath()) def refresh(self): """Refresh listed files for current selection in the interface""" - self._workarea_files_model.refresh() + if self.published_enabled: + self._publish_files_model.refresh() + else: + self._workarea_files_model.refresh() + if self.auto_select_latest_modified: self._select_last_modified_file() @@ -516,7 +545,11 @@ class FilesWidget(QtWidgets.QWidget): def _select_last_modified_file(self): """Utility function to select the file with latest date modified""" - model = self._workarea_files_view.model() + if self.published_enabled: + source_view = self._publish_files_view + else: + source_view = self._workarea_files_view + model = source_view.model() highest_index = None highest = 0 @@ -531,4 +564,4 @@ class FilesWidget(QtWidgets.QWidget): highest = modified if highest_index: - self._workarea_files_view.setCurrentIndex(highest_index) + source_view.setCurrentIndex(highest_index) diff --git a/openpype/tools/workfiles/model.py b/openpype/tools/workfiles/model.py index f38c80b190..fa0dddc2bc 100644 --- a/openpype/tools/workfiles/model.py +++ b/openpype/tools/workfiles/model.py @@ -51,7 +51,7 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): def _get_empty_root_item(self): if self._empty_root_item is None: - message = "Work Area does not exist. Use Save As to create it." + message = "Work Area is empty." item = QtGui.QStandardItem(message) icon = qtawesome.icon( "fa.times", @@ -198,7 +198,7 @@ class PublishFilesModel(QtGui.QStandardItemModel): def _get_invalid_context_item(self): if self._invalid_context_item is None: - message = "Selected context is not vald." + message = "Selected context is not valid." item = QtGui.QStandardItem(message) icon = qtawesome.icon( "fa.times", @@ -242,10 +242,17 @@ class PublishFilesModel(QtGui.QStandardItemModel): def _get_workfie_representations(self): output = [] - subset_docs = self._dbcon.find({ - "type": "subset", - "parent": self._asset_id - }) + subset_docs = self._dbcon.find( + { + "type": "subset", + "parent": self._asset_id + }, + { + "_id": True, + "data.families": True, + "name": True + } + ) filtered_subsets = [] for subset_doc in subset_docs: data = subset_doc.get("data") or {} @@ -257,10 +264,16 @@ class PublishFilesModel(QtGui.QStandardItemModel): if not subset_ids: return output - version_docs = self._dbcon.find({ - "type": "version", - "parent": {"$in": subset_ids} - }) + version_docs = self._dbcon.find( + { + "type": "version", + "parent": {"$in": subset_ids} + }, + { + "_id": True, + "parent": True + } + ) version_ids = [version_doc["_id"] for version_doc in version_docs] if not version_ids: return output @@ -296,7 +309,7 @@ class PublishFilesModel(QtGui.QStandardItemModel): if not self._asset_id or not self._task_name: self._clear() # Add Work Area does not exist placeholder - item = self._get_invalid_path_item() + item = self._get_invalid_context_item() root_item.appendRow(item) self._invalid_item_visible = True return diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index f68b721872..c90edc079c 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -42,12 +42,18 @@ class SidePanelWidget(QtWidgets.QWidget): btn_note_save.clicked.connect(self.on_save_click) self.details_input = details_input + self.note_label = note_label self.note_input = note_input self.btn_note_save = btn_note_save self._orig_note = "" self._workfile_doc = None + def set_published_visible(self, published_visible): + self.note_label.setVisible(not published_visible) + self.note_input.setVisible(not published_visible) + self.btn_note_save.setVisible(not published_visible) + def on_note_change(self): text = self.note_input.toPlainText() self.btn_note_save.setEnabled(self._orig_note != text) @@ -178,6 +184,9 @@ class Window(QtWidgets.QMainWindow): files_widget.file_selected.connect(self.on_file_select) files_widget.workfile_created.connect(self.on_workfile_create) files_widget.file_opened.connect(self._on_file_opened) + files_widget.published_visible_changed.connect( + self._on_published_change + ) side_panel.save_clicked.connect(self.on_side_panel_save) self._set_context_timer = set_context_timer @@ -192,7 +201,7 @@ class Window(QtWidgets.QMainWindow): self.side_panel = side_panel # Force focus on the open button by default, required for Houdini. - files_widget.btn_open.setFocus() + files_widget.setFocus() self.resize(1200, 600) @@ -217,7 +226,7 @@ class Window(QtWidgets.QMainWindow): """ def set_save_enabled(self, enabled): - self.files_widget.btn_save.setEnabled(enabled) + self.files_widget.set_save_enabled(enabled) def on_file_select(self, filepath): asset_id = self.assets_widget.get_selected_asset_id() @@ -239,6 +248,9 @@ class Window(QtWidgets.QMainWindow): def _on_file_opened(self): self.close() + def _on_published_change(self, visible): + self.side_panel.set_published_visible(visible) + def on_side_panel_save(self): workfile_doc, data = self.side_panel.get_workfile_data() if not workfile_doc: From 5a81596bd86b0440cc34937d3342904ef6fe905b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 14:17:56 +0100 Subject: [PATCH 114/196] added basic system of temping workfiles on user's side --- openpype/tools/workfiles/files_widget.py | 141 ++++++++++++++++++++++- 1 file changed, 139 insertions(+), 2 deletions(-) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index fb36efea33..9f133fd47d 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -1,11 +1,16 @@ import os import logging import shutil +import json +import time +import uuid import Qt from Qt import QtWidgets, QtCore from avalon import io, api +import appdirs + from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.delegates import PrettyTimeDelegate from openpype.lib import ( @@ -30,6 +35,134 @@ from .save_as_dialog import SaveAsDialog log = logging.getLogger(__name__) +class TempPublishFilesItem(object): + """Object representing on subfolder in app temp files. + + Args: + item_id (str): Id of item used as subfolder. + data (dict): Metadata about temp files. + directory (str): Path to directory where files are copied to. + """ + + def __init__(self, item_id, data, directory): + self._id = item_id + self._directory = directory + self._filepath = os.path.join(directory, data["filename"]) + + @property + def directory(self): + return self._directory + + @property + def filepath(self): + return self._filepath + + @property + def id(self): + return self._id + + +class TempPublishFiles(object): + """Directory where """ + minute_in_seconds = 60 + hour_in_seconds = 60 * minute_in_seconds + day_in_seconds = 24 * hour_in_seconds + + def __init__(self): + root_dir = appdirs.user_data_dir( + "published_workfiles_temp", "openpype" + ) + if not os.path.exists(root_dir): + os.makedirs(root_dir) + + metadata_path = os.path.join(root_dir, "metadata.json") + + self._root_dir = root_dir + self._metadata_path = metadata_path + + if not os.path.exists(metadata_path): + self._store_data({}) + + @property + def life_time(self): + return int(self.hour_in_seconds) + + def add_file(self, src_path): + filename = os.path.basename(src_path) + + item_id = str(uuid.uuid4()) + dst_dirpath = os.path.join(self._root_dir, item_id) + if not os.path.exists(dst_dirpath): + os.makedirs(dst_dirpath) + + dst_path = os.path.join(dst_dirpath, filename) + shutil.copy(src_path, dst_path) + + now = time.time() + item_data = { + "filename": filename, + "expiration": now + self.life_time, + "created": now + } + data = self._get_data() + data[item_id] = item_data + self._store_data(data) + return TempPublishFilesItem(item_id, item_data, dst_dirpath) + + def _store_data(self, data): + with open(self._metadata_path, "w") as stream: + json.dump(data, stream) + + def _get_data(self): + if not os.path.exists(self._metadata_path): + return {} + + with open(self._metadata_path, "r") as stream: + output = json.load(stream) + return output + + def cleanup(self, check_expiration=True): + data = self._get_data() + now = time.time() + remove_ids = set() + for item_id, item_data in data.items(): + if check_expiration and now < item_data["expiration"]: + continue + + remove_ids.add(item_id) + + for item_id in remove_ids: + try: + self.remove_id(item_id) + except Exception: + log.warning( + "Failed to remove temp publish item \"{}\"".format( + item_id + ), + exc_info=True + ) + + def clear(self): + self.cleanup(False) + + def get_items(self): + output = [] + for item_id, item_data in self._get_data(): + item_path = os.path.join(self._root_dir, item_id) + output.append(TempPublishFiles(item_id, item_data, item_path)) + return output + + def remove_id(self, item_id): + filepath = os.path.join(self._root_dir, item_id) + if os.path.exists(filepath): + shutil.rmtree(filepath) + + data = self._get_data() + if item_id in data: + data.pop(item_id) + self._store_data(data) + + class FilesView(QtWidgets.QTreeView): doubleClickedLeft = QtCore.Signal() doubleClickedRight = QtCore.Signal() @@ -69,6 +202,9 @@ class FilesWidget(QtWidgets.QWidget): self._workfiles_root = None self._workdir_path = None self.host = api.registered_host() + temp_publish_files = TempPublishFiles() + temp_publish_files.cleanup() + self._temp_publish_files = temp_publish_files # Whether to automatically select the latest modified # file on a refresh of the files model. @@ -503,7 +639,9 @@ class FilesWidget(QtWidgets.QWidget): self.refresh() def _on_view_published_pressed(self): - print("View of published workfile triggered") + filepath = self._get_selected_filepath() + item = self._temp_publish_files.add_file(filepath) + self.host.open_file(item.filepath) def on_file_select(self): self.file_selected.emit(self._get_selected_filepath()) @@ -515,7 +653,6 @@ class FilesWidget(QtWidgets.QWidget): else: self._workarea_files_model.refresh() - if self.auto_select_latest_modified: self._select_last_modified_file() From 46b4f6f544b3e26295903893351d95ba82cc6b1d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 17:03:29 +0100 Subject: [PATCH 115/196] added ability to clear cached files --- openpype/tools/workfiles/files_widget.py | 138 +-------------- openpype/tools/workfiles/lib.py | 195 +++++++++++++++++++++ openpype/tools/workfiles/save_as_dialog.py | 50 ++++-- openpype/tools/workfiles/window.py | 135 +++++++++----- 4 files changed, 323 insertions(+), 195 deletions(-) create mode 100644 openpype/tools/workfiles/lib.py diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 9f133fd47d..071be4ec1c 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -1,16 +1,11 @@ import os import logging import shutil -import json -import time -import uuid import Qt from Qt import QtWidgets, QtCore from avalon import io, api -import appdirs - from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.delegates import PrettyTimeDelegate from openpype.lib import ( @@ -31,138 +26,11 @@ from .model import ( DATE_MODIFIED_ROLE, ) from .save_as_dialog import SaveAsDialog +from .lib import TempPublishFiles log = logging.getLogger(__name__) -class TempPublishFilesItem(object): - """Object representing on subfolder in app temp files. - - Args: - item_id (str): Id of item used as subfolder. - data (dict): Metadata about temp files. - directory (str): Path to directory where files are copied to. - """ - - def __init__(self, item_id, data, directory): - self._id = item_id - self._directory = directory - self._filepath = os.path.join(directory, data["filename"]) - - @property - def directory(self): - return self._directory - - @property - def filepath(self): - return self._filepath - - @property - def id(self): - return self._id - - -class TempPublishFiles(object): - """Directory where """ - minute_in_seconds = 60 - hour_in_seconds = 60 * minute_in_seconds - day_in_seconds = 24 * hour_in_seconds - - def __init__(self): - root_dir = appdirs.user_data_dir( - "published_workfiles_temp", "openpype" - ) - if not os.path.exists(root_dir): - os.makedirs(root_dir) - - metadata_path = os.path.join(root_dir, "metadata.json") - - self._root_dir = root_dir - self._metadata_path = metadata_path - - if not os.path.exists(metadata_path): - self._store_data({}) - - @property - def life_time(self): - return int(self.hour_in_seconds) - - def add_file(self, src_path): - filename = os.path.basename(src_path) - - item_id = str(uuid.uuid4()) - dst_dirpath = os.path.join(self._root_dir, item_id) - if not os.path.exists(dst_dirpath): - os.makedirs(dst_dirpath) - - dst_path = os.path.join(dst_dirpath, filename) - shutil.copy(src_path, dst_path) - - now = time.time() - item_data = { - "filename": filename, - "expiration": now + self.life_time, - "created": now - } - data = self._get_data() - data[item_id] = item_data - self._store_data(data) - return TempPublishFilesItem(item_id, item_data, dst_dirpath) - - def _store_data(self, data): - with open(self._metadata_path, "w") as stream: - json.dump(data, stream) - - def _get_data(self): - if not os.path.exists(self._metadata_path): - return {} - - with open(self._metadata_path, "r") as stream: - output = json.load(stream) - return output - - def cleanup(self, check_expiration=True): - data = self._get_data() - now = time.time() - remove_ids = set() - for item_id, item_data in data.items(): - if check_expiration and now < item_data["expiration"]: - continue - - remove_ids.add(item_id) - - for item_id in remove_ids: - try: - self.remove_id(item_id) - except Exception: - log.warning( - "Failed to remove temp publish item \"{}\"".format( - item_id - ), - exc_info=True - ) - - def clear(self): - self.cleanup(False) - - def get_items(self): - output = [] - for item_id, item_data in self._get_data(): - item_path = os.path.join(self._root_dir, item_id) - output.append(TempPublishFiles(item_id, item_data, item_path)) - return output - - def remove_id(self, item_id): - filepath = os.path.join(self._root_dir, item_id) - if os.path.exists(filepath): - shutil.rmtree(filepath) - - data = self._get_data() - if item_id in data: - data.pop(item_id) - self._store_data(data) - - class FilesView(QtWidgets.QTreeView): doubleClickedLeft = QtCore.Signal() doubleClickedRight = QtCore.Signal() @@ -181,6 +49,7 @@ class FilesWidget(QtWidgets.QWidget): """A widget displaying files that allows to save and open files.""" file_selected = QtCore.Signal(str) file_opened = QtCore.Signal() + publish_file_viewed = QtCore.Signal() workfile_created = QtCore.Signal(str) published_visible_changed = QtCore.Signal(bool) @@ -372,6 +241,8 @@ class FilesWidget(QtWidgets.QWidget): self.published_visible_changed.emit(published_enabled) + self._select_last_modified_file() + def _on_filter_text_change(self): self._update_filtering() @@ -642,6 +513,7 @@ class FilesWidget(QtWidgets.QWidget): filepath = self._get_selected_filepath() item = self._temp_publish_files.add_file(filepath) self.host.open_file(item.filepath) + self.publish_file_viewed.emit() def on_file_select(self): self.file_selected.emit(self._get_selected_filepath()) diff --git a/openpype/tools/workfiles/lib.py b/openpype/tools/workfiles/lib.py new file mode 100644 index 0000000000..c181e634d6 --- /dev/null +++ b/openpype/tools/workfiles/lib.py @@ -0,0 +1,195 @@ +import os +import shutil +import uuid +import time +import json +import logging +import contextlib + +import appdirs + + +class TempPublishFilesItem(object): + """Object representing on subfolder in app temp files. + + Args: + item_id (str): Id of item used as subfolder. + data (dict): Metadata about temp files. + directory (str): Path to directory where files are copied to. + """ + + def __init__(self, item_id, data, directory): + self._id = item_id + self._directory = directory + self._filepath = os.path.join(directory, data["filename"]) + + @property + def directory(self): + return self._directory + + @property + def filepath(self): + return self._filepath + + @property + def id(self): + return self._id + + @property + def size(self): + if os.path.exists(self.filepath): + s = os.stat(self.filepath) + return s.st_size + return 0 + + +class TempPublishFiles(object): + """Directory where """ + minute_in_seconds = 60 + hour_in_seconds = 60 * minute_in_seconds + day_in_seconds = 24 * hour_in_seconds + + def __init__(self): + root_dir = appdirs.user_data_dir( + "published_workfiles_temp", "openpype" + ) + if not os.path.exists(root_dir): + os.makedirs(root_dir) + + metadata_path = os.path.join(root_dir, "metadata.json") + lock_path = os.path.join(root_dir, "lock.json") + + self._root_dir = root_dir + self._metadata_path = metadata_path + self._lock_path = lock_path + self._log = None + + @property + def log(self): + if self._log is None: + self._log = logging.getLogger(self.__class__.__name__) + return self._log + + @property + def life_time(self): + return int(self.hour_in_seconds) + + @property + def size(self): + size = 0 + for item in self.get_items(): + size += item.size + return size + + def add_file(self, src_path): + filename = os.path.basename(src_path) + + item_id = str(uuid.uuid4()) + dst_dirpath = os.path.join(self._root_dir, item_id) + if not os.path.exists(dst_dirpath): + os.makedirs(dst_dirpath) + + dst_path = os.path.join(dst_dirpath, filename) + shutil.copy(src_path, dst_path) + + now = time.time() + item_data = { + "filename": filename, + "expiration": now + self.life_time, + "created": now + } + with self._modify_data() as data: + data[item_id] = item_data + + return TempPublishFilesItem(item_id, item_data, dst_dirpath) + + @contextlib.contextmanager + def _modify_data(self): + start_time = time.time() + timeout = 3 + while os.path.exists(self._lock_path): + time.sleep(0.01) + if start_time > timeout: + self.log.warning(( + "Waited for {} seconds to free lock file. Overriding lock." + ).format(timeout)) + + with open(self._lock_path, "w") as stream: + json.dump({"pid": os.getpid()}, stream) + + try: + data = self._get_data() + yield data + with open(self._metadata_path, "w") as stream: + json.dump(data, stream) + + finally: + os.remove(self._lock_path) + + def _get_data(self): + output = {} + if not os.path.exists(self._metadata_path): + return output + + try: + with open(self._metadata_path, "r") as stream: + output = json.load(stream) + except Exception: + self.log.warning("Failed to read metadata file.", exc_info=True) + return output + + def cleanup(self, check_expiration=True): + data = self._get_data() + now = time.time() + remove_ids = set() + for item_id, item_data in data.items(): + if check_expiration and now < item_data["expiration"]: + continue + + remove_ids.add(item_id) + + for item_id in remove_ids: + try: + self.remove_id(item_id) + except Exception: + self.log.warning( + "Failed to remove temp publish item \"{}\"".format( + item_id + ), + exc_info=True + ) + + def clear(self): + self.cleanup(False) + + def get_items(self): + output = [] + data = self._get_data() + for item_id, item_data in data.items(): + item_path = os.path.join(self._root_dir, item_id) + output.append(TempPublishFilesItem(item_id, item_data, item_path)) + return output + + def remove_id(self, item_id): + filepath = os.path.join(self._root_dir, item_id) + if os.path.exists(filepath): + shutil.rmtree(filepath) + + with self._modify_data() as data: + data.pop(item_id, None) + + +def file_size_to_string(file_size): + size = 0 + size_ending_mapping = { + "KB": 1024 ** 1, + "MB": 1024 ** 2, + "GB": 1024 ** 3 + } + ending = "B" + for _ending, _size in size_ending_mapping.items(): + if file_size < _size: + break + size = file_size / _size + ending = _ending + return "{:.2f} {}".format(size, ending) diff --git a/openpype/tools/workfiles/save_as_dialog.py b/openpype/tools/workfiles/save_as_dialog.py index 399d54bd54..e616a325cc 100644 --- a/openpype/tools/workfiles/save_as_dialog.py +++ b/openpype/tools/workfiles/save_as_dialog.py @@ -107,25 +107,39 @@ class CommentMatcher(object): class SubversionLineEdit(QtWidgets.QWidget): """QLineEdit with QPushButton for drop down selection of list of strings""" - def __init__(self, parent=None): - super(SubversionLineEdit, self).__init__(parent=parent) + + text_changed = QtCore.Signal(str) + + def __init__(self, *args, **kwargs): + super(SubversionLineEdit, self).__init__(*args, **kwargs) + + input_field = PlaceholderLineEdit(self) + menu_btn = QtWidgets.QPushButton(self) + menu_btn.setFixedWidth(18) + + menu = QtWidgets.QMenu(self) + menu_btn.setMenu(menu) layout = QtWidgets.QHBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) layout.setSpacing(3) - self._input = PlaceholderLineEdit() - self._button = QtWidgets.QPushButton("") - self._button.setFixedWidth(18) - self._menu = QtWidgets.QMenu(self) - self._button.setMenu(self._menu) + layout.addWidget(input_field, 1) + layout.addWidget(menu_btn, 0) - layout.addWidget(self._input) - layout.addWidget(self._button) + input_field.textChanged.connect(self.text_changed) - @property - def input(self): - return self._input + self.setFocusProxy(input_field) + + self._input_field = input_field + self._menu_btn = menu_btn + self._menu = menu + + def set_placeholder(self, placeholder): + self._input_field.setPlaceholderText(placeholder) + + def set_text(self, text): + self._input_field.setText(text) def set_values(self, values): self._update(values) @@ -134,7 +148,7 @@ class SubversionLineEdit(QtWidgets.QWidget): self._menu.exec_() def _on_action_clicked(self, action): - self._input.setText(action.text()) + self._input_field.setText(action.text()) def _update(self, values): """Create optional predefined subset names @@ -147,7 +161,7 @@ class SubversionLineEdit(QtWidgets.QWidget): """ menu = self._menu - button = self._button + button = self._menu_btn state = any(values) button.setEnabled(state) @@ -236,7 +250,7 @@ class SaveAsDialog(QtWidgets.QDialog): # Subversion input subversion = SubversionLineEdit(inputs_widget) - subversion.input.setPlaceholderText("Will be part of filename.") + subversion.set_placeholder("Will be part of filename.") # Extensions combobox ext_combo = QtWidgets.QComboBox(inputs_widget) @@ -271,7 +285,7 @@ class SaveAsDialog(QtWidgets.QDialog): if comment: log.info("Detected subversion comment: {}".format(comment)) self.data["comment"] = comment - subversion.input.setText(comment) + subversion.set_text(comment) existing_comments = self.get_existing_comments() subversion.set_values(existing_comments) @@ -292,7 +306,7 @@ class SaveAsDialog(QtWidgets.QDialog): self.on_version_checkbox_changed ) - subversion.input.textChanged.connect(self.on_comment_changed) + subversion.text_changed.connect(self.on_comment_changed) ext_combo.currentIndexChanged.connect(self.on_extension_changed) btn_ok.pressed.connect(self.on_ok_pressed) @@ -303,7 +317,7 @@ class SaveAsDialog(QtWidgets.QDialog): # Force default focus to comment, some hosts didn't automatically # apply focus to this line edit (e.g. Houdini) - subversion.input.setFocus() + subversion.setFocus() # Store widgets self.btn_ok = btn_ok diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index c90edc079c..7f5bbd1ee7 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -14,10 +14,15 @@ from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from openpype.tools.utils.tasks_widget import TasksWidget from .files_widget import FilesWidget +from .lib import TempPublishFiles, file_size_to_string class SidePanelWidget(QtWidgets.QWidget): save_clicked = QtCore.Signal() + published_workfile_message = ( + "INFO: Published workfiles you'll opened will be stored in" + " temp directory on your machine. Current temp size: {}." + ) def __init__(self, parent=None): super(SidePanelWidget, self).__init__(parent) @@ -26,41 +31,88 @@ class SidePanelWidget(QtWidgets.QWidget): details_input = QtWidgets.QPlainTextEdit(self) details_input.setReadOnly(True) - note_label = QtWidgets.QLabel("Artist note", self) - note_input = QtWidgets.QPlainTextEdit(self) - btn_note_save = QtWidgets.QPushButton("Save note", self) + artist_note_widget = QtWidgets.QWidget(self) + note_label = QtWidgets.QLabel("Artist note", artist_note_widget) + note_input = QtWidgets.QPlainTextEdit(artist_note_widget) + btn_note_save = QtWidgets.QPushButton("Save note", artist_note_widget) + + artist_note_layout = QtWidgets.QVBoxLayout(artist_note_widget) + artist_note_layout.setContentsMargins(0, 0, 0, 0) + artist_note_layout.addWidget(note_label, 0) + artist_note_layout.addWidget(note_input, 1) + artist_note_layout.addWidget( + btn_note_save, 0, alignment=QtCore.Qt.AlignRight + ) + + publish_temp_widget = QtWidgets.QWidget(self) + publish_temp_info_label = QtWidgets.QLabel( + self.published_workfile_message.format( + file_size_to_string(0) + ), + publish_temp_widget + ) + publish_temp_info_label.setWordWrap(True) + + btn_clear_temp = QtWidgets.QPushButton( + "Clear temp", publish_temp_widget + ) + + publish_temp_layout = QtWidgets.QVBoxLayout(publish_temp_widget) + publish_temp_layout.setContentsMargins(0, 0, 0, 0) + publish_temp_layout.addWidget(publish_temp_info_label, 0) + publish_temp_layout.addWidget( + btn_clear_temp, 0, alignment=QtCore.Qt.AlignRight + ) main_layout = QtWidgets.QVBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) main_layout.addWidget(details_label, 0) - main_layout.addWidget(details_input, 0) - main_layout.addWidget(note_label, 0) - main_layout.addWidget(note_input, 1) - main_layout.addWidget(btn_note_save, alignment=QtCore.Qt.AlignRight) + main_layout.addWidget(details_input, 1) + main_layout.addWidget(artist_note_widget, 1) + main_layout.addWidget(publish_temp_widget, 0) - note_input.textChanged.connect(self.on_note_change) - btn_note_save.clicked.connect(self.on_save_click) + note_input.textChanged.connect(self._on_note_change) + btn_note_save.clicked.connect(self._on_save_click) + btn_clear_temp.clicked.connect(self._on_clear_temp_click) - self.details_input = details_input - self.note_label = note_label - self.note_input = note_input - self.btn_note_save = btn_note_save + self._details_input = details_input + self._artist_note_widget = artist_note_widget + self._note_input = note_input + self._btn_note_save = btn_note_save + + self._publish_temp_info_label = publish_temp_info_label + self._publish_temp_widget = publish_temp_widget self._orig_note = "" self._workfile_doc = None + publish_temp_widget.setVisible(False) + def set_published_visible(self, published_visible): - self.note_label.setVisible(not published_visible) - self.note_input.setVisible(not published_visible) - self.btn_note_save.setVisible(not published_visible) + self._artist_note_widget.setVisible(not published_visible) + self._publish_temp_widget.setVisible(published_visible) + if published_visible: + self.refresh_publish_temp_sizes() - def on_note_change(self): - text = self.note_input.toPlainText() - self.btn_note_save.setEnabled(self._orig_note != text) + def refresh_publish_temp_sizes(self): + temp_publish_files = TempPublishFiles() + text = self.published_workfile_message.format( + file_size_to_string(temp_publish_files.size) + ) + self.publish_temp_info_label.setText(text) - def on_save_click(self): - self._orig_note = self.note_input.toPlainText() - self.on_note_change() + def _on_clear_temp_click(self): + temp_publish_files = TempPublishFiles() + temp_publish_files.clear() + self.refresh_publish_temp_sizes() + + def _on_note_change(self): + text = self._note_input.toPlainText() + self._btn_note_save.setEnabled(self._orig_note != text) + + def _on_save_click(self): + self._orig_note = self._note_input.toPlainText() + self._on_note_change() self.save_clicked.emit() def set_context(self, asset_id, task_name, filepath, workfile_doc): @@ -68,17 +120,17 @@ class SidePanelWidget(QtWidgets.QWidget): # NOTE workfile document is not requirement enabled = bool(asset_id) and bool(task_name) and bool(filepath) - self.details_input.setEnabled(enabled) - self.note_input.setEnabled(enabled) - self.btn_note_save.setEnabled(enabled) + self._details_input.setEnabled(enabled) + self._note_input.setEnabled(enabled) + self._btn_note_save.setEnabled(enabled) # Make sure workfile doc is overridden self._workfile_doc = workfile_doc # Disable inputs and remove texts if any required arguments are missing if not enabled: self._orig_note = "" - self.details_input.setPlainText("") - self.note_input.setPlainText("") + self._details_input.setPlainText("") + self._note_input.setPlainText("") return orig_note = "" @@ -86,23 +138,12 @@ class SidePanelWidget(QtWidgets.QWidget): orig_note = workfile_doc["data"].get("note") or orig_note self._orig_note = orig_note - self.note_input.setPlainText(orig_note) + self._note_input.setPlainText(orig_note) # Set as empty string - self.details_input.setPlainText("") + self._details_input.setPlainText("") filestat = os.stat(filepath) - size_ending_mapping = { - "KB": 1024 ** 1, - "MB": 1024 ** 2, - "GB": 1024 ** 3 - } - size = filestat.st_size - ending = "B" - for _ending, _size in size_ending_mapping.items(): - if filestat.st_size < _size: - break - size = filestat.st_size / _size - ending = _ending + size_value = file_size_to_string(filestat.st_size) # Append html string datetime_format = "%b %d %Y %H:%M:%S" @@ -110,17 +151,17 @@ class SidePanelWidget(QtWidgets.QWidget): modification_time = datetime.datetime.fromtimestamp(filestat.st_mtime) lines = ( "Size:", - "{:.2f} {}".format(size, ending), + size_value, "Created:", creation_time.strftime(datetime_format), "Modified:", modification_time.strftime(datetime_format) ) - self.details_input.appendHtml("
".join(lines)) + self._details_input.appendHtml("
".join(lines)) def get_workfile_data(self): data = { - "note": self.note_input.toPlainText() + "note": self._note_input.toPlainText() } return self._workfile_doc, data @@ -184,6 +225,9 @@ class Window(QtWidgets.QMainWindow): files_widget.file_selected.connect(self.on_file_select) files_widget.workfile_created.connect(self.on_workfile_create) files_widget.file_opened.connect(self._on_file_opened) + files_widget.publish_file_viewed.connect( + self._on_publish_file_viewed + ) files_widget.published_visible_changed.connect( self._on_published_change ) @@ -248,6 +292,9 @@ class Window(QtWidgets.QMainWindow): def _on_file_opened(self): self.close() + def _on_publish_file_viewed(self): + self.side_panel.refresh_publish_temp_sizes() + def _on_published_change(self, visible): self.side_panel.set_published_visible(visible) From 8f28b96c7f3409dc426bca65491c34c10ad84f32 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 17:15:55 +0100 Subject: [PATCH 116/196] smaller fixes --- openpype/tools/workfiles/files_widget.py | 5 +++++ openpype/tools/workfiles/window.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 071be4ec1c..7f201d2cf3 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -188,6 +188,9 @@ class FilesWidget(QtWidgets.QWidget): workarea_files_view.selectionModel().selectionChanged.connect( self.on_file_select ) + publish_files_view.doubleClickedLeft.connect( + self._on_view_published_pressed + ) btn_open.pressed.connect(self._on_workarea_open_pressed) btn_browse.pressed.connect(self.on_browse_pressed) @@ -511,6 +514,8 @@ class FilesWidget(QtWidgets.QWidget): def _on_view_published_pressed(self): filepath = self._get_selected_filepath() + if not filepath or not os.path.exists(filepath): + return item = self._temp_publish_files.add_file(filepath) self.host.open_file(item.filepath) self.publish_file_viewed.emit() diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index 7f5bbd1ee7..c2a3f74a22 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -99,7 +99,7 @@ class SidePanelWidget(QtWidgets.QWidget): text = self.published_workfile_message.format( file_size_to_string(temp_publish_files.size) ) - self.publish_temp_info_label.setText(text) + self._publish_temp_info_label.setText(text) def _on_clear_temp_click(self): temp_publish_files = TempPublishFiles() From 2d86f0ee7c752180a0ded81f412194cb6083347b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 17:30:47 +0100 Subject: [PATCH 117/196] added check of unknown files in temp --- openpype/tools/workfiles/lib.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/openpype/tools/workfiles/lib.py b/openpype/tools/workfiles/lib.py index c181e634d6..84f2e76450 100644 --- a/openpype/tools/workfiles/lib.py +++ b/openpype/tools/workfiles/lib.py @@ -142,7 +142,9 @@ class TempPublishFiles(object): data = self._get_data() now = time.time() remove_ids = set() + all_ids = set() for item_id, item_data in data.items(): + all_ids.add(item_id) if check_expiration and now < item_data["expiration"]: continue @@ -159,6 +161,23 @@ class TempPublishFiles(object): exc_info=True ) + # Remove unknown folders/files + for filename in os.listdir(self._root_dir): + if filename in all_ids: + continue + + full_path = os.path.join(self._root_dir, filename) + if full_path in (self._metadata_path, self._lock_path): + continue + + try: + shutil.rmtree(full_path) + except Exception: + self.log.warning( + "Couldn't remove arbitrary path \"{}\"".format(full_path), + exc_info=True + ) + def clear(self): self.cleanup(False) From b75eafbeae0b1c63e4364789e9e8c45724103e5e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 17:53:51 +0100 Subject: [PATCH 118/196] uncheck published checkbox on open of published file --- openpype/tools/workfiles/files_widget.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 7f201d2cf3..d2b8a76952 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -519,6 +519,8 @@ class FilesWidget(QtWidgets.QWidget): item = self._temp_publish_files.add_file(filepath) self.host.open_file(item.filepath) self.publish_file_viewed.emit() + # Change state back to workarea + self._published_checkbox.setChecked(False) def on_file_select(self): self.file_selected.emit(self._get_selected_filepath()) From 528b27b7ab9cbeb29aa298fe79ff29aa935a06ae Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 17:54:39 +0100 Subject: [PATCH 119/196] show all workfile representations if task is not selected in UI --- openpype/tools/workfiles/model.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/openpype/tools/workfiles/model.py b/openpype/tools/workfiles/model.py index fa0dddc2bc..563a2fc558 100644 --- a/openpype/tools/workfiles/model.py +++ b/openpype/tools/workfiles/model.py @@ -286,7 +286,12 @@ class PublishFilesModel(QtGui.QStandardItemModel): "context.ext": {"$in": extensions} } ) + filtered_repre_docs = [] for repre_doc in repre_docs: + if self._task_name is None: + filtered_repre_docs.append(repre_doc) + continue + task_info = repre_doc["context"].get("task") if not task_info: print("Not task info") @@ -298,15 +303,18 @@ class PublishFilesModel(QtGui.QStandardItemModel): task_name = task_info if task_name == self._task_name: - path = get_representation_path( - repre_doc, root=self._anatomy.roots - ) - output.append((path, repre_doc["_id"])) + filtered_repre_docs.append(repre_doc) + + for repre_doc in filtered_repre_docs: + path = get_representation_path( + repre_doc, root=self._anatomy.roots + ) + output.append((path, repre_doc["_id"])) return output def refresh(self): root_item = self.invisibleRootItem() - if not self._asset_id or not self._task_name: + if not self._asset_id: self._clear() # Add Work Area does not exist placeholder item = self._get_invalid_context_item() From 8b572a0d3ca27b8fb52856b5c5a00f71e499115f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Mar 2022 18:59:34 +0100 Subject: [PATCH 120/196] just handle error when is caused by OSError --- openpype/lib/log.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/lib/log.py b/openpype/lib/log.py index 98a3bae8e6..f33385e0ba 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -98,6 +98,10 @@ class PypeStreamHandler(logging.StreamHandler): self.flush() except (KeyboardInterrupt, SystemExit): raise + + except OSError: + self.handleError(record) + except Exception: print(repr(record)) self.handleError(record) From ae9e34bc62d5d9918842df27492d4a418a83cc82 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Tue, 22 Mar 2022 11:21:26 +0900 Subject: [PATCH 121/196] use collected animation data at multiverse use creator --- .../maya/plugins/create/create_multiverse_usd.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py index 6851e0f6bc..c06c764f95 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py @@ -44,15 +44,9 @@ class CreateMultiverseUsd(plugin.Creator): self.data["writeUsdAttributes"] = False self.data["timeVaryingTopology"] = False self.data["customMaterialNamespace"] = '' + self.data["numTimeSamples"] = 1 + self.data["timeSamplesSpan"] = 0.0 - # The attributes below are about animated cache. - self.data["writeTimeRange"] = True - self.data["timeRangeNumTimeSamples"] = 0 - self.data["timeRangeSamplesSpan"] = 0.0 - + # Add animation data animation_data = lib.collect_animation_data(True) - - self.data["timeRangeStart"] = animation_data["frameStart"] - self.data["timeRangeEnd"] = animation_data["frameEnd"] - self.data["timeRangeIncrement"] = animation_data["step"] - self.data["timeRangeFramesPerSecond"] = animation_data["fps"] + self.data.update(animation_data) From 370416a949280a4223a07f0e4a70060b43433fa4 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Tue, 22 Mar 2022 11:41:13 +0900 Subject: [PATCH 122/196] use openpye built-in variables for multiverse usd extractor --- .../plugins/publish/extract_multiverse_usd.py | 60 +++++++++---------- 1 file changed, 28 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 7c13252957..db0f57768b 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -58,22 +58,13 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "writeUsdAttributes": bool, "timeVaryingTopology": bool, "customMaterialNamespace": str, - "writeTimeRange": bool, - "timeRangeStart": int, - "timeRangeEnd": int, - "timeRangeIncrement": int, - "timeRangeNumTimeSamples": int, - "timeRangeSamplesSpan": float, - "timeRangeFramesPerSecond": float + "numTimeSamples": int, + "timeSamplesSpan": float } @property def default_options(self): """The default options for Multiverse USD extraction.""" - start_frame = int(cmds.playbackOptions(query=True, - animationStartTime=True)) - end_frame = int(cmds.playbackOptions(query=True, - animationEndTime=True)) return { "stripNamespaces": False, @@ -108,13 +99,8 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "writeUsdAttributes": False, "timeVaryingTopology": False, "customMaterialNamespace": '', - "writeTimeRange": True, - "timeRangeStart": start_frame, - "timeRangeEnd": end_frame, - "timeRangeIncrement": 1, - "timeRangeNumTimeSamples": 0, - "timeRangeSamplesSpan": 0.0, - "timeRangeFramesPerSecond": 24.0 + "numTimeSamples": 1, + "timeSamplesSpan": 0.0 } def process(self, instance): @@ -130,6 +116,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): # Parse export options options = self.default_options self.log.info("Export options: {0}".format(options)) + self.log.info("Export instance data: {0}".format(instance.data)) # Perform extraction self.log.info("Performing extraction ...") @@ -144,30 +131,39 @@ class ExtractMultiverseUsd(openpype.api.Extractor): long=True) self.log.info('Collected object {}'.format(members)) - # TODO: Deal with asset, composition, overide with options. import multiverse time_opts = None - if options["writeTimeRange"]: + frame_start = instance.data['frameStart'] + frame_end = instance.data['frameEnd'] + step = instance.data['step'] + fps = instance.data['fps'] + if frame_end != frame_start: time_opts = multiverse.TimeOptions() time_opts.writeTimeRange = True - - time_range_start = options["timeRangeStart"] - time_range_end = options["timeRangeEnd"] - time_opts.frameRange = (time_range_start, time_range_end) - - time_opts.frameIncrement = options["timeRangeIncrement"] - time_opts.numTimeSamples = options["timeRangeNumTimeSamples"] - time_opts.timeSamplesSpan = options["timeRangeSamplesSpan"] - time_opts.framePerSecond = options["timeRangeFramesPerSecond"] + time_opts.frameRange = (frame_start, frame_end) + time_opts.frameIncrement = step + time_opts.numTimeSamples = instance.data["numTimeSamples"] + time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"] + time_opts.framePerSecond = fps asset_write_opts = multiverse.AssetWriteOptions(time_opts) options_items = getattr(options, "iteritems", options.items) - for (k, v) in options_items(): - if k == "writeTimeRange" or k.startswith("timeRange"): + options_discard_keys = [ + 'numTimeSamples', + 'timeSamplesSpan', + 'frameStart', + 'frameEnd', + 'handleStart', + 'handleEnd', + 'step', + 'fps' + ] + for key, value in options_items(): + if key in options_discard_keys: continue - setattr(asset_write_opts, k, v) + setattr(asset_write_opts, key, instance.data[key]) multiverse.WriteAsset(file_path, members, asset_write_opts) From 889851a2d820cc43c23819b2afca4781c3c471c5 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Tue, 22 Mar 2022 11:43:58 +0900 Subject: [PATCH 123/196] tidy variable of unused variable at multiverse usd creator --- openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index db0f57768b..d084ac844e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -160,7 +160,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): 'step', 'fps' ] - for key, value in options_items(): + for key, _value in options_items(): if key in options_discard_keys: continue setattr(asset_write_opts, key, instance.data[key]) From b553fe66a044a4b3f2570503375b1a8a0f13a2c8 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Tue, 22 Mar 2022 12:10:35 +0900 Subject: [PATCH 124/196] add method parse_overrides to multiverse usd extractor to update option values --- .../plugins/publish/extract_multiverse_usd.py | 33 ++++++++++++++++--- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index d084ac844e..9e6d46af7e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -32,9 +32,9 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "flattenParentXforms": bool, "writeSparseOverrides": bool, "useMetaPrimPath": bool, - "customRootPath": str, - "customAttributes": str, - "nodeTypesToIgnore": str, + "customRootPath": unicode, + "customAttributes": unicode, + "nodeTypesToIgnore": unicode, "writeMeshes": bool, "writeCurves": bool, "writeParticles": bool, @@ -57,7 +57,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "writeTransformMatrix": bool, "writeUsdAttributes": bool, "timeVaryingTopology": bool, - "customMaterialNamespace": str, + "customMaterialNamespace": unicode, "numTimeSamples": int, "timeSamplesSpan": float } @@ -103,6 +103,29 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "timeSamplesSpan": 0.0 } + def parse_overrides(self, instance, options): + """Inspect data of instance to determine overridden options""" + + for key in instance.data: + if key not in self.options: + continue + + # Ensure the data is of correct type + value = instance.data[key] + if not isinstance(value, self.options[key]): + self.log.warning( + "Overridden attribute {key} was of " + "the wrong type: {invalid_type} " + "- should have been {valid_type}".format( + key=key, + invalid_type=type(value).__name__, + valid_type=self.options[key].__name__)) + continue + + options[key] = value + + return options + def process(self, instance): # Load plugin firstly cmds.loadPlugin("MultiverseForMaya", quiet=True) @@ -115,8 +138,8 @@ class ExtractMultiverseUsd(openpype.api.Extractor): # Parse export options options = self.default_options + options = self.parse_overrides(instance, options) self.log.info("Export options: {0}".format(options)) - self.log.info("Export instance data: {0}".format(instance.data)) # Perform extraction self.log.info("Performing extraction ...") From cc1d6313d534d1a6dc76cc4101fd8021806bf44d Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Tue, 22 Mar 2022 12:24:09 +0900 Subject: [PATCH 125/196] convert string to unicode at multiverse usd extractor --- openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 9e6d46af7e..5fece9cfd3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -112,6 +112,8 @@ class ExtractMultiverseUsd(openpype.api.Extractor): # Ensure the data is of correct type value = instance.data[key] + if isinstance(value, str): + value = unicode(value, "utf-8") if not isinstance(value, self.options[key]): self.log.warning( "Overridden attribute {key} was of " From b5d775700d8cfe20ea8d3db0db2354eaa815380c Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Tue, 22 Mar 2022 12:28:51 +0900 Subject: [PATCH 126/196] declare default string option as unicode --- .../maya/plugins/publish/extract_multiverse_usd.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 5fece9cfd3..7b01d3066d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -73,9 +73,9 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "flattenParentXforms": False, "writeSparseOverrides": False, "useMetaPrimPath": False, - "customRootPath": '', - "customAttributes": '', - "nodeTypesToIgnore": '', + "customRootPath": u'', + "customAttributes": u'', + "nodeTypesToIgnore": u'', "writeMeshes": True, "writeCurves": True, "writeParticles": True, @@ -98,7 +98,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "writeTransformMatrix": True, "writeUsdAttributes": False, "timeVaryingTopology": False, - "customMaterialNamespace": '', + "customMaterialNamespace": u'', "numTimeSamples": 1, "timeSamplesSpan": 0.0 } @@ -112,8 +112,6 @@ class ExtractMultiverseUsd(openpype.api.Extractor): # Ensure the data is of correct type value = instance.data[key] - if isinstance(value, str): - value = unicode(value, "utf-8") if not isinstance(value, self.options[key]): self.log.warning( "Overridden attribute {key} was of " From fba9399bd3cef9ea5043f312903684bc88b0cab1 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Tue, 22 Mar 2022 12:31:09 +0900 Subject: [PATCH 127/196] use values from processed options from multiverse usd extractor --- openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 7b01d3066d..fd46f87684 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -183,10 +183,10 @@ class ExtractMultiverseUsd(openpype.api.Extractor): 'step', 'fps' ] - for key, _value in options_items(): + for key, value in options_items(): if key in options_discard_keys: continue - setattr(asset_write_opts, key, instance.data[key]) + setattr(asset_write_opts, key, value) multiverse.WriteAsset(file_path, members, asset_write_opts) From adde37f982b001bd1aae6722c4e2d4ff3b94d466 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Tue, 22 Mar 2022 13:48:47 +0900 Subject: [PATCH 128/196] removed root transform when load the usd file --- .../maya/plugins/load/load_multiverse_usd.py | 33 +++++++++++-------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index 3370033141..8a618d5b01 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -1,7 +1,15 @@ # -*- coding: utf-8 -*- +import maya.cmds as cmds +import maya.mel as mel + from avalon import api -import maya.cmds as cmds +from openpype.hosts.maya.api.lib import ( + maintained_selection, + namespaced, + unique_namespace +) +from openpype.hosts.maya.api.pipeline import containerise class MultiverseUsdLoader(api.Loader): @@ -17,9 +25,6 @@ class MultiverseUsdLoader(api.Loader): def load(self, context, name=None, namespace=None, options=None): - from openpype.hosts.maya.api.pipeline import containerise - from openpype.hosts.maya.api.lib import unique_namespace - asset = context['asset']['name'] namespace = namespace or unique_namespace( asset + "_", @@ -27,19 +32,19 @@ class MultiverseUsdLoader(api.Loader): suffix="_", ) + # Create the shape cmds.loadPlugin("MultiverseForMaya", quiet=True) - # Root group - rootName = "{}:{}".format(namespace, name) - root = cmds.group(name=rootName, empty=True) + shape = None + transform = None + with maintained_selection(): + cmds.namespace(addNamespace=namespace) + with namespaced(namespace, new=False): + import multiverse + shape = multiverse.CreateUsdCompound(self.fname) + transform = mel.eval('firstParentOf "{}"'.format(shape)) - # Create shape with transform and move it under root - import multiverse - transform = multiverse.CreateUsdCompound(self.fname) - cmds.parent(transform, root) - - # Rename transform - nodes = [root, transform] + nodes = [transform, shape] self[:] = nodes return containerise( From e1bdbf3cdc61e2dbf3b211cc80c725af6c1c2e06 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Tue, 22 Mar 2022 14:18:17 +0900 Subject: [PATCH 129/196] updated update and remove method in usd loader --- .../maya/plugins/load/load_multiverse_usd.py | 33 ++++++++++++++----- 1 file changed, 25 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index 8a618d5b01..dac2244b5f 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -55,20 +55,22 @@ class MultiverseUsdLoader(api.Loader): loader=self.__class__.__name__) def update(self, container, representation): + # type: (dict, dict) -> None + """Update container with specified representation.""" + node = container['objectName'] + assert cmds.objExists(node), "Missing container" + + members = cmds.sets(node, query=True) or [] + shapes = cmds.ls(members, type="mvUsdCompoundShape") + assert shapes, "Cannot find mvUsdCompoundShape in container" path = api.get_representation_path(representation) - # Update the shape - members = cmds.sets(container['objectName'], query=True) - shapes = cmds.ls(members, type="mvUsdCompoundShape", long=True) - - assert len(shapes) == 1, "This is a bug" - import multiverse for shape in shapes: multiverse.SetUsdCompoundAssetPaths(shape, [path]) - cmds.setAttr(container["objectName"] + ".representation", + cmds.setAttr("{}.representation".format(node), str(representation["_id"]), type="string") @@ -76,4 +78,19 @@ class MultiverseUsdLoader(api.Loader): self.update(container, representation) def remove(self, container): - pass + # type: (dict) -> None + """Remove loaded container.""" + # Delete container and its contents + if cmds.objExists(container['objectName']): + members = cmds.sets(container['objectName'], query=True) or [] + cmds.delete([container['objectName']] + members) + + # Remove the namespace, if empty + namespace = container['namespace'] + if cmds.namespace(exists=namespace): + members = cmds.namespaceInfo(namespace, listNamespace=True) + if not members: + cmds.namespace(removeNamespace=namespace) + else: + self.log.warning("Namespace not deleted because it " + "still has members: %s", namespace) From 5e5fc4ec55e9bcd9e87573dead58abc92d942c59 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Mar 2022 12:31:14 +0100 Subject: [PATCH 130/196] removed silo references --- openpype/hosts/blender/api/ops.py | 1 - .../avalon_uri_processor.py | 2 - openpype/lib/usdlib.py | 3 +- openpype/pipeline/load/utils.py | 1 - .../publish/extract_hierarchy_avalon.py | 2 +- openpype/tools/context_dialog/window.py | 1 - openpype/tools/launcher/lib.py | 16 ------- openpype/tools/loader/app.py | 17 +------- .../standalonepublish/widgets/model_asset.py | 43 +++---------------- .../standalonepublish/widgets/widget_asset.py | 1 - openpype/tools/texture_copy/app.py | 2 - openpype/tools/workfiles/app.py | 1 - 12 files changed, 10 insertions(+), 80 deletions(-) diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py index 3069c3e1c9..29d6d356c8 100644 --- a/openpype/hosts/blender/api/ops.py +++ b/openpype/hosts/blender/api/ops.py @@ -328,7 +328,6 @@ class LaunchWorkFiles(LaunchQtApp): result = super().execute(context) self._window.set_context({ "asset": avalon.api.Session["AVALON_ASSET"], - "silo": avalon.api.Session["AVALON_SILO"], "task": avalon.api.Session["AVALON_TASK"] }) return result diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py index 4071eb3e0c..499b733570 100644 --- a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py +++ b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py @@ -145,7 +145,6 @@ class AvalonURIOutputProcessor(base.OutputProcessorBase): path = self._template.format(**{ "root": root, "project": PROJECT, - "silo": asset_doc["silo"], "asset": asset_doc["name"], "subset": subset, "representation": ext, @@ -165,4 +164,3 @@ output_processor = AvalonURIOutputProcessor() def usdOutputProcessor(): return output_processor - diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index 3ae7430c7b..89021156b4 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -315,7 +315,7 @@ def get_usd_master_path(asset, subset, representation): ) template = project["config"]["template"]["publish"] - if isinstance(asset, dict) and "silo" in asset and "name" in asset: + if isinstance(asset, dict) and "name" in asset: # Allow explicitly passing asset document asset_doc = asset else: @@ -325,7 +325,6 @@ def get_usd_master_path(asset, subset, representation): **{ "root": api.registered_root(), "project": api.Session["AVALON_PROJECT"], - "silo": asset_doc["silo"], "asset": asset_doc["name"], "subset": subset, "representation": representation, diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 6d32c11cd7..699e82ebd2 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -593,7 +593,6 @@ def get_representation_path(representation, root=None, dbcon=None): "code": project.get("data", {}).get("code") }, "asset": asset["name"], - "silo": asset.get("silo"), "hierarchy": hierarchy, "subset": subset["name"], "version": version_["name"], diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index e263edd931..b062a9c4b5 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -64,7 +64,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): data["tasks"] = tasks parents = [] visualParent = None - # do not store project"s id as visualParent (silo asset) + # do not store project"s id as visualParent if self.project is not None: if self.project["_id"] != parent["_id"]: visualParent = parent["_id"] diff --git a/openpype/tools/context_dialog/window.py b/openpype/tools/context_dialog/window.py index c8464faa3e..9e030853bf 100644 --- a/openpype/tools/context_dialog/window.py +++ b/openpype/tools/context_dialog/window.py @@ -308,7 +308,6 @@ class ContextDialog(QtWidgets.QDialog): self._validate_strict() def _set_asset_to_tasks_widget(self): - # filter None docs they are silo asset_id = self._assets_widget.get_selected_asset_id() self._tasks_widget.set_asset_id(asset_id) diff --git a/openpype/tools/launcher/lib.py b/openpype/tools/launcher/lib.py index 68c759f295..c1392b7b8f 100644 --- a/openpype/tools/launcher/lib.py +++ b/openpype/tools/launcher/lib.py @@ -1,19 +1,3 @@ -"""Utility script for updating database with configuration files - -Until assets are created entirely in the database, this script -provides a bridge between the file-based project inventory and configuration. - -- Migrating an old project: - $ python -m avalon.inventory --extract --silo-parent=f02_prod - $ python -m avalon.inventory --upload - -- Managing an existing project: - 1. Run `python -m avalon.inventory --load` - 2. Update the .inventory.toml or .config.toml - 3. Run `python -m avalon.inventory --save` - -""" - import os from Qt import QtGui import qtawesome diff --git a/openpype/tools/loader/app.py b/openpype/tools/loader/app.py index d73a977ac6..923a1fabdb 100644 --- a/openpype/tools/loader/app.py +++ b/openpype/tools/loader/app.py @@ -290,7 +290,6 @@ class LoaderWindow(QtWidgets.QDialog): subsets_model.clear() self.clear_assets_underlines() - # filter None docs they are silo asset_ids = self._assets_widget.get_selected_asset_ids() # Start loading subsets_widget.set_loading_state( @@ -381,17 +380,9 @@ class LoaderWindow(QtWidgets.QDialog): The context must contain `asset` data by name. - Note: Prior to setting context ensure `refresh` is triggered so that - the "silos" are listed correctly, aside from that setting the - context will force a refresh further down because it changes - the active silo and asset. - Args: context (dict): The context to apply. - - Returns: - None - + refrest (bool): Trigger refresh on context set. """ asset = context.get("asset", None) @@ -399,12 +390,6 @@ class LoaderWindow(QtWidgets.QDialog): return if refresh: - # Workaround: - # Force a direct (non-scheduled) refresh prior to setting the - # asset widget's silo and asset selection to ensure it's correctly - # displaying the silo tabs. Calling `window.refresh()` and directly - # `window.set_context()` the `set_context()` seems to override the - # scheduled refresh and the silo tabs are not shown. self._refresh() self._assets_widget.select_asset_by_name(asset) diff --git a/openpype/tools/standalonepublish/widgets/model_asset.py b/openpype/tools/standalonepublish/widgets/model_asset.py index a7316a2aa7..e9d1517497 100644 --- a/openpype/tools/standalonepublish/widgets/model_asset.py +++ b/openpype/tools/standalonepublish/widgets/model_asset.py @@ -35,7 +35,7 @@ def _iter_model_rows(model, class AssetModel(TreeModel): - """A model listing assets in the silo in the active project. + """A model listing assets in the active project. The assets are displayed in a treeview, they are visually parented by a `visualParent` field in the database containing an `_id` to a parent @@ -64,7 +64,7 @@ class AssetModel(TreeModel): self.refresh() - def _add_hierarchy(self, assets, parent=None, silos=None): + def _add_hierarchy(self, assets, parent=None): """Add the assets that are related to the parent as children items. This method does *not* query the database. These instead are queried @@ -72,27 +72,8 @@ class AssetModel(TreeModel): queries. Resulting in up to 10x speed increase. Args: - assets (dict): All assets in the currently active silo stored - by key/value - - Returns: - None - + assets (dict): All assets from current project. """ - if silos: - # WARNING: Silo item "_id" is set to silo value - # mainly because GUI issue with preserve selection and expanded row - # and because of easier hierarchy parenting (in "assets") - for silo in silos: - node = Node({ - "_id": silo, - "name": silo, - "label": silo, - "type": "silo" - }) - self.add_child(node, parent=parent) - self._add_hierarchy(assets, parent=node) - parent_id = parent["_id"] if parent else None current_assets = assets.get(parent_id, list()) @@ -132,27 +113,19 @@ class AssetModel(TreeModel): self.beginResetModel() - # Get all assets in current silo sorted by name + # Get all assets in current project sorted by name db_assets = self.dbcon.find({"type": "asset"}).sort("name", 1) - silos = db_assets.distinct("silo") or None - # if any silo is set to None then it's expected it should not be used - if silos and None in silos: - silos = None # Group the assets by their visual parent's id assets_by_parent = collections.defaultdict(list) for asset in db_assets: - parent_id = ( - asset.get("data", {}).get("visualParent") or - asset.get("silo") - ) + parent_id = asset.get("data", {}).get("visualParent") assets_by_parent[parent_id].append(asset) # Build the hierarchical tree items recursively self._add_hierarchy( assets_by_parent, - parent=None, - silos=silos + parent=None ) self.endResetModel() @@ -173,9 +146,7 @@ class AssetModel(TreeModel): # Allow a custom icon and custom icon color to be defined data = node.get("_document", {}).get("data", {}) - icon = data.get("icon", None) - if icon is None and node.get("type") == "silo": - icon = "database" + icon = data.get("icon", None) color = data.get("color", self._default_asset_icon_color) if icon is None: diff --git a/openpype/tools/standalonepublish/widgets/widget_asset.py b/openpype/tools/standalonepublish/widgets/widget_asset.py index e6b74f8f82..8b43cd7cf8 100644 --- a/openpype/tools/standalonepublish/widgets/widget_asset.py +++ b/openpype/tools/standalonepublish/widgets/widget_asset.py @@ -229,7 +229,6 @@ class AssetWidget(QtWidgets.QWidget): data = { 'project': project['name'], 'asset': asset['name'], - 'silo': asset.get("silo"), 'parents': self.get_parents(asset), 'task': task } diff --git a/openpype/tools/texture_copy/app.py b/openpype/tools/texture_copy/app.py index ceca98a082..0c3c260e51 100644 --- a/openpype/tools/texture_copy/app.py +++ b/openpype/tools/texture_copy/app.py @@ -57,7 +57,6 @@ class TextureCopy: "name": project_name, "code": project['data']['code'] }, - "silo": asset.get('silo'), "asset": asset['name'], "family": 'texture', "subset": 'Main', @@ -155,7 +154,6 @@ def texture_copy(asset, project, path): t.echo(">>> Initializing avalon session ...") os.environ["AVALON_PROJECT"] = project os.environ["AVALON_ASSET"] = asset - os.environ["AVALON_SILO"] = "" TextureCopy().process(asset, project, path) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 713992bc4b..b3d6003b28 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -1266,7 +1266,6 @@ def show(root=None, debug=False, parent=None, use_context=True, save=True): if use_context: context = { "asset": api.Session["AVALON_ASSET"], - "silo": api.Session["AVALON_SILO"], "task": api.Session["AVALON_TASK"] } window.set_context(context) From d721ed94e4e1903a856774a65479894d2961c603 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Mar 2022 12:31:50 +0100 Subject: [PATCH 131/196] simplified loader to not use registered root --- openpype/tools/libraryloader/app.py | 17 ++++------------- openpype/tools/libraryloader/lib.py | 12 ------------ openpype/tools/libraryloader/widgets.py | 18 ------------------ openpype/tools/loader/widgets.py | 23 ++++++++++++++--------- 4 files changed, 18 insertions(+), 52 deletions(-) delete mode 100644 openpype/tools/libraryloader/widgets.py diff --git a/openpype/tools/libraryloader/app.py b/openpype/tools/libraryloader/app.py index 9f8845f30f..b73b415128 100644 --- a/openpype/tools/libraryloader/app.py +++ b/openpype/tools/libraryloader/app.py @@ -9,14 +9,14 @@ from openpype.tools.loader.widgets import ( ThumbnailWidget, VersionWidget, FamilyListView, - RepresentationWidget + RepresentationWidget, + SubsetWidget ) from openpype.tools.utils.assets_widget import MultiSelectAssetsWidget from openpype.modules import ModulesManager from . import lib -from .widgets import LibrarySubsetWidget module = sys.modules[__name__] module.window = None @@ -92,7 +92,7 @@ class LibraryLoaderWindow(QtWidgets.QDialog): # --- Middle part --- # Subsets widget - subsets_widget = LibrarySubsetWidget( + subsets_widget = SubsetWidget( dbcon, self.groups_config, self.family_config_cache, @@ -448,10 +448,7 @@ class LibraryLoaderWindow(QtWidgets.QDialog): def _set_context(self, context, refresh=True): """Set the selection in the interface using a context. The context must contain `asset` data by name. - Note: Prior to setting context ensure `refresh` is triggered so that - the "silos" are listed correctly, aside from that setting the - context will force a refresh further down because it changes - the active silo and asset. + Args: context (dict): The context to apply. Returns: @@ -463,12 +460,6 @@ class LibraryLoaderWindow(QtWidgets.QDialog): return if refresh: - # Workaround: - # Force a direct (non-scheduled) refresh prior to setting the - # asset widget's silo and asset selection to ensure it's correctly - # displaying the silo tabs. Calling `window.refresh()` and directly - # `window.set_context()` the `set_context()` seems to override the - # scheduled refresh and the silo tabs are not shown. self._refresh_assets() self._assets_widget.select_asset_by_name(asset_name) diff --git a/openpype/tools/libraryloader/lib.py b/openpype/tools/libraryloader/lib.py index 6a497a6a16..182b48893a 100644 --- a/openpype/tools/libraryloader/lib.py +++ b/openpype/tools/libraryloader/lib.py @@ -1,7 +1,6 @@ import os import importlib import logging -from openpype.api import Anatomy log = logging.getLogger(__name__) @@ -20,14 +19,3 @@ def find_config(): log.info("Found %s, loading.." % config) return importlib.import_module(config) - - -class RegisteredRoots: - roots_per_project = {} - - @classmethod - def registered_root(cls, project_name): - if project_name not in cls.roots_per_project: - cls.roots_per_project[project_name] = Anatomy(project_name).roots - - return cls.roots_per_project[project_name] diff --git a/openpype/tools/libraryloader/widgets.py b/openpype/tools/libraryloader/widgets.py deleted file mode 100644 index 45f9ea2048..0000000000 --- a/openpype/tools/libraryloader/widgets.py +++ /dev/null @@ -1,18 +0,0 @@ -from Qt import QtWidgets - -from .lib import RegisteredRoots -from openpype.tools.loader.widgets import SubsetWidget - - -class LibrarySubsetWidget(SubsetWidget): - def on_copy_source(self): - """Copy formatted source path to clipboard""" - source = self.data.get("source", None) - if not source: - return - - project_name = self.dbcon.Session["AVALON_PROJECT"] - root = RegisteredRoots.registered_root(project_name) - path = source.format(root=root) - clipboard = QtWidgets.QApplication.clipboard() - clipboard.setText(path) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index b14bdd0e93..0934642937 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -7,8 +7,9 @@ import collections from Qt import QtWidgets, QtCore, QtGui -from avalon import api, pipeline +from avalon import pipeline +from openpype.api import Anatomy from openpype.pipeline import HeroVersionType from openpype.pipeline.load import ( discover_loader_plugins, @@ -640,6 +641,7 @@ class VersionTextEdit(QtWidgets.QTextEdit): "source": None, "raw": None } + self._anatomy = None # Reset self.set_version(None) @@ -730,20 +732,20 @@ class VersionTextEdit(QtWidgets.QTextEdit): # Add additional actions when any text so we can assume # the version is set. if self.toPlainText().strip(): - menu.addSeparator() - action = QtWidgets.QAction("Copy source path to clipboard", - menu) + action = QtWidgets.QAction( + "Copy source path to clipboard", menu + ) action.triggered.connect(self.on_copy_source) menu.addAction(action) - action = QtWidgets.QAction("Copy raw data to clipboard", - menu) + action = QtWidgets.QAction( + "Copy raw data to clipboard", menu + ) action.triggered.connect(self.on_copy_raw) menu.addAction(action) menu.exec_(event.globalPos()) - del menu def on_copy_source(self): """Copy formatted source path to clipboard""" @@ -751,7 +753,11 @@ class VersionTextEdit(QtWidgets.QTextEdit): if not source: return - path = source.format(root=api.registered_root()) + project_name = self.dbcon.Session["AVALON_PROJECT"] + if self._anatomy is None or self._anatomy.project_name != project_name: + self._anatomy = Anatomy(project_name) + + path = source.format(root=self._anatomy.roots) clipboard = QtWidgets.QApplication.clipboard() clipboard.setText(path) @@ -771,7 +777,6 @@ class VersionTextEdit(QtWidgets.QTextEdit): class ThumbnailWidget(QtWidgets.QLabel): - aspect_ratio = (16, 9) max_width = 300 From 6a6cbe6b99a275f3034c782d2abc725be864e9de Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Mar 2022 12:32:13 +0100 Subject: [PATCH 132/196] removed terminal splash --- openpype/lib/splash.txt | 413 -------------------------------- openpype/lib/terminal_splash.py | 43 ---- 2 files changed, 456 deletions(-) delete mode 100644 openpype/lib/splash.txt delete mode 100644 openpype/lib/terminal_splash.py diff --git a/openpype/lib/splash.txt b/openpype/lib/splash.txt deleted file mode 100644 index 833bcd4b9c..0000000000 --- a/openpype/lib/splash.txt +++ /dev/null @@ -1,413 +0,0 @@ - - - - * - - - - - - - .* - - - - - - * - .* - * - - - - . - * - .* - * - . - - . - * - .* - .* - .* - * - . - . - * - .* - .* - .* - * - . - _. - /** - \ * - \* - * - * - . - __. - ---* - \ \* - \ * - \* - * - . - \___. - /* * - \ \ * - \ \* - \ * - \* - . - |____. - /* * - \|\ * - \ \ * - \ \ * - \ \* - \/. - _/_____. - /* * - / \ * - \ \ * - \ \ * - \ \__* - \/__. - __________. - --*-- ___* - \ \ \/_* - \ \ __* - \ \ \_* - \ \____\* - \/____/. - \____________ . - /* ___ \* - \ \ \/_\ * - \ \ _____* - \ \ \___/* - \ \____\ * - \/____/ . - |___________ . - /* ___ \ * - \|\ \/_\ \ * - \ \ _____/ * - \ \ \___/ * - \ \____\ / * - \/____/ \. - _/__________ . - /* ___ \ * - / \ \/_\ \ * - \ \ _____/ * - \ \ \___/ ---* - \ \____\ / \__* - \/____/ \/__. - ____________ . - --*-- ___ \ * - \ \ \/_\ \ * - \ \ _____/ * - \ \ \___/ ---- * - \ \____\ / \____\* - \/____/ \/____/. - ____________ - /\ ___ \ . - \ \ \/_\ \ * - \ \ _____/ * - \ \ \___/ ---- * - \ \____\ / \____\ . - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ . - \ \ _____/ * - \ \ \___/ ---- * - \ \____\ / \____\ . - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ . - \ \ \___/ ---- * - \ \____\ / \____\ . - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ - \ \ \___/ ---- * - \ \____\ / \____\ - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ - \ \ \___/ ---- . - \ \____\ / \____\ - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ _ - \ \ \___/ ---- - \ \____\ / \____\ - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ - \ \ \___/ ---- - \ \____\ / \____\ - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ - \ \ \___/ ---- \ - \ \____\ / \____\ - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ - \ \ \___/ ---- \ - \ \____\ / \____\ \ - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ - \ \ \___/ ---- \ - \ \____\ / \____\ __\ - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ - \ \ \___/ ---- \ - \ \____\ / \____\ \__\ - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ - \ \ \___/ ---- \ \ - \ \____\ / \____\ \__\ - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ - \ \ \___/ ---- \ \ - \ \____\ / \____\ \__\ - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___. - \ \ \___/ ---- \ \\ - \ \____\ / \____\ \__\, - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ . - \ \ \___/ ---- \ \\ - \ \____\ / \____\ \__\\, - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ _. - \ \ \___/ ---- \ \\\ - \ \____\ / \____\ \__\\\ - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ __. - \ \ \___/ ---- \ \\ \ - \ \____\ / \____\ \__\\_/. - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___. - \ \ \___/ ---- \ \\ \\ - \ \____\ / \____\ \__\\__\. - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ . - \ \ \___/ ---- \ \\ \\ - \ \____\ / \____\ \__\\__\\. - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ _. - \ \ \___/ ---- \ \\ \\\ - \ \____\ / \____\ \__\\__\\. - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ __. - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\_. - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ __. - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__. - \/____/ \/____/ - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ * - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ O* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ .oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ ..oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . .oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . p.oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . Py.oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYp.oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYPe.oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYPE .oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYPE c.oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYPE C1.oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYPE ClU.oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYPE CluB.oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYPE Club .oO* - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYPE Club . .. - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYPE Club . .. - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYPE Club . . - ____________ - /\ ___ \ - \ \ \/_\ \ - \ \ _____/ ___ ___ ___ - \ \ \___/ ---- \ \\ \\ \ - \ \____\ / \____\ \__\\__\\__\ - \/____/ \/____/ . PYPE Club . diff --git a/openpype/lib/terminal_splash.py b/openpype/lib/terminal_splash.py deleted file mode 100644 index 0ba2706a27..0000000000 --- a/openpype/lib/terminal_splash.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -"""Pype terminal animation.""" -import blessed -from pathlib import Path -from time import sleep - -NO_TERMINAL = False - -try: - term = blessed.Terminal() -except AttributeError: - # this happens when blessed cannot find proper terminal. - # If so, skip printing ascii art animation. - NO_TERMINAL = True - - -def play_animation(): - """Play ASCII art Pype animation.""" - if NO_TERMINAL: - return - print(term.home + term.clear) - frame_size = 7 - splash_file = Path(__file__).parent / "splash.txt" - with splash_file.open("r") as sf: - animation = sf.readlines() - - animation_length = int(len(animation) / frame_size) - current_frame = 0 - for _ in range(animation_length): - frame = "".join( - scanline - for y, scanline in enumerate( - animation[current_frame: current_frame + frame_size] - ) - ) - - with term.location(0, 0): - # term.aquamarine3_bold(frame) - print(f"{term.bold}{term.aquamarine3}{frame}{term.normal}") - - sleep(0.02) - current_frame += frame_size - print(term.move_y(7)) From eba76ad9c0b4931059b8409cda8cfa656698a359 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 22 Mar 2022 12:52:39 +0100 Subject: [PATCH 133/196] Change note Co-authored-by: Roy Nieterau --- openpype/tools/workfiles/window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index c2a3f74a22..8654a18036 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -20,7 +20,7 @@ from .lib import TempPublishFiles, file_size_to_string class SidePanelWidget(QtWidgets.QWidget): save_clicked = QtCore.Signal() published_workfile_message = ( - "INFO: Published workfiles you'll opened will be stored in" + "INFO: Opened published workfiles will be stored in" " temp directory on your machine. Current temp size: {}." ) From 42a79966a3a771be3ec39a53a862850212567739 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Mar 2022 13:38:38 +0100 Subject: [PATCH 134/196] fix trailing spaces --- openpype/tools/standalonepublish/widgets/model_asset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/standalonepublish/widgets/model_asset.py b/openpype/tools/standalonepublish/widgets/model_asset.py index e9d1517497..02e9073555 100644 --- a/openpype/tools/standalonepublish/widgets/model_asset.py +++ b/openpype/tools/standalonepublish/widgets/model_asset.py @@ -146,7 +146,7 @@ class AssetModel(TreeModel): # Allow a custom icon and custom icon color to be defined data = node.get("_document", {}).get("data", {}) - icon = data.get("icon", None) + icon = data.get("icon", None) color = data.get("color", self._default_asset_icon_color) if icon is None: From bfbf7c8d54168015ba106c8496b265f122bdc4e7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Mar 2022 14:48:16 +0100 Subject: [PATCH 135/196] removed redundant line --- openpype/tools/utils/delegates.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/utils/delegates.py b/openpype/tools/utils/delegates.py index 41de7cce60..71f817a1d7 100644 --- a/openpype/tools/utils/delegates.py +++ b/openpype/tools/utils/delegates.py @@ -289,4 +289,3 @@ class PrettyTimeDelegate(QtWidgets.QStyledItemDelegate): def displayText(self, value, locale): if value is not None: return pretty_timestamp(value) - return None From 35e0b043e1dfeb259128e84a954f2e7df2879a71 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Mar 2022 14:49:15 +0100 Subject: [PATCH 136/196] added few docstrings --- openpype/tools/workfiles/lib.py | 62 ++++++++++++++++++++++++++++++- openpype/tools/workfiles/model.py | 43 +++++++++++++++++++++ 2 files changed, 103 insertions(+), 2 deletions(-) diff --git a/openpype/tools/workfiles/lib.py b/openpype/tools/workfiles/lib.py index 84f2e76450..b9a1f5b19b 100644 --- a/openpype/tools/workfiles/lib.py +++ b/openpype/tools/workfiles/lib.py @@ -10,7 +10,7 @@ import appdirs class TempPublishFilesItem(object): - """Object representing on subfolder in app temp files. + """Object representing copied workfile in app temp folfer. Args: item_id (str): Id of item used as subfolder. @@ -44,7 +44,39 @@ class TempPublishFilesItem(object): class TempPublishFiles(object): - """Directory where """ + """Directory where published workfiles are copied when opened. + + Directory is located in appdirs on the machine. Folder contains file + with metadata about stored files. Each item in metadata has id, filename + and expiration time. When expiration time is higher then current time the + item is removed from metadata and it's files are deleted. Files of items + are stored in subfolder named by item's id. + + Metadata file can be in theory opened and modified by multiple processes, + threads at one time. For those cases is created simple lock file which + is created before modification begins and is removed when modification + ends. Existince of the file means that it should not be modified by + any other process at the same time. + + Metadata example: + ``` + { + "96050b4a-8974-4fca-8179-7c446c478d54": { + "created": 1647880725.555, + "expiration": 1647884325.555, + "filename": "cg_pigeon_workfileModeling_v025.ma" + }, + ... + } + ``` + + ## Why is this needed + Combination of more issues. Temp files are not automatically removed by + OS on windows so using tempfiles in TEMP would lead to kill disk space of + machine. There are also cases when someone wants to open multiple files + in short period of time and want to manually remove those files so keeping + track of temporary copied files in pre-defined structure is needed. + """ minute_in_seconds = 60 hour_in_seconds = 60 * minute_in_seconds day_in_seconds = 24 * hour_in_seconds @@ -72,16 +104,26 @@ class TempPublishFiles(object): @property def life_time(self): + """How long will be new item kept in temp in seconds. + + Returns: + int: Lifetime of temp item. + """ return int(self.hour_in_seconds) @property def size(self): + """File size of existing items.""" size = 0 for item in self.get_items(): size += item.size return size def add_file(self, src_path): + """Add workfile to temp directory. + + This will create new item and source path is copied to it's directory. + """ filename = os.path.basename(src_path) item_id = str(uuid.uuid4()) @@ -105,6 +147,7 @@ class TempPublishFiles(object): @contextlib.contextmanager def _modify_data(self): + """Create lock file when data in metadata file are modified.""" start_time = time.time() timeout = 3 while os.path.exists(self._lock_path): @@ -139,6 +182,15 @@ class TempPublishFiles(object): return output def cleanup(self, check_expiration=True): + """Cleanup files based on metadata. + + Items that passed expiration are removed when this is called. Or all + files are removed when `check_expiration` is set to False. + + Args: + check_expiration (bool): All items and files are removed when set + to True. + """ data = self._get_data() now = time.time() remove_ids = set() @@ -182,6 +234,11 @@ class TempPublishFiles(object): self.cleanup(False) def get_items(self): + """Receive all items from metadata file. + + Returns: + list: Info about each item in metadata. + """ output = [] data = self._get_data() for item_id, item_data in data.items(): @@ -190,6 +247,7 @@ class TempPublishFiles(object): return output def remove_id(self, item_id): + """Remove files of item and then remove the item from metadata.""" filepath = os.path.join(self._root_dir, item_id) if os.path.exists(filepath): shutil.rmtree(filepath) diff --git a/openpype/tools/workfiles/model.py b/openpype/tools/workfiles/model.py index 563a2fc558..4d772c58e0 100644 --- a/openpype/tools/workfiles/model.py +++ b/openpype/tools/workfiles/model.py @@ -19,6 +19,8 @@ ITEM_ID_ROLE = QtCore.Qt.UserRole + 4 class WorkAreaFilesModel(QtGui.QStandardItemModel): + """Model is looking into one folder for files with extension.""" + def __init__(self, extensions, *args, **kwargs): super(WorkAreaFilesModel, self).__init__(*args, **kwargs) @@ -64,6 +66,7 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): return self._empty_root_item def set_root(self, root): + """Change directory where to look for file.""" self._root = root if root and not os.path.exists(root): log.debug("Work Area does not exist: {}".format(root)) @@ -81,7 +84,9 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): self._items_by_filename = {} def refresh(self): + """Refresh and update model items.""" root_item = self.invisibleRootItem() + # If path is not set or does not exist then add invalid path item if not self._root or not os.path.exists(self._root): self._clear() # Add Work Area does not exist placeholder @@ -90,9 +95,14 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): self._invalid_item_visible = True return + # Clear items if previous refresh set '_invalid_item_visible' to True + # - Invalid items are not stored to '_items_by_filename' so they would + # not be removed if self._invalid_item_visible: self._clear() + # Check for new items that should be added and items that should be + # removed new_items = [] items_to_remove = set(self._items_by_filename.keys()) for filename in os.listdir(self._root): @@ -106,6 +116,7 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): modified = os.path.getmtime(filepath) + # Use existing item or create new one if filename in items_to_remove: items_to_remove.remove(filename) item = self._items_by_filename[filename] @@ -118,16 +129,20 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): item.setData(self._file_icon, QtCore.Qt.DecorationRole) new_items.append(item) self._items_by_filename[filename] = item + # Update data that may be different item.setData(filepath, FILEPATH_ROLE) item.setData(modified, DATE_MODIFIED_ROLE) + # Add new items if there are any if new_items: root_item.appendRows(new_items) + # Remove items that are no longer available for filename in items_to_remove: item = self._items_by_filename.pop(filename) root_item.removeRow(item.row()) + # Add empty root item if there are not filenames that could be shown if root_item.rowCount() > 0: self._invalid_item_visible = False else: @@ -136,9 +151,11 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): root_item.appendRow(item) def has_valid_items(self): + """Directory has files that are listed in items.""" return not self._invalid_item_visible def flags(self, index): + # Use flags of first column for all columns if index.column() != 0: index = self.index(index.row(), 0, index.parent()) return super(WorkAreaFilesModel, self).flags(index) @@ -147,6 +164,7 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): if role is None: role = QtCore.Qt.DisplayRole + # Handle roles for first column if index.column() == 1: if role == QtCore.Qt.DecorationRole: return None @@ -174,6 +192,22 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): class PublishFilesModel(QtGui.QStandardItemModel): + """Model filling files with published files calculated from representation. + + This model looks for workfile family representations based on selected + asset and task. + + Asset must set to be able look for representations that could be used. + Task is used to filter representations by task. + Model has few filter criteria for filling. + - First criteria is that version document must have "workfile" in + "data.families". + - Second cirteria is that representation must have extension same as + defined extensions + - If task is set then representation must have 'task["name"]' with same + name. + """ + def __init__(self, extensions, dbcon, anatomy, *args, **kwargs): super(PublishFilesModel, self).__init__(*args, **kwargs) @@ -225,6 +259,12 @@ class PublishFilesModel(QtGui.QStandardItemModel): return self._empty_root_item def set_context(self, asset_id, task_name): + """Change context to asset and task. + + Args: + asset_id (ObjectId): Id of selected asset. + task_name (str): Name of selected task. + """ self._asset_id = asset_id self._task_name = task_name self.refresh() @@ -242,6 +282,7 @@ class PublishFilesModel(QtGui.QStandardItemModel): def _get_workfie_representations(self): output = [] + # Get subset docs of asset subset_docs = self._dbcon.find( { "type": "subset", @@ -286,6 +327,7 @@ class PublishFilesModel(QtGui.QStandardItemModel): "context.ext": {"$in": extensions} } ) + # Filter queried representations by task name if task is set filtered_repre_docs = [] for repre_doc in repre_docs: if self._task_name is None: @@ -305,6 +347,7 @@ class PublishFilesModel(QtGui.QStandardItemModel): if task_name == self._task_name: filtered_repre_docs.append(repre_doc) + # Collect paths of representations for repre_doc in filtered_repre_docs: path = get_representation_path( repre_doc, root=self._anatomy.roots From dbe643e4100a6d0630a30f0aefab686ad023e3e7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Mar 2022 15:10:30 +0100 Subject: [PATCH 137/196] Fix cases when published file is not available on the machine --- openpype/tools/workfiles/model.py | 73 ++++++++++++++++++------------- 1 file changed, 43 insertions(+), 30 deletions(-) diff --git a/openpype/tools/workfiles/model.py b/openpype/tools/workfiles/model.py index 4d772c58e0..2695e0d26e 100644 --- a/openpype/tools/workfiles/model.py +++ b/openpype/tools/workfiles/model.py @@ -223,6 +223,10 @@ class PublishFilesModel(QtGui.QStandardItemModel): "fa.file-o", color=get_default_entity_icon_color() ) + self._invalid_icon = qtawesome.icon( + "fa.times", + color=get_disabled_entity_icon_color() + ) self._invalid_item_visible = False self._items_by_id = {} @@ -230,31 +234,29 @@ class PublishFilesModel(QtGui.QStandardItemModel): self._asset_id = None self._task_name = None + def _set_item_invalid(self, item): + item.setFlags(QtCore.Qt.NoItemFlags) + item.setData(self._invalid_icon, QtCore.Qt.DecorationRole) + + def _set_item_valid(self, item): + item.setFlags( + QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable + ) + item.setData(self._file_icon, QtCore.Qt.DecorationRole) + def _get_invalid_context_item(self): if self._invalid_context_item is None: - message = "Selected context is not valid." - item = QtGui.QStandardItem(message) - icon = qtawesome.icon( - "fa.times", - color=get_disabled_entity_icon_color() - ) - item.setData(icon, QtCore.Qt.DecorationRole) - item.setFlags(QtCore.Qt.NoItemFlags) + item = QtGui.QStandardItem("Selected context is not valid.") item.setColumnCount(self.columnCount()) + self._set_item_invalid(item) self._invalid_context_item = item return self._invalid_context_item def _get_empty_root_item(self): if self._empty_root_item is None: - message = "Didn't find any published workfiles." - item = QtGui.QStandardItem(message) - icon = qtawesome.icon( - "fa.times", - color=get_disabled_entity_icon_color() - ) - item.setData(icon, QtCore.Qt.DecorationRole) - item.setFlags(QtCore.Qt.NoItemFlags) + item = QtGui.QStandardItem("Didn't find any published workfiles.") item.setColumnCount(self.columnCount()) + self._set_item_invalid(item) self._empty_root_item = item return self._empty_root_item @@ -290,21 +292,15 @@ class PublishFilesModel(QtGui.QStandardItemModel): }, { "_id": True, - "data.families": True, "name": True } ) - filtered_subsets = [] - for subset_doc in subset_docs: - data = subset_doc.get("data") or {} - families = data.get("families") or [] - if "workfile" in families: - filtered_subsets.append(subset_doc) - subset_ids = [subset_doc["_id"] for subset_doc in filtered_subsets] + subset_ids = [subset_doc["_id"] for subset_doc in subset_docs] if not subset_ids: return output + # Get version docs of subsets with their families version_docs = self._dbcon.find( { "type": "version", @@ -312,13 +308,24 @@ class PublishFilesModel(QtGui.QStandardItemModel): }, { "_id": True, + "data.families": True, "parent": True } ) - version_ids = [version_doc["_id"] for version_doc in version_docs] + # Filter versions if they contain 'workfile' family + filtered_versions = [] + for version_doc in version_docs: + data = version_doc.get("data") or {} + families = data.get("families") or [] + if "workfile" in families: + filtered_versions.append(version_doc) + + version_ids = [version_doc["_id"] for version_doc in filtered_versions] if not version_ids: return output + # Query representations of filtered versions and add filter for + # extension extensions = [ext.replace(".", "") for ext in self._file_extensions] repre_docs = self._dbcon.find( { @@ -372,7 +379,6 @@ class PublishFilesModel(QtGui.QStandardItemModel): items_to_remove = set(self._items_by_id.keys()) for item in self._get_workfie_representations(): filepath, repre_id = item - modified = os.path.getmtime(filepath) filename = os.path.basename(filepath) if repre_id in items_to_remove: @@ -381,12 +387,19 @@ class PublishFilesModel(QtGui.QStandardItemModel): else: item = QtGui.QStandardItem(filename) item.setColumnCount(self.columnCount()) - item.setFlags( - QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable - ) - item.setData(self._file_icon, QtCore.Qt.DecorationRole) new_items.append(item) self._items_by_id[repre_id] = item + + if os.path.exists(filepath): + modified = os.path.getmtime(filepath) + tooltip = None + self._set_item_valid(item) + else: + modified = None + tooltip = "File is not available from this machine" + self._set_item_invalid(item) + + item.setData(tooltip, QtCore.Qt.ToolTipRole) item.setData(filepath, FILEPATH_ROLE) item.setData(modified, DATE_MODIFIED_ROLE) item.setData(repre_id, ITEM_ID_ROLE) From 8b06aa590a44e231d519ed19ee605361ff04a1e4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Mar 2022 15:11:38 +0100 Subject: [PATCH 138/196] skip empty filepaths --- openpype/tools/workfiles/model.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/tools/workfiles/model.py b/openpype/tools/workfiles/model.py index 2695e0d26e..8f9dd8c6ba 100644 --- a/openpype/tools/workfiles/model.py +++ b/openpype/tools/workfiles/model.py @@ -379,6 +379,9 @@ class PublishFilesModel(QtGui.QStandardItemModel): items_to_remove = set(self._items_by_id.keys()) for item in self._get_workfie_representations(): filepath, repre_id = item + # TODO handle empty filepaths + if not filepath: + continue filename = os.path.basename(filepath) if repre_id in items_to_remove: From 8be14fc8818fa4842276a5078cd7611f9e165e82 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 22 Mar 2022 16:17:52 +0100 Subject: [PATCH 139/196] Fix typo Co-authored-by: Roy Nieterau --- openpype/tools/workfiles/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/workfiles/lib.py b/openpype/tools/workfiles/lib.py index b9a1f5b19b..0aa78fa00e 100644 --- a/openpype/tools/workfiles/lib.py +++ b/openpype/tools/workfiles/lib.py @@ -10,7 +10,7 @@ import appdirs class TempPublishFilesItem(object): - """Object representing copied workfile in app temp folfer. + """Object representing copied workfile in app temp folder. Args: item_id (str): Id of item used as subfolder. From 231b63df60d67d315f5c2cdc1da0746cabcc53b1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 22 Mar 2022 16:18:01 +0100 Subject: [PATCH 140/196] Fix typo 2 Co-authored-by: Roy Nieterau --- openpype/tools/workfiles/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/workfiles/lib.py b/openpype/tools/workfiles/lib.py index 0aa78fa00e..21a7485b7b 100644 --- a/openpype/tools/workfiles/lib.py +++ b/openpype/tools/workfiles/lib.py @@ -55,7 +55,7 @@ class TempPublishFiles(object): Metadata file can be in theory opened and modified by multiple processes, threads at one time. For those cases is created simple lock file which is created before modification begins and is removed when modification - ends. Existince of the file means that it should not be modified by + ends. Existence of the file means that it should not be modified by any other process at the same time. Metadata example: From 37f152adbf7e621363726266c315bc02330c5095 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Mar 2022 16:27:57 +0100 Subject: [PATCH 141/196] update avalon core --- repos/avalon-core | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/avalon-core b/repos/avalon-core index 64491fbbcf..2fa14cea6f 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit 64491fbbcf89ba2a0b3a20d67d7486c6142232b3 +Subproject commit 2fa14cea6f6a9d86eec70bbb96860cbe4c75c8eb From 970d0768116d3a658ce1cf5c77b6a5331a1e5c2a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Mar 2022 18:18:02 +0100 Subject: [PATCH 142/196] moved attribute definitions to openpype/lib --- .../testhost/plugins/create/auto_creator.py | 6 ++-- .../testhost/plugins/create/test_creator_1.py | 20 ++++++++----- .../testhost/plugins/create/test_creator_2.py | 6 ++-- .../plugins/publish/collect_instance_1.py | 6 ++-- openpype/lib/__init__.py | 28 +++++++++++++++++ .../lib/attribute_definitions.py | 0 openpype/pipeline/create/context.py | 3 +- openpype/pipeline/lib/__init__.py | 30 ------------------- openpype/widgets/attribute_defs/widgets.py | 2 +- 9 files changed, 52 insertions(+), 49 deletions(-) rename openpype/{pipeline => }/lib/attribute_definitions.py (100%) delete mode 100644 openpype/pipeline/lib/__init__.py diff --git a/openpype/hosts/testhost/plugins/create/auto_creator.py b/openpype/hosts/testhost/plugins/create/auto_creator.py index 45c573e487..d5935602a0 100644 --- a/openpype/hosts/testhost/plugins/create/auto_creator.py +++ b/openpype/hosts/testhost/plugins/create/auto_creator.py @@ -1,10 +1,10 @@ +from avalon import io +from openpype.lib import NumberDef from openpype.hosts.testhost.api import pipeline from openpype.pipeline import ( AutoCreator, CreatedInstance, - lib ) -from avalon import io class MyAutoCreator(AutoCreator): @@ -13,7 +13,7 @@ class MyAutoCreator(AutoCreator): def get_instance_attr_defs(self): output = [ - lib.NumberDef("number_key", label="Number") + NumberDef("number_key", label="Number") ] return output diff --git a/openpype/hosts/testhost/plugins/create/test_creator_1.py b/openpype/hosts/testhost/plugins/create/test_creator_1.py index 45c30e8a27..7664276fa2 100644 --- a/openpype/hosts/testhost/plugins/create/test_creator_1.py +++ b/openpype/hosts/testhost/plugins/create/test_creator_1.py @@ -1,10 +1,16 @@ import json from openpype import resources from openpype.hosts.testhost.api import pipeline +from openpype.lib import ( + UISeparatorDef, + UILabelDef, + BoolDef, + NumberDef, + FileDef, +) from openpype.pipeline import ( Creator, CreatedInstance, - lib ) @@ -54,17 +60,17 @@ class TestCreatorOne(Creator): def get_instance_attr_defs(self): output = [ - lib.NumberDef("number_key", label="Number"), + NumberDef("number_key", label="Number"), ] return output def get_pre_create_attr_defs(self): output = [ - lib.BoolDef("use_selection", label="Use selection"), - lib.UISeparatorDef(), - lib.UILabelDef("Testing label"), - lib.FileDef("filepath", folders=True, label="Filepath"), - lib.FileDef( + BoolDef("use_selection", label="Use selection"), + UISeparatorDef(), + UILabelDef("Testing label"), + FileDef("filepath", folders=True, label="Filepath"), + FileDef( "filepath_2", multipath=True, folders=True, label="Filepath 2" ) ] diff --git a/openpype/hosts/testhost/plugins/create/test_creator_2.py b/openpype/hosts/testhost/plugins/create/test_creator_2.py index e66304a038..f54adee8a2 100644 --- a/openpype/hosts/testhost/plugins/create/test_creator_2.py +++ b/openpype/hosts/testhost/plugins/create/test_creator_2.py @@ -1,8 +1,8 @@ +from openpype.lib import NumberDef, TextDef from openpype.hosts.testhost.api import pipeline from openpype.pipeline import ( Creator, CreatedInstance, - lib ) @@ -40,8 +40,8 @@ class TestCreatorTwo(Creator): def get_instance_attr_defs(self): output = [ - lib.NumberDef("number_key"), - lib.TextDef("text_key") + NumberDef("number_key"), + TextDef("text_key") ] return output diff --git a/openpype/hosts/testhost/plugins/publish/collect_instance_1.py b/openpype/hosts/testhost/plugins/publish/collect_instance_1.py index 3c035eccb6..c7241a15a8 100644 --- a/openpype/hosts/testhost/plugins/publish/collect_instance_1.py +++ b/openpype/hosts/testhost/plugins/publish/collect_instance_1.py @@ -1,10 +1,8 @@ import json import pyblish.api -from openpype.pipeline import ( - OpenPypePyblishPluginMixin, - attribute_definitions -) +from openpype.lib import attribute_definitions +from openpype.pipeline import OpenPypePyblishPluginMixin class CollectInstanceOneTestHost( diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 1ebafbb2d2..e8b6d18f4e 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -29,6 +29,21 @@ from .vendor_bin_utils import ( is_oiio_supported ) +from .attribute_definitions import ( + AbtractAttrDef, + + UIDef, + UISeparatorDef, + UILabelDef, + + UnknownDef, + NumberDef, + TextDef, + EnumDef, + BoolDef, + FileDef, +) + from .env_tools import ( env_value_to_bool, get_paths_from_environ, @@ -233,6 +248,19 @@ __all__ = [ "get_ffmpeg_tool_path", "is_oiio_supported", + "AbtractAttrDef", + + "UIDef", + "UISeparatorDef", + "UILabelDef", + + "UnknownDef", + "NumberDef", + "TextDef", + "EnumDef", + "BoolDef", + "FileDef", + "import_filepath", "modules_from_path", "recursive_bases_from_class", diff --git a/openpype/pipeline/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py similarity index 100% rename from openpype/pipeline/lib/attribute_definitions.py rename to openpype/lib/attribute_definitions.py diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index c2757a4502..eeb08a6294 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -6,7 +6,6 @@ import inspect from uuid import uuid4 from contextlib import contextmanager -from ..lib import UnknownDef from .creator_plugins import ( BaseCreator, Creator, @@ -87,6 +86,8 @@ class AttributeValues: origin_data(dict): Values loaded from host before conversion. """ def __init__(self, attr_defs, values, origin_data=None): + from openpype.lib.attribute_definitions import UnknownDef + if origin_data is None: origin_data = copy.deepcopy(values) self._origin_data = origin_data diff --git a/openpype/pipeline/lib/__init__.py b/openpype/pipeline/lib/__init__.py deleted file mode 100644 index f762c4205d..0000000000 --- a/openpype/pipeline/lib/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -from .attribute_definitions import ( - AbtractAttrDef, - - UIDef, - UISeparatorDef, - UILabelDef, - - UnknownDef, - NumberDef, - TextDef, - EnumDef, - BoolDef, - FileDef, -) - - -__all__ = ( - "AbtractAttrDef", - - "UIDef", - "UISeparatorDef", - "UILabelDef", - - "UnknownDef", - "NumberDef", - "TextDef", - "EnumDef", - "BoolDef", - "FileDef", -) diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index a6f1b8d6c9..23f025967d 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -2,7 +2,7 @@ import uuid from Qt import QtWidgets, QtCore -from openpype.pipeline.lib import ( +from openpype.lib.attribute_definitions import ( AbtractAttrDef, UnknownDef, NumberDef, From 534ef55e221f0fde90ad57e0c321dd277798c604 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Mar 2022 18:50:26 +0100 Subject: [PATCH 143/196] fix attribute definitions import --- openpype/pipeline/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index d44fbad33e..6ed307dbc7 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -3,8 +3,6 @@ from .constants import ( HOST_WORKFILE_EXTENSIONS, ) -from .lib import attribute_definitions - from .create import ( BaseCreator, Creator, From 0990163a0b487f3e32c5563c7f742e29ca26d0bd Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Wed, 23 Mar 2022 18:30:34 +0900 Subject: [PATCH 144/196] fix removed module from multiverse usd extractor --- .../hosts/maya/plugins/publish/extract_multiverse_usd.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index fd46f87684..d45ceb1932 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -1,10 +1,10 @@ import os -import avalon.maya -import openpype.api - from maya import cmds +import openpype.api +from openpype.hosts.maya.api.lib import maintained_selection + class ExtractMultiverseUsd(openpype.api.Extractor): """Extractor for USD by Multiverse.""" @@ -144,7 +144,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): # Perform extraction self.log.info("Performing extraction ...") - with avalon.maya.maintained_selection(): + with maintained_selection(): members = instance.data("setMembers") members = cmds.ls(members, dag=True, From 4261d224228c3c3ecf4d8d36252ad594d0a66fc1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 23 Mar 2022 12:37:13 +0100 Subject: [PATCH 145/196] Fix support for Maya 2018 Older versions of Maya do not allow `set` type to be passed to Maya commands and will result in e.g. "RuntimeError: # Syntax error: unexpected end ( at position 4 while parsing" --- openpype/hosts/maya/api/lib.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 376c033d46..92fc5133a9 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1511,7 +1511,7 @@ def get_container_members(container): members = cmds.sets(container, query=True) or [] members = cmds.ls(members, long=True, objectsOnly=True) or [] - members = set(members) + all_members = set(members) # Include any referenced nodes from any reference in the container # This is required since we've removed adding ALL nodes of a reference @@ -1530,9 +1530,9 @@ def get_container_members(container): reference_members = cmds.ls(reference_members, long=True, objectsOnly=True) - members.update(reference_members) + all_members.update(reference_members) - return members + return list(all_members) # region LOOKDEV From 748f84b600ac4a362edf08d622d74fc3db2bc05d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Mar 2022 15:02:07 +0100 Subject: [PATCH 146/196] fix usage of collapsed value in CollapsibleWrapper --- openpype/tools/settings/settings/wrapper_widgets.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/tools/settings/settings/wrapper_widgets.py b/openpype/tools/settings/settings/wrapper_widgets.py index 7370fcf945..6b2258157c 100644 --- a/openpype/tools/settings/settings/wrapper_widgets.py +++ b/openpype/tools/settings/settings/wrapper_widgets.py @@ -92,7 +92,8 @@ class CollapsibleWrapper(WrapperWidget): self.content_layout = content_layout if self.collapsible: - body_widget.toggle_content(self.collapsed) + if not self.entity.collapsed: + body_widget.toggle_content() else: body_widget.hide_toolbox(hide_content=False) From 06502b10064020e01d0e2102334f6d0b499a45d8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Mar 2022 16:52:35 +0100 Subject: [PATCH 147/196] safer delete versions --- .../event_handlers_user/action_delete_old_versions.py | 3 ++- openpype/plugins/load/delete_old_versions.py | 7 ++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py index 1b694e25f1..5871646b20 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py @@ -492,7 +492,8 @@ class DeleteOldVersions(BaseAction): os.remove(file_path) self.log.debug("Removed file: {}".format(file_path)) - remainders.remove(file_path_base) + if file_path_base in remainders: + remainders.remove(file_path_base) continue seq_path_base = os.path.split(seq_path)[1] diff --git a/openpype/plugins/load/delete_old_versions.py b/openpype/plugins/load/delete_old_versions.py index 692acdec02..2789f4ea23 100644 --- a/openpype/plugins/load/delete_old_versions.py +++ b/openpype/plugins/load/delete_old_versions.py @@ -126,7 +126,8 @@ class DeleteOldVersions(load.SubsetLoaderPlugin): os.remove(file_path) self.log.debug("Removed file: {}".format(file_path)) - remainders.remove(file_path_base) + if file_path_base in remainders: + remainders.remove(file_path_base) continue seq_path_base = os.path.split(seq_path)[1] @@ -333,6 +334,8 @@ class DeleteOldVersions(load.SubsetLoaderPlugin): def main(self, data, remove_publish_folder): # Size of files. size = 0 + if not data: + return size if remove_publish_folder: size = self.delete_whole_dir_paths(data["dir_paths"].values()) @@ -418,6 +421,8 @@ class DeleteOldVersions(load.SubsetLoaderPlugin): ) data = self.get_data(context, versions_to_keep) + if not data: + continue size += self.main(data, remove_publish_folder) print("Progressing {}/{}".format(count + 1, len(contexts))) From 953ff9eedb1c419b954c5f1e12346ba9f5fd30e1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Mar 2022 18:00:57 +0100 Subject: [PATCH 148/196] fix attribute access --- openpype/tools/settings/settings/wrapper_widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/settings/settings/wrapper_widgets.py b/openpype/tools/settings/settings/wrapper_widgets.py index 6b2258157c..b14a226912 100644 --- a/openpype/tools/settings/settings/wrapper_widgets.py +++ b/openpype/tools/settings/settings/wrapper_widgets.py @@ -92,7 +92,7 @@ class CollapsibleWrapper(WrapperWidget): self.content_layout = content_layout if self.collapsible: - if not self.entity.collapsed: + if not self.collapsed: body_widget.toggle_content() else: body_widget.hide_toolbox(hide_content=False) From 7a96bfcfbf2199d2c4c0d1c3d5db9cc049018653 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 09:45:36 +0900 Subject: [PATCH 149/196] deal with handle start and end for multiverse usd extractor --- .../hosts/maya/plugins/publish/extract_multiverse_usd.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index d45ceb1932..c64e5b03e2 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -159,13 +159,16 @@ class ExtractMultiverseUsd(openpype.api.Extractor): time_opts = None frame_start = instance.data['frameStart'] frame_end = instance.data['frameEnd'] + handle_start = instance.data['handleStart'] + handle_end = instance.data['handleEnd'] step = instance.data['step'] fps = instance.data['fps'] if frame_end != frame_start: time_opts = multiverse.TimeOptions() time_opts.writeTimeRange = True - time_opts.frameRange = (frame_start, frame_end) + time_opts.frameRange = ( + frame_start - handle_start, frame_end + handle_end) time_opts.frameIncrement = step time_opts.numTimeSamples = instance.data["numTimeSamples"] time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"] From 006c43c6f08106dc7702ca49623dd18e3f1b18c4 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 10:37:17 +0900 Subject: [PATCH 150/196] replace unicode with str and normalize unicode instance data value --- .../plugins/publish/extract_multiverse_usd.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index c64e5b03e2..d7d1337930 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -1,4 +1,5 @@ import os +import six from maya import cmds @@ -32,9 +33,9 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "flattenParentXforms": bool, "writeSparseOverrides": bool, "useMetaPrimPath": bool, - "customRootPath": unicode, - "customAttributes": unicode, - "nodeTypesToIgnore": unicode, + "customRootPath": str, + "customAttributes": str, + "nodeTypesToIgnore": str, "writeMeshes": bool, "writeCurves": bool, "writeParticles": bool, @@ -57,7 +58,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "writeTransformMatrix": bool, "writeUsdAttributes": bool, "timeVaryingTopology": bool, - "customMaterialNamespace": unicode, + "customMaterialNamespace": str, "numTimeSamples": int, "timeSamplesSpan": float } @@ -73,9 +74,9 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "flattenParentXforms": False, "writeSparseOverrides": False, "useMetaPrimPath": False, - "customRootPath": u'', - "customAttributes": u'', - "nodeTypesToIgnore": u'', + "customRootPath": str(), + "customAttributes": str(), + "nodeTypesToIgnore": str(), "writeMeshes": True, "writeCurves": True, "writeParticles": True, @@ -98,7 +99,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): "writeTransformMatrix": True, "writeUsdAttributes": False, "timeVaryingTopology": False, - "customMaterialNamespace": u'', + "customMaterialNamespace": str(), "numTimeSamples": 1, "timeSamplesSpan": 0.0 } @@ -112,6 +113,8 @@ class ExtractMultiverseUsd(openpype.api.Extractor): # Ensure the data is of correct type value = instance.data[key] + if isinstance(value, six.text_type): + value = str(value) if not isinstance(value, self.options[key]): self.log.warning( "Overridden attribute {key} was of " From de8eac521c091914a4a1e7063e792c269b0db162 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 11:00:44 +0900 Subject: [PATCH 151/196] improved multiverse usd composition creator --- .../plugins/create/create_multiverse_usd_comp.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py index 2f57ccec6c..5d216ddb9c 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py @@ -4,7 +4,7 @@ from openpype.hosts.maya.api import plugin, lib class CreateMultiverseUsdComp(plugin.Creator): """Create Multiverse USD Composition""" - name = "usdOverrideMain" + name = "usdCompositionMain" label = "Multiverse USD Composition" family = "usdComposition" icon = "cubes" @@ -17,14 +17,6 @@ class CreateMultiverseUsdComp(plugin.Creator): self.data["flattenContent"] = False self.data["writePendingOverrides"] = False - # The attributes below are about animated cache. - self.data["writeTimeRange"] = True - self.data["timeRangeNumTimeSamples"] = 0 - self.data["timeRangeSamplesSpan"] = 0.0 - + # Add animation data animation_data = lib.collect_animation_data(True) - - self.data["timeRangeStart"] = animation_data["frameStart"] - self.data["timeRangeEnd"] = animation_data["frameEnd"] - self.data["timeRangeIncrement"] = animation_data["step"] - self.data["timeRangeFramesPerSecond"] = animation_data["fps"] + self.data.update(animation_data) From 7320e570cac9e2c5852709e9c37dc9e2dd37f9dd Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 11:37:11 +0900 Subject: [PATCH 152/196] fixed multiverse usd composition extractor --- .../publish/extract_multiverse_usd_comp.py | 98 +++++++++++-------- 1 file changed, 58 insertions(+), 40 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py index 449a99e1be..c80a3cce6c 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py @@ -1,10 +1,10 @@ import os -import avalon.maya -import openpype.api - from maya import cmds +import openpype.api +from openpype.hosts.maya.api.lib import maintained_selection + class ExtractMultiverseUsdComposition(openpype.api.Extractor): """Extractor of Multiverse USD Composition.""" @@ -29,38 +29,43 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): "stripNamespaces": bool, "mergeTransformAndShape": bool, "flattenContent": bool, - "writePendingOverrides": bool, - "writeTimeRange": bool, - "timeRangeStart": int, - "timeRangeEnd": int, - "timeRangeIncrement": int, - "timeRangeNumTimeSamples": int, - "timeRangeSamplesSpan": float, - "timeRangeFramesPerSecond": float + "writePendingOverrides": bool } @property def default_options(self): """The default options for Multiverse USD extraction.""" - start_frame = int(cmds.playbackOptions(query=True, - animationStartTime=True)) - end_frame = int(cmds.playbackOptions(query=True, - animationEndTime=True)) return { "stripNamespaces": False, "mergeTransformAndShape": False, "flattenContent": False, - "writePendingOverrides": False, - "writeTimeRange": True, - "timeRangeStart": start_frame, - "timeRangeEnd": end_frame, - "timeRangeIncrement": 1, - "timeRangeNumTimeSamples": 0, - "timeRangeSamplesSpan": 0.0, - "timeRangeFramesPerSecond": 24.0 + "writePendingOverrides": False } + def parse_overrides(self, instance, options): + """Inspect data of instance to determine overridden options""" + + for key in instance.data: + if key not in self.options: + continue + + # Ensure the data is of correct type + value = instance.data[key] + if not isinstance(value, self.options[key]): + self.log.warning( + "Overridden attribute {key} was of " + "the wrong type: {invalid_type} " + "- should have been {valid_type}".format( + key=key, + invalid_type=type(value).__name__, + valid_type=self.options[key].__name__)) + continue + + options[key] = value + + return options + def process(self, instance): # Load plugin firstly cmds.loadPlugin("MultiverseForMaya", quiet=True) @@ -73,46 +78,59 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): # Parse export options options = self.default_options + options = self.parse_overrides(instance, options) self.log.info("Export options: {0}".format(options)) # Perform extraction self.log.info("Performing extraction ...") - with avalon.maya.maintained_selection(): + with maintained_selection(): members = instance.data("setMembers") members = cmds.ls(members, dag=True, shapes=True, - type=("mvUsdCompoundShape"), + type="mvUsdCompoundShape", noIntermediate=True, long=True) self.log.info('Collected object {}'.format(members)) - # TODO: Deal with asset, composition, overide with options. import multiverse time_opts = None - if options["writeTimeRange"]: + frame_start = instance.data['frameStart'] + frame_end = instance.data['frameEnd'] + handle_start = instance.data['handleStart'] + handle_end = instance.data['handleEnd'] + step = instance.data['step'] + fps = instance.data['fps'] + if frame_end != frame_start: time_opts = multiverse.TimeOptions() time_opts.writeTimeRange = True - - time_range_start = options["timeRangeStart"] - time_range_end = options["timeRangeEnd"] - time_opts.frameRange = (time_range_start, time_range_end) - - time_opts.frameIncrement = options["timeRangeIncrement"] - time_opts.numTimeSamples = options["timeRangeNumTimeSamples"] - time_opts.timeSamplesSpan = options["timeRangeSamplesSpan"] - time_opts.framePerSecond = options["timeRangeFramesPerSecond"] + time_opts.frameRange = ( + frame_start - handle_start, frame_end + handle_end) + time_opts.frameIncrement = step + time_opts.numTimeSamples = instance.data["numTimeSamples"] + time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"] + time_opts.framePerSecond = fps comp_write_opts = multiverse.CompositionWriteOptions() options_items = getattr(options, "iteritems", options.items) - for (k, v) in options_items(): - if k == "writeTimeRange" or k.startswith("timeRange"): + options_discard_keys = [ + 'numTimeSamples', + 'timeSamplesSpan', + 'frameStart', + 'frameEnd', + 'handleStart', + 'handleEnd', + 'step', + 'fps' + ] + for key, value in options_items(): + if key in options_discard_keys: continue - setattr(comp_write_opts, k, v) - comp_write_opts.timeOptions = time_opts + setattr(asset_write_opts, key, value) + multiverse.WriteComposition(file_path, members, comp_write_opts) if "representations" not in instance.data: From 783c315c7eaba812d6b9e16fde5b962c4ebae6d2 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 12:05:49 +0900 Subject: [PATCH 153/196] fixed creator of multiverse usd composition with more arguments --- .../hosts/maya/plugins/create/create_multiverse_usd_comp.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py index 5d216ddb9c..56b8721ce0 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py @@ -16,6 +16,8 @@ class CreateMultiverseUsdComp(plugin.Creator): self.data["mergeTransformAndShape"] = False self.data["flattenContent"] = False self.data["writePendingOverrides"] = False + self.data["numTimeSamples"] = 1 + self.data["timeSamplesSpan"] = 0.0 # Add animation data animation_data = lib.collect_animation_data(True) From d69d7cddf488dd109f9be3fbb38f13578f165bbb Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 12:07:06 +0900 Subject: [PATCH 154/196] fix multiverse composition extractor for arguments --- .../plugins/publish/extract_multiverse_usd_comp.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py index c80a3cce6c..3876afb89c 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py @@ -29,7 +29,9 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): "stripNamespaces": bool, "mergeTransformAndShape": bool, "flattenContent": bool, - "writePendingOverrides": bool + "writePendingOverrides": bool, + "numTimeSamples": int, + "timeSamplesSpan": float } @property @@ -37,10 +39,12 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): """The default options for Multiverse USD extraction.""" return { - "stripNamespaces": False, + "stripNamespaces": True, "mergeTransformAndShape": False, "flattenContent": False, - "writePendingOverrides": False + "writePendingOverrides": False, + "numTimeSamples": 1, + "timeSamplesSpan": 0.0 } def parse_overrides(self, instance, options): @@ -72,7 +76,7 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): # Define output file path staging_dir = self.staging_dir(instance) - file_name = "{}.usda".format(instance.name) + file_name = "{}.usd".format(instance.name) file_path = os.path.join(staging_dir, file_name) file_path = file_path.replace('\\', '/') @@ -129,7 +133,7 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): for key, value in options_items(): if key in options_discard_keys: continue - setattr(asset_write_opts, key, value) + setattr(comp_write_opts, key, value) multiverse.WriteComposition(file_path, members, comp_write_opts) From 3ab9c8c0bf09cf68e74e38684c9ab1dac2367820 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 12:07:37 +0900 Subject: [PATCH 155/196] fixed multiverse usd loader for new api --- .../hosts/maya/plugins/load/load_multiverse_usd.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index dac2244b5f..ce84c0baf8 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -2,8 +2,10 @@ import maya.cmds as cmds import maya.mel as mel -from avalon import api - +from openpype.pipeline import ( + load, + get_representation_path +) from openpype.hosts.maya.api.lib import ( maintained_selection, namespaced, @@ -12,7 +14,7 @@ from openpype.hosts.maya.api.lib import ( from openpype.hosts.maya.api.pipeline import containerise -class MultiverseUsdLoader(api.Loader): +class MultiverseUsdLoader(load.LoaderPlugin): """Load the USD by Multiverse""" families = ["model", "usd", "usdComposition", "usdOverride"] @@ -64,7 +66,7 @@ class MultiverseUsdLoader(api.Loader): shapes = cmds.ls(members, type="mvUsdCompoundShape") assert shapes, "Cannot find mvUsdCompoundShape in container" - path = api.get_representation_path(representation) + path = get_representation_path(representation) import multiverse for shape in shapes: From 498c968805bbaa1097582deee3999499245f03e4 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 13:43:42 +0900 Subject: [PATCH 156/196] use py3 style to iterate option dict --- openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index d7d1337930..29f806375e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -178,7 +178,6 @@ class ExtractMultiverseUsd(openpype.api.Extractor): time_opts.framePerSecond = fps asset_write_opts = multiverse.AssetWriteOptions(time_opts) - options_items = getattr(options, "iteritems", options.items) options_discard_keys = [ 'numTimeSamples', 'timeSamplesSpan', @@ -189,7 +188,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): 'step', 'fps' ] - for key, value in options_items(): + for key, value in options.items(): if key in options_discard_keys: continue setattr(asset_write_opts, key, value) From 0645dc8190db07e70a6f8c5caa12a47b3975fdb2 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 13:49:58 +0900 Subject: [PATCH 157/196] use py3 style to iterate option dict from multiverse usd composition extractor --- .../hosts/maya/plugins/publish/extract_multiverse_usd_comp.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py index 3876afb89c..1d764d1221 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py @@ -119,7 +119,6 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): time_opts.framePerSecond = fps comp_write_opts = multiverse.CompositionWriteOptions() - options_items = getattr(options, "iteritems", options.items) options_discard_keys = [ 'numTimeSamples', 'timeSamplesSpan', @@ -130,7 +129,7 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): 'step', 'fps' ] - for key, value in options_items(): + for key, value in options.items(): if key in options_discard_keys: continue setattr(comp_write_opts, key, value) From fe2a3e8c2f086bd6dab61d9ac358a8a9ae12f66c Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 13:52:06 +0900 Subject: [PATCH 158/196] use usd as name and ext for representation from multiverse usd composition extractor --- .../hosts/maya/plugins/publish/extract_multiverse_usd_comp.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py index 1d764d1221..8a26379313 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py @@ -140,8 +140,8 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): instance.data["representations"] = [] representation = { - 'name': 'usda', - 'ext': 'usda', + 'name': 'usd', + 'ext': 'usd', 'files': file_name, "stagingDir": staging_dir } From d41d114bd3afcb25c98a1ce15f73322673eae210 Mon Sep 17 00:00:00 2001 From: DMO Date: Thu, 24 Mar 2022 16:49:57 +0900 Subject: [PATCH 159/196] updating create mv overwrite to correct data fields getting used. --- .../plugins/create/create_multiverse_usd_over.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py index bdec96c2ff..9ccf2e45fc 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py @@ -21,15 +21,9 @@ class CreateMultiverseUsdOver(plugin.Creator): self.data["writeVariantsDefinition"] = True self.data["writeActiveState"] = True self.data["writeNamespaces"] = False + self.data["numTimeSamples"] = 1 + self.data["timeSamplesSpan"] = 0.0 - # The attributes below are about animated cache. - self.data["writeTimeRange"] = True - self.data["timeRangeNumTimeSamples"] = 0 - self.data["timeRangeSamplesSpan"] = 0.0 - + # Add animation data animation_data = lib.collect_animation_data(True) - - self.data["timeRangeStart"] = animation_data["frameStart"] - self.data["timeRangeEnd"] = animation_data["frameEnd"] - self.data["timeRangeIncrement"] = animation_data["step"] - self.data["timeRangeFramesPerSecond"] = animation_data["fps"] + self.data.update(animation_data) From 20124bdc836839bea94dc480ef813fa03d828fad Mon Sep 17 00:00:00 2001 From: DMO Date: Thu, 24 Mar 2022 16:50:53 +0900 Subject: [PATCH 160/196] fix import - use direct api vs avalon --- .../hosts/maya/plugins/publish/extract_multiverse_usd_over.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py index 406ff8ba11..aa355513f3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -1,7 +1,7 @@ import os -import avalon.maya import openpype.api +from openpype.hosts.maya.api.lib import maintained_selection from maya import cmds @@ -88,7 +88,7 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): # Perform extraction self.log.info("Performing extraction ...") - with avalon.maya.maintained_selection(): + with maintained_selection(): members = instance.data("setMembers") members = cmds.ls(members, dag=True, From 06dce74a3682bd2f30f4085d628d6e1cd28103dc Mon Sep 17 00:00:00 2001 From: DMO Date: Thu, 24 Mar 2022 16:51:24 +0900 Subject: [PATCH 161/196] use a set instead of a list --- .../hosts/maya/plugins/publish/extract_multiverse_usd.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 29f806375e..2357690160 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -178,7 +178,8 @@ class ExtractMultiverseUsd(openpype.api.Extractor): time_opts.framePerSecond = fps asset_write_opts = multiverse.AssetWriteOptions(time_opts) - options_discard_keys = [ + options_items = getattr(options, "iteritems", options.items) + options_discard_keys = { 'numTimeSamples', 'timeSamplesSpan', 'frameStart', @@ -187,8 +188,8 @@ class ExtractMultiverseUsd(openpype.api.Extractor): 'handleEnd', 'step', 'fps' - ] - for key, value in options.items(): + } + for key, value in options_items(): if key in options_discard_keys: continue setattr(asset_write_opts, key, value) From 1cea33d94ca967a6bc5aaeb7f8b3a10016a71052 Mon Sep 17 00:00:00 2001 From: DMO Date: Thu, 24 Mar 2022 16:52:02 +0900 Subject: [PATCH 162/196] fixing the data getting used for the write options ; standardise on " vs '. --- .../publish/extract_multiverse_usd_over.py | 72 ++++++++++--------- 1 file changed, 37 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py index aa355513f3..9ee6f99de4 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -35,13 +35,8 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): "writeVariantsDefinition": bool, "writeActiveState": bool, "writeNamespaces": bool, - "writeTimeRange": bool, - "timeRangeStart": int, - "timeRangeEnd": int, - "timeRangeIncrement": int, - "timeRangeNumTimeSamples": int, - "timeRangeSamplesSpan": float, - "timeRangeFramesPerSecond": float + "numTimeSamples": int, + "timeSamplesSpan": float } @property @@ -62,13 +57,8 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): "writeVariantsDefinition": True, "writeActiveState": True, "writeNamespaces": False, - "writeTimeRange": True, - "timeRangeStart": start_frame, - "timeRangeEnd": end_frame, - "timeRangeIncrement": 1, - "timeRangeNumTimeSamples": 0, - "timeRangeSamplesSpan": 0.0, - "timeRangeFramesPerSecond": 24.0 + "numTimeSamples": 1, + "timeSamplesSpan": 0.0 } def process(self, instance): @@ -79,7 +69,7 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): staging_dir = self.staging_dir(instance) file_name = "{}.usda".format(instance.name) file_path = os.path.join(staging_dir, file_name) - file_path = file_path.replace('\\', '/') + file_path = file_path.replace("\\", "/") # Parse export options options = self.default_options @@ -93,36 +83,48 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): members = cmds.ls(members, dag=True, shapes=True, - type=("mvUsdCompoundShape"), + type="mvUsdCompoundShape", noIntermediate=True, long=True) - self.log.info('Collected object {}'.format(members)) + self.log.info("Collected object {}".format(members)) # TODO: Deal with asset, composition, overide with options. import multiverse time_opts = None - if options["writeTimeRange"]: + frame_start = instance.data["frameStart"] + frame_end = instance.data["frameEnd"] + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] + step = instance.data["step"] + fps = instance.data["fps"] + if frame_end != frame_start: time_opts = multiverse.TimeOptions() time_opts.writeTimeRange = True + time_opts.frameRange = ( + frame_start - handle_start, frame_end + handle_end) + time_opts.frameIncrement = step + time_opts.numTimeSamples = instance.data["numTimeSamples"] + time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"] + time_opts.framePerSecond = fps - time_range_start = options["timeRangeStart"] - time_range_end = options["timeRangeEnd"] - time_opts.frameRange = (time_range_start, time_range_end) - - time_opts.frameIncrement = options["timeRangeIncrement"] - time_opts.numTimeSamples = options["timeRangeNumTimeSamples"] - time_opts.timeSamplesSpan = options["timeRangeSamplesSpan"] - time_opts.framePerSecond = options["timeRangeFramesPerSecond"] - - over_write_opts = multiverse.OverridesWriteOptions() + over_write_opts = multiverse.OverridesWriteOptions(time_opts) options_items = getattr(options, "iteritems", options.items) - for (k, v) in options_items(): - if k == "writeTimeRange" or k.startswith("timeRange"): + options_discard_keys = { + "numTimeSamples", + "timeSamplesSpan", + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "step", + "fps" + } + for key, value in options_items(): + if key in options_discard_keys: continue - setattr(over_write_opts, k, v) - over_write_opts.timeOptions = time_opts + setattr(over_write_opts, key, value) for member in members: multiverse.WriteOverrides(file_path, member, over_write_opts) @@ -131,9 +133,9 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): instance.data["representations"] = [] representation = { - 'name': 'usda', - 'ext': 'usda', - 'files': file_name, + "name": "usd", + "ext": "usd", + "files": file_name, "stagingDir": staging_dir } instance.data["representations"].append(representation) From 73e691f379765841bf2169b477a62e5eceaa2100 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 17:27:22 +0900 Subject: [PATCH 163/196] use cmds.listRelatives instead of mel at multiverse usd loader --- openpype/hosts/maya/plugins/load/load_multiverse_usd.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index ce84c0baf8..5361f7a85b 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import maya.cmds as cmds -import maya.mel as mel from openpype.pipeline import ( load, @@ -44,7 +43,8 @@ class MultiverseUsdLoader(load.LoaderPlugin): with namespaced(namespace, new=False): import multiverse shape = multiverse.CreateUsdCompound(self.fname) - transform = mel.eval('firstParentOf "{}"'.format(shape)) + transform = cmds.listRelatives( + shape, parent=True, fullPath=True)[0] nodes = [transform, shape] self[:] = nodes From 7f18f94abce80194783d3d6032fa39ac834ef577 Mon Sep 17 00:00:00 2001 From: DMO Date: Thu, 24 Mar 2022 17:28:09 +0900 Subject: [PATCH 164/196] removing unused variables. --- .../hosts/maya/plugins/publish/extract_multiverse_usd_over.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py index 9ee6f99de4..b26d6421fd 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -42,10 +42,6 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): @property def default_options(self): """The default options for Multiverse USD extraction.""" - start_frame = int(cmds.playbackOptions(query=True, - animationStartTime=True)) - end_frame = int(cmds.playbackOptions(query=True, - animationEndTime=True)) return { "writeAll": False, From 70c317005cf37fc741e269eb1648fb3398871aab Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Mar 2022 09:42:18 +0100 Subject: [PATCH 165/196] add python version specific vendors outside of launch hook --- openpype/hooks/pre_python_2_prelaunch.py | 35 ------------------------ openpype/lib/applications.py | 35 ++++++++++++++++++++++++ 2 files changed, 35 insertions(+), 35 deletions(-) delete mode 100644 openpype/hooks/pre_python_2_prelaunch.py diff --git a/openpype/hooks/pre_python_2_prelaunch.py b/openpype/hooks/pre_python_2_prelaunch.py deleted file mode 100644 index 84272d2e5d..0000000000 --- a/openpype/hooks/pre_python_2_prelaunch.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -from openpype.lib import PreLaunchHook - - -class PrePython2Vendor(PreLaunchHook): - """Prepend python 2 dependencies for py2 hosts.""" - order = 10 - - def execute(self): - if not self.application.use_python_2: - return - - # Prepare vendor dir path - self.log.info("adding global python 2 vendor") - pype_root = os.getenv("OPENPYPE_REPOS_ROOT") - python_2_vendor = os.path.join( - pype_root, - "openpype", - "vendor", - "python", - "python_2" - ) - - # Add Python 2 modules - python_paths = [ - python_2_vendor - ] - - # Load PYTHONPATH from current launch context - python_path = self.launch_context.env.get("PYTHONPATH") - if python_path: - python_paths.append(python_path) - - # Set new PYTHONPATH to launch context environments - self.launch_context.env["PYTHONPATH"] = os.pathsep.join(python_paths) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index e72585c75a..fcb5226606 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1118,6 +1118,39 @@ class ApplicationLaunchContext: # Return process which is already terminated return process + def _add_python_version_paths(self): + """Add vendor packages specific for a Python version.""" + + # Skip adding if host name is not set + if not self.application.host_name: + return + + # Add Python 2/3 modules + openpype_root = os.getenv("OPENPYPE_REPOS_ROOT") + python_vendor_dir = os.path.join( + openpype_root, + "openpype", + "vendor", + "python" + ) + python_paths = [] + if self.application.use_python_2: + python_paths.append( + os.path.join(python_vendor_dir, "python_2") + ) + else: + python_paths.append( + os.path.join(python_vendor_dir, "python_3") + ) + + # Load PYTHONPATH from current launch context + python_path = self.env.get("PYTHONPATH") + if python_path: + python_paths.append(python_path) + + # Set new PYTHONPATH to launch context environments + self.env["PYTHONPATH"] = os.pathsep.join(python_paths) + def launch(self): """Collect data for new process and then create it. @@ -1130,6 +1163,8 @@ class ApplicationLaunchContext: self.log.warning("Application was already launched.") return + self._add_python_version_paths() + # Discover launch hooks self.discover_launch_hooks() From e64797c4337c97b9119a7eaf704f806c09a46def Mon Sep 17 00:00:00 2001 From: DMO Date: Thu, 24 Mar 2022 17:43:43 +0900 Subject: [PATCH 166/196] switch do more readable options.items() --- .../hosts/maya/plugins/publish/extract_multiverse_usd_over.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py index b26d6421fd..83d840d045 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -106,7 +106,6 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): time_opts.framePerSecond = fps over_write_opts = multiverse.OverridesWriteOptions(time_opts) - options_items = getattr(options, "iteritems", options.items) options_discard_keys = { "numTimeSamples", "timeSamplesSpan", @@ -117,7 +116,7 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): "step", "fps" } - for key, value in options_items(): + for key, value in options.items(): if key in options_discard_keys: continue setattr(over_write_opts, key, value) From 600a07237c8412468ce72e7d5951535acedc291b Mon Sep 17 00:00:00 2001 From: DMO Date: Thu, 24 Mar 2022 17:45:10 +0900 Subject: [PATCH 167/196] removed extra space. --- .../hosts/maya/plugins/publish/extract_multiverse_usd_over.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py index 83d840d045..ce0e8a392a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -116,7 +116,7 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor): "step", "fps" } - for key, value in options.items(): + for key, value in options.items(): if key in options_discard_keys: continue setattr(over_write_opts, key, value) From ef832181e4e0a3e6f0e7e06f7d06e800ab17a79a Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 17:46:28 +0900 Subject: [PATCH 168/196] use py3 style to iterate options dict --- openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 2357690160..4e4efdc32c 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -178,7 +178,6 @@ class ExtractMultiverseUsd(openpype.api.Extractor): time_opts.framePerSecond = fps asset_write_opts = multiverse.AssetWriteOptions(time_opts) - options_items = getattr(options, "iteritems", options.items) options_discard_keys = { 'numTimeSamples', 'timeSamplesSpan', @@ -189,7 +188,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): 'step', 'fps' } - for key, value in options_items(): + for key, value in options.items(): if key in options_discard_keys: continue setattr(asset_write_opts, key, value) From 014d0d8c53946a494843a4f735a35f1c4a04172f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Mar 2022 09:48:46 +0100 Subject: [PATCH 169/196] moved adding of vendor paths to prepare_app_environments --- openpype/lib/applications.py | 72 ++++++++++++++++++------------------ 1 file changed, 37 insertions(+), 35 deletions(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index fcb5226606..ad59ae0dbc 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1118,39 +1118,6 @@ class ApplicationLaunchContext: # Return process which is already terminated return process - def _add_python_version_paths(self): - """Add vendor packages specific for a Python version.""" - - # Skip adding if host name is not set - if not self.application.host_name: - return - - # Add Python 2/3 modules - openpype_root = os.getenv("OPENPYPE_REPOS_ROOT") - python_vendor_dir = os.path.join( - openpype_root, - "openpype", - "vendor", - "python" - ) - python_paths = [] - if self.application.use_python_2: - python_paths.append( - os.path.join(python_vendor_dir, "python_2") - ) - else: - python_paths.append( - os.path.join(python_vendor_dir, "python_3") - ) - - # Load PYTHONPATH from current launch context - python_path = self.env.get("PYTHONPATH") - if python_path: - python_paths.append(python_path) - - # Set new PYTHONPATH to launch context environments - self.env["PYTHONPATH"] = os.pathsep.join(python_paths) - def launch(self): """Collect data for new process and then create it. @@ -1163,8 +1130,6 @@ class ApplicationLaunchContext: self.log.warning("Application was already launched.") return - self._add_python_version_paths() - # Discover launch hooks self.discover_launch_hooks() @@ -1354,6 +1319,41 @@ def _merge_env(env, current_env): return result +def _add_python_version_paths(app, env, logger): + """Add vendor packages specific for a Python version.""" + + # Skip adding if host name is not set + if not app.host_name: + return + + # Add Python 2/3 modules + openpype_root = os.getenv("OPENPYPE_REPOS_ROOT") + python_vendor_dir = os.path.join( + openpype_root, + "openpype", + "vendor", + "python" + ) + if app.use_python_2: + pythonpath = os.path.join(python_vendor_dir, "python_2") + else: + pythonpath = os.path.join(python_vendor_dir, "python_3") + + if not os.path.exists(pythonpath): + return + + logger.debug("Adding Python version specific paths to PYTHONPATH") + python_paths = [pythonpath] + + # Load PYTHONPATH from current launch context + python_path = env.get("PYTHONPATH") + if python_path: + python_paths.append(python_path) + + # Set new PYTHONPATH to launch context environments + env["PYTHONPATH"] = os.pathsep.join(python_paths) + + def prepare_app_environments(data, env_group=None, implementation_envs=True): """Modify launch environments based on launched app and context. @@ -1366,6 +1366,8 @@ def prepare_app_environments(data, env_group=None, implementation_envs=True): app = data["app"] log = data["log"] + _add_python_version_paths(app, data["env"], log) + # `added_env_keys` has debug purpose added_env_keys = {app.group.name, app.name} # Environments for application From 6ea037175a03db3efc022a42859120754f8a4b28 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Thu, 24 Mar 2022 17:57:18 +0900 Subject: [PATCH 170/196] use set instead of list for discard option keys from multiverse composition extractor --- .../hosts/maya/plugins/publish/extract_multiverse_usd_comp.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py index 8a26379313..8fccc412e6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py @@ -119,7 +119,7 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): time_opts.framePerSecond = fps comp_write_opts = multiverse.CompositionWriteOptions() - options_discard_keys = [ + options_discard_keys = { 'numTimeSamples', 'timeSamplesSpan', 'frameStart', @@ -128,7 +128,7 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor): 'handleEnd', 'step', 'fps' - ] + } for key, value in options.items(): if key in options_discard_keys: continue From aaa4c1d54ce71771bdc9a8608c42a5e5213b6b47 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 24 Mar 2022 11:53:57 +0100 Subject: [PATCH 171/196] Add generic exception handling for Slack notification --- openpype/modules/slack/plugins/publish/integrate_slack_api.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 018a7594bb..c0392b0195 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -210,6 +210,9 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): # You will get a SlackApiError if "ok" is False error_str = self._enrich_error(str(e.response["error"]), channel) self.log.warning("Error happened {}".format(error_str)) + except Exception as e: + error_str = self._enrich_error(str(e), channel) + self.log.warning("Not SlackAPI error: {}".format(error_str)) return None, [] From 4442ef71289fb6b9fdd9694ed5516dedfb73732d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 24 Mar 2022 11:57:56 +0100 Subject: [PATCH 172/196] Update openpype/modules/slack/plugins/publish/integrate_slack_api.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/modules/slack/plugins/publish/integrate_slack_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index c0392b0195..f236662361 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -212,7 +212,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): self.log.warning("Error happened {}".format(error_str)) except Exception as e: error_str = self._enrich_error(str(e), channel) - self.log.warning("Not SlackAPI error: {}".format(error_str)) + self.log.warning("Not SlackAPI error", exc_info=True) return None, [] From 543e80e84f94252a6ea99c3fd15a660f92c0e3a8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Mar 2022 13:22:24 +0100 Subject: [PATCH 173/196] added deafult implementation of optional pyblish plugin which will show attributes in new publisher UI --- openpype/pipeline/__init__.py | 4 +- openpype/pipeline/publish/__init__.py | 2 + openpype/pipeline/publish/publish_plugins.py | 58 ++++++++++++++++++++ 3 files changed, 63 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index 6ed307dbc7..511e4c7b94 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -41,7 +41,8 @@ from .publish import ( PublishValidationError, PublishXmlValidationError, KnownPublishError, - OpenPypePyblishPluginMixin + OpenPypePyblishPluginMixin, + OptionalPyblishPluginMixin, ) from .actions import ( @@ -105,6 +106,7 @@ __all__ = ( "PublishXmlValidationError", "KnownPublishError", "OpenPypePyblishPluginMixin", + "OptionalPyblishPluginMixin", # --- Actions --- "LauncherAction", diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index c2729a46ce..af5d7c4a91 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -3,6 +3,7 @@ from .publish_plugins import ( PublishXmlValidationError, KnownPublishError, OpenPypePyblishPluginMixin, + OptionalPyblishPluginMixin, ) from .lib import ( @@ -18,6 +19,7 @@ __all__ = ( "PublishXmlValidationError", "KnownPublishError", "OpenPypePyblishPluginMixin", + "OptionalPyblishPluginMixin", "DiscoverResult", "publish_plugins_discover", diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index bce64ec709..6b908c3ae3 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -1,3 +1,4 @@ +from openpype.lib import BoolDef from .lib import load_help_content_from_plugin @@ -108,3 +109,60 @@ class OpenPypePyblishPluginMixin: plugin_values[key] ) return attribute_values + + def get_attr_values_from_data(self, data): + """Get attribute values for attribute definitoins from data. + + Args: + data(dict): Data from instance or context. + """ + return ( + data + .get("publish_attributes", {}) + .get(self.__class__.__name__, {}) + ) + + +class OptionalPyblishPluginMixin(OpenPypePyblishPluginMixin): + """Prepare mixin for optional plugins. + + Defined active attribute definition prepared for published and + prepares method which will check if is active or not. + + ``` + def process(self, instance): + # Skip the instance if is not active by data on the instance + if not self.is_active(instance.data): + return + ``` + """ + + @classmethod + def get_attribute_defs(cls): + """Attribute definitions based on plugin's optional attribute.""" + + # Empty list if plugin is not optional + if not getattr(cls, "optional", None): + return [] + + # Get active value from class as default value + active = getattr(cls, "active", True) + # Return boolean stored under 'active' key with label of the class name + return [ + BoolDef("active", default=active, label=cls.__name__) + ] + + def is_active(self, data): + """Check if plugins is active for instance/context based on their data. + + Args: + data(dict): Data from instance or context. + """ + # Skip if is not optional and return True + if not getattr(self, "optional", None): + return True + attr_values = self.get_attr_values_from_data(data) + active = attr_values.get("active") + if active is None: + active = getattr(self, "active", True) + return active From d0c4f188c75edaab4e9b998c105fe541af8ff003 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Mar 2022 13:25:24 +0100 Subject: [PATCH 174/196] added better example in docstring --- openpype/pipeline/publish/publish_plugins.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 6b908c3ae3..be5efa34c1 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -130,10 +130,13 @@ class OptionalPyblishPluginMixin(OpenPypePyblishPluginMixin): prepares method which will check if is active or not. ``` - def process(self, instance): - # Skip the instance if is not active by data on the instance - if not self.is_active(instance.data): - return + class ValidateScene( + pyblish.api.InstancePlugin, OptionalPyblishPluginMixin + ): + def process(self, instance): + # Skip the instance if is not active by data on the instance + if not self.is_active(instance.data): + return ``` """ From 3a2603d8f735395f5925ca4de67fee7a12b4c13d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Mar 2022 13:26:55 +0100 Subject: [PATCH 175/196] changed label --- openpype/pipeline/publish/publish_plugins.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index be5efa34c1..83d6c717d0 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -151,8 +151,9 @@ class OptionalPyblishPluginMixin(OpenPypePyblishPluginMixin): # Get active value from class as default value active = getattr(cls, "active", True) # Return boolean stored under 'active' key with label of the class name + label = cls.label or cls.__name__ return [ - BoolDef("active", default=active, label=cls.__name__) + BoolDef("active", default=active, label=label) ] def is_active(self, data): From 5402a99634f932e9a423d073fbc6cb53baf30283 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Mar 2022 13:31:38 +0100 Subject: [PATCH 176/196] fix imports in scene inventory --- openpype/tools/sceneinventory/switch_dialog.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/tools/sceneinventory/switch_dialog.py b/openpype/tools/sceneinventory/switch_dialog.py index 252f5cde4c..bb3e2615ac 100644 --- a/openpype/tools/sceneinventory/switch_dialog.py +++ b/openpype/tools/sceneinventory/switch_dialog.py @@ -4,11 +4,12 @@ from Qt import QtWidgets, QtCore import qtawesome from bson.objectid import ObjectId -from avalon import io, pipeline -from openpype.pipeline import ( +from avalon import io +from openpype.pipeline.load import ( discover_loader_plugins, switch_container, get_repres_contexts, + loaders_from_repre_context, ) from .widgets import ( @@ -370,7 +371,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): loaders = None for repre_context in repre_contexts.values(): - _loaders = set(pipeline.loaders_from_repre_context( + _loaders = set(loaders_from_repre_context( available_loaders, repre_context )) if loaders is None: From cf40d52d4d4c1e201e928d0d77730fe18d7f8e3f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Mar 2022 13:31:45 +0100 Subject: [PATCH 177/196] removed debug prints --- openpype/pipeline/actions.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/pipeline/actions.py b/openpype/pipeline/actions.py index a045c92aa7..141e277db3 100644 --- a/openpype/pipeline/actions.py +++ b/openpype/pipeline/actions.py @@ -115,10 +115,8 @@ def discover_inventory_actions(): filtered_actions = [] for action in actions: if action is not InventoryAction: - print("DISCOVERED", action) filtered_actions.append(action) - else: - print("GOT SOURCE") + return filtered_actions From 3a54f370b8b86279f345f5058e468a87149c6a72 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 24 Mar 2022 13:46:04 +0100 Subject: [PATCH 178/196] Fix docstring Co-authored-by: Roy Nieterau --- openpype/pipeline/publish/publish_plugins.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 83d6c717d0..2402a005c2 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -111,7 +111,7 @@ class OpenPypePyblishPluginMixin: return attribute_values def get_attr_values_from_data(self, data): - """Get attribute values for attribute definitoins from data. + """Get attribute values for attribute definitions from data. Args: data(dict): Data from instance or context. From ce4caeabd3f02262374e5bff7121741602e526ff Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 24 Mar 2022 13:47:07 +0100 Subject: [PATCH 179/196] Added configurable maximum file size of review upload to Slack --- .../plugins/publish/collect_slack_family.py | 29 +++++++++++-------- .../plugins/publish/integrate_slack_api.py | 18 ++++++++++-- .../projects_schema/schema_project_slack.json | 9 ++++++ 3 files changed, 42 insertions(+), 14 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/slack/plugins/publish/collect_slack_family.py index 6c965b04cd..7475bdc89e 100644 --- a/openpype/modules/slack/plugins/publish/collect_slack_family.py +++ b/openpype/modules/slack/plugins/publish/collect_slack_family.py @@ -35,20 +35,25 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin): return # make slack publishable - if profile: - self.log.info("Found profile: {}".format(profile)) - if instance.data.get('families'): - instance.data['families'].append('slack') - else: - instance.data['families'] = ['slack'] + if not profile: + return - instance.data["slack_channel_message_profiles"] = \ - profile["channel_messages"] + self.log.info("Found profile: {}".format(profile)) + if instance.data.get('families'): + instance.data['families'].append('slack') + else: + instance.data['families'] = ['slack'] - slack_token = (instance.context.data["project_settings"] - ["slack"] - ["token"]) - instance.data["slack_token"] = slack_token + selected_profiles = profile["channel_messages"] + for prof in selected_profiles: + prof["review_upload_limit"] = profile.get("review_upload_limit", + 50) + instance.data["slack_channel_message_profiles"] = selected_profiles + + slack_token = (instance.context.data["project_settings"] + ["slack"] + ["token"]) + instance.data["slack_token"] = slack_token def main_family_from_instance(self, instance): # TODO yank from integrate """Returns main family of entered instance.""" diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 018a7594bb..8fde25b42d 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -35,7 +35,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): message = self._get_filled_message(message_profile["message"], instance, review_path) - self.log.info("message:: {}".format(message)) + self.log.debug("message:: {}".format(message)) if not message: return @@ -43,7 +43,8 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): publish_files.add(thumbnail_path) if message_profile["upload_review"] and review_path: - publish_files.add(review_path) + message, publish_files = self._handle_review_upload( + message, message_profile, publish_files, review_path) project = instance.context.data["anatomyData"]["project"]["code"] for channel in message_profile["channels"]: @@ -75,6 +76,19 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): dbcon = mongo_client[database_name]["notification_messages"] dbcon.insert_one(msg) + def _handle_review_upload(self, message, message_profile, publish_files, + review_path): + """Check if uploaded file is not too large""" + review_file_size_MB = os.path.getsize(review_path) / 1024 / 1024 + file_limit = message_profile.get("review_upload_limit", 50) + if review_file_size_MB > file_limit: + if review_path not in message: + message += "\n Review upload omitted because of " + \ + "file size, file located at: {}".format(review_path) + else: + publish_files.add(review_path) + return message, publish_files + def _get_filled_message(self, message_templ, instance, review_path=None): """Use message_templ and data from instance to get message content. diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json index 14814d8b01..9ca7c35e10 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json @@ -75,6 +75,15 @@ "type": "list", "object_type": "text" }, + { + "type": "number", + "key": "review_upload_limit", + "label": "Upload review of maximally size (MB)", + "decimal": 2, + "default": 50, + "minimum": 0, + "maximum": 1000000 + }, { "type": "separator" }, From 18d883ff0f4c44b6f5d3f46e3b1d26b985766493 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 14:31:57 +0100 Subject: [PATCH 180/196] anatomy data with correct task short key --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 05d2ffd821..b4e6abb72d 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1705,7 +1705,7 @@ def _get_task_context_data_for_anatomy( "task": { "name": task_name, "type": task_type, - "short_name": project_task_type_data["short_name"] + "short": project_task_type_data["short_name"] } } From 1aa56e10d4f0dc2b23dc1512268d6c3ae3e64da7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Mar 2022 14:47:23 +0100 Subject: [PATCH 181/196] nuke: python3 compatibility issue with `iteritems` --- openpype/hosts/nuke/plugins/load/load_effects.py | 6 +++--- openpype/hosts/nuke/plugins/load/load_effects_ip.py | 6 +++--- openpype/hosts/nuke/plugins/load/load_gizmo_ip.py | 2 +- .../nuke/plugins/publish/validate_write_deadline_tab.py | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py index 68c3952942..675ac9d46f 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects.py +++ b/openpype/hosts/nuke/plugins/load/load_effects.py @@ -72,7 +72,7 @@ class LoadEffects(load.LoaderPlugin): # getting data from json file with unicode conversion with open(file, "r") as f: json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).iteritems()} + for key, value in json.load(f).items()} # get correct order of nodes by positions on track and subtrack nodes_order = self.reorder_nodes(json_f) @@ -188,7 +188,7 @@ class LoadEffects(load.LoaderPlugin): # getting data from json file with unicode conversion with open(file, "r") as f: json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).iteritems()} + for key, value in json.load(f).items()} # get correct order of nodes by positions on track and subtrack nodes_order = self.reorder_nodes(json_f) @@ -330,7 +330,7 @@ class LoadEffects(load.LoaderPlugin): if isinstance(input, dict): return {self.byteify(key): self.byteify(value) - for key, value in input.iteritems()} + for key, value in input.items()} elif isinstance(input, list): return [self.byteify(element) for element in input] elif isinstance(input, unicode): diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py index 9c4fd4c2c6..91f5685920 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py @@ -74,7 +74,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): # getting data from json file with unicode conversion with open(file, "r") as f: json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).iteritems()} + for key, value in json.load(f).items()} # get correct order of nodes by positions on track and subtrack nodes_order = self.reorder_nodes(json_f) @@ -194,7 +194,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): # getting data from json file with unicode conversion with open(file, "r") as f: json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).iteritems()} + for key, value in json.load(f).items()} # get correct order of nodes by positions on track and subtrack nodes_order = self.reorder_nodes(json_f) @@ -350,7 +350,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): if isinstance(input, dict): return {self.byteify(key): self.byteify(value) - for key, value in input.iteritems()} + for key, value in input.items()} elif isinstance(input, list): return [self.byteify(element) for element in input] elif isinstance(input, unicode): diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py index 87bebce15b..df52a22364 100644 --- a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py @@ -240,7 +240,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin): if isinstance(input, dict): return {self.byteify(key): self.byteify(value) - for key, value in input.iteritems()} + for key, value in input.items()} elif isinstance(input, list): return [self.byteify(element) for element in input] elif isinstance(input, unicode): diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py b/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py index 5ee93403d0..907577a97d 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py @@ -25,7 +25,7 @@ class RepairNukeWriteDeadlineTab(pyblish.api.Action): # Remove existing knobs. knob_names = openpype.hosts.nuke.lib.get_deadline_knob_names() - for name, knob in group_node.knobs().iteritems(): + for name, knob in group_node.knobs().items(): if name in knob_names: group_node.removeKnob(knob) From c0d27f47d236dee4ed793682c8fef6e9c9f9de3c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Mar 2022 15:20:53 +0100 Subject: [PATCH 182/196] don't refresh log viewer on initialization but on first show --- openpype/modules/log_viewer/tray/app.py | 9 +++++++++ openpype/modules/log_viewer/tray/widgets.py | 11 +++++++++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/openpype/modules/log_viewer/tray/app.py b/openpype/modules/log_viewer/tray/app.py index 1e8d6483cd..71827fcac9 100644 --- a/openpype/modules/log_viewer/tray/app.py +++ b/openpype/modules/log_viewer/tray/app.py @@ -26,3 +26,12 @@ class LogsWindow(QtWidgets.QWidget): self.log_detail = log_detail self.setStyleSheet(style.load_stylesheet()) + + self._frist_show = True + + def showEvent(self, event): + super(LogsWindow, self).showEvent(event) + + if self._frist_show: + self._frist_show = False + self.logs_widget.refresh() diff --git a/openpype/modules/log_viewer/tray/widgets.py b/openpype/modules/log_viewer/tray/widgets.py index ff77405de5..ed08e62109 100644 --- a/openpype/modules/log_viewer/tray/widgets.py +++ b/openpype/modules/log_viewer/tray/widgets.py @@ -155,6 +155,11 @@ class LogsWidget(QtWidgets.QWidget): QtCore.Qt.DescendingOrder ) + refresh_triggered_timer = QtCore.QTimer() + refresh_triggered_timer.setSingleShot(True) + refresh_triggered_timer.setInterval(200) + + refresh_triggered_timer.timeout.connect(self._on_refresh_timeout) view.selectionModel().selectionChanged.connect(self._on_index_change) refresh_btn.clicked.connect(self._on_refresh_clicked) @@ -169,10 +174,12 @@ class LogsWidget(QtWidgets.QWidget): self.detail_widget = detail_widget self.refresh_btn = refresh_btn - # prepare - self.refresh() + self._refresh_triggered_timer = refresh_triggered_timer def refresh(self): + self._refresh_triggered_timer.start() + + def _on_refresh_timeout(self): self.model.refresh() self.detail_widget.refresh() From bb8bd9042778ad4293682993832242cb8b57e200 Mon Sep 17 00:00:00 2001 From: DMO Date: Fri, 25 Mar 2022 11:08:53 +0900 Subject: [PATCH 183/196] add the animation data first to maintain order. --- .../hosts/maya/plugins/create/create_multiverse_usd.py | 7 +++---- .../maya/plugins/create/create_multiverse_usd_comp.py | 7 +++---- .../maya/plugins/create/create_multiverse_usd_over.py | 7 +++---- 3 files changed, 9 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py index c06c764f95..b2266e5a57 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py @@ -12,6 +12,9 @@ class CreateMultiverseUsd(plugin.Creator): def __init__(self, *args, **kwargs): super(CreateMultiverseUsd, self).__init__(*args, **kwargs) + # Add animation data first, since it maintains order. + self.data.update(lib.collect_animation_data(True)) + self.data["stripNamespaces"] = False self.data["mergeTransformAndShape"] = False self.data["writeAncestors"] = True @@ -46,7 +49,3 @@ class CreateMultiverseUsd(plugin.Creator): self.data["customMaterialNamespace"] = '' self.data["numTimeSamples"] = 1 self.data["timeSamplesSpan"] = 0.0 - - # Add animation data - animation_data = lib.collect_animation_data(True) - self.data.update(animation_data) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py index 56b8721ce0..77b808c459 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py @@ -12,13 +12,12 @@ class CreateMultiverseUsdComp(plugin.Creator): def __init__(self, *args, **kwargs): super(CreateMultiverseUsdComp, self).__init__(*args, **kwargs) + # Add animation data first, since it maintains order. + self.data.update(lib.collect_animation_data(True)) + self.data["stripNamespaces"] = False self.data["mergeTransformAndShape"] = False self.data["flattenContent"] = False self.data["writePendingOverrides"] = False self.data["numTimeSamples"] = 1 self.data["timeSamplesSpan"] = 0.0 - - # Add animation data - animation_data = lib.collect_animation_data(True) - self.data.update(animation_data) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py index 9ccf2e45fc..bb82ab2039 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_over.py @@ -12,6 +12,9 @@ class CreateMultiverseUsdOver(plugin.Creator): def __init__(self, *args, **kwargs): super(CreateMultiverseUsdOver, self).__init__(*args, **kwargs) + # Add animation data first, since it maintains order. + self.data.update(lib.collect_animation_data(True)) + self.data["writeAll"] = False self.data["writeTransforms"] = True self.data["writeVisibility"] = True @@ -23,7 +26,3 @@ class CreateMultiverseUsdOver(plugin.Creator): self.data["writeNamespaces"] = False self.data["numTimeSamples"] = 1 self.data["timeSamplesSpan"] = 0.0 - - # Add animation data - animation_data = lib.collect_animation_data(True) - self.data.update(animation_data) From 9f75f217a9ef0f5df5994d7cec1b5ccf079b2e22 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 09:49:13 +0100 Subject: [PATCH 184/196] Update openpype/settings/entities/schemas/projects_schema/schema_project_slack.json Co-authored-by: Roy Nieterau --- .../entities/schemas/projects_schema/schema_project_slack.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json index 9ca7c35e10..1a9804cd4f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json @@ -78,7 +78,7 @@ { "type": "number", "key": "review_upload_limit", - "label": "Upload review of maximally size (MB)", + "label": "Upload review maximum file size (MB)", "decimal": 2, "default": 50, "minimum": 0, From 5adf3966106f528bdfc38ccc2f7d1a1171efcb5b Mon Sep 17 00:00:00 2001 From: DMO Date: Fri, 25 Mar 2022 18:07:07 +0900 Subject: [PATCH 185/196] Adding "pointcache" & "animation" to the list of families, since it's supported. --- openpype/hosts/maya/plugins/load/load_multiverse_usd.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index 5361f7a85b..eafad535eb 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -16,7 +16,8 @@ from openpype.hosts.maya.api.pipeline import containerise class MultiverseUsdLoader(load.LoaderPlugin): """Load the USD by Multiverse""" - families = ["model", "usd", "usdComposition", "usdOverride"] + families = ["model", "usd", "usdComposition", "usdOverride", + "pointcache", "animation"] representations = ["usd", "usda", "usdc", "usdz", "abc"] label = "Read USD by Multiverse" From deedc893bf82248149e652422270c27c16518ac8 Mon Sep 17 00:00:00 2001 From: DMO Date: Fri, 25 Mar 2022 18:07:28 +0900 Subject: [PATCH 186/196] Lock the shape after creating to avoid deletion. --- openpype/hosts/maya/plugins/load/load_multiverse_usd.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index eafad535eb..c03f2c5d92 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -47,6 +47,9 @@ class MultiverseUsdLoader(load.LoaderPlugin): transform = cmds.listRelatives( shape, parent=True, fullPath=True)[0] + # Lock the shape node so the user cannot delete it. + cmds.lockNode(shape, lock=True) + nodes = [transform, shape] self[:] = nodes From ac7b35698d8288febe55759e8f231bf238ae63cf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 10:08:40 +0100 Subject: [PATCH 187/196] Updated Slack notification message --- openpype/modules/slack/plugins/publish/integrate_slack_api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 22cf4cdf93..10bde7d4c0 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -82,9 +82,9 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): review_file_size_MB = os.path.getsize(review_path) / 1024 / 1024 file_limit = message_profile.get("review_upload_limit", 50) if review_file_size_MB > file_limit: + message += "\nReview upload omitted because of file size." if review_path not in message: - message += "\n Review upload omitted because of " + \ - "file size, file located at: {}".format(review_path) + message += "\nFile located at: {}".format(review_path) else: publish_files.add(review_path) return message, publish_files From ed5dadc6308c5d406e6d717ddf70272fc5efff77 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Mar 2022 10:59:15 +0100 Subject: [PATCH 188/196] nuke: remove unicode type --- openpype/hosts/nuke/plugins/load/load_effects.py | 4 ++-- openpype/hosts/nuke/plugins/load/load_effects_ip.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py index 675ac9d46f..1ed32996e1 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects.py +++ b/openpype/hosts/nuke/plugins/load/load_effects.py @@ -333,8 +333,8 @@ class LoadEffects(load.LoaderPlugin): for key, value in input.items()} elif isinstance(input, list): return [self.byteify(element) for element in input] - elif isinstance(input, unicode): - return input.encode('utf-8') + elif isinstance(input, str): + return str(input) else: return input diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py index 91f5685920..383776111f 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py @@ -353,8 +353,8 @@ class LoadEffectsInputProcess(load.LoaderPlugin): for key, value in input.items()} elif isinstance(input, list): return [self.byteify(element) for element in input] - elif isinstance(input, unicode): - return input.encode('utf-8') + elif isinstance(input, str): + return str(input) else: return input From 102b65b5d478b7e66f20a62ecc8232da22e73cd6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 12:28:22 +0100 Subject: [PATCH 189/196] Added MongoDB requirements --- website/docs/dev_requirements.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md index bbf3b1fb5b..6c87054ba0 100644 --- a/website/docs/dev_requirements.md +++ b/website/docs/dev_requirements.md @@ -33,6 +33,8 @@ It can be built and ran on all common platforms. We develop and test on the foll ## Database +Database version should be at least **MongoDB 4.4**. + Pype needs site-wide installation of **MongoDB**. It should be installed on reliable server, that all workstations (and possibly render nodes) can connect. This server holds **Avalon** database that is at the core of everything From a029e55d3ce434fafb8c1d5984723691a413aecb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 13:47:27 +0100 Subject: [PATCH 190/196] OP-2832 - added transparency into review creator for Maya --- openpype/hosts/maya/plugins/create/create_review.py | 9 +++++++++ openpype/hosts/maya/plugins/publish/extract_playblast.py | 5 +++++ 2 files changed, 14 insertions(+) diff --git a/openpype/hosts/maya/plugins/create/create_review.py b/openpype/hosts/maya/plugins/create/create_review.py index 14a21d28ca..fbf3399f61 100644 --- a/openpype/hosts/maya/plugins/create/create_review.py +++ b/openpype/hosts/maya/plugins/create/create_review.py @@ -15,6 +15,14 @@ class CreateReview(plugin.Creator): keepImages = False isolate = False imagePlane = True + transparency = [ + "preset", + "simple", + "object sorting", + "weighted average", + "depth peeling", + "alpha cut" + ] def __init__(self, *args, **kwargs): super(CreateReview, self).__init__(*args, **kwargs) @@ -28,5 +36,6 @@ class CreateReview(plugin.Creator): data["isolate"] = self.isolate data["keepImages"] = self.keepImages data["imagePlane"] = self.imagePlane + data["transparency"] = self.transparency self.data = data diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index b233a57453..bb1ecf279d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -73,6 +73,11 @@ class ExtractPlayblast(openpype.api.Extractor): pm.currentTime(refreshFrameInt - 1, edit=True) pm.currentTime(refreshFrameInt, edit=True) + # Override transparency if requested. + transparency = instance.data.get("transparency", 0) + if transparency != 0: + preset["viewport2_options"]["transparencyAlgorithm"] = transparency + # Isolate view is requested by having objects in the set besides a # camera. if preset.pop("isolate_view", False) and instance.data.get("isolate"): From 91d9eb57981946a3ee948ec3058c71da7e832ac1 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 26 Mar 2022 03:36:43 +0000 Subject: [PATCH 191/196] [Automated] Bump version --- CHANGELOG.md | 33 +++++++++++++++++++++++++++++---- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 31 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f20276cbd7..abe9eaa3ce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,24 +1,49 @@ # Changelog -## [3.9.2-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.9.2-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.1...HEAD) +### 📖 Documentation + +- Docs: Added MongoDB requirements [\#2951](https://github.com/pypeclub/OpenPype/pull/2951) + **🚀 Enhancements** +- Slack: Added configurable maximum file size of review upload to Slack [\#2945](https://github.com/pypeclub/OpenPype/pull/2945) +- NewPublisher: Prepared implementation of optional pyblish plugin [\#2943](https://github.com/pypeclub/OpenPype/pull/2943) +- Workfiles: Open published workfiles [\#2925](https://github.com/pypeclub/OpenPype/pull/2925) - CI: change the version bump logic [\#2919](https://github.com/pypeclub/OpenPype/pull/2919) - Deadline: Add headless argument [\#2916](https://github.com/pypeclub/OpenPype/pull/2916) +- Nuke: Add no-audio Tag [\#2911](https://github.com/pypeclub/OpenPype/pull/2911) - Ftrack: Fill workfile in custom attribute [\#2906](https://github.com/pypeclub/OpenPype/pull/2906) +- Nuke: improving readability [\#2903](https://github.com/pypeclub/OpenPype/pull/2903) - Settings UI: Add simple tooltips for settings entities [\#2901](https://github.com/pypeclub/OpenPype/pull/2901) **🐛 Bug fixes** +- LogViewer: Don't refresh on initialization [\#2949](https://github.com/pypeclub/OpenPype/pull/2949) +- General: anatomy data with correct task short key [\#2947](https://github.com/pypeclub/OpenPype/pull/2947) +- SceneInventory: Fix imports in UI [\#2944](https://github.com/pypeclub/OpenPype/pull/2944) +- Slack: add generic exception [\#2941](https://github.com/pypeclub/OpenPype/pull/2941) +- General: Python specific vendor paths on env injection [\#2939](https://github.com/pypeclub/OpenPype/pull/2939) +- General: More fail safe delete old versions [\#2936](https://github.com/pypeclub/OpenPype/pull/2936) +- Settings UI: Collapsed of collapsible wrapper works as expected [\#2934](https://github.com/pypeclub/OpenPype/pull/2934) +- General: Don't print log record on OSError [\#2926](https://github.com/pypeclub/OpenPype/pull/2926) +- Hiero: Fix import of 'register\_event\_callback' [\#2924](https://github.com/pypeclub/OpenPype/pull/2924) - Ftrack: Missing Ftrack id after editorial publish [\#2905](https://github.com/pypeclub/OpenPype/pull/2905) -- AfterEffects: Fix rendering for single frame in DL [\#2875](https://github.com/pypeclub/OpenPype/pull/2875) **🔀 Refactored code** +- General: Move Attribute Definitions from pipeline [\#2931](https://github.com/pypeclub/OpenPype/pull/2931) +- General: Removed silo references and terminal splash [\#2927](https://github.com/pypeclub/OpenPype/pull/2927) +- General: Move pipeline constants to OpenPype [\#2918](https://github.com/pypeclub/OpenPype/pull/2918) - General: Move formatting and workfile functions [\#2914](https://github.com/pypeclub/OpenPype/pull/2914) +- General: Move remaining plugins from avalon [\#2912](https://github.com/pypeclub/OpenPype/pull/2912) + +**Merged pull requests:** + +- Maya: Do not pass `set` to maya commands \(fixes support for older maya versions\) [\#2932](https://github.com/pypeclub/OpenPype/pull/2932) ## [3.9.1](https://github.com/pypeclub/OpenPype/tree/3.9.1) (2022-03-18) @@ -42,6 +67,7 @@ - General: Remove forgotten use of avalon Creator [\#2885](https://github.com/pypeclub/OpenPype/pull/2885) - General: Avoid circular import [\#2884](https://github.com/pypeclub/OpenPype/pull/2884) - Fixes for attaching loaded containers \(\#2837\) [\#2874](https://github.com/pypeclub/OpenPype/pull/2874) +- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) **🔀 Refactored code** @@ -73,11 +99,11 @@ - General: Color dialog UI fixes [\#2817](https://github.com/pypeclub/OpenPype/pull/2817) - global: letter box calculated on output as last process [\#2812](https://github.com/pypeclub/OpenPype/pull/2812) - Nuke: adding Reformat to baking mov plugin [\#2811](https://github.com/pypeclub/OpenPype/pull/2811) -- Manager: Update all to latest button [\#2805](https://github.com/pypeclub/OpenPype/pull/2805) **🐛 Bug fixes** - General: Missing time function [\#2877](https://github.com/pypeclub/OpenPype/pull/2877) +- AfterEffects: Fix rendering for single frame in DL [\#2875](https://github.com/pypeclub/OpenPype/pull/2875) - Deadline: Fix plugin name for tile assemble [\#2868](https://github.com/pypeclub/OpenPype/pull/2868) - Nuke: gizmo precollect fix [\#2866](https://github.com/pypeclub/OpenPype/pull/2866) - General: Fix hardlink for windows [\#2864](https://github.com/pypeclub/OpenPype/pull/2864) @@ -94,7 +120,6 @@ - Maya: Stop creation of reviews for Cryptomattes [\#2832](https://github.com/pypeclub/OpenPype/pull/2832) - Deadline: Remove recreated event [\#2828](https://github.com/pypeclub/OpenPype/pull/2828) - Deadline: Added missing events folder [\#2827](https://github.com/pypeclub/OpenPype/pull/2827) -- Maya: Deformer node ids validation plugin [\#2826](https://github.com/pypeclub/OpenPype/pull/2826) - Settings: Missing document with OP versions may break start of OpenPype [\#2825](https://github.com/pypeclub/OpenPype/pull/2825) - Deadline: more detailed temp file name for environment json [\#2824](https://github.com/pypeclub/OpenPype/pull/2824) - General: Host name was formed from obsolete code [\#2821](https://github.com/pypeclub/OpenPype/pull/2821) diff --git a/openpype/version.py b/openpype/version.py index 2390309e76..84ea02fd08 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.9.2-nightly.1" +__version__ = "3.9.2-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 90e264d456..46515b4785 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.9.2-nightly.1" # OpenPype +version = "3.9.2-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 1a01be7ec094585810079c50d4755ca265355dd4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 28 Mar 2022 11:06:50 +0200 Subject: [PATCH 192/196] added option to not log invalid types in base class --- openpype/settings/entities/base_entity.py | 37 +++++++++++++++-------- 1 file changed, 24 insertions(+), 13 deletions(-) diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index 76700d605d..21ee44ae77 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -173,6 +173,10 @@ class BaseItemEntity(BaseEntity): # Entity has set `_project_override_value` (is not NOT_SET) self.had_project_override = False + self._default_log_invalid_types = True + self._studio_log_invalid_types = True + self._project_log_invalid_types = True + # Callbacks that are called on change. # - main current purspose is to register GUI callbacks self.on_change_callbacks = [] @@ -419,7 +423,7 @@ class BaseItemEntity(BaseEntity): raise InvalidValueType(self.valid_value_types, type(value), self.path) # TODO convert to private method - def _check_update_value(self, value, value_source): + def _check_update_value(self, value, value_source, log_invalid_types=True): """Validation of value on update methods. Update methods update data from currently saved settings so it is @@ -447,16 +451,17 @@ class BaseItemEntity(BaseEntity): if new_value is not NOT_SET: return new_value - # Warning log about invalid value type. - self.log.warning( - ( - "{} Got invalid value type for {} values." - " Expected types: {} | Got Type: {} | Value: \"{}\"" - ).format( - self.path, value_source, - self.valid_value_types, type(value), str(value) + if log_invalid_types: + # Warning log about invalid value type. + self.log.warning( + ( + "{} Got invalid value type for {} values." + " Expected types: {} | Got Type: {} | Value: \"{}\"" + ).format( + self.path, value_source, + self.valid_value_types, type(value), str(value) + ) ) - ) return NOT_SET def available_for_role(self, role_name=None): @@ -985,7 +990,7 @@ class ItemEntity(BaseItemEntity): return self.root_item.get_entity_from_path(path) @abstractmethod - def update_default_value(self, parent_values): + def update_default_value(self, parent_values, log_invalid_types=True): """Fill default values on startup or on refresh. Default values stored in `openpype` repository should update all items @@ -995,11 +1000,13 @@ class ItemEntity(BaseItemEntity): Args: parent_values (dict): Values of parent's item. But in case item is used as widget, `parent_values` contain value for item. + log_invalid_types (bool): Log invalid type of value. Used when + entity can have children with same keys and different types. """ pass @abstractmethod - def update_studio_value(self, parent_values): + def update_studio_value(self, parent_values, log_invalid_types=True): """Fill studio override values on startup or on refresh. Set studio value if is not set to NOT_SET, in that case studio @@ -1008,11 +1015,13 @@ class ItemEntity(BaseItemEntity): Args: parent_values (dict): Values of parent's item. But in case item is used as widget, `parent_values` contain value for item. + log_invalid_types (bool): Log invalid type of value. Used when + entity can have children with same keys and different types. """ pass @abstractmethod - def update_project_value(self, parent_values): + def update_project_value(self, parent_values, log_invalid_types=True): """Fill project override values on startup, refresh or project change. Set project value if is not set to NOT_SET, in that case project @@ -1021,5 +1030,7 @@ class ItemEntity(BaseItemEntity): Args: parent_values (dict): Values of parent's item. But in case item is used as widget, `parent_values` contain value for item. + log_invalid_types (bool): Log invalid type of value. Used when + entity can have children with same keys and different types. """ pass From 2d73df190a124f72dc0ba28110fb43e3c79a8949 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 28 Mar 2022 11:15:36 +0200 Subject: [PATCH 193/196] specify places when to log and not invalid values --- .../settings/entities/dict_conditional.py | 86 ++++++++++++------ .../entities/dict_immutable_keys_entity.py | 88 +++++++++++++------ .../entities/dict_mutable_keys_entity.py | 38 +++++--- openpype/settings/entities/input_entities.py | 37 +++++--- openpype/settings/entities/item_entities.py | 75 ++++++++++------ openpype/settings/entities/list_entity.py | 32 +++++-- 6 files changed, 242 insertions(+), 114 deletions(-) diff --git a/openpype/settings/entities/dict_conditional.py b/openpype/settings/entities/dict_conditional.py index 19f326aea7..88d2dc8296 100644 --- a/openpype/settings/entities/dict_conditional.py +++ b/openpype/settings/entities/dict_conditional.py @@ -518,12 +518,18 @@ class DictConditionalEntity(ItemEntity): output.update(self._current_metadata) return output - def _prepare_value(self, value): + def _prepare_value(self, value, log_invalid_types): if value is NOT_SET or self.enum_key not in value: return NOT_SET, NOT_SET enum_value = value.get(self.enum_key) if enum_value not in self.non_gui_children: + if log_invalid_types: + self.log.warning( + "{} Unknown enum key in default values: {}".format( + self.path, enum_value + ) + ) return NOT_SET, NOT_SET # Create copy of value before poping values @@ -551,22 +557,25 @@ class DictConditionalEntity(ItemEntity): return value, metadata - def update_default_value(self, value): + def update_default_value(self, value, log_invalid_types=True): """Update default values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "default") + self._default_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "default", log_invalid_types + ) self.has_default_value = value is not NOT_SET # TODO add value validation - value, metadata = self._prepare_value(value) + value, metadata = self._prepare_value(value, log_invalid_types) self._default_metadata = metadata if value is NOT_SET: - self.enum_entity.update_default_value(value) + self.enum_entity.update_default_value(value, log_invalid_types) for children_by_key in self.non_gui_children.values(): for child_obj in children_by_key.values(): - child_obj.update_default_value(value) + child_obj.update_default_value(value, log_invalid_types) return value_keys = set(value.keys()) @@ -574,7 +583,7 @@ class DictConditionalEntity(ItemEntity): expected_keys = set(self.non_gui_children[enum_value].keys()) expected_keys.add(self.enum_key) unknown_keys = value_keys - expected_keys - if unknown_keys: + if unknown_keys and log_invalid_types: self.log.warning( "{} Unknown keys in default values: {}".format( self.path, @@ -582,28 +591,37 @@ class DictConditionalEntity(ItemEntity): ) ) - self.enum_entity.update_default_value(enum_value) - for children_by_key in self.non_gui_children.values(): + self.enum_entity.update_default_value(enum_value, log_invalid_types) + + for enum_key, children_by_key in self.non_gui_children.items(): + _log_invalid_types = log_invalid_types + if _log_invalid_types: + _log_invalid_types = enum_key == enum_value + value_copy = copy.deepcopy(value) for key, child_obj in children_by_key.items(): child_value = value_copy.get(key, NOT_SET) - child_obj.update_default_value(child_value) + child_obj.update_default_value(child_value, _log_invalid_types) - def update_studio_value(self, value): + def update_studio_value(self, value, log_invalid_types=True): """Update studio override values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "studio override") - value, metadata = self._prepare_value(value) + + self._studio_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "studio override", log_invalid_types + ) + value, metadata = self._prepare_value(value, log_invalid_types) self._studio_override_metadata = metadata self.had_studio_override = metadata is not NOT_SET if value is NOT_SET: - self.enum_entity.update_studio_value(value) + self.enum_entity.update_studio_value(value, log_invalid_types) for children_by_key in self.non_gui_children.values(): for child_obj in children_by_key.values(): - child_obj.update_studio_value(value) + child_obj.update_studio_value(value, log_invalid_types) return value_keys = set(value.keys()) @@ -611,7 +629,7 @@ class DictConditionalEntity(ItemEntity): expected_keys = set(self.non_gui_children[enum_value]) expected_keys.add(self.enum_key) unknown_keys = value_keys - expected_keys - if unknown_keys: + if unknown_keys and log_invalid_types: self.log.warning( "{} Unknown keys in studio overrides: {}".format( self.path, @@ -619,28 +637,36 @@ class DictConditionalEntity(ItemEntity): ) ) - self.enum_entity.update_studio_value(enum_value) - for children_by_key in self.non_gui_children.values(): + self.enum_entity.update_studio_value(enum_value, log_invalid_types) + for enum_key, children_by_key in self.non_gui_children.items(): + _log_invalid_types = log_invalid_types + if _log_invalid_types: + _log_invalid_types = enum_key == enum_value + value_copy = copy.deepcopy(value) for key, child_obj in children_by_key.items(): child_value = value_copy.get(key, NOT_SET) - child_obj.update_studio_value(child_value) + child_obj.update_studio_value(child_value, _log_invalid_types) - def update_project_value(self, value): + def update_project_value(self, value, log_invalid_types=True): """Update project override values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "project override") - value, metadata = self._prepare_value(value) + + self._project_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "project override", log_invalid_types + ) + value, metadata = self._prepare_value(value, log_invalid_types) self._project_override_metadata = metadata self.had_project_override = metadata is not NOT_SET if value is NOT_SET: - self.enum_entity.update_project_value(value) + self.enum_entity.update_project_value(value, log_invalid_types) for children_by_key in self.non_gui_children.values(): for child_obj in children_by_key.values(): - child_obj.update_project_value(value) + child_obj.update_project_value(value, log_invalid_types) return value_keys = set(value.keys()) @@ -648,7 +674,7 @@ class DictConditionalEntity(ItemEntity): expected_keys = set(self.non_gui_children[enum_value]) expected_keys.add(self.enum_key) unknown_keys = value_keys - expected_keys - if unknown_keys: + if unknown_keys and log_invalid_types: self.log.warning( "{} Unknown keys in project overrides: {}".format( self.path, @@ -656,12 +682,16 @@ class DictConditionalEntity(ItemEntity): ) ) - self.enum_entity.update_project_value(enum_value) - for children_by_key in self.non_gui_children.values(): + self.enum_entity.update_project_value(enum_value, log_invalid_types) + for enum_key, children_by_key in self.non_gui_children.items(): + _log_invalid_types = log_invalid_types + if _log_invalid_types: + _log_invalid_types = enum_key == enum_value + value_copy = copy.deepcopy(value) for key, child_obj in children_by_key.items(): child_value = value_copy.get(key, NOT_SET) - child_obj.update_project_value(child_value) + child_obj.update_project_value(child_value, _log_invalid_types) def _discard_changes(self, on_change_trigger): self._ignore_child_changes = True diff --git a/openpype/settings/entities/dict_immutable_keys_entity.py b/openpype/settings/entities/dict_immutable_keys_entity.py index 060f8d522e..0209681e95 100644 --- a/openpype/settings/entities/dict_immutable_keys_entity.py +++ b/openpype/settings/entities/dict_immutable_keys_entity.py @@ -414,12 +414,16 @@ class DictImmutableKeysEntity(ItemEntity): return value, metadata - def update_default_value(self, value): + def update_default_value(self, value, log_invalid_types=True): """Update default values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "default") + + self._default_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "default", log_invalid_types + ) self.has_default_value = value is not NOT_SET # TODO add value validation value, metadata = self._prepare_value(value) @@ -427,13 +431,13 @@ class DictImmutableKeysEntity(ItemEntity): if value is NOT_SET: for child_obj in self.non_gui_children.values(): - child_obj.update_default_value(value) + child_obj.update_default_value(value, log_invalid_types) return value_keys = set(value.keys()) expected_keys = set(self.non_gui_children) unknown_keys = value_keys - expected_keys - if unknown_keys: + if unknown_keys and log_invalid_types: self.log.warning( "{} Unknown keys in default values: {}".format( self.path, @@ -443,27 +447,31 @@ class DictImmutableKeysEntity(ItemEntity): for key, child_obj in self.non_gui_children.items(): child_value = value.get(key, NOT_SET) - child_obj.update_default_value(child_value) + child_obj.update_default_value(child_value, log_invalid_types) - def update_studio_value(self, value): + def update_studio_value(self, value, log_invalid_types=True): """Update studio override values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "studio override") + + self._studio_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "studio override", log_invalid_types + ) value, metadata = self._prepare_value(value) self._studio_override_metadata = metadata self.had_studio_override = metadata is not NOT_SET if value is NOT_SET: for child_obj in self.non_gui_children.values(): - child_obj.update_studio_value(value) + child_obj.update_studio_value(value, log_invalid_types) return value_keys = set(value.keys()) expected_keys = set(self.non_gui_children) unknown_keys = value_keys - expected_keys - if unknown_keys: + if unknown_keys and log_invalid_types: self.log.warning( "{} Unknown keys in studio overrides: {}".format( self.path, @@ -472,27 +480,31 @@ class DictImmutableKeysEntity(ItemEntity): ) for key, child_obj in self.non_gui_children.items(): child_value = value.get(key, NOT_SET) - child_obj.update_studio_value(child_value) + child_obj.update_studio_value(child_value, log_invalid_types) - def update_project_value(self, value): + def update_project_value(self, value, log_invalid_types=True): """Update project override values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "project override") + + self._project_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "project override", log_invalid_types + ) value, metadata = self._prepare_value(value) self._project_override_metadata = metadata self.had_project_override = metadata is not NOT_SET if value is NOT_SET: for child_obj in self.non_gui_children.values(): - child_obj.update_project_value(value) + child_obj.update_project_value(value, log_invalid_types) return value_keys = set(value.keys()) expected_keys = set(self.non_gui_children) unknown_keys = value_keys - expected_keys - if unknown_keys: + if unknown_keys and log_invalid_types: self.log.warning( "{} Unknown keys in project overrides: {}".format( self.path, @@ -502,7 +514,7 @@ class DictImmutableKeysEntity(ItemEntity): for key, child_obj in self.non_gui_children.items(): child_value = value.get(key, NOT_SET) - child_obj.update_project_value(child_value) + child_obj.update_project_value(child_value, log_invalid_types) def _discard_changes(self, on_change_trigger): self._ignore_child_changes = True @@ -694,37 +706,48 @@ class RootsDictEntity(DictImmutableKeysEntity): self._metadata_are_modified = False self._current_metadata = {} - def update_default_value(self, value): + def update_default_value(self, value, log_invalid_types=True): """Update default values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "default") + + self._default_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "default", log_invalid_types + ) value, _ = self._prepare_value(value) self._default_value = value self._default_metadata = {} self.has_default_value = value is not NOT_SET - def update_studio_value(self, value): + def update_studio_value(self, value, log_invalid_types=True): """Update studio override values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "studio override") + + self._studio_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "studio override", log_invalid_types + ) value, _ = self._prepare_value(value) self._studio_value = value self._studio_override_metadata = {} self.had_studio_override = value is not NOT_SET - def update_project_value(self, value): + def update_project_value(self, value, log_invalid_types=True): """Update project override values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "project override") + self._project_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "project override", log_invalid_types + ) value, _metadata = self._prepare_value(value) self._project_value = value @@ -886,37 +909,48 @@ class SyncServerSites(DictImmutableKeysEntity): self._metadata_are_modified = False self._current_metadata = {} - def update_default_value(self, value): + def update_default_value(self, value, log_invalid_types=True): """Update default values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "default") + + self._default_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "default", log_invalid_types + ) value, _ = self._prepare_value(value) self._default_value = value self._default_metadata = {} self.has_default_value = value is not NOT_SET - def update_studio_value(self, value): + def update_studio_value(self, value, log_invalid_types=True): """Update studio override values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "studio override") + + self._studio_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "studio override", log_invalid_types + ) value, _ = self._prepare_value(value) self._studio_value = value self._studio_override_metadata = {} self.had_studio_override = value is not NOT_SET - def update_project_value(self, value): + def update_project_value(self, value, log_invalid_types=True): """Update project override values. Not an api method, should be called by parent. """ - value = self._check_update_value(value, "project override") + self._project_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "project override", log_invalid_types + ) value, _metadata = self._prepare_value(value) self._project_value = value diff --git a/openpype/settings/entities/dict_mutable_keys_entity.py b/openpype/settings/entities/dict_mutable_keys_entity.py index 6b9c0bc7ed..a0c93b97a7 100644 --- a/openpype/settings/entities/dict_mutable_keys_entity.py +++ b/openpype/settings/entities/dict_mutable_keys_entity.py @@ -393,11 +393,15 @@ class DictMutableKeysEntity(EndpointEntity): value = self.value_on_not_set using_values_from_state = False + log_invalid_types = True if state is OverrideState.PROJECT: + log_invalid_types = self._project_log_invalid_types using_values_from_state = using_project_overrides elif state is OverrideState.STUDIO: + log_invalid_types = self._studio_log_invalid_types using_values_from_state = using_studio_overrides elif state is OverrideState.DEFAULTS: + log_invalid_types = self._default_log_invalid_types using_values_from_state = using_default_values new_value = copy.deepcopy(value) @@ -437,11 +441,11 @@ class DictMutableKeysEntity(EndpointEntity): if not label: label = metadata_labels.get(new_key) - child_entity.update_default_value(_value) + child_entity.update_default_value(_value, log_invalid_types) if using_project_overrides: - child_entity.update_project_value(_value) + child_entity.update_project_value(_value, log_invalid_types) elif using_studio_overrides: - child_entity.update_studio_value(_value) + child_entity.update_studio_value(_value, log_invalid_types) if label: children_label_by_id[child_entity.id] = label @@ -598,8 +602,11 @@ class DictMutableKeysEntity(EndpointEntity): metadata[key] = value.pop(key) return value, metadata - def update_default_value(self, value): - value = self._check_update_value(value, "default") + def update_default_value(self, value, log_invalid_types=True): + self._default_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "default", log_invalid_types + ) has_default_value = value is not NOT_SET if has_default_value: for required_key in self.required_keys: @@ -611,15 +618,21 @@ class DictMutableKeysEntity(EndpointEntity): self._default_value = value self._default_metadata = metadata - def update_studio_value(self, value): - value = self._check_update_value(value, "studio override") + def update_studio_value(self, value, log_invalid_types=True): + self._studio_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "studio override", log_invalid_types + ) value, metadata = self._prepare_value(value) self._studio_override_value = value self._studio_override_metadata = metadata self.had_studio_override = value is not NOT_SET - def update_project_value(self, value): - value = self._check_update_value(value, "project override") + def update_project_value(self, value, log_invalid_types=True): + self._project_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "project override", log_invalid_types + ) value, metadata = self._prepare_value(value) self._project_override_value = value self._project_override_metadata = metadata @@ -686,9 +699,12 @@ class DictMutableKeysEntity(EndpointEntity): if not self._can_remove_from_project_override: return + log_invalid_types = True if self._has_studio_override: + log_invalid_types = self._studio_log_invalid_types value = self._studio_override_value elif self.has_default_value: + log_invalid_types = self._default_log_invalid_types value = self._default_value else: value = self.value_on_not_set @@ -709,9 +725,9 @@ class DictMutableKeysEntity(EndpointEntity): for _key, _value in new_value.items(): new_key = self._convert_to_regex_valid_key(_key) child_entity = self._add_key(new_key) - child_entity.update_default_value(_value) + child_entity.update_default_value(_value, log_invalid_types) if self._has_studio_override: - child_entity.update_studio_value(_value) + child_entity.update_studio_value(_value, log_invalid_types) label = metadata_labels.get(_key) if label: diff --git a/openpype/settings/entities/input_entities.py b/openpype/settings/entities/input_entities.py index 7512d7bfcc..3dcd238672 100644 --- a/openpype/settings/entities/input_entities.py +++ b/openpype/settings/entities/input_entities.py @@ -90,18 +90,27 @@ class EndpointEntity(ItemEntity): def require_restart(self): return self.has_unsaved_changes - def update_default_value(self, value): - value = self._check_update_value(value, "default") + def update_default_value(self, value, log_invalid_types=True): + self._default_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "default", log_invalid_types + ) self._default_value = value self.has_default_value = value is not NOT_SET - def update_studio_value(self, value): - value = self._check_update_value(value, "studio override") + def update_studio_value(self, value, log_invalid_types=True): + self._studio_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "studio override", log_invalid_types + ) self._studio_override_value = value self.had_studio_override = bool(value is not NOT_SET) - def update_project_value(self, value): - value = self._check_update_value(value, "project override") + def update_project_value(self, value, log_invalid_types=True): + self._project_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "project override", log_invalid_types + ) self._project_override_value = value self.had_project_override = bool(value is not NOT_SET) @@ -590,22 +599,26 @@ class RawJsonEntity(InputEntity): metadata[key] = value.pop(key) return value, metadata - def update_default_value(self, value): - value = self._check_update_value(value, "default") + def update_default_value(self, value, log_invalid_types=True): + value = self._check_update_value(value, "default", log_invalid_types) self.has_default_value = value is not NOT_SET value, metadata = self._prepare_value(value) self._default_value = value self.default_metadata = metadata - def update_studio_value(self, value): - value = self._check_update_value(value, "studio override") + def update_studio_value(self, value, log_invalid_types=True): + value = self._check_update_value( + value, "studio override", log_invalid_types + ) self.had_studio_override = value is not NOT_SET value, metadata = self._prepare_value(value) self._studio_override_value = value self.studio_override_metadata = metadata - def update_project_value(self, value): - value = self._check_update_value(value, "project override") + def update_project_value(self, value, log_invalid_types=True): + value = self._check_update_value( + value, "project override", log_invalid_types + ) self.had_project_override = value is not NOT_SET value, metadata = self._prepare_value(value) self._project_override_value = value diff --git a/openpype/settings/entities/item_entities.py b/openpype/settings/entities/item_entities.py index 9c6f428b97..4cba0b42d9 100644 --- a/openpype/settings/entities/item_entities.py +++ b/openpype/settings/entities/item_entities.py @@ -173,14 +173,17 @@ class PathEntity(ItemEntity): self._ignore_missing_defaults = ignore_missing_defaults self.child_obj.set_override_state(state, ignore_missing_defaults) - def update_default_value(self, value): - self.child_obj.update_default_value(value) + def update_default_value(self, value, log_invalid_types=True): + self._default_log_invalid_types = log_invalid_types + self.child_obj.update_default_value(value, log_invalid_types) - def update_project_value(self, value): - self.child_obj.update_project_value(value) + def update_project_value(self, value, log_invalid_types=True): + self._studio_log_invalid_types = log_invalid_types + self.child_obj.update_project_value(value, log_invalid_types) - def update_studio_value(self, value): - self.child_obj.update_studio_value(value) + def update_studio_value(self, value, log_invalid_types=True): + self._project_log_invalid_types = log_invalid_types + self.child_obj.update_studio_value(value, log_invalid_types) def _discard_changes(self, *args, **kwargs): self.child_obj.discard_changes(*args, **kwargs) @@ -472,9 +475,9 @@ class ListStrictEntity(ItemEntity): self._has_project_override = False - def _check_update_value(self, value, value_type): + def _check_update_value(self, value, value_type, log_invalid_types=True): value = super(ListStrictEntity, self)._check_update_value( - value, value_type + value, value_type, log_invalid_types ) if value is NOT_SET: return value @@ -484,15 +487,16 @@ class ListStrictEntity(ItemEntity): if value_len == child_len: return value - self.log.warning( - ( - "{} Amount of strict list items in {} values is" - " not same as expected. Expected {} items. Got {} items. {}" - ).format( - self.path, value_type, - child_len, value_len, str(value) + if log_invalid_types: + self.log.warning( + ( + "{} Amount of strict list items in {} values is" + " not same as expected. Expected {} items. Got {} items. {}" + ).format( + self.path, value_type, + child_len, value_len, str(value) + ) ) - ) if value_len < child_len: # Fill missing values with NOT_SET @@ -504,36 +508,51 @@ class ListStrictEntity(ItemEntity): value.pop(child_len) return value - def update_default_value(self, value): - value = self._check_update_value(value, "default") + def update_default_value(self, value, log_invalid_types=True): + self._default_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "default", log_invalid_types + ) self.has_default_value = value is not NOT_SET if value is NOT_SET: for child_obj in self.children: - child_obj.update_default_value(value) + child_obj.update_default_value(value, log_invalid_types) else: for idx, item_value in enumerate(value): - self.children[idx].update_default_value(item_value) + self.children[idx].update_default_value( + item_value, log_invalid_types + ) - def update_studio_value(self, value): - value = self._check_update_value(value, "studio override") + def update_studio_value(self, value, log_invalid_types=True): + self._studio_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "studio override", log_invalid_types + ) if value is NOT_SET: for child_obj in self.children: - child_obj.update_studio_value(value) + child_obj.update_studio_value(value, log_invalid_types) else: for idx, item_value in enumerate(value): - self.children[idx].update_studio_value(item_value) + self.children[idx].update_studio_value( + item_value, log_invalid_types + ) - def update_project_value(self, value): - value = self._check_update_value(value, "project override") + def update_project_value(self, value, log_invalid_types=True): + self._project_log_invalid_types = log_invalid_types + value = self._check_update_value( + value, "project override", log_invalid_types + ) if value is NOT_SET: for child_obj in self.children: - child_obj.update_project_value(value) + child_obj.update_project_value(value, log_invalid_types) else: for idx, item_value in enumerate(value): - self.children[idx].update_project_value(item_value) + self.children[idx].update_project_value( + item_value, log_invalid_types + ) def reset_callbacks(self): super(ListStrictEntity, self).reset_callbacks() diff --git a/openpype/settings/entities/list_entity.py b/openpype/settings/entities/list_entity.py index 0268c208bb..5d6a64b3ea 100644 --- a/openpype/settings/entities/list_entity.py +++ b/openpype/settings/entities/list_entity.py @@ -325,16 +325,24 @@ class ListEntity(EndpointEntity): for item in value: child_obj = self._add_new_item() - child_obj.update_default_value(item) + child_obj.update_default_value( + item, self._default_log_invalid_types + ) if self._override_state is OverrideState.PROJECT: if self.had_project_override: - child_obj.update_project_value(item) + child_obj.update_project_value( + item, self._project_log_invalid_types + ) elif self.had_studio_override: - child_obj.update_studio_value(item) + child_obj.update_studio_value( + item, self._studio_log_invalid_types + ) elif self._override_state is OverrideState.STUDIO: if self.had_studio_override: - child_obj.update_studio_value(item) + child_obj.update_studio_value( + item, self._studio_log_invalid_types + ) for child_obj in self.children: child_obj.set_override_state( @@ -466,16 +474,24 @@ class ListEntity(EndpointEntity): for item in value: child_obj = self._add_new_item() - child_obj.update_default_value(item) + child_obj.update_default_value( + item, self._default_log_invalid_types + ) if self._override_state is OverrideState.PROJECT: if self.had_project_override: - child_obj.update_project_value(item) + child_obj.update_project_value( + item, self._project_log_invalid_types + ) elif self.had_studio_override: - child_obj.update_studio_value(item) + child_obj.update_studio_value( + item, self._studio_log_invalid_types + ) elif self._override_state is OverrideState.STUDIO: if self.had_studio_override: - child_obj.update_studio_value(item) + child_obj.update_studio_value( + item, self._studio_log_invalid_types + ) child_obj.set_override_state( self._override_state, self._ignore_missing_defaults From 5acdf2ab4f8b6cf81510e2893d9b7b18befe852f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 28 Mar 2022 11:22:49 +0200 Subject: [PATCH 194/196] fix line length --- openpype/settings/entities/item_entities.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/item_entities.py b/openpype/settings/entities/item_entities.py index 4cba0b42d9..3b756e4ede 100644 --- a/openpype/settings/entities/item_entities.py +++ b/openpype/settings/entities/item_entities.py @@ -490,8 +490,8 @@ class ListStrictEntity(ItemEntity): if log_invalid_types: self.log.warning( ( - "{} Amount of strict list items in {} values is" - " not same as expected. Expected {} items. Got {} items. {}" + "{} Amount of strict list items in {} values is not same" + " as expected. Expected {} items. Got {} items. {}" ).format( self.path, value_type, child_len, value_len, str(value) From 75f3762e48ab8dd472e8c1c5ebeabe2aac476f02 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 28 Mar 2022 12:34:39 +0200 Subject: [PATCH 195/196] Added current commit of acre That commit was fixing missing license file --- poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index ee7b839b8d..ed2b0dd3c2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,7 +11,7 @@ develop = false type = "git" url = "https://github.com/pypeclub/acre.git" reference = "master" -resolved_reference = "55a7c331e6dc5f81639af50ca4a8cc9d73e9273d" +resolved_reference = "126f7a188cfe36718f707f42ebbc597e86aa86c3" [[package]] name = "aiohttp" From 719b184d8085e6cea54cdae06c12fcf19ac5d5e4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 10:12:37 +0200 Subject: [PATCH 196/196] Added default for review_upload_limit for Slack --- openpype/settings/defaults/project_settings/slack.json | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/settings/defaults/project_settings/slack.json b/openpype/settings/defaults/project_settings/slack.json index d77b8c2208..c156fed08e 100644 --- a/openpype/settings/defaults/project_settings/slack.json +++ b/openpype/settings/defaults/project_settings/slack.json @@ -11,6 +11,7 @@ "task_types": [], "tasks": [], "subsets": [], + "review_upload_limit": 50.0, "channel_messages": [] } ]