From 893a84526c8308c12cdbc7f915470735a1d8faa0 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 18 Mar 2022 19:15:07 +0900 Subject: [PATCH 001/171] add new render product for 3delight --- openpype/hosts/maya/api/lib_renderproducts.py | 75 +++++++++++++++++++ 1 file changed, 75 insertions(+) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 0c34998874..69c4eae18e 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1107,6 +1107,81 @@ class RenderProductsRenderman(ARenderProducts): return new_files +class RenderProducts3Delight(ARenderProducts): + """Expected files for Renderman renderer. + + Warning: + This is very rudimentary and needs more love and testing. + """ + + renderer = "_3delight" + + def get_render_products(self): + """Get all AOVs. + + See Also: + :func:`ARenderProducts.get_render_products()` + + """ + cameras = [ + self.sanitize_camera_name(c) + for c in self.get_renderable_cameras() + ] + + if not cameras: + cameras = [ + self.sanitize_camera_name( + self.get_renderable_cameras()[0]) + ] + products = [] + + default_ext = "exr" + + nodes = cmds.listConnections('dlRenderGlobals1') + assert len(nodes) == 1 + node = nodes[0] + + num_layers = cmds.getAttr( + '{}.layerOutput'.format(node), + size=True) + for i in range(num_layers): + output = cmds.getAttr( + '{}.layerOutput[{}]'.format(node, i)) + if not output: + continue + + output_var = cmds.getAttr( + '{}.layerOutputVariables[{}]'.format(node, i)) + output_var_tokens = layerOutputVariable.split('|') + name = output_var_tokens[4] + + for camera in cameras: + product = RenderProduct(productName=name, + ext=default_ext, + camera=camera) + products.append(product) + + return products + + def get_files(self, product, camera): + """Get expected files. + + See Also: + :func:`ARenderProducts.get_files()` + """ + files = super(RenderProducts3Delight, self).get_files(product, camera) + + layer_data = self.layer_data + new_files = [] + for file in files: + new_file = "{}/{}/{}".format( + layer_data["sceneName"], layer_data["layerName"], file + ) + new_files.append(new_file) + + return new_files + + class AOVError(Exception): """Custom exception for determining AOVs.""" From e85ef95b443825badf6b30e598712ba24cd6aeb0 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 18 Mar 2022 20:00:57 +0900 Subject: [PATCH 002/171] improve 3delight render product class and return it --- openpype/hosts/maya/api/lib_renderproducts.py | 35 ++++++------------- 1 file changed, 11 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 69c4eae18e..97aa8e8957 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -77,6 +77,7 @@ IMAGE_PREFIXES = { "arnold": "defaultRenderGlobals.imageFilePrefix", "renderman": "rmanGlobals.imageFileFormat", "redshift": "defaultRenderGlobals.imageFilePrefix", + "_3delight": "defaultRenderGlobals.imageFilePrefix", } @@ -154,7 +155,8 @@ def get(layer, render_instance=None): "arnold": RenderProductsArnold, "vray": RenderProductsVray, "redshift": RenderProductsRedshift, - "renderman": RenderProductsRenderman + "renderman": RenderProductsRenderman, + "_3delight": RenderProducts3Delight }.get(renderer_name.lower(), None) if renderer is None: raise UnsupportedRendererException( @@ -1137,13 +1139,16 @@ class RenderProducts3Delight(ARenderProducts): default_ext = "exr" - nodes = cmds.listConnections('dlRenderGlobals1') + nodes = cmds.listConnections( + 'dlRenderGlobals1', + type='dlRenderSettings') assert len(nodes) == 1 node = nodes[0] num_layers = cmds.getAttr( - '{}.layerOutput'.format(node), + '{}.layerOutputVariables'.format(node), size=True) + assert num_layers > 0 for i in range(num_layers): output = cmds.getAttr( '{}.layerOutput[{}]'.format(node, i)) @@ -1152,35 +1157,17 @@ class RenderProducts3Delight(ARenderProducts): output_var = cmds.getAttr( '{}.layerOutputVariables[{}]'.format(node, i)) - output_var_tokens = layerOutputVariable.split('|') - name = output_var_tokens[4] + output_var_tokens = output_var.split('|') + aov_name = output_var_tokens[4] for camera in cameras: - product = RenderProduct(productName=name, + product = RenderProduct(productName=aov_name, ext=default_ext, camera=camera) products.append(product) return products - def get_files(self, product, camera): - """Get expected files. - - See Also: - :func:`ARenderProducts.get_files()` - """ - files = super(RenderProducts3Delight, self).get_files(product, camera) - - layer_data = self.layer_data - new_files = [] - for file in files: - new_file = "{}/{}/{}".format( - layer_data["sceneName"], layer_data["layerName"], file - ) - new_files.append(new_file) - - return new_files - class AOVError(Exception): """Custom exception for determining AOVs.""" From 3ec621a8efa97bc33c63f33fd87a62a76fa7d0c2 Mon Sep 17 00:00:00 2001 From: Bo Zhou Date: Fri, 18 Mar 2022 20:02:29 +0900 Subject: [PATCH 003/171] add method _set_3delight_settings to render creator --- .../maya/plugins/create/create_render.py | 40 ++++++++++++++++++- 1 file changed, 38 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 9002ae3876..c06fe8a76d 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -77,7 +77,8 @@ class CreateRender(plugin.Creator): 'vray': 'vraySettings.fileNamePrefix', 'arnold': 'defaultRenderGlobals.imageFilePrefix', 'renderman': 'defaultRenderGlobals.imageFilePrefix', - 'redshift': 'defaultRenderGlobals.imageFilePrefix' + 'redshift': 'defaultRenderGlobals.imageFilePrefix', + '_3delight': 'defaultRenderGlobals.imageFilePrefix' } _image_prefixes = { @@ -85,7 +86,8 @@ class CreateRender(plugin.Creator): 'vray': 'maya///', 'arnold': 'maya///{aov_separator}', # noqa 'renderman': 'maya///{aov_separator}', - 'redshift': 'maya///' # noqa + 'redshift': 'maya///', # noqa + '_3delight': 'maya///' # noqa } _aov_chars = { @@ -462,6 +464,8 @@ class CreateRender(plugin.Creator): asset["data"].get("resolutionHeight")) self._set_global_output_settings() + if renderer == "_3delight": + self._set_3delight_settings(asset) def _set_vray_settings(self, asset): # type: (dict) -> None @@ -507,6 +511,38 @@ class CreateRender(plugin.Creator): "{}.height".format(node), asset["data"].get("resolutionHeight")) + def _set_3delight_settings(self, asset): + # type: (dict) -> None + """Sets important settings for 3Delight.""" + nodes = cmds.listConnections( + 'dlRenderGlobals1', + type='dlRenderSettings') + assert len(nodes) == 1 + node = nodes[0] + + # frame range + start_frame = int(cmds.playbackOptions(query=True, + animationStartTime=True)) + end_frame = int(cmds.playbackOptions(query=True, + animationEndTime=True)) + + cmds.setAttr( + "{}.startFrame".format(node), start_frame) + cmds.setAttr( + "{}.endFrame".format(node), end_frame) + + # outputOptionsDefault + cmds.setAttr( + "{}.outputOptionsDefault".format(node), 2) + + # resolution + cmds.setAttr( + "defaultResolution.width", + asset["data"].get("resolutionWidth")) + cmds.setAttr( + "defaultResolution.height", + asset["data"].get("resolutionHeight")) + @staticmethod def _set_global_output_settings(): # enable animation From 87c697b51bd42655ef3936187240b4a2681efaac Mon Sep 17 00:00:00 2001 From: DMO Date: Wed, 22 Jun 2022 13:52:02 +0900 Subject: [PATCH 004/171] Rewrote large portions of the file to handle different nodes that may contain texture files. Currently supported are: - file (maya) - aiImage (Arnold) - RedshiftNormalMap (Redshift) - dlTexture (3Delight) - dlTriplanar (3Delight) --- .../publish/collect_multiverse_look.py | 250 +++++++++++------- 1 file changed, 148 insertions(+), 102 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py b/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py index edf40a27a6..4bd2476feb 100644 --- a/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py @@ -21,37 +21,68 @@ COLOUR_SPACES = ['sRGB', 'linear', 'auto'] MIPMAP_EXTENSIONS = ['tdl'] -def get_look_attrs(node): - """Returns attributes of a node that are important for the look. +class _NodeTypeAttrib(object): + """docstring for _NodeType""" - These are the "changed" attributes (those that have edits applied - in the current scene). + def __init__(self, name, fname, computed_fname=None, colour_space=None): + self.name = name + self.fname = fname + self.computed_fname = computed_fname or fname + self.colour_space = colour_space or "colorSpace" - Returns: - list: Attribute names to extract + def get_fname(self, node): + return "{}.{}".format(node, self.fname) + def get_computed_fname(self, node): + return "{}.{}".format(node, self.computed_fname) + + def get_colour_space(self, node): + return "{}.{}".format(node, self.colour_space) + + def __str__(self): + return "_NodeTypeAttrib(name={}, fname={}, " + "computed_fname={}, colour_space={})".format( + self.name, self.fname, self.computed_fname, self.colour_space) + + +NODETYPES = { + "file": [_NodeTypeAttrib("file", "fileTextureName", + "computedFileTextureNamePattern")], + "aiImage": [_NodeTypeAttrib("aiImage", "filename")], + "RedshiftNormalMap": [_NodeTypeAttrib("RedshiftNormalMap", "tex0")], + "dlTexture": [_NodeTypeAttrib("dlTexture", "textureFile", + None, "textureFile_meta_colorspace")], + "dlTriplanar": [_NodeTypeAttrib("dlTriplanar", "colorTexture", + None, "colorTexture_meta_colorspace"), + _NodeTypeAttrib("dlTriplanar", "floatTexture", + None, "floatTexture_meta_colorspace"), + _NodeTypeAttrib("dlTriplanar", "heightTexture", + None, "heightTexture_meta_colorspace")] +} + + +def get_file_paths_for_node(node): + """Gets all the file paths in this node. + + Returns all filepaths that this node references. Some node types only + reference one, but others, like dlTriplanar, can reference 3. + + Args: + node (str): Name of the Maya node + + Returns + list(str): A list with all evaluated maya attributes for filepaths. """ - # When referenced get only attributes that are "changed since file open" - # which includes any reference edits, otherwise take *all* user defined - # attributes - is_referenced = cmds.referenceQuery(node, isNodeReferenced=True) - result = cmds.listAttr(node, userDefined=True, - changedSinceFileOpen=is_referenced) or [] - # `cbId` is added when a scene is saved, ignore by default - if "cbId" in result: - result.remove("cbId") + node_type = cmds.nodeType(node) + if node_type not in NODETYPES: + return [] - # For shapes allow render stat changes - if cmds.objectType(node, isAType="shape"): - attrs = cmds.listAttr(node, changedSinceFileOpen=True) or [] - for attr in attrs: - if attr in SHAPE_ATTRS: - result.append(attr) - elif attr.startswith('ai'): - result.append(attr) - - return result + paths = [] + for node_type_attr in NODETYPES[node_type]: + fname = cmds.getAttr("{}.{}".format(node, node_type_attr.fname)) + paths.append(fname) + return paths def node_uses_image_sequence(node): @@ -69,13 +100,29 @@ def node_uses_image_sequence(node): """ # useFrameExtension indicates an explicit image sequence - node_path = get_file_node_path(node).lower() + paths = get_file_node_paths(node) + paths = [path.lower() for path in paths] # The following tokens imply a sequence patterns = ["", "", "", "u_v", ""] lower = texture_pattern.lower() if any(pattern in lower for pattern in patterns): - return texture_pattern + return [texture_pattern] - if cmds.nodeType(node) == 'aiImage': - return cmds.getAttr('{0}.filename'.format(node)) - if cmds.nodeType(node) == 'RedshiftNormalMap': - return cmds.getAttr('{}.tex0'.format(node)) - - # otherwise use fileTextureName - return cmds.getAttr('{0}.fileTextureName'.format(node)) + return get_file_paths_for_node(node) def get_file_node_files(node): @@ -181,15 +222,13 @@ def get_file_node_files(node): """ - path = get_file_node_path(node) - path = cmds.workspace(expandName=path) + paths = get_file_node_paths(node) + paths = [cmds.workspace(expandName=path) for path in paths] if node_uses_image_sequence(node): - glob_pattern = seq_to_glob(path) - return glob.glob(glob_pattern) - elif os.path.exists(path): - return [path] + globs = [glob.glob(seq_to_glob(path)) for path in paths] + return globs else: - return [] + return list(filter(lambda x: os.path.exists(x), paths)) def get_mipmap(fname): @@ -211,6 +250,11 @@ def is_mipmap(fname): class CollectMultiverseLookData(pyblish.api.InstancePlugin): """Collect Multiverse Look + Searches through the overrides finding all material overrides. From there + it extracts the shading group and then finds all texture files in the + shading group network. It also checks for mipmap versions of texture files + and adds them to the resouces to get published. + """ order = pyblish.api.CollectorOrder + 0.2 @@ -258,12 +302,20 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin): shadingGroup), "members": list()} # The SG may reference files, add those too! - history = cmds.listHistory(shadingGroup) - files = cmds.ls(history, type="file", long=True) + history = cmds.listHistory( + shadingGroup, allConnections=True) + + # We need to iterate over node_types since `cmds.ls` may + # error out if we don't have the appropriate plugin loaded. + files = [] + for node_type in NODETYPES.keys(): + files += cmds.ls(history, + type=node_type, + long=True) for f in files: resources = self.collect_resource(f, publishMipMap) - instance.data["resources"].append(resources) + instance.data["resources"] += resources elif isinstance(matOver, multiverse.MaterialSourceUsdPath): # TODO: Handle this later. @@ -284,69 +336,63 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin): dict """ - self.log.debug("processing: {}".format(node)) - if cmds.nodeType(node) not in ["file", "aiImage", "RedshiftNormalMap"]: - self.log.error( - "Unsupported file node: {}".format(cmds.nodeType(node))) + node_type = cmds.nodeType(node) + self.log.debug("processing: {}/{}".format(node, node_type)) + + if not node_type in NODETYPES: + self.log.error("Unsupported file node: {}".format(node_type)) raise AssertionError("Unsupported file node") - if cmds.nodeType(node) == 'file': - self.log.debug(" - file node") - attribute = "{}.fileTextureName".format(node) - computed_attribute = "{}.computedFileTextureNamePattern".format( - node) - elif cmds.nodeType(node) == 'aiImage': - self.log.debug("aiImage node") - attribute = "{}.filename".format(node) - computed_attribute = attribute - elif cmds.nodeType(node) == 'RedshiftNormalMap': - self.log.debug("RedshiftNormalMap node") - attribute = "{}.tex0".format(node) - computed_attribute = attribute + resources = [] + for node_type_attr in NODETYPES[node_type]: + fname_attrib = node_type_attr.get_fname(node) + computed_fname_attrib = node_type_attr.get_computed_fname(node) + colour_space_attrib = node_type_attr.get_colour_space(node) - source = cmds.getAttr(attribute) - self.log.info(" - file source: {}".format(source)) - color_space_attr = "{}.colorSpace".format(node) - try: - color_space = cmds.getAttr(color_space_attr) - except ValueError: - # node doesn't have colorspace attribute + source = cmds.getAttr(fname_attrib) color_space = "Raw" - # Compare with the computed file path, e.g. the one with the - # pattern in it, to generate some logging information about this - # difference - # computed_attribute = "{}.computedFileTextureNamePattern".format(node) - computed_source = cmds.getAttr(computed_attribute) - if source != computed_source: - self.log.debug("Detected computed file pattern difference " - "from original pattern: {0} " - "({1} -> {2})".format(node, - source, - computed_source)) + try: + color_space = cmds.getAttr(colour_space_attrib) + except ValueError: + # node doesn't have colorspace attribute, use "Raw" from before + pass + # Compare with the computed file path, e.g. the one with the + # pattern in it, to generate some logging information about this + # difference + # computed_attribute = "{}.computedFileTextureNamePattern".format(node) + computed_source = cmds.getAttr(computed_fname_attrib) + if source != computed_source: + self.log.debug("Detected computed file pattern difference " + "from original pattern: {0} " + "({1} -> {2})".format(node, + source, + computed_source)) - # We replace backslashes with forward slashes because V-Ray - # can't handle the UDIM files with the backslashes in the - # paths as the computed patterns - source = source.replace("\\", "/") + # We replace backslashes with forward slashes because V-Ray + # can't handle the UDIM files with the backslashes in the + # paths as the computed patterns + source = source.replace("\\", "/") - files = get_file_node_files(node) - files = self.handle_files(files, publishMipMap) - if len(files) == 0: - self.log.error("No valid files found from node `%s`" % node) + files = get_file_node_files(node) + files = self.handle_files(files, publishMipMap) + if len(files) == 0: + self.log.error("No valid files found from node `%s`" % node) - self.log.info("collection of resource done:") - self.log.info(" - node: {}".format(node)) - self.log.info(" - attribute: {}".format(attribute)) - self.log.info(" - source: {}".format(source)) - self.log.info(" - file: {}".format(files)) - self.log.info(" - color space: {}".format(color_space)) + self.log.info("collection of resource done:") + self.log.info(" - node: {}".format(node)) + self.log.info(" - attribute: {}".format(fname_attrib)) + self.log.info(" - source: {}".format(source)) + self.log.info(" - file: {}".format(files)) + self.log.info(" - color space: {}".format(color_space)) - # Define the resource - return {"node": node, - "attribute": attribute, - "source": source, # required for resources - "files": files, - "color_space": color_space} # required for resources + # Define the resource + resource = {"node": node, + "attribute": fname_attrib, + "source": source, # required for resources + "files": files, + "color_space": color_space} # required for resources + resources.append(resource) + return resources def handle_files(self, files, publishMipMap): """This will go through all the files and make sure that they are From 2007c759478b08b002a0d0a91816c5cf0e769c3f Mon Sep 17 00:00:00 2001 From: DMO Date: Fri, 24 Jun 2022 15:22:31 +0900 Subject: [PATCH 005/171] Reverting `mvUsd` family back to `usd` so that other software can create standard Usd files and still be imported directly by Multiverse. --- openpype/hosts/maya/plugins/create/create_multiverse_usd.py | 2 +- openpype/hosts/maya/plugins/load/load_multiverse_usd.py | 2 +- openpype/hosts/maya/plugins/load/load_reference.py | 3 ++- openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py | 2 +- openpype/plugins/publish/integrate_new.py | 1 - 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py index 5290d5143f..8cd76b5f40 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd.py @@ -6,7 +6,7 @@ class CreateMultiverseUsd(plugin.Creator): name = "mvUsdMain" label = "Multiverse USD Asset" - family = "mvUsd" + family = "usd" icon = "cubes" def __init__(self, *args, **kwargs): diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index 3350dc6ac9..76d7c306a0 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -16,7 +16,7 @@ from openpype.hosts.maya.api.pipeline import containerise class MultiverseUsdLoader(load.LoaderPlugin): """Read USD data in a Multiverse Compound""" - families = ["model", "mvUsd", "mvUsdComposition", "mvUsdOverride", + families = ["model", "usd", "mvUsdComposition", "mvUsdOverride", "pointcache", "animation"] representations = ["usd", "usda", "usdc", "usdz", "abc"] diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index e4355ed3d4..0a2640014c 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -25,7 +25,8 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): "rig", "camerarig", "xgen", - "staticMesh"] + "staticMesh", + "mvLook"] representations = ["ma", "abc", "fbx", "mb"] label = "Reference" diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 3654be7b34..b1aaf9d9ba 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -26,7 +26,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): label = "Extract Multiverse USD Asset" hosts = ["maya"] - families = ["mvUsd"] + families = ["usd"] scene_type = "usd" file_formats = ["usd", "usda", "usdz"] diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 2471105250..f5ca125189 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -110,7 +110,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "staticMesh", "skeletalMesh", "mvLook", - "mvUsd", "mvUsdComposition", "mvUsdOverride", "simpleUnrealTexture" From ab1c86a96dda5eb92281e810027d8952c91b0a31 Mon Sep 17 00:00:00 2001 From: DMO Date: Tue, 28 Jun 2022 15:40:45 +0900 Subject: [PATCH 006/171] Do not lock nodes, it's not needed and it makes things much harder. --- openpype/hosts/maya/plugins/load/load_multiverse_usd.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index 76d7c306a0..24b97db365 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -47,9 +47,6 @@ class MultiverseUsdLoader(load.LoaderPlugin): transform = cmds.listRelatives( shape, parent=True, fullPath=True)[0] - # Lock the shape node so the user cannot delete it. - cmds.lockNode(shape, lock=True) - nodes = [transform, shape] self[:] = nodes From c5fd2a970eab972b87468d5c3f7e796b3c11c2e1 Mon Sep 17 00:00:00 2001 From: DMO Date: Tue, 28 Jun 2022 17:13:18 +0900 Subject: [PATCH 007/171] I was incorrectly globing into an array of arrays instead of single long array. --- .../hosts/maya/plugins/publish/collect_multiverse_look.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py b/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py index 4bd2476feb..b11dbaeba6 100644 --- a/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py @@ -225,7 +225,9 @@ def get_file_node_files(node): paths = get_file_node_paths(node) paths = [cmds.workspace(expandName=path) for path in paths] if node_uses_image_sequence(node): - globs = [glob.glob(seq_to_glob(path)) for path in paths] + globs = [] + for path in paths: + globs += glob.glob(seq_to_glob(path)) return globs else: return list(filter(lambda x: os.path.exists(x), paths)) From 044946e8c5f65a8a4ae308862b2ed5d57494e501 Mon Sep 17 00:00:00 2001 From: DMO Date: Tue, 28 Jun 2022 20:47:28 +0900 Subject: [PATCH 008/171] Strip UsdComp namespaces. Write attributes from mvLook. Formatting. --- .../hosts/maya/plugins/create/create_multiverse_usd_comp.py | 2 +- openpype/hosts/maya/plugins/publish/collect_look.py | 3 ++- .../hosts/maya/plugins/publish/collect_multiverse_look.py | 2 +- .../hosts/maya/plugins/publish/extract_multiverse_look.py | 2 +- .../hosts/maya/plugins/publish/validate_mvlook_contents.py | 5 +++-- 5 files changed, 8 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py index ed466a8068..a92969eb9a 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py @@ -17,7 +17,7 @@ class CreateMultiverseUsdComp(plugin.Creator): # Order of `fileFormat` must match extract_multiverse_usd_comp.py self.data["fileFormat"] = ["usda", "usd"] - self.data["stripNamespaces"] = False + self.data["stripNamespaces"] = True self.data["mergeTransformAndShape"] = False self.data["flattenContent"] = False self.data["writeAsCompoundLayers"] = False diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index e8ada57f8f..28c57e04b5 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -440,7 +440,8 @@ class CollectLook(pyblish.api.InstancePlugin): for res in self.collect_resources(n): instance.data["resources"].append(res) - self.log.info("Collected resources: {}".format(instance.data["resources"])) + self.log.info("Collected resources: {}".format( + instance.data["resources"])) # Log warning when no relevant sets were retrieved for the look. if ( diff --git a/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py b/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py index b11dbaeba6..4c50e4df27 100644 --- a/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py @@ -256,7 +256,7 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin): it extracts the shading group and then finds all texture files in the shading group network. It also checks for mipmap versions of texture files and adds them to the resouces to get published. - + """ order = pyblish.api.CollectorOrder + 0.2 diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py index 82e2b41929..b97314d5a1 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py @@ -73,7 +73,7 @@ class ExtractMultiverseLook(openpype.api.Extractor): "writeAll": False, "writeTransforms": False, "writeVisibility": False, - "writeAttributes": False, + "writeAttributes": True, "writeMaterials": True, "writeVariants": False, "writeVariantsDefinition": False, diff --git a/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py b/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py index bac2c030c8..a755ec1da9 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_mvlook_contents.py @@ -80,13 +80,14 @@ class ValidateMvLookContents(pyblish.api.InstancePlugin): def is_or_has_mipmap(self, fname, files): ext = os.path.splitext(fname)[1][1:] if ext in MIPMAP_EXTENSIONS: - self.log.debug("Is a mipmap '{}'".format(fname)) + self.log.debug(" - Is a mipmap '{}'".format(fname)) return True for colour_space in COLOUR_SPACES: for mipmap_ext in MIPMAP_EXTENSIONS: mipmap_fname = '.'.join([fname, colour_space, mipmap_ext]) if mipmap_fname in files: - self.log.debug("Has a mipmap '{}'".format(fname)) + self.log.debug( + " - Has a mipmap '{}'".format(mipmap_fname)) return True return False From ef2f4f3e4551e38a0c92461794a8fc1fed45717b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 29 Jun 2022 16:45:26 +0200 Subject: [PATCH 009/171] Remove fallback to context `handleStart` and `handleEnd` if instance `frameStart` and `frameEnd` matches context --- .../maya/plugins/publish/collect_instances.py | 43 +++++-------------- 1 file changed, 11 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_instances.py b/openpype/hosts/maya/plugins/publish/collect_instances.py index ad1f794680..6c6819f0a2 100644 --- a/openpype/hosts/maya/plugins/publish/collect_instances.py +++ b/openpype/hosts/maya/plugins/publish/collect_instances.py @@ -74,13 +74,6 @@ class CollectInstances(pyblish.api.ContextPlugin): objectset = cmds.ls("*.id", long=True, type="objectSet", recursive=True, objectsOnly=True) - ctx_frame_start = context.data['frameStart'] - ctx_frame_end = context.data['frameEnd'] - ctx_handle_start = context.data['handleStart'] - ctx_handle_end = context.data['handleEnd'] - ctx_frame_start_handle = context.data['frameStartHandle'] - ctx_frame_end_handle = context.data['frameEndHandle'] - context.data['objectsets'] = objectset for objset in objectset: @@ -156,34 +149,20 @@ class CollectInstances(pyblish.api.ContextPlugin): # Append start frame and end frame to label if present if "frameStart" and "frameEnd" in data: - # if frame range on maya set is the same as full shot range - # adjust the values to match the asset data - if (ctx_frame_start_handle == data["frameStart"] - and ctx_frame_end_handle == data["frameEnd"]): # noqa: W503, E501 - data["frameStartHandle"] = ctx_frame_start_handle - data["frameEndHandle"] = ctx_frame_end_handle - data["frameStart"] = ctx_frame_start - data["frameEnd"] = ctx_frame_end - data["handleStart"] = ctx_handle_start - data["handleEnd"] = ctx_handle_end - - # if there are user values on start and end frame not matching - # the asset, use them - - else: - if "handles" in data: - data["handleStart"] = data["handles"] - data["handleEnd"] = data["handles"] - else: - data["handleStart"] = 0 - data["handleEnd"] = 0 - - data["frameStartHandle"] = data["frameStart"] - data["handleStart"] # noqa: E501 - data["frameEndHandle"] = data["frameEnd"] + data["handleEnd"] # noqa: E501 - + # Backwards compatibility for 'handles' data if "handles" in data: + data["handleStart"] = data["handles"] + data["handleEnd"] = data["handles"] data.pop('handles') + # Take handles from context if not set locally on the instance + for key in ["handleStart", "handleEnd"]: + if key not in data: + data[key] = context.data[key] + + data["frameStartHandle"] = data["frameStart"] - data["handleStart"] # noqa: E501 + data["frameEndHandle"] = data["frameEnd"] + data["handleEnd"] # noqa: E501 + label += " [{0}-{1}]".format(int(data["frameStartHandle"]), int(data["frameEndHandle"])) From cb82e66d01c506bd9038acec6904a0a7b39cbb37 Mon Sep 17 00:00:00 2001 From: DMO Date: Wed, 6 Jul 2022 11:30:47 +0900 Subject: [PATCH 010/171] Naming suffixes can be overridden in project settings, lets print out what they *actually* are. --- .../publish/validate_transform_naming_suffix.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py b/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py index 6f5ff24b9c..51561b35e1 100644 --- a/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py +++ b/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py @@ -20,6 +20,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin): - nurbsSurface: _NRB - locator: _LOC - null/group: _GRP + Suffices can also be overriden by project settings. .. warning:: This grabs the first child shape as a reference and doesn't use the @@ -43,6 +44,13 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin): ALLOW_IF_NOT_IN_SUFFIX_TABLE = True + @classmethod + def get_table_for_invalid(cls): + ss = [] + for k,v in cls.SUFFIX_NAMING_TABLE.items(): + ss.append(" - {}: {}".format(k,", ".join(v))) + return "\n".join(ss) + @staticmethod def is_valid_name(node_name, shape_type, SUFFIX_NAMING_TABLE, ALLOW_IF_NOT_IN_SUFFIX_TABLE): @@ -105,5 +113,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin): """ invalid = self.get_invalid(instance) if invalid: + valid = self.get_table_for_invalid() raise ValueError("Incorrectly named geometry " - "transforms: {0}".format(invalid)) + "transforms: {0}, accepted suffixes are: " + "\n{1}".format(invalid, valid)) From 56f13bf388ad42a4a34e533fe5ea529ffd7f6668 Mon Sep 17 00:00:00 2001 From: DMO Date: Wed, 6 Jul 2022 12:39:40 +0900 Subject: [PATCH 011/171] Move alembic publishing options around. Adding more publishing options to the schema and defaults. --- .../defaults/project_settings/maya.json | 22 +++--- .../schemas/schema_maya_publish.json | 72 ++++++++++++------- 2 files changed, 62 insertions(+), 32 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index cdd3a62d00..82d5ab20cb 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -401,14 +401,6 @@ "optional": true, "active": true }, - "ExtractAlembic": { - "enabled": true, - "families": [ - "pointcache", - "model", - "vrayproxy" - ] - }, "ValidateRigContents": { "enabled": false, "optional": true, @@ -561,6 +553,20 @@ "optional": true, "active": true, "bake_attributes": [] + }, + "ExtractAlembic": { + "enabled": true, + "families": [ + "pointcache", + "model", + "vrayproxy" + ] + }, + "ExtractAnimation": { + "enabled": true + }, + "ExtractMultiverseUsdAnim": { + "enabled": true } }, "load": { diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 41b681d893..e77bb1a6f8 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -504,30 +504,6 @@ "label": "ValidateUniqueNames" } ] - }, - { - "type": "label", - "label": "Extractors" - }, - { - "type": "dict", - "collapsible": true, - "key": "ExtractAlembic", - "label": "Extract Alembic", - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "key": "families", - "label": "Families", - "type": "list", - "object_type": "text" - } - ] } ] }, @@ -686,6 +662,54 @@ "is_list": true } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "ExtractAlembic", + "label": "Extract Alembic", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "ExtractAnimation", + "label": "Extract Alembic Animation", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "ExtractMultiverseUsdAnim", + "label": "Extract USD Animation", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] } ] } From 53f56853d9519b96115ab92e10aa610d776cd5be Mon Sep 17 00:00:00 2001 From: DMO Date: Wed, 6 Jul 2022 12:43:24 +0900 Subject: [PATCH 012/171] Subclass ExtractMultiverseUsd as ExtractMultiverseUsdAnim to enable animation-specific USD extraction. Adding noNormals to settings to allow skipping normals for meshes that will be sub-divided later. --- .../maya/plugins/create/create_animation.py | 3 + .../maya/plugins/publish/extract_animation.py | 4 +- .../plugins/publish/extract_multiverse_usd.py | 70 ++++++++++++++++--- 3 files changed, 65 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_animation.py b/openpype/hosts/maya/plugins/create/create_animation.py index 5cd1f7090a..b9e1f3b7a2 100644 --- a/openpype/hosts/maya/plugins/create/create_animation.py +++ b/openpype/hosts/maya/plugins/create/create_animation.py @@ -42,3 +42,6 @@ class CreateAnimation(plugin.Creator): # Default to not send to farm. self.data["farm"] = False self.data["priority"] = 50 + + # Default to write normals. + self.data["writeNormals"] = True diff --git a/openpype/hosts/maya/plugins/publish/extract_animation.py b/openpype/hosts/maya/plugins/publish/extract_animation.py index abe5ed3bf5..7ce80b4679 100644 --- a/openpype/hosts/maya/plugins/publish/extract_animation.py +++ b/openpype/hosts/maya/plugins/publish/extract_animation.py @@ -63,7 +63,9 @@ class ExtractAnimation(openpype.api.Extractor): "selection": True, "worldSpace": instance.data.get("worldSpace", True), "writeColorSets": instance.data.get("writeColorSets", False), - "writeFaceSets": instance.data.get("writeFaceSets", False) + "writeFaceSets": instance.data.get("writeFaceSets", False), + # 'noNormals' is the standard alembic option name. + "noNormals": not instance.data.get("writeNormals", True) } if not instance.data.get("includeParentHierarchy", True): diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index b1aaf9d9ba..2a99dffa8d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -2,6 +2,7 @@ import os import six from maya import cmds +from maya import mel import openpype.api from openpype.hosts.maya.api.lib import maintained_selection @@ -87,7 +88,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): return { "stripNamespaces": False, "mergeTransformAndShape": False, - "writeAncestors": True, + "writeAncestors": False, "flattenParentXforms": False, "writeSparseOverrides": False, "useMetaPrimPath": False, @@ -147,6 +148,13 @@ class ExtractMultiverseUsd(openpype.api.Extractor): return options + def get_default_options(self): + self.log.info("ExtractMultiverseUsd get_default_options") + return self.default_options + + def filter_members(self, members): + return members + def process(self, instance): # Load plugin first cmds.loadPlugin("MultiverseForMaya", quiet=True) @@ -161,7 +169,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor): file_path = file_path.replace('\\', '/') # Parse export options - options = self.default_options + options = self.get_default_options() options = self.parse_overrides(instance, options) self.log.info("Export options: {0}".format(options)) @@ -170,27 +178,35 @@ class ExtractMultiverseUsd(openpype.api.Extractor): with maintained_selection(): members = instance.data("setMembers") - self.log.info('Collected object {}'.format(members)) + self.log.info('Collected objects: {}'.format(members)) + members = self.filter_members(members) + if not members: + self.log.error('No members!') + return + self.log.info(' - filtered: {}'.format(members)) import multiverse time_opts = None frame_start = instance.data['frameStart'] frame_end = instance.data['frameEnd'] - handle_start = instance.data['handleStart'] - handle_end = instance.data['handleEnd'] - step = instance.data['step'] - fps = instance.data['fps'] if frame_end != frame_start: time_opts = multiverse.TimeOptions() time_opts.writeTimeRange = True + + handle_start = instance.data['handleStart'] + handle_end = instance.data['handleEnd'] + time_opts.frameRange = ( frame_start - handle_start, frame_end + handle_end) - time_opts.frameIncrement = step - time_opts.numTimeSamples = instance.data["numTimeSamples"] - time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"] - time_opts.framePerSecond = fps + time_opts.frameIncrement = instance.data['step'] + time_opts.numTimeSamples = instance.data.get( + 'numTimeSamples', options['numTimeSamples']) + time_opts.timeSamplesSpan = instance.data.get( + 'timeSamplesSpan', options['timeSamplesSpan']) + time_opts.framePerSecond = instance.data.get( + 'fps', mel.eval('currentTimeUnitToFPS()')) asset_write_opts = multiverse.AssetWriteOptions(time_opts) options_discard_keys = { @@ -203,11 +219,15 @@ class ExtractMultiverseUsd(openpype.api.Extractor): 'step', 'fps' } + self.log.debug("Write Options:") for key, value in options.items(): if key in options_discard_keys: continue + + self.log.debug(" - {}={}".format(key, value)) setattr(asset_write_opts, key, value) + self.log.info('WriteAsset: {} / {}'.format(file_path, members)) multiverse.WriteAsset(file_path, members, asset_write_opts) if "representations" not in instance.data: @@ -223,3 +243,31 @@ class ExtractMultiverseUsd(openpype.api.Extractor): self.log.info("Extracted instance {} to {}".format( instance.name, file_path)) + + +class ExtractMultiverseUsdAnim(ExtractMultiverseUsd): + """Extractor for Multiverse USD Animation Sparse Cache data. + + This will extract the sparse cache data from the scene and generate a + USD file with all the animation data. + + Upon publish a .usd sparse cache will be written. + """ + label = "Extract Multiverse USD Animation Sparse Cache" + families = ["animation"] + + def get_default_options(self): + anim_options = self.default_options + anim_options["writeSparseOverrides"] = True + anim_options["stripNamespaces"] = True + return anim_options + + def filter_members(self, members): + out_set = next((i for i in members if i.endswith("out_SET")), None) + + if out_set is None: + self.log.warning("Expecting out_SET") + return None + + members = cmds.ls(cmds.sets(out_set, query=True), long=True) + return members From cdf165ed79c7b9f3939d3b199d0757f9bac74e5b Mon Sep 17 00:00:00 2001 From: DMO Date: Wed, 6 Jul 2022 12:43:46 +0900 Subject: [PATCH 013/171] The mvLook should skip namespaces. --- openpype/hosts/maya/plugins/publish/extract_multiverse_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py index b97314d5a1..8a5d7e4e53 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py @@ -78,7 +78,7 @@ class ExtractMultiverseLook(openpype.api.Extractor): "writeVariants": False, "writeVariantsDefinition": False, "writeActiveState": False, - "writeNamespaces": False, + "writeNamespaces": True, "numTimeSamples": 1, "timeSamplesSpan": 0.0 } From 559210ffb6ffacd68be8227d56b90c036a70c184 Mon Sep 17 00:00:00 2001 From: DMO Date: Wed, 6 Jul 2022 12:44:24 +0900 Subject: [PATCH 014/171] If there's a mixed-attribute on the node, this will fail, just skip it with a warning. --- openpype/hosts/maya/plugins/publish/collect_look.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index 28c57e04b5..40edd3b2f4 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -549,6 +549,11 @@ class CollectLook(pyblish.api.InstancePlugin): if not cmds.attributeQuery(attr, node=node, exists=True): continue attribute = "{}.{}".format(node, attr) + # We don't support mixed-type attributes yet. + if cmds.attributeQuery(attr, node=node, multi=True): + self.log.warning("Attribute '{}' is mixed-type and is " + "not supported yet.".format(attribute)) + continue if cmds.getAttr(attribute, type=True) == "message": continue node_attributes[attr] = cmds.getAttr(attribute) From 77c45e2b7f110e6939b685b03c613f7feef7f718 Mon Sep 17 00:00:00 2001 From: DMO Date: Wed, 20 Jul 2022 11:04:17 +0900 Subject: [PATCH 015/171] Animation should also write out UsdAttributes. --- openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 2a99dffa8d..40dd5dfe50 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -259,6 +259,7 @@ class ExtractMultiverseUsdAnim(ExtractMultiverseUsd): def get_default_options(self): anim_options = self.default_options anim_options["writeSparseOverrides"] = True + anim_options["writeUsdAttributes"] = True anim_options["stripNamespaces"] = True return anim_options From 8ae92351a7e3e1758cca4a62e4170749ed93a914 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 16 Sep 2022 21:27:00 +0800 Subject: [PATCH 016/171] Import Reference during Publish --- openpype/hosts/maya/plugins/create/create_look.py | 3 +++ openpype/hosts/maya/plugins/publish/extract_look.py | 8 ++++++++ 2 files changed, 11 insertions(+) diff --git a/openpype/hosts/maya/plugins/create/create_look.py b/openpype/hosts/maya/plugins/create/create_look.py index 44e439fe1f..cecdf9f54d 100644 --- a/openpype/hosts/maya/plugins/create/create_look.py +++ b/openpype/hosts/maya/plugins/create/create_look.py @@ -21,6 +21,9 @@ class CreateLook(plugin.Creator): # Whether to automatically convert the textures to .tx upon publish. self.data["maketx"] = self.make_tx + # Enable users to import reference + self.data["importReference"] = False + # Enable users to force a copy. # - on Windows is "forceCopy" always changed to `True` because of # windows implementation of hardlinks diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 91b0da75c6..845af0d32d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -255,6 +255,14 @@ class ExtractLook(publish.Extractor): hashes = results["fileHashes"] remap = results["attrRemap"] + # Import Reference if the option is enabled + ref_import = instance.data.get("importReference", True) + if ref_import: + reference_node = cmds.ls(type="reference") + for r in reference_node: + rFile = cmds.referenceQuery(r, f=True) + cmds.file(rFile, importReference=True) + # Extract in correct render layer layer = instance.data.get("renderlayer", "defaultRenderLayer") with lib.renderlayer(layer): From 25279d276d8ef1035aea327c67b49a44ab29d647 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 17:59:39 +0800 Subject: [PATCH 017/171] Import Reference during Publish --- .../hosts/maya/plugins/create/create_look.py | 3 - .../publish/extract_import_reference.py | 122 ++++++++++++++++++ .../maya/plugins/publish/extract_look.py | 8 -- .../defaults/project_settings/maya.json | 3 + .../schemas/schema_maya_publish.json | 16 ++- 5 files changed, 140 insertions(+), 12 deletions(-) create mode 100644 openpype/hosts/maya/plugins/publish/extract_import_reference.py diff --git a/openpype/hosts/maya/plugins/create/create_look.py b/openpype/hosts/maya/plugins/create/create_look.py index cecdf9f54d..44e439fe1f 100644 --- a/openpype/hosts/maya/plugins/create/create_look.py +++ b/openpype/hosts/maya/plugins/create/create_look.py @@ -21,9 +21,6 @@ class CreateLook(plugin.Creator): # Whether to automatically convert the textures to .tx upon publish. self.data["maketx"] = self.make_tx - # Enable users to import reference - self.data["importReference"] = False - # Enable users to force a copy. # - on Windows is "forceCopy" always changed to `True` because of # windows implementation of hardlinks diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py new file mode 100644 index 0000000000..3e44addf6c --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -0,0 +1,122 @@ +import os +import pyblish.api +from openpype.pipeline import publish, legacy_io +from openpype.settings import get_project_settings + + +def _get_project_setting(): + project_name = legacy_io.active_project() + project_setting = get_project_settings(project_name) + maya_enabled = ( + project_setting["maya"]["publish"]["ImportReference"]["enabled"] + ) + use_published = ( + project_setting["deadline"]["publish"]["MayaSubmitDeadline"]["use_published"] + ) + if maya_enabled != use_published: + return False + else: + return use_published + + +class ImportReference(publish.Extractor): + """ + + Extract the scene with imported reference. + The temp scene with imported reference is + published for rendering if this extractor is activated + + """ + + label = "Import Reference" + order = pyblish.api.ExtractorOrder - 0.48 + hosts = ["maya"] + families = ["renderlayer", "workfile"] + active = _get_project_setting() + optional = True + tmp_format = "_tmp" + + def process(self, instance): + from maya import cmds + + ext_mapping = ( + instance.context.data["project_settings"]["maya"]["ext_mapping"] + ) + if ext_mapping: + self.log.info("Looking in settings for scene type ...") + # use extension mapping for first family found + for family in self.families: + try: + self.scene_type = ext_mapping[family] + self.log.info( + "Using {} as scene type".format(self.scene_type)) + break + except KeyError: + # no preset found + pass + + _scene_type = ("mayaAscii" + if self.scene_type == "ma" + else "mayaBinary") + + dir_path = self.staging_dir(instance) + # named the file with imported reference + tmp_name = instance.name + self.tmp_format + m_ref_fname = "{0}.{1}".format(tmp_name, self.scene_type) + + m_ref_path = os.path.join(dir_path, m_ref_fname) + + self.log.info("Performing extraction..") + current = cmds.file(query=True, sceneName=True) + cmds.file(save=True, force=True + + self.log.info("Performing extraction..") + + # create temp scene with imported + # reference for rendering + reference_node = cmds.ls(type="reference") + for r in reference_node: + rFile = cmds.referenceQuery(r, f=True) + if r == "sharedReferenceNode": + cmds.file(rFile, removeReference = True, referenceNode=r) + cmds.file(rFile, importReference=True) + + if current.endswith(self.scene_type): + current_path = os.path.dirname(current) + tmp_path_name = os.path.join(current_path, tmp_name) + cmds.file(rename=tmp_path_name) + cmds.file(save=True, force=True) + + with lib.maintained_selection(): + cmds.select(all=True, noExpand=True) + cmds.file(m_ref_path, + force=True, + typ = _scene_type, + exportSelected=True, + channels=True, + constraints=True, + shader=True, + expressions=True, + constructionHistory=True + ) + + if "files" not in instance.data: + instance.data["files"] = [] + + instance.data["files"].append(m_ref_path) + + if instance.data.get("representations") is None: + instance.data["representations"] = [] + + ref_representation = { + "name": self.scene_type, + "ext": self.scene_type, + "files": os.path.basename(m_ref_fname), + "stagingDir": dir_path + } + instance.data["representations"].append(ref_representation) + + self.log.info("Extracted instance '%s' to : '%s'" % (tmp_name, + m_ref_path)) + + cmds.file(current, open=True) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 845af0d32d..91b0da75c6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -255,14 +255,6 @@ class ExtractLook(publish.Extractor): hashes = results["fileHashes"] remap = results["attrRemap"] - # Import Reference if the option is enabled - ref_import = instance.data.get("importReference", True) - if ref_import: - reference_node = cmds.ls(type="reference") - for r in reference_node: - rFile = cmds.referenceQuery(r, f=True) - cmds.file(rFile, importReference=True) - # Extract in correct render layer layer = instance.data.get("renderlayer", "defaultRenderLayer") with lib.renderlayer(layer): diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 8643297f02..76be3c393e 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -762,6 +762,9 @@ } } }, + "ImportReference": { + "enabled": false + }, "ExtractMayaSceneRaw": { "enabled": true, "add_for_families": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 53247f6bd4..4cf84795b5 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -62,7 +62,7 @@ } ] }, - { + { "type": "dict", "collapsible": true, "key": "ValidateFrameRange", @@ -807,6 +807,20 @@ "type": "schema", "name": "schema_maya_capture" }, + { + "type": "dict", + "collapsible": true, + "key": "ImportReference", + "label": "Extract Scenes with Imported Reference", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, { "type": "dict", "collapsible": true, From e5cf620575d535eb5415f8f0b22858c954fb5eb2 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 18:06:50 +0800 Subject: [PATCH 018/171] Import Reference during Publish --- .../hosts/maya/plugins/publish/extract_import_reference.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 3e44addf6c..ac425ee083 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -1,4 +1,5 @@ import os + import pyblish.api from openpype.pipeline import publish, legacy_io from openpype.settings import get_project_settings @@ -11,8 +12,8 @@ def _get_project_setting(): project_setting["maya"]["publish"]["ImportReference"]["enabled"] ) use_published = ( - project_setting["deadline"]["publish"]["MayaSubmitDeadline"]["use_published"] - ) + project_setting["deadline"]["publish"]["MayaSubmitDeadline"]["use_published"] # noqa + ) if maya_enabled != use_published: return False else: From 06b982fd435c2549266cf400e47cbc3c3bb404f0 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 18:09:21 +0800 Subject: [PATCH 019/171] Import Reference during Publish --- .../hosts/maya/plugins/publish/extract_import_reference.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index ac425ee083..feacf072c3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -1,5 +1,5 @@ -import os +import os import pyblish.api from openpype.pipeline import publish, legacy_io from openpype.settings import get_project_settings @@ -10,7 +10,7 @@ def _get_project_setting(): project_setting = get_project_settings(project_name) maya_enabled = ( project_setting["maya"]["publish"]["ImportReference"]["enabled"] - ) + ) use_published = ( project_setting["deadline"]["publish"]["MayaSubmitDeadline"]["use_published"] # noqa ) From 89b7699c21f9c0b55e5a4cdc2755bc242b7e12e9 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 18:10:48 +0800 Subject: [PATCH 020/171] Import Reference during Publish --- .../hosts/maya/plugins/publish/extract_import_reference.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index feacf072c3..eb225127f0 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -1,6 +1,7 @@ - import os + import pyblish.api + from openpype.pipeline import publish, legacy_io from openpype.settings import get_project_settings From 2dc186f1911c2b5eebfa98e513a89831a51389ec Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 18:16:05 +0800 Subject: [PATCH 021/171] Import Reference during Publish --- .../hosts/maya/plugins/publish/extract_import_reference.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index eb225127f0..56b757f2b4 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -1,5 +1,3 @@ -import os - import pyblish.api from openpype.pipeline import publish, legacy_io @@ -39,6 +37,7 @@ class ImportReference(publish.Extractor): tmp_format = "_tmp" def process(self, instance): + import os from maya import cmds ext_mapping = ( From bf7cca8586f485fb4507eef0cd050f6cd4cf481e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 18:17:25 +0800 Subject: [PATCH 022/171] Import Reference during Publish --- .../hosts/maya/plugins/publish/extract_import_reference.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 56b757f2b4..eb225127f0 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -1,3 +1,5 @@ +import os + import pyblish.api from openpype.pipeline import publish, legacy_io @@ -37,7 +39,6 @@ class ImportReference(publish.Extractor): tmp_format = "_tmp" def process(self, instance): - import os from maya import cmds ext_mapping = ( From 93c9ec8d16404bd17fd8de7c25c3bc5829402b20 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 18:22:52 +0800 Subject: [PATCH 023/171] Import Reference during Publish --- .../hosts/maya/plugins/publish/extract_import_reference.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index eb225127f0..8be37c91e3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -1,5 +1,7 @@ import os +from maya import cmds + import pyblish.api from openpype.pipeline import publish, legacy_io @@ -39,8 +41,6 @@ class ImportReference(publish.Extractor): tmp_format = "_tmp" def process(self, instance): - from maya import cmds - ext_mapping = ( instance.context.data["project_settings"]["maya"]["ext_mapping"] ) From 3ce41cbae8c7e0bb5b00951f8e7a9927df472da3 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 18:33:57 +0800 Subject: [PATCH 024/171] Import Reference during Publish --- openpype/hosts/maya/plugins/publish/extract_import_reference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 8be37c91e3..2f9e8516d7 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -70,7 +70,7 @@ class ImportReference(publish.Extractor): self.log.info("Performing extraction..") current = cmds.file(query=True, sceneName=True) - cmds.file(save=True, force=True + cmds.file(save=True, force=True) self.log.info("Performing extraction..") From 79e78beb61ef1c5f5bae9042a6b07db419fb79f8 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 18:36:24 +0800 Subject: [PATCH 025/171] Import Reference during Publish --- .../hosts/maya/plugins/publish/extract_import_reference.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 2f9e8516d7..1ed109d720 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -6,6 +6,7 @@ import pyblish.api from openpype.pipeline import publish, legacy_io from openpype.settings import get_project_settings +from openpype.hosts.maya.api import lib def _get_project_setting(): @@ -80,7 +81,7 @@ class ImportReference(publish.Extractor): for r in reference_node: rFile = cmds.referenceQuery(r, f=True) if r == "sharedReferenceNode": - cmds.file(rFile, removeReference = True, referenceNode=r) + cmds.file(rFile, removeReference=True, referenceNode=r) cmds.file(rFile, importReference=True) if current.endswith(self.scene_type): @@ -93,14 +94,14 @@ class ImportReference(publish.Extractor): cmds.select(all=True, noExpand=True) cmds.file(m_ref_path, force=True, - typ = _scene_type, + typ=_scene_type, exportSelected=True, channels=True, constraints=True, shader=True, expressions=True, constructionHistory=True - ) + ) if "files" not in instance.data: instance.data["files"] = [] From 92a80eb24a4e9a849e58ef5ca57d885f9efa396a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 18:38:08 +0800 Subject: [PATCH 026/171] Import Reference during Publish --- openpype/hosts/maya/plugins/publish/extract_import_reference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 1ed109d720..4035e8a9e6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -101,7 +101,7 @@ class ImportReference(publish.Extractor): shader=True, expressions=True, constructionHistory=True - ) + ) if "files" not in instance.data: instance.data["files"] = [] From ab7ed6becbd100c6e9f8960065bdeffc8f609b5b Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 28 Sep 2022 18:39:22 +0800 Subject: [PATCH 027/171] Import Reference during Publish --- .../hosts/maya/plugins/publish/extract_import_reference.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 4035e8a9e6..cde8f67789 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -100,8 +100,7 @@ class ImportReference(publish.Extractor): constraints=True, shader=True, expressions=True, - constructionHistory=True - ) + constructionHistory=True) if "files" not in instance.data: instance.data["files"] = [] From ee78e9e67036308e651a9a0005b72a4d16ecce17 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 4 Oct 2022 15:45:17 +0800 Subject: [PATCH 028/171] Import Reference during Publish --- .../publish/extract_import_reference.py | 58 +++++++++---------- .../deadline/abstract_submit_deadline.py | 7 ++- .../defaults/project_settings/deadline.json | 1 + .../defaults/project_settings/maya.json | 3 - .../schema_project_deadline.json | 5 ++ .../schemas/schema_maya_publish.json | 14 ----- 6 files changed, 37 insertions(+), 51 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index cde8f67789..4cbc963cf3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -9,22 +9,16 @@ from openpype.settings import get_project_settings from openpype.hosts.maya.api import lib -def _get_project_setting(): +def _import_reference(): project_name = legacy_io.active_project() project_setting = get_project_settings(project_name) - maya_enabled = ( - project_setting["maya"]["publish"]["ImportReference"]["enabled"] + import_reference = ( + project_setting["deadline"]["publish"]["MayaSubmitDeadline"]["import_reference"] # noqa ) - use_published = ( - project_setting["deadline"]["publish"]["MayaSubmitDeadline"]["use_published"] # noqa - ) - if maya_enabled != use_published: - return False - else: - return use_published + return import_reference -class ImportReference(publish.Extractor): +class ExtractImportReference(publish.Extractor): """ Extract the scene with imported reference. @@ -33,12 +27,11 @@ class ImportReference(publish.Extractor): """ - label = "Import Reference" + label = "Extract Import Reference" order = pyblish.api.ExtractorOrder - 0.48 hosts = ["maya"] families = ["renderlayer", "workfile"] - active = _get_project_setting() - optional = True + active = _import_reference() tmp_format = "_tmp" def process(self, instance): @@ -54,9 +47,10 @@ class ImportReference(publish.Extractor): self.log.info( "Using {} as scene type".format(self.scene_type)) break + except KeyError: - # no preset found - pass + # set scene type to ma + self.scene_type = "ma" _scene_type = ("mayaAscii" if self.scene_type == "ma" @@ -64,6 +58,8 @@ class ImportReference(publish.Extractor): dir_path = self.staging_dir(instance) # named the file with imported reference + if instance.name == "Main": + return tmp_name = instance.name + self.tmp_format m_ref_fname = "{0}.{1}".format(tmp_name, self.scene_type) @@ -72,23 +68,20 @@ class ImportReference(publish.Extractor): self.log.info("Performing extraction..") current = cmds.file(query=True, sceneName=True) cmds.file(save=True, force=True) - - self.log.info("Performing extraction..") - # create temp scene with imported # reference for rendering reference_node = cmds.ls(type="reference") for r in reference_node: - rFile = cmds.referenceQuery(r, f=True) + ref_file = cmds.referenceQuery(r, f=True) if r == "sharedReferenceNode": - cmds.file(rFile, removeReference=True, referenceNode=r) - cmds.file(rFile, importReference=True) + cmds.file(ref_file, removeReference=True, referenceNode=r) + return + cmds.file(ref_file, importReference=True) - if current.endswith(self.scene_type): - current_path = os.path.dirname(current) - tmp_path_name = os.path.join(current_path, tmp_name) - cmds.file(rename=tmp_path_name) - cmds.file(save=True, force=True) + cmds.file(rename=m_ref_fname) + cmds.file(save=True, force=True) + tmp_filepath = cmds.file(query=True, sceneName=True) + instance.context.data["currentFile"] = tmp_filepath with lib.maintained_selection(): cmds.select(all=True, noExpand=True) @@ -104,8 +97,7 @@ class ImportReference(publish.Extractor): if "files" not in instance.data: instance.data["files"] = [] - - instance.data["files"].append(m_ref_path) + instance.data["files"].append(m_ref_fname) if instance.data.get("representations") is None: instance.data["representations"] = [] @@ -113,12 +105,14 @@ class ImportReference(publish.Extractor): ref_representation = { "name": self.scene_type, "ext": self.scene_type, - "files": os.path.basename(m_ref_fname), - "stagingDir": dir_path + "files": m_ref_fname, + "stagingDir": os.path.dirname(tmp_filepath) } + instance.data["representations"].append(ref_representation) - self.log.info("Extracted instance '%s' to : '%s'" % (tmp_name, + self.log.info("Extracted instance '%s' to : '%s'" % (m_ref_fname, m_ref_path)) + #re-open the previous scene cmds.file(current, open=True) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 512ff800ee..909a5871e3 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -400,6 +400,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): label = "Submit to Deadline" order = pyblish.api.IntegratorOrder + 0.1 + import_reference = False use_published = True asset_dependencies = False @@ -516,7 +517,6 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): published. """ - instance = self._instance workfile_instance = self._get_workfile_instance(instance.context) if workfile_instance is None: @@ -524,7 +524,10 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): # determine published path from Anatomy. template_data = workfile_instance.data.get("anatomyData") - rep = workfile_instance.data.get("representations")[0] + if self.import_reference: + rep = workfile_instance.data.get("representations")[1] + else: + rep = workfile_instance.data.get("representations")[0] template_data["representation"] = rep.get("name") template_data["ext"] = rep.get("ext") template_data["comment"] = None diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index a6e7b4a94a..5f0731fb0c 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -25,6 +25,7 @@ "active": true, "tile_assembler_plugin": "OpenPypeTileAssembler", "use_published": true, + "import_reference": false, "asset_dependencies": true, "priority": 50, "tile_priority": 50, diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 76be3c393e..8643297f02 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -762,9 +762,6 @@ } } }, - "ImportReference": { - "enabled": false - }, "ExtractMayaSceneRaw": { "enabled": true, "add_for_families": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index cd1741ba8b..be95c682c4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -130,6 +130,11 @@ "key": "use_published", "label": "Use Published scene" }, + { + "type": "boolean", + "key": "import_reference", + "label": "Use Scene with Imported Reference" + }, { "type": "boolean", "key": "asset_dependencies", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 4cf84795b5..07d8f6aea0 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -807,20 +807,6 @@ "type": "schema", "name": "schema_maya_capture" }, - { - "type": "dict", - "collapsible": true, - "key": "ImportReference", - "label": "Extract Scenes with Imported Reference", - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - } - ] - }, { "type": "dict", "collapsible": true, From 6f914a001de08931f84bf1753f0d3ad3bf903593 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 4 Oct 2022 15:46:12 +0800 Subject: [PATCH 029/171] Import Reference during Publish --- openpype/hosts/maya/plugins/publish/extract_import_reference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 4cbc963cf3..fd2687995b 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -114,5 +114,5 @@ class ExtractImportReference(publish.Extractor): self.log.info("Extracted instance '%s' to : '%s'" % (m_ref_fname, m_ref_path)) - #re-open the previous scene + # re-open the previous scene cmds.file(current, open=True) From df937a8e9ee028142ce4f56ea17c58e166b800ed Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 4 Oct 2022 16:11:26 +0800 Subject: [PATCH 030/171] Import Reference during Publish --- openpype/hosts/maya/plugins/publish/extract_import_reference.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index fd2687995b..92426e97dd 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -32,6 +32,7 @@ class ExtractImportReference(publish.Extractor): hosts = ["maya"] families = ["renderlayer", "workfile"] active = _import_reference() + optional= True tmp_format = "_tmp" def process(self, instance): From dfd30d54601939ac31f72fa417a8bb00f67c5881 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 4 Oct 2022 16:13:21 +0800 Subject: [PATCH 031/171] Import Reference during Publish --- openpype/hosts/maya/plugins/publish/extract_import_reference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 92426e97dd..193048e9dc 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -32,7 +32,7 @@ class ExtractImportReference(publish.Extractor): hosts = ["maya"] families = ["renderlayer", "workfile"] active = _import_reference() - optional= True + optional = True tmp_format = "_tmp" def process(self, instance): From 590496814df787ba33a7bb366654fe14d8da76e3 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 7 Oct 2022 22:07:15 +0800 Subject: [PATCH 032/171] Import Reference during Publish --- .../publish/extract_import_reference.py | 70 ++++++++++++------- 1 file changed, 43 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 193048e9dc..7448cf9966 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -1,9 +1,11 @@ import os +import sys from maya import cmds import pyblish.api +from openpype.lib import run_subprocess from openpype.pipeline import publish, legacy_io from openpype.settings import get_project_settings from openpype.hosts.maya.api import lib @@ -31,7 +33,7 @@ class ExtractImportReference(publish.Extractor): order = pyblish.api.ExtractorOrder - 0.48 hosts = ["maya"] families = ["renderlayer", "workfile"] - active = _import_reference() + active = True optional = True tmp_format = "_tmp" @@ -62,31 +64,48 @@ class ExtractImportReference(publish.Extractor): if instance.name == "Main": return tmp_name = instance.name + self.tmp_format - m_ref_fname = "{0}.{1}".format(tmp_name, self.scene_type) + current_name = cmds.file(query=True, sceneName=True) + ref_scene_name = "{0}.{1}".format(tmp_name, self.scene_type) - m_ref_path = os.path.join(dir_path, m_ref_fname) + reference_path = os.path.join(dir_path, ref_scene_name) self.log.info("Performing extraction..") - current = cmds.file(query=True, sceneName=True) - cmds.file(save=True, force=True) - # create temp scene with imported - # reference for rendering - reference_node = cmds.ls(type="reference") - for r in reference_node: - ref_file = cmds.referenceQuery(r, f=True) - if r == "sharedReferenceNode": - cmds.file(ref_file, removeReference=True, referenceNode=r) - return - cmds.file(ref_file, importReference=True) + script = ("import maya.standalone\nmaya.standalone.initialize()\n" + "cmds.file('{current_name}', open=True, force=True)\n" + "reference_node = cmds.ls(type='reference')\n" + "for ref in reference_node:\n" + "\tref_file = cmds.referenceQuery(ref, f=True)\n" + "\tif ref == 'sharedReferenceNode':\n" + "\t\tcmds.file(ref_file, removeReference=True, referenceNode=ref)\n" + "\telse:\n" + "\t\tcmds.file(ref_file, importReference=True)\n" + "try:\n" + "\tcmds.file(rename='{ref_scene_name}')\n" + "except SyntaxError:\n" + "\tcmds.file(rename='{ref_scene_name}')\n" + "cmds.file(save=True, force=True)\n") - cmds.file(rename=m_ref_fname) - cmds.file(save=True, force=True) - tmp_filepath = cmds.file(query=True, sceneName=True) + mayapy_exe = os.path.join(os.getenv("MAYA_LOCATION"), "bin", "mayapy") + if sys.platform == "windows": + mayapy_exe = mayapy_exe + ".exe" + + subprocess_args = [ + mayapy_exe, + "-c", + script.replace("\n", ";") + ] + try: + out = run_subprocess(subprocess_args) + except Exception: + self.log.error("Import reference failed", exc_info=True) + raise + + proj_file_dir = os.path.dirname(current_name) + tmp_filepath = os.path.join(proj_file_dir, ref_scene_name) instance.context.data["currentFile"] = tmp_filepath - with lib.maintained_selection(): cmds.select(all=True, noExpand=True) - cmds.file(m_ref_path, + cmds.file(reference_path, force=True, typ=_scene_type, exportSelected=True, @@ -98,7 +117,7 @@ class ExtractImportReference(publish.Extractor): if "files" not in instance.data: instance.data["files"] = [] - instance.data["files"].append(m_ref_fname) + instance.data["files"].append(ref_scene_name) if instance.data.get("representations") is None: instance.data["representations"] = [] @@ -106,14 +125,11 @@ class ExtractImportReference(publish.Extractor): ref_representation = { "name": self.scene_type, "ext": self.scene_type, - "files": m_ref_fname, - "stagingDir": os.path.dirname(tmp_filepath) + "files": ref_scene_name, + "stagingDir": proj_file_dir } instance.data["representations"].append(ref_representation) - self.log.info("Extracted instance '%s' to : '%s'" % (m_ref_fname, - m_ref_path)) - - # re-open the previous scene - cmds.file(current, open=True) + self.log.info("Extracted instance '%s' to : '%s'" % (ref_scene_name, + reference_path)) From 34b7ba9d3abead3437f7c53ea5d4d3abe3cd5720 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 12 Oct 2022 15:14:14 +0800 Subject: [PATCH 033/171] Import Reference during Publish --- .../publish/extract_import_reference.py | 38 ++++++++++--------- .../deadline/abstract_submit_deadline.py | 2 + .../defaults/project_anatomy/templates.json | 2 +- 3 files changed, 23 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 7448cf9966..4ad3c7756d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -70,20 +70,24 @@ class ExtractImportReference(publish.Extractor): reference_path = os.path.join(dir_path, ref_scene_name) self.log.info("Performing extraction..") - script = ("import maya.standalone\nmaya.standalone.initialize()\n" - "cmds.file('{current_name}', open=True, force=True)\n" - "reference_node = cmds.ls(type='reference')\n" - "for ref in reference_node:\n" - "\tref_file = cmds.referenceQuery(ref, f=True)\n" - "\tif ref == 'sharedReferenceNode':\n" - "\t\tcmds.file(ref_file, removeReference=True, referenceNode=ref)\n" - "\telse:\n" - "\t\tcmds.file(ref_file, importReference=True)\n" - "try:\n" - "\tcmds.file(rename='{ref_scene_name}')\n" - "except SyntaxError:\n" - "\tcmds.file(rename='{ref_scene_name}')\n" - "cmds.file(save=True, force=True)\n") + script = f""" + import maya.standalone + maya.standalone.initialize() + cmds.file('{current_name}', open=True, force=True) + reference_node = cmds.ls(type='reference') + for ref in reference_node: + ref_file = cmds.referenceQuery(ref, f=True) + if ref == 'sharedReferenceNode': + cmds.file(ref_file, removeReference=True, referenceNode=ref) + else: + cmds.file(ref_file, importReference=True) + try: + cmds.file(rename='{ref_scene_name}') + except SyntaxError: + cmds.file(rename='{ref_scene_name}') + + cmds.file(save=True, force=True) + """ mayapy_exe = os.path.join(os.getenv("MAYA_LOCATION"), "bin", "mayapy") if sys.platform == "windows": @@ -100,9 +104,7 @@ class ExtractImportReference(publish.Extractor): self.log.error("Import reference failed", exc_info=True) raise - proj_file_dir = os.path.dirname(current_name) - tmp_filepath = os.path.join(proj_file_dir, ref_scene_name) - instance.context.data["currentFile"] = tmp_filepath + instance.context.data["currentFile"] = ref_scene_name with lib.maintained_selection(): cmds.select(all=True, noExpand=True) cmds.file(reference_path, @@ -126,7 +128,7 @@ class ExtractImportReference(publish.Extractor): "name": self.scene_type, "ext": self.scene_type, "files": ref_scene_name, - "stagingDir": proj_file_dir + "stagingDir": os.path.dirname(current_name) } instance.data["representations"].append(ref_representation) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 909a5871e3..1f74b9b19b 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -526,6 +526,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): template_data = workfile_instance.data.get("anatomyData") if self.import_reference: rep = workfile_instance.data.get("representations")[1] + # template_data["workfiletype"] = rep.get("workfiletype") else: rep = workfile_instance.data.get("representations")[0] template_data["representation"] = rep.get("name") @@ -535,6 +536,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): anatomy = instance.context.data['anatomy'] anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled["publish"]["path"] + # template_filled = anatomy_filled["others"]["mayaWorkfile"]["path"] file_path = os.path.normpath(template_filled) self.log.info("Using published scene for render {}".format(file_path)) diff --git a/openpype/settings/defaults/project_anatomy/templates.json b/openpype/settings/defaults/project_anatomy/templates.json index caf399a903..72d387335d 100644 --- a/openpype/settings/defaults/project_anatomy/templates.json +++ b/openpype/settings/defaults/project_anatomy/templates.json @@ -17,7 +17,7 @@ }, "publish": { "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}", - "file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}><_{udim}>.{ext}", + "file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}><_{udim}><_{workfiletype}>.{ext}", "path": "{@folder}/{@file}", "thumbnail": "{thumbnail_root}/{project[name]}/{_id}_{thumbnail_type}.{ext}" }, From de73dd7de6368dbc65e825d325d2058af14633d0 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 14 Oct 2022 21:27:10 +0800 Subject: [PATCH 034/171] Import Reference during Publish --- .../publish/extract_import_reference.py | 86 ++++++++++++------- .../deadline/abstract_submit_deadline.py | 2 - .../defaults/project_anatomy/templates.json | 2 +- 3 files changed, 56 insertions(+), 34 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 4ad3c7756d..6b935ffb73 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -4,6 +4,7 @@ import sys from maya import cmds import pyblish.api +import tempfile from openpype.lib import run_subprocess from openpype.pipeline import publish, legacy_io @@ -33,7 +34,7 @@ class ExtractImportReference(publish.Extractor): order = pyblish.api.ExtractorOrder - 0.48 hosts = ["maya"] families = ["renderlayer", "workfile"] - active = True + active = _import_reference() optional = True tmp_format = "_tmp" @@ -68,43 +69,62 @@ class ExtractImportReference(publish.Extractor): ref_scene_name = "{0}.{1}".format(tmp_name, self.scene_type) reference_path = os.path.join(dir_path, ref_scene_name) + tmp_path = os.path.dirname(current_name) + "/" + ref_scene_name self.log.info("Performing extraction..") - script = f""" - import maya.standalone - maya.standalone.initialize() - cmds.file('{current_name}', open=True, force=True) - reference_node = cmds.ls(type='reference') - for ref in reference_node: - ref_file = cmds.referenceQuery(ref, f=True) - if ref == 'sharedReferenceNode': - cmds.file(ref_file, removeReference=True, referenceNode=ref) - else: - cmds.file(ref_file, importReference=True) - try: - cmds.file(rename='{ref_scene_name}') - except SyntaxError: - cmds.file(rename='{ref_scene_name}') - cmds.file(save=True, force=True) - """ + # This generates script for mayapy to take care of reference + # importing outside current session. It is passing current scene + # name and destination scene name. + script = (""" +# -*- coding: utf-8 -*- +'''Script to import references to given scene.''' +import maya.standalone +maya.standalone.initialize() +# scene names filled by caller +current_name = "{current_name}" +ref_scene_name = "{ref_scene_name}" +print(">>> Opening {{}} ...".format(current_name)) +cmds.file(current_name, open=True, force=True) +reference_node = cmds.ls(type='reference') +print(">>> Processing references") +for ref in reference_node: + ref_file = cmds.referenceQuery(ref, f=True) + print("--- {{}}".format(ref)) + print("--> {{}}".format(ref_file)) + if ref == 'sharedReferenceNode': + cmds.file(ref_file, removeReference=True, referenceNode=ref) + else: + cmds.file(ref_file, importReference=True) +print(">>> Saving scene as {{}}".format(ref_scene_name)) +cmds.file(rename=ref_scene_name) +cmds.file(save=True, force=True) +print("*** Done") + """).format(current_name=current_name, ref_scene_name=tmp_path) mayapy_exe = os.path.join(os.getenv("MAYA_LOCATION"), "bin", "mayapy") if sys.platform == "windows": - mayapy_exe = mayapy_exe + ".exe" + mayapy_exe += ".exe" + mayapy_exe = os.path.normpath(mayapy_exe) + # can't use TemporaryNamedFile as that can't be opened in another + # process until handles are closed by context manager. + with tempfile.TemporaryDirectory() as tmp_dir_name: + tmp_file_name = os.path.join(tmp_dir_name, "import_ref.py") + tmp = open(tmp_file_name, "w+t") + subprocess_args = [ + mayapy_exe, + tmp_file_name + ] + self.log.info("Using temp file: {}".format(tmp.name)) + try: + tmp.write(script) + tmp.close() + run_subprocess(subprocess_args) + except Exception: + self.log.error("Import reference failed", exc_info=True) + raise - subprocess_args = [ - mayapy_exe, - "-c", - script.replace("\n", ";") - ] - try: - out = run_subprocess(subprocess_args) - except Exception: - self.log.error("Import reference failed", exc_info=True) - raise - instance.context.data["currentFile"] = ref_scene_name with lib.maintained_selection(): cmds.select(all=True, noExpand=True) cmds.file(reference_path, @@ -117,6 +137,8 @@ class ExtractImportReference(publish.Extractor): expressions=True, constructionHistory=True) + instance.context.data["currentFile"] = tmp_path + if "files" not in instance.data: instance.data["files"] = [] instance.data["files"].append(ref_scene_name) @@ -128,8 +150,10 @@ class ExtractImportReference(publish.Extractor): "name": self.scene_type, "ext": self.scene_type, "files": ref_scene_name, - "stagingDir": os.path.dirname(current_name) + "stagingDir": os.path.dirname(current_name), + "outputName": "imported" } + self.log.info("%s" % ref_representation) instance.data["representations"].append(ref_representation) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 1f74b9b19b..909a5871e3 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -526,7 +526,6 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): template_data = workfile_instance.data.get("anatomyData") if self.import_reference: rep = workfile_instance.data.get("representations")[1] - # template_data["workfiletype"] = rep.get("workfiletype") else: rep = workfile_instance.data.get("representations")[0] template_data["representation"] = rep.get("name") @@ -536,7 +535,6 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): anatomy = instance.context.data['anatomy'] anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled["publish"]["path"] - # template_filled = anatomy_filled["others"]["mayaWorkfile"]["path"] file_path = os.path.normpath(template_filled) self.log.info("Using published scene for render {}".format(file_path)) diff --git a/openpype/settings/defaults/project_anatomy/templates.json b/openpype/settings/defaults/project_anatomy/templates.json index 72d387335d..caf399a903 100644 --- a/openpype/settings/defaults/project_anatomy/templates.json +++ b/openpype/settings/defaults/project_anatomy/templates.json @@ -17,7 +17,7 @@ }, "publish": { "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}", - "file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}><_{udim}><_{workfiletype}>.{ext}", + "file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}><_{udim}>.{ext}", "path": "{@folder}/{@file}", "thumbnail": "{thumbnail_root}/{project[name]}/{_id}_{thumbnail_type}.{ext}" }, From 76a70b70cf75c83c2be3beae8897c17c9ad80cb0 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 14 Oct 2022 21:28:30 +0800 Subject: [PATCH 035/171] Import Reference during Publish --- openpype/hosts/maya/plugins/publish/extract_import_reference.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 6b935ffb73..e70a27a6f6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -124,7 +124,6 @@ print("*** Done") self.log.error("Import reference failed", exc_info=True) raise - with lib.maintained_selection(): cmds.select(all=True, noExpand=True) cmds.file(reference_path, From b82c3ac7f97d74272c4d991e7ec9a230cda6a5b4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 14 Oct 2022 21:29:51 +0800 Subject: [PATCH 036/171] Import Reference during Publish --- openpype/hosts/maya/plugins/publish/extract_import_reference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index e70a27a6f6..3ec2f3bba4 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -66,7 +66,7 @@ class ExtractImportReference(publish.Extractor): return tmp_name = instance.name + self.tmp_format current_name = cmds.file(query=True, sceneName=True) - ref_scene_name = "{0}.{1}".format(tmp_name, self.scene_type) + ref_scene_name = "{0}.{1}".format(tmp_name, self.scene_type) reference_path = os.path.join(dir_path, ref_scene_name) tmp_path = os.path.dirname(current_name) + "/" + ref_scene_name From ab93c766ea425e6ef7b017465c6905cca014f712 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 25 Oct 2022 20:09:04 +0800 Subject: [PATCH 037/171] Import Reference during Publish --- .../plugins/publish/extract_import_reference.py | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 3ec2f3bba4..fa9e612dad 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -7,20 +7,10 @@ import pyblish.api import tempfile from openpype.lib import run_subprocess -from openpype.pipeline import publish, legacy_io -from openpype.settings import get_project_settings +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -def _import_reference(): - project_name = legacy_io.active_project() - project_setting = get_project_settings(project_name) - import_reference = ( - project_setting["deadline"]["publish"]["MayaSubmitDeadline"]["import_reference"] # noqa - ) - return import_reference - - class ExtractImportReference(publish.Extractor): """ @@ -34,10 +24,13 @@ class ExtractImportReference(publish.Extractor): order = pyblish.api.ExtractorOrder - 0.48 hosts = ["maya"] families = ["renderlayer", "workfile"] - active = _import_reference() optional = True tmp_format = "_tmp" + @classmethod + def apply_settings(cls, project_setting, system_settings): #noqa + cls.active = project_setting["deadline"]["publish"]["MayaSubmitDeadline"]["import_reference"] # noqa + def process(self, instance): ext_mapping = ( instance.context.data["project_settings"]["maya"]["ext_mapping"] From fbfdefd8d4b487a161db7843e6f6995c1c45b88d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 25 Oct 2022 20:09:38 +0800 Subject: [PATCH 038/171] Import Reference during Publish --- openpype/hosts/maya/plugins/publish/extract_import_reference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index fa9e612dad..b0b69304ef 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -28,7 +28,7 @@ class ExtractImportReference(publish.Extractor): tmp_format = "_tmp" @classmethod - def apply_settings(cls, project_setting, system_settings): #noqa + def apply_settings(cls, project_setting, system_settings): cls.active = project_setting["deadline"]["publish"]["MayaSubmitDeadline"]["import_reference"] # noqa def process(self, instance): From aeee2a491821133549672a98a0939e402fb7ce12 Mon Sep 17 00:00:00 2001 From: DMO Date: Mon, 31 Oct 2022 14:51:44 +0900 Subject: [PATCH 039/171] Fixed commit for adding proper layered support for USD overrides. --- .../create/create_multiverse_usd_comp.py | 2 +- .../maya/plugins/load/load_multiverse_usd.py | 32 ++++- .../plugins/load/load_multiverse_usd_over.py | 136 ++++++++++++++++++ 3 files changed, 164 insertions(+), 6 deletions(-) create mode 100644 openpype/hosts/maya/plugins/load/load_multiverse_usd_over.py diff --git a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py index a92969eb9a..ed466a8068 100644 --- a/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/create/create_multiverse_usd_comp.py @@ -17,7 +17,7 @@ class CreateMultiverseUsdComp(plugin.Creator): # Order of `fileFormat` must match extract_multiverse_usd_comp.py self.data["fileFormat"] = ["usda", "usd"] - self.data["stripNamespaces"] = True + self.data["stripNamespaces"] = False self.data["mergeTransformAndShape"] = False self.data["flattenContent"] = False self.data["writeAsCompoundLayers"] = False diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index 24b97db365..13915aa2a9 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- import maya.cmds as cmds +from maya import mel +import os from openpype.pipeline import ( load, @@ -11,6 +13,7 @@ from openpype.hosts.maya.api.lib import ( unique_namespace ) from openpype.hosts.maya.api.pipeline import containerise +from openpype.client import get_representations, get_representation_by_id class MultiverseUsdLoader(load.LoaderPlugin): @@ -26,7 +29,6 @@ class MultiverseUsdLoader(load.LoaderPlugin): color = "orange" def load(self, context, name=None, namespace=None, options=None): - asset = context['asset']['name'] namespace = namespace or unique_namespace( asset + "_", @@ -34,15 +36,16 @@ class MultiverseUsdLoader(load.LoaderPlugin): suffix="_", ) - # Create the shape + # Make sure we can load the plugin cmds.loadPlugin("MultiverseForMaya", quiet=True) + import multiverse + # Create the shape shape = None transform = None with maintained_selection(): cmds.namespace(addNamespace=namespace) with namespaced(namespace, new=False): - import multiverse shape = multiverse.CreateUsdCompound(self.fname) transform = cmds.listRelatives( shape, parent=True, fullPath=True)[0] @@ -67,15 +70,34 @@ class MultiverseUsdLoader(load.LoaderPlugin): shapes = cmds.ls(members, type="mvUsdCompoundShape") assert shapes, "Cannot find mvUsdCompoundShape in container" - path = get_representation_path(representation) + project_name = representation["context"]["project"]["name"] + prev_representation_id = cmds.getAttr("{}.representation".format(node)) + prev_representation = get_representation_by_id(project_name, + prev_representation_id) + prev_path = os.path.normpath(prev_representation["data"]["path"]) + # Make sure we can load the plugin + cmds.loadPlugin("MultiverseForMaya", quiet=True) import multiverse + for shape in shapes: - multiverse.SetUsdCompoundAssetPaths(shape, [path]) + + asset_paths = multiverse.GetUsdCompoundAssetPaths(shape) + asset_paths = [os.path.normpath(p) for p in asset_paths] + + assert asset_paths.count(prev_path) == 1, \ + "Couldn't find matching path (or too many)" + prev_path_idx = asset_paths.index(prev_path) + + path = get_representation_path(representation) + asset_paths[prev_path_idx] = path + + multiverse.SetUsdCompoundAssetPaths(shape, asset_paths) cmds.setAttr("{}.representation".format(node), str(representation["_id"]), type="string") + mel.eval('refreshEditorTemplates;') def switch(self, container, representation): self.update(container, representation) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd_over.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd_over.py new file mode 100644 index 0000000000..080475461b --- /dev/null +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd_over.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +import maya.cmds as cmds +from maya import mel +import os + +import qargparse + +from openpype.pipeline import ( + load, + get_representation_path +) +from openpype.hosts.maya.api.lib import ( + maintained_selection, + namespaced, + unique_namespace +) +from openpype.hosts.maya.api.pipeline import containerise +from openpype.client import get_representations, get_representation_by_id + + +class MultiverseUsdOverLoader(load.LoaderPlugin): + """Reference file""" + + families = ["mvUsdOverride"] + representations = ["usda", "usd", "udsz"] + + label = "Load Usd Override into Compound" + order = -10 + icon = "code-fork" + color = "orange" + + options = [ + qargparse.String( + "Which Compound", + label="Compound", + help="Select which compound to add this as a layer to." + ) + ] + + def load(self, context, name=None, namespace=None, options=None): + asset = context['asset']['name'] + + current_usd = cmds.ls(selection=True, + type="mvUsdCompoundShape", + dag=True, + long=True) + if len(current_usd) != 1: + self.log.error("Current selection invalid: '{}', " + "must contain exactly 1 mvUsdCompoundShape." + "".format(current_usd)) + return + + # Make sure we can load the plugin + cmds.loadPlugin("MultiverseForMaya", quiet=True) + import multiverse + + nodes = current_usd + with maintained_selection(): + multiverse.AddUsdCompoundAssetPath(current_usd[0], self.fname) + + namespace = current_usd[0].split("|")[1].split(":")[0] + + container = containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + cmds.addAttr(container, longName="mvUsdCompoundShape", + niceName="mvUsdCompoundShape", dataType="string") + cmds.setAttr(container + ".mvUsdCompoundShape", + current_usd[0], type="string") + + return container + + def update(self, container, representation): + # type: (dict, dict) -> None + """Update container with specified representation.""" + + cmds.loadPlugin("MultiverseForMaya", quiet=True) + import multiverse + + node = container['objectName'] + assert cmds.objExists(node), "Missing container" + + members = cmds.sets(node, query=True) or [] + shapes = cmds.ls(members, type="mvUsdCompoundShape") + assert shapes, "Cannot find mvUsdCompoundShape in container" + + mvShape = container['mvUsdCompoundShape'] + assert mvShape, "Missing mv source" + + project_name = representation["context"]["project"]["name"] + prev_representation_id = cmds.getAttr("{}.representation".format(node)) + prev_representation = get_representation_by_id(project_name, + prev_representation_id) + prev_path = os.path.normpath(prev_representation["data"]["path"]) + + path = get_representation_path(representation) + + for shape in shapes: + asset_paths = multiverse.GetUsdCompoundAssetPaths(shape) + asset_paths = [os.path.normpath(p) for p in asset_paths] + + assert asset_paths.count(prev_path) == 1, \ + "Couldn't find matching path (or too many)" + prev_path_idx = asset_paths.index(prev_path) + asset_paths[prev_path_idx] = path + multiverse.SetUsdCompoundAssetPaths(shape, asset_paths) + + cmds.setAttr("{}.representation".format(node), + str(representation["_id"]), + type="string") + mel.eval('refreshEditorTemplates;') + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + # type: (dict) -> None + """Remove loaded container.""" + # Delete container and its contents + if cmds.objExists(container['objectName']): + members = cmds.sets(container['objectName'], query=True) or [] + cmds.delete([container['objectName']] + members) + + # Remove the namespace, if empty + namespace = container['namespace'] + if cmds.namespace(exists=namespace): + members = cmds.namespaceInfo(namespace, listNamespace=True) + if not members: + cmds.namespace(removeNamespace=namespace) + else: + self.log.warning("Namespace not deleted because it " + "still has members: %s", namespace) From c62527a678d97f9bf0e6be6f4ecec3d30433104b Mon Sep 17 00:00:00 2001 From: DMO Date: Mon, 31 Oct 2022 15:15:46 +0900 Subject: [PATCH 040/171] Removing bad merge. --- openpype/hosts/maya/api/lib_renderproducts.py | 58 ------------------- 1 file changed, 58 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 7b9601cda8..cd204445b7 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -77,7 +77,6 @@ IMAGE_PREFIXES = { "arnold": "defaultRenderGlobals.imageFilePrefix", "renderman": "rmanGlobals.imageFileFormat", "redshift": "defaultRenderGlobals.imageFilePrefix", - "_3delight": "defaultRenderGlobals.imageFilePrefix", "mayahardware2": "defaultRenderGlobals.imageFilePrefix" } @@ -173,7 +172,6 @@ def get(layer, render_instance=None): "redshift": RenderProductsRedshift, "renderman": RenderProductsRenderman, "mayahardware2": RenderProductsMayaHardware - "_3delight": RenderProducts3Delight }.get(renderer_name.lower(), None) if renderer is None: raise UnsupportedRendererException( @@ -1288,62 +1286,6 @@ class RenderProductsMayaHardware(ARenderProducts): for cam in self.get_renderable_cameras(): product = RenderProduct(productName="beauty", ext=ext, camera=cam) products.append(product) -class RenderProducts3Delight(ARenderProducts): - """Expected files for Renderman renderer. - - Warning: - This is very rudimentary and needs more love and testing. - """ - - renderer = "_3delight" - - def get_render_products(self): - """Get all AOVs. - - See Also: - :func:`ARenderProducts.get_render_products()` - - """ - cameras = [ - self.sanitize_camera_name(c) - for c in self.get_renderable_cameras() - ] - - if not cameras: - cameras = [ - self.sanitize_camera_name( - self.get_renderable_cameras()[0]) - ] - products = [] - - default_ext = "exr" - - nodes = cmds.listConnections( - 'dlRenderGlobals1', - type='dlRenderSettings') - assert len(nodes) == 1 - node = nodes[0] - - num_layers = cmds.getAttr( - '{}.layerOutputVariables'.format(node), - size=True) - assert num_layers > 0 - for i in range(num_layers): - output = cmds.getAttr( - '{}.layerOutput[{}]'.format(node, i)) - if not output: - continue - - output_var = cmds.getAttr( - '{}.layerOutputVariables[{}]'.format(node, i)) - output_var_tokens = output_var.split('|') - aov_name = output_var_tokens[4] - - for camera in cameras: - product = RenderProduct(productName=aov_name, - ext=default_ext, - camera=camera) - products.append(product) return products From 422a3c4f8ad9e7cbdb590d4c9f5aeb694cbcc698 Mon Sep 17 00:00:00 2001 From: DMO Date: Mon, 31 Oct 2022 15:33:50 +0900 Subject: [PATCH 041/171] Removed wrongly-staged `extract_animation.py` --- .../maya/plugins/publish/extract_animation.py | 113 ------------------ 1 file changed, 113 deletions(-) delete mode 100644 openpype/hosts/maya/plugins/publish/extract_animation.py diff --git a/openpype/hosts/maya/plugins/publish/extract_animation.py b/openpype/hosts/maya/plugins/publish/extract_animation.py deleted file mode 100644 index 3d1f30b640..0000000000 --- a/openpype/hosts/maya/plugins/publish/extract_animation.py +++ /dev/null @@ -1,113 +0,0 @@ -import os - -from maya import cmds - -import openpype.api -from openpype.hosts.maya.api.lib import ( - extract_alembic, - suspended_refresh, - maintained_selection, - iter_visible_nodes_in_range -) - - -class ExtractAnimation(openpype.api.Extractor): - """Produce an alembic of just point positions and normals. - - Positions and normals, uvs, creases are preserved, but nothing more, - for plain and predictable point caches. - - Plugin can run locally or remotely (on a farm - if instance is marked with - "farm" it will be skipped in local processing, but processed on farm) - """ - - label = "Extract Animation" - hosts = ["maya"] - families = ["animation"] - targets = ["local", "remote"] - - def process(self, instance): - if instance.data.get("farm"): - self.log.debug("Should be processed on farm, skipping.") - return - - # Collect the out set nodes - out_sets = [node for node in instance if node.endswith("out_SET")] - if len(out_sets) != 1: - raise RuntimeError("Couldn't find exactly one out_SET: " - "{0}".format(out_sets)) - out_set = out_sets[0] - roots = cmds.sets(out_set, query=True) - - # Include all descendants - nodes = roots + cmds.listRelatives(roots, - allDescendents=True, - fullPath=True) or [] - - # Collect the start and end including handles - start = instance.data["frameStartHandle"] - end = instance.data["frameEndHandle"] - - self.log.info("Extracting animation..") - dirname = self.staging_dir(instance) - - parent_dir = self.staging_dir(instance) - filename = "{name}.abc".format(**instance.data) - path = os.path.join(parent_dir, filename) - - options = { - "step": instance.data.get("step", 1.0) or 1.0, - "attr": ["cbId"], - "writeVisibility": True, - "writeCreases": True, - "uvWrite": True, - "selection": True, - "worldSpace": instance.data.get("worldSpace", True), - "writeColorSets": instance.data.get("writeColorSets", False), - "writeFaceSets": instance.data.get("writeFaceSets", False), - # 'noNormals' is the standard alembic option name. - "noNormals": not instance.data.get("writeNormals", True) - } - - if not instance.data.get("includeParentHierarchy", True): - # Set the root nodes if we don't want to include parents - # The roots are to be considered the ones that are the actual - # direct members of the set - options["root"] = roots - - if int(cmds.about(version=True)) >= 2017: - # Since Maya 2017 alembic supports multiple uv sets - write them. - options["writeUVSets"] = True - - if instance.data.get("visibleOnly", False): - # If we only want to include nodes that are visible in the frame - # range then we need to do our own check. Alembic's `visibleOnly` - # flag does not filter out those that are only hidden on some - # frames as it counts "animated" or "connected" visibilities as - # if it's always visible. - nodes = list(iter_visible_nodes_in_range(nodes, - start=start, - end=end)) - - with suspended_refresh(): - with maintained_selection(): - cmds.select(nodes, noExpand=True) - extract_alembic(file=path, - startFrame=float(start), - endFrame=float(end), - **options) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'abc', - 'ext': 'abc', - 'files': filename, - "stagingDir": dirname, - } - instance.data["representations"].append(representation) - - instance.context.data["cleanupFullPaths"].append(path) - - self.log.info("Extracted {} to {}".format(instance, dirname)) From 046ed724c802101ad7a4190ae7fb9f3765b907d3 Mon Sep 17 00:00:00 2001 From: DMO Date: Mon, 31 Oct 2022 15:40:25 +0900 Subject: [PATCH 042/171] Addressing valid hound concerns. --- openpype/hosts/maya/plugins/load/load_multiverse_usd.py | 2 +- .../hosts/maya/plugins/load/load_multiverse_usd_over.py | 8 ++------ .../hosts/maya/plugins/publish/collect_multiverse_look.py | 6 +++--- .../plugins/publish/validate_transform_naming_suffix.py | 4 ++-- 4 files changed, 8 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py index 13915aa2a9..9e0d38df46 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd.py @@ -13,7 +13,7 @@ from openpype.hosts.maya.api.lib import ( unique_namespace ) from openpype.hosts.maya.api.pipeline import containerise -from openpype.client import get_representations, get_representation_by_id +from openpype.client import get_representation_by_id class MultiverseUsdLoader(load.LoaderPlugin): diff --git a/openpype/hosts/maya/plugins/load/load_multiverse_usd_over.py b/openpype/hosts/maya/plugins/load/load_multiverse_usd_over.py index 080475461b..8a25508ac2 100644 --- a/openpype/hosts/maya/plugins/load/load_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/load/load_multiverse_usd_over.py @@ -10,12 +10,10 @@ from openpype.pipeline import ( get_representation_path ) from openpype.hosts.maya.api.lib import ( - maintained_selection, - namespaced, - unique_namespace + maintained_selection ) from openpype.hosts.maya.api.pipeline import containerise -from openpype.client import get_representations, get_representation_by_id +from openpype.client import get_representation_by_id class MultiverseUsdOverLoader(load.LoaderPlugin): @@ -38,8 +36,6 @@ class MultiverseUsdOverLoader(load.LoaderPlugin): ] def load(self, context, name=None, namespace=None, options=None): - asset = context['asset']['name'] - current_usd = cmds.ls(selection=True, type="mvUsdCompoundShape", dag=True, diff --git a/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py b/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py index 4c50e4df27..a7cb14855b 100644 --- a/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_multiverse_look.py @@ -253,7 +253,7 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin): """Collect Multiverse Look Searches through the overrides finding all material overrides. From there - it extracts the shading group and then finds all texture files in the + it extracts the shading group and then finds all texture files in the shading group network. It also checks for mipmap versions of texture files and adds them to the resouces to get published. @@ -341,7 +341,7 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin): node_type = cmds.nodeType(node) self.log.debug("processing: {}/{}".format(node, node_type)) - if not node_type in NODETYPES: + if node_type not in NODETYPES: self.log.error("Unsupported file node: {}".format(node_type)) raise AssertionError("Unsupported file node") @@ -361,7 +361,7 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin): # Compare with the computed file path, e.g. the one with the # pattern in it, to generate some logging information about this # difference - # computed_attribute = "{}.computedFileTextureNamePattern".format(node) + # computed_attribute = "{}.computedFileTextureNamePattern".format(node) # noqa computed_source = cmds.getAttr(computed_fname_attrib) if source != computed_source: self.log.debug("Detected computed file pattern difference " diff --git a/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py b/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py index 4f2a400d91..65551c8d5e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py +++ b/openpype/hosts/maya/plugins/publish/validate_transform_naming_suffix.py @@ -48,8 +48,8 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin): @classmethod def get_table_for_invalid(cls): ss = [] - for k,v in cls.SUFFIX_NAMING_TABLE.items(): - ss.append(" - {}: {}".format(k,", ".join(v))) + for k, v in cls.SUFFIX_NAMING_TABLE.items(): + ss.append(" - {}: {}".format(k, ", ".join(v))) return "\n".join(ss) @staticmethod From ef924a2358b883c8118fca413770881e544cf8d2 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 16 Nov 2022 21:00:17 +0800 Subject: [PATCH 043/171] Import Reference during Publish --- .../publish/extract_import_reference.py | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index b0b69304ef..8e0257dafb 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -79,16 +79,19 @@ current_name = "{current_name}" ref_scene_name = "{ref_scene_name}" print(">>> Opening {{}} ...".format(current_name)) cmds.file(current_name, open=True, force=True) -reference_node = cmds.ls(type='reference') print(">>> Processing references") -for ref in reference_node: - ref_file = cmds.referenceQuery(ref, f=True) - print("--- {{}}".format(ref)) - print("--> {{}}".format(ref_file)) - if ref == 'sharedReferenceNode': - cmds.file(ref_file, removeReference=True, referenceNode=ref) - else: - cmds.file(ref_file, importReference=True) +all_reference = cmds.file(q=True, reference=True) or [] +for ref in all_reference: + if cmds.referenceQuery(ref, f=True): + cmds.file(ref, importReference=True) + + nested_ref = cmds.file(q=True, reference=True) + if nested_ref: + for new_ref in nested_ref: + if new_ref not in all_reference: + all_reference.append(new_ref) + +print(">>> Finish importing references") print(">>> Saving scene as {{}}".format(ref_scene_name)) cmds.file(rename=ref_scene_name) From d068ff481401e4261f3447ddbd52a7bd0cdd06a6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 10:54:46 +0100 Subject: [PATCH 044/171] Don't use legacy_io session in global plugins --- openpype/plugins/publish/cleanup_farm.py | 6 ++---- .../plugins/publish/collect_anatomy_instance_data.py | 3 +-- .../plugins/publish/collect_scene_loaded_versions.py | 7 ++----- openpype/plugins/publish/integrate.py | 11 +++++------ .../plugins/publish/validate_editorial_asset_name.py | 6 +----- 5 files changed, 11 insertions(+), 22 deletions(-) diff --git a/openpype/plugins/publish/cleanup_farm.py b/openpype/plugins/publish/cleanup_farm.py index 2c6c1625bb..b87d4698a2 100644 --- a/openpype/plugins/publish/cleanup_farm.py +++ b/openpype/plugins/publish/cleanup_farm.py @@ -4,8 +4,6 @@ import os import shutil import pyblish.api -from openpype.pipeline import legacy_io - class CleanUpFarm(pyblish.api.ContextPlugin): """Cleans up the staging directory after a successful publish. @@ -23,8 +21,8 @@ class CleanUpFarm(pyblish.api.ContextPlugin): def process(self, context): # Get source host from which farm publishing was started - src_host_name = legacy_io.Session.get("AVALON_APP") - self.log.debug("Host name from session is {}".format(src_host_name)) + src_host_name = context.data["hostName"] + self.log.debug("Host name from context is {}".format(src_host_name)) # Skip process if is not in list of source hosts in which this # plugin should run if src_host_name not in self.allowed_hosts: diff --git a/openpype/plugins/publish/collect_anatomy_instance_data.py b/openpype/plugins/publish/collect_anatomy_instance_data.py index 909b49a07d..3858b4725e 100644 --- a/openpype/plugins/publish/collect_anatomy_instance_data.py +++ b/openpype/plugins/publish/collect_anatomy_instance_data.py @@ -32,7 +32,6 @@ from openpype.client import ( get_subsets, get_last_versions ) -from openpype.pipeline import legacy_io class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): @@ -49,7 +48,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): def process(self, context): self.log.info("Collecting anatomy data for all instances.") - project_name = legacy_io.active_project() + project_name = context.data["projectName"] self.fill_missing_asset_docs(context, project_name) self.fill_instance_data_from_asset(context) self.fill_latest_versions(context, project_name) diff --git a/openpype/plugins/publish/collect_scene_loaded_versions.py b/openpype/plugins/publish/collect_scene_loaded_versions.py index 5ff2b46e3b..627d451f58 100644 --- a/openpype/plugins/publish/collect_scene_loaded_versions.py +++ b/openpype/plugins/publish/collect_scene_loaded_versions.py @@ -1,10 +1,7 @@ import pyblish.api from openpype.client import get_representations -from openpype.pipeline import ( - registered_host, - legacy_io, -) +from openpype.pipeline import registered_host class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): @@ -44,7 +41,7 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): for container in containers } - project_name = legacy_io.active_project() + project_name = context.data["projectName"] repre_docs = get_representations( project_name, representation_ids=repre_ids, diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 401270a788..e19c3eee7c 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -25,7 +25,6 @@ from openpype.client import ( ) from openpype.lib import source_hash from openpype.lib.file_transaction import FileTransaction -from openpype.pipeline import legacy_io from openpype.pipeline.publish import ( KnownPublishError, get_publish_template_name, @@ -242,7 +241,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): return filtered_repres def register(self, instance, file_transactions, filtered_repres): - project_name = legacy_io.active_project() + project_name = instance.context["projectName"] instance_stagingdir = instance.data.get("stagingDir") if not instance_stagingdir: @@ -803,11 +802,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): """Return anatomy template name to use for integration""" # Anatomy data is pre-filled by Collectors - - project_name = legacy_io.active_project() + context = instance.context + project_name = context.data["projectName"] # Task can be optional in anatomy data - host_name = instance.context.data["hostName"] + host_name = context.data["hostName"] anatomy_data = instance.data["anatomyData"] family = anatomy_data["family"] task_info = anatomy_data.get("task") or {} @@ -818,7 +817,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): family, task_name=task_info.get("name"), task_type=task_info.get("type"), - project_settings=instance.context.data["project_settings"], + project_settings=context.data["project_settings"], logger=self.log ) diff --git a/openpype/plugins/publish/validate_editorial_asset_name.py b/openpype/plugins/publish/validate_editorial_asset_name.py index 694788c414..4f8a1abf2e 100644 --- a/openpype/plugins/publish/validate_editorial_asset_name.py +++ b/openpype/plugins/publish/validate_editorial_asset_name.py @@ -2,7 +2,6 @@ from pprint import pformat import pyblish.api -from openpype.pipeline import legacy_io from openpype.client import get_assets @@ -28,10 +27,7 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin): asset_and_parents = self.get_parents(context) self.log.debug("__ asset_and_parents: {}".format(asset_and_parents)) - if not legacy_io.Session: - legacy_io.install() - - project_name = legacy_io.active_project() + project_name = context.data["projectName"] db_assets = list(get_assets( project_name, fields=["name", "data.parents"] )) From 59b4f5823719e168bdf4d6f0ccb33a380e83405b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Dec 2022 12:59:49 +0100 Subject: [PATCH 045/171] OP-4504 - added originalBasename and originalDirname to instance Will be used to fill 'source' (and 'online') template. Source template is used to publish in-situ, eg. without copying possibly massive files (as pointcaches etc.) --- .../hosts/traypublisher/plugins/publish/collect_source.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_source.py b/openpype/hosts/traypublisher/plugins/publish/collect_source.py index 6ff22be13a..3d983a89ee 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_source.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_source.py @@ -1,3 +1,5 @@ +import os.path + import pyblish.api @@ -22,3 +24,9 @@ class CollectSource(pyblish.api.ContextPlugin): self.log.info(( "Source of instance \"{}\" was already set to \"{}\"" ).format(instance.data["name"], source)) + + if not instance.data.get("originalBasename"): + instance.data["originalBasename"] = os.path.basename(source) + + if not instance.data.get("originalDirname"): + instance.data["originalDirname"] = os.path.dirname(source) From 5aed3f9bd817b722f529d9289885a7af1cd155e4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Dec 2022 13:01:54 +0100 Subject: [PATCH 046/171] OP-4504 - added originalDirname to data filling template Will be used to fill 'source' (and 'online') template. --- openpype/plugins/publish/integrate.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 401270a788..a5df678332 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -535,7 +535,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "resolutionHeight": "resolution_height", "fps": "fps", "outputName": "output", - "originalBasename": "originalBasename" + "originalBasename": "originalBasename", + "originalDirname": "originalDirname" }.items(): # Allow to take value from representation # if not found also consider instance.data From 07ee68eea1d13960450a170cc86e8285638d1552 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Dec 2022 13:02:49 +0100 Subject: [PATCH 047/171] OP-4504 - added checks for not overwriting same file Added check for publishing to project folder --- openpype/plugins/publish/integrate.py | 35 ++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index a5df678332..08b59a3574 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -268,6 +268,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): ) instance.data["versionEntity"] = version + anatomy = instance.context.data["anatomy"] + # Get existing representations (if any) existing_repres_by_name = { repre_doc["name"].lower(): repre_doc @@ -291,6 +293,19 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance) for src, dst in prepared["transfers"]: + if src == dst: + self.log.info( + "Source '{}' same as destination '{}'. Skipping." + .format(src, dst)) + continue + + if not self._is_path_in_project_roots(anatomy.all_root_paths, + dst): + self.log.warning( + "Destination '{}' is not in project folder. Skipping" + .format(dst)) + continue + # todo: add support for hardlink transfers file_transactions.add(src, dst) @@ -340,7 +355,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Compute the resource file infos once (files belonging to the # version instance instead of an individual representation) so # we can re-use those file infos per representation - anatomy = instance.context.data["anatomy"] resource_file_infos = self.get_files_info(resource_destinations, sites=sites, anatomy=anatomy) @@ -889,3 +903,22 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "hash": source_hash(path), "sites": sites } + + def _is_path_in_project_roots(self, roots, file_path): + """Checks if 'file_path' starts with any of the roots. + + Used to check that published path belongs to project, eg. we are not + trying to publish to local only folder. + Args: + roots (list of RootItem): {ROOT_NAME: ROOT_PATH} + file_path (str) + Returns: + (bool) + """ + file_path = str(file_path).replace("\\", "/") + for root_item in roots.values(): + if file_path.startswith(root_item.clean_value): + return True + + return False + From 0aa0080f1122fdafde739258f127056ab37a7620 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Dec 2022 15:36:33 +0100 Subject: [PATCH 048/171] OP-4504 - fixed check file in project folder --- openpype/plugins/publish/integrate.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 08b59a3574..44cee664b5 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -299,7 +299,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): .format(src, dst)) continue - if not self._is_path_in_project_roots(anatomy.all_root_paths, + if not self._is_path_in_project_roots(anatomy.all_root_paths(), dst): self.log.warning( "Destination '{}' is not in project folder. Skipping" @@ -915,9 +915,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): Returns: (bool) """ - file_path = str(file_path).replace("\\", "/") - for root_item in roots.values(): - if file_path.startswith(root_item.clean_value): + file_path = str(file_path).replace("\\", "/").lower() + for root_item in roots: + if file_path.startswith(root_item.lower()): return True return False From 232743854e06ad68eff562272372177437d4059e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Dec 2022 15:58:01 +0100 Subject: [PATCH 049/171] OP-4504 - moved path parsing to global plugin CollectSources should run on all hosts whenever some output is expected. It seems to be best location to put parsing of source file, unless we want to create completely new separate plugin. --- openpype/hosts/traypublisher/api/plugin.py | 7 +++++- .../plugins/publish/collect_source.py | 6 ----- .../plugins/publish/collect_resources_path.py | 22 ++++++++++++++++++- 3 files changed, 27 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 75930f0f31..d559853fd1 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,4 +1,4 @@ -from openpype.lib.attribute_definitions import FileDef +from openpype.lib.attribute_definitions import FileDef, BoolDef from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS from openpype.pipeline.create import ( Creator, @@ -129,6 +129,11 @@ class SettingsCreator(TrayPublishCreator): single_item=True, label="Reviewable representations", extensions_label="Single reviewable item" + ), + BoolDef( + "publish_prepared", + default=False, + label="Just publish already prepared" ) ] diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_source.py b/openpype/hosts/traypublisher/plugins/publish/collect_source.py index 3d983a89ee..5121452ca8 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_source.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_source.py @@ -24,9 +24,3 @@ class CollectSource(pyblish.api.ContextPlugin): self.log.info(( "Source of instance \"{}\" was already set to \"{}\"" ).format(instance.data["name"], source)) - - if not instance.data.get("originalBasename"): - instance.data["originalBasename"] = os.path.basename(source) - - if not instance.data.get("originalDirname"): - instance.data["originalDirname"] = os.path.dirname(source) diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 00f65b8b67..383cea4a25 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -15,7 +15,11 @@ import pyblish.api class CollectResourcesPath(pyblish.api.InstancePlugin): - """Generate directory path where the files and resources will be stored""" + """Generate directory path where the files and resources will be stored. + + Collects folder name and file name from files, if exists, for in-situ + publishing. + """ label = "Collect Resources Path" order = pyblish.api.CollectorOrder + 0.495 @@ -100,3 +104,19 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): self.log.debug("publishDir: \"{}\"".format(publish_folder)) self.log.debug("resourcesDir: \"{}\"".format(resources_folder)) + + # parse folder name and file name for online and source templates + # currentFile comes from hosts workfiles + # source comes from Publisher + current_file = instance.data.get("currentFile") + source = instance.data.get("source") + source_file = current_file or source + if os.path.exists(source_file): + self.log.debug("Parsing paths for {}".format(source_file)) + if not instance.data.get("originalBasename"): + instance.data["originalBasename"] = \ + os.path.basename(source_file) + + if not instance.data.get("originalDirname"): + instance.data["originalDirname"] = \ + os.path.dirname(source_file) From 499c32110cbf498234032c389585fb02d4aa9d5e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Dec 2022 16:02:47 +0100 Subject: [PATCH 050/171] OP-4504 - revert of unwanted change --- openpype/hosts/traypublisher/api/plugin.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index d559853fd1..75930f0f31 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -1,4 +1,4 @@ -from openpype.lib.attribute_definitions import FileDef, BoolDef +from openpype.lib.attribute_definitions import FileDef from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS from openpype.pipeline.create import ( Creator, @@ -129,11 +129,6 @@ class SettingsCreator(TrayPublishCreator): single_item=True, label="Reviewable representations", extensions_label="Single reviewable item" - ), - BoolDef( - "publish_prepared", - default=False, - label="Just publish already prepared" ) ] From 9c381b4b51e9e017bb4366eb14e647d5f61389a6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Dec 2022 18:37:47 +0100 Subject: [PATCH 051/171] OP-4504 - added logic for originalDirname into integrate Adding originalDirname to all hosts could be an ordeal, adding logic here is simpler, but might not be best solution. --- openpype/plugins/publish/integrate.py | 35 +++++++++++++++------------ 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 44cee664b5..b48020860b 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -541,8 +541,25 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_data["representation"] = repre["name"] template_data["ext"] = repre["ext"] + stagingdir = repre.get("stagingDir") + if not stagingdir: + # Fall back to instance staging dir if not explicitly + # set for representation in the instance + self.log.debug(( + "Representation uses instance staging dir: {}" + ).format(instance_stagingdir)) + stagingdir = instance_stagingdir + + if not stagingdir: + raise KnownPublishError( + "No staging directory set for representation: {}".format(repre) + ) + # optionals # retrieve additional anatomy data from representation if exists + if not instance.data.get("originalDirname"): + instance.data["originalDirname"] = stagingdir + for key, anatomy_key in { # Representation Key: Anatomy data key "resolutionWidth": "resolution_width", @@ -561,20 +578,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): if value is not None: template_data[anatomy_key] = value - stagingdir = repre.get("stagingDir") - if not stagingdir: - # Fall back to instance staging dir if not explicitly - # set for representation in the instance - self.log.debug(( - "Representation uses instance staging dir: {}" - ).format(instance_stagingdir)) - stagingdir = instance_stagingdir - - if not stagingdir: - raise KnownPublishError( - "No staging directory set for representation: {}".format(repre) - ) - self.log.debug("Anatomy template name: {}".format(template_name)) anatomy = instance.context.data["anatomy"] publish_template_category = anatomy.templates[template_name] @@ -600,6 +603,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): )) src_collection = src_collections[0] + template_data["originalBasename"] = src_collection.head[:-1] destination_indexes = list(src_collection.indexes) # Use last frame for minimum padding # - that should cover both 'udim' and 'frame' minimum padding @@ -684,7 +688,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): raise KnownPublishError( "This is a bug. Representation file name is full path" ) - + if not template_data.get("originalBasename"): + template_data["originalBasename"] = fname # Manage anatomy template data template_data.pop("frame", None) if is_udim: From 1106f8cf5ad6d23051643c83fd894846127ec600 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Dec 2022 18:38:24 +0100 Subject: [PATCH 052/171] OP-4504 - check for None --- openpype/plugins/publish/collect_resources_path.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 383cea4a25..0f55e65c9e 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -111,7 +111,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): current_file = instance.data.get("currentFile") source = instance.data.get("source") source_file = current_file or source - if os.path.exists(source_file): + if source_file and os.path.exists(source_file): self.log.debug("Parsing paths for {}".format(source_file)) if not instance.data.get("originalBasename"): instance.data["originalBasename"] = \ From b67c8e28a9115c16970596dfceaf2ada1e425ea0 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 1 Dec 2022 19:16:39 +0100 Subject: [PATCH 053/171] OP-4504 - cleanup of logic --- openpype/plugins/publish/integrate.py | 43 ++++++++++++++++++--------- 1 file changed, 29 insertions(+), 14 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index b48020860b..dbad90af93 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -293,17 +293,12 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance) for src, dst in prepared["transfers"]: - if src == dst: - self.log.info( - "Source '{}' same as destination '{}'. Skipping." - .format(src, dst)) + + if self._are_paths_same(src, dst): continue if not self._is_path_in_project_roots(anatomy.all_root_paths(), dst): - self.log.warning( - "Destination '{}' is not in project folder. Skipping" - .format(dst)) continue # todo: add support for hardlink transfers @@ -316,13 +311,20 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # .ma representation. Those destination paths are pre-defined, etc. # todo: should we move or simplify this logic? resource_destinations = set() - for src, dst in instance.data.get("transfers", []): - file_transactions.add(src, dst, mode=FileTransaction.MODE_COPY) - resource_destinations.add(os.path.abspath(dst)) - for src, dst in instance.data.get("hardlinks", []): - file_transactions.add(src, dst, mode=FileTransaction.MODE_HARDLINK) - resource_destinations.add(os.path.abspath(dst)) + file_copy_modes = [ + ("transfers", FileTransaction.MODE_COPY), + ("hardlinks", FileTransaction.MODE_HARDLINK) + ] + for files_type, copy_mode in zip(*file_copy_modes): # unpack + for src, dst in instance.data.get(files_type, []): + if self._are_paths_same(src, dst): + continue + if not self._is_path_in_project_roots(anatomy.all_root_paths(), + dst): + continue + file_transactions.add(src, dst, mode=copy_mode) + resource_destinations.add(os.path.abspath(dst)) # Bulk write to the database # We write the subset and version to the database before the File @@ -924,6 +926,19 @@ class IntegrateAsset(pyblish.api.InstancePlugin): for root_item in roots: if file_path.startswith(root_item.lower()): return True - + self.log.warning( + "Destination '{}' is not in project folder. Skipping" + .format(file_path)) return False + def _are_paths_same(self, src, dst): + src = str(src).replace("\\", "/").lower() + dst = str(dst).replace("\\", "/").lower() + + same = src == dst + if same: + self.log.info( + "Source '{}' same as destination '{}'. Skipping." + .format(src, dst)) + return same + From ddfaae8a798c9ba7ea12b56c4547f2e3214c8c6c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 14:28:26 +0100 Subject: [PATCH 054/171] OP-4504 - added source template to defaults Source template is used in-situ publishing, eg. use files at their location, don't copy them anywhere. --- openpype/settings/defaults/project_anatomy/templates.json | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_anatomy/templates.json b/openpype/settings/defaults/project_anatomy/templates.json index 0ac56a4dad..e4814257bc 100644 --- a/openpype/settings/defaults/project_anatomy/templates.json +++ b/openpype/settings/defaults/project_anatomy/templates.json @@ -53,11 +53,17 @@ "file": "{originalBasename}<.{@frame}><_{udim}>.{ext}", "path": "{@folder}/{@file}" }, + "source": { + "folder": "{originalBasename}<.{@frame}><_{udim}>.{ext}", + "file": "{originalDirname}", + "path": "{@folder}/{@file}" + }, "__dynamic_keys_labels__": { "maya2unreal": "Maya to Unreal", "simpleUnrealTextureHero": "Simple Unreal Texture - Hero", "simpleUnrealTexture": "Simple Unreal Texture", - "online": "online" + "online": "online", + "source": "source" } } } \ No newline at end of file From af3ebebb264ae559d98fa296c18da534d3590c66 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 14:47:01 +0100 Subject: [PATCH 055/171] OP-4504 - Hound --- openpype/hosts/traypublisher/plugins/publish/collect_source.py | 2 -- openpype/plugins/publish/integrate.py | 1 - 2 files changed, 3 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_source.py b/openpype/hosts/traypublisher/plugins/publish/collect_source.py index 5121452ca8..6ff22be13a 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_source.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_source.py @@ -1,5 +1,3 @@ -import os.path - import pyblish.api diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index dbad90af93..4c26f28862 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -941,4 +941,3 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "Source '{}' same as destination '{}'. Skipping." .format(src, dst)) return same - From d1ee451b9a547d3ff8d480022efe4d020d9f7bc7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 6 Dec 2022 15:06:21 +0100 Subject: [PATCH 056/171] OP-4504 - update logging --- openpype/plugins/publish/extract_thumbnail_from_source.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index 8da1213807..1165c80318 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -76,7 +76,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): def _create_thumbnail(self, context, thumbnail_source): if not thumbnail_source: - self.log.debug("Thumbnail source not filled. Skipping.") + self.log.debug("Thumbnail source on context not filled. Skipping.") return if not os.path.exists(thumbnail_source): From c9496bcfe3271631124d557af9166c0a2c3879cc Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Dec 2022 12:22:53 +0100 Subject: [PATCH 057/171] OP-4504 - change boolean test to validation with exception It actually shouldn't allow to publish into non project folder (like artists own c:/ drive). --- openpype/plugins/publish/integrate.py | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index f3683c4214..5e76521550 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -293,11 +293,10 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance) for src, dst in prepared["transfers"]: - if self._are_paths_same(src, dst): - continue + self._validate_path_in_project_roots(anatomy.all_root_paths(), + dst) - if not self._is_path_in_project_roots(anatomy.all_root_paths(), - dst): + if self._are_paths_same(src, dst): continue # todo: add support for hardlink transfers @@ -317,11 +316,10 @@ class IntegrateAsset(pyblish.api.InstancePlugin): ] for files_type, copy_mode in zip(*file_copy_modes): # unpack for src, dst in instance.data.get(files_type, []): + self._validate_path_in_project_roots(anatomy.all_root_paths(), + dst) if self._are_paths_same(src, dst): continue - if not self._is_path_in_project_roots(anatomy.all_root_paths(), - dst): - continue file_transactions.add(src, dst, mode=copy_mode) resource_destinations.add(os.path.abspath(dst)) @@ -910,7 +908,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "sites": sites } - def _is_path_in_project_roots(self, roots, file_path): + def _validate_path_in_project_roots(self, roots, file_path): """Checks if 'file_path' starts with any of the roots. Used to check that published path belongs to project, eg. we are not @@ -918,17 +916,17 @@ class IntegrateAsset(pyblish.api.InstancePlugin): Args: roots (list of RootItem): {ROOT_NAME: ROOT_PATH} file_path (str) - Returns: - (bool) + Raises + (KnownPublishError) """ file_path = str(file_path).replace("\\", "/").lower() for root_item in roots: if file_path.startswith(root_item.lower()): return True - self.log.warning( - "Destination '{}' is not in project folder. Skipping" - .format(file_path)) - return False + raise KnownPublishError(( + "Destination path {} ".format(file_path) + + "must be in project dir" + )) def _are_paths_same(self, src, dst): src = str(src).replace("\\", "/").lower() From 3fabd516ea02b762505377e4d60853e6ebc60c9e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Dec 2022 12:25:16 +0100 Subject: [PATCH 058/171] OP-4504 - weird unpacking not necessary --- openpype/plugins/publish/integrate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 5e76521550..6b359af1d2 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -314,7 +314,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): ("transfers", FileTransaction.MODE_COPY), ("hardlinks", FileTransaction.MODE_HARDLINK) ] - for files_type, copy_mode in zip(*file_copy_modes): # unpack + for files_type, copy_mode in file_copy_modes: for src, dst in instance.data.get(files_type, []): self._validate_path_in_project_roots(anatomy.all_root_paths(), dst) From 0a12a42460cef6770b4761d935cce38a2f6fc1cb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Dec 2022 12:29:44 +0100 Subject: [PATCH 059/171] OP-4504 - use existing method to check if path in project --- openpype/plugins/publish/integrate.py | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 6b359af1d2..ce31831f1e 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -293,8 +293,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance) for src, dst in prepared["transfers"]: - self._validate_path_in_project_roots(anatomy.all_root_paths(), - dst) + self._validate_path_in_project_roots(anatomy, dst) if self._are_paths_same(src, dst): continue @@ -316,8 +315,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): ] for files_type, copy_mode in file_copy_modes: for src, dst in instance.data.get(files_type, []): - self._validate_path_in_project_roots(anatomy.all_root_paths(), - dst) + self._validate_path_in_project_roots(anatomy, dst) if self._are_paths_same(src, dst): continue file_transactions.add(src, dst, mode=copy_mode) @@ -908,25 +906,23 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "sites": sites } - def _validate_path_in_project_roots(self, roots, file_path): + def _validate_path_in_project_roots(self, anatomy, file_path): """Checks if 'file_path' starts with any of the roots. Used to check that published path belongs to project, eg. we are not trying to publish to local only folder. Args: - roots (list of RootItem): {ROOT_NAME: ROOT_PATH} + anatomy (Anatomy) file_path (str) Raises (KnownPublishError) """ - file_path = str(file_path).replace("\\", "/").lower() - for root_item in roots: - if file_path.startswith(root_item.lower()): - return True - raise KnownPublishError(( - "Destination path {} ".format(file_path) + - "must be in project dir" - )) + found, _ = anatomy.find_root_template_from_path(file_path) + if not found: + raise KnownPublishError(( + "Destination path {} ".format(file_path) + + "must be in project dir" + )) def _are_paths_same(self, src, dst): src = str(src).replace("\\", "/").lower() From cccd9c61ddac217d59480c8fafee4eba8b569412 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Dec 2022 12:34:26 +0100 Subject: [PATCH 060/171] OP-4504 - logging message is wrong as it is called on instances also --- openpype/plugins/publish/extract_thumbnail_from_source.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index 084815915f..03df1455e2 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -77,7 +77,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): def _create_thumbnail(self, context, thumbnail_source): if not thumbnail_source: - self.log.debug("Thumbnail source on context not filled. Skipping.") + self.log.debug("Thumbnail source not filled. Skipping.") return if not os.path.exists(thumbnail_source): From 30eca6f6ca3487bee2b758e85cce6dbeb0d26354 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Dec 2022 14:10:52 +0100 Subject: [PATCH 061/171] OP-4504 - fix default source template --- openpype/settings/defaults/project_anatomy/templates.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_anatomy/templates.json b/openpype/settings/defaults/project_anatomy/templates.json index e4814257bc..32230e0625 100644 --- a/openpype/settings/defaults/project_anatomy/templates.json +++ b/openpype/settings/defaults/project_anatomy/templates.json @@ -54,8 +54,8 @@ "path": "{@folder}/{@file}" }, "source": { - "folder": "{originalBasename}<.{@frame}><_{udim}>.{ext}", - "file": "{originalDirname}", + "folder": "{root[work]}/{originalDirname}", + "file": "{originalBasename}<.{@frame}><_{udim}>.{ext}", "path": "{@folder}/{@file}" }, "__dynamic_keys_labels__": { From 4c53a77a83fd4d79825f11339d7561c23f3797f4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Dec 2022 17:48:59 +0100 Subject: [PATCH 062/171] OP-4504 - make root comparison case insensitive for windows find_root_template_from_path tries to find root in passed path with case sensitivity, on Windows it doesn't make sense C:// == c://. Keep all other paths case sensitive. --- openpype/pipeline/anatomy.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index 908dc2b187..a50f8f67bb 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -1106,17 +1106,21 @@ class RootItem(FormatObject): result = False output = str(path) - root_paths = list(self.cleaned_data.values()) mod_path = self.clean_path(path) - for root_path in root_paths: + for root_os, root_path in self.cleaned_data.items(): # Skip empty paths if not root_path: continue - if mod_path.startswith(root_path): + _mod_path = mod_path # reset to original cleaned value + if root_os == "windows": + root_path = root_path.lower() + _mod_path = _mod_path.lower() + + if _mod_path.startswith(root_path): result = True replacement = "{" + self.full_key() + "}" - output = replacement + mod_path[len(root_path):] + output = replacement + _mod_path[len(root_path):] break return (result, output) @@ -1206,6 +1210,7 @@ class Roots: Raises: ValueError: When roots are not entered and can't be loaded. """ + print("!roots::{}".format(roots)) if roots is None: log.debug( "Looking for matching root in path \"{}\".".format(path) @@ -1216,10 +1221,12 @@ class Roots: raise ValueError("Roots are not set. Can't find path.") if isinstance(roots, RootItem): + print("here") return roots.find_root_template_from_path(path) for root_name, _root in roots.items(): - success, result = self.find_root_template_from_path(path, _root) + print("root::{}".format(_root)) + success, result = self.find_root_template_from_path(path.lower(), _root) if success: log.info("Found match in root \"{}\".".format(root_name)) return success, result From 726c8f2cc12f5362fc255f2b269c5b356794b0e6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Dec 2022 17:51:41 +0100 Subject: [PATCH 063/171] OP-4504 - fix resolving of originalDirname If instance has originalDirname collected, all repres should use this as a target folder (that allows copying transient items from temporary folders into folder where source item comes from). --- openpype/plugins/publish/integrate.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index ce31831f1e..45710c8f41 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -554,17 +554,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # optionals # retrieve additional anatomy data from representation if exists - if not instance.data.get("originalDirname"): - instance.data["originalDirname"] = stagingdir - for key, anatomy_key in { # Representation Key: Anatomy data key "resolutionWidth": "resolution_width", "resolutionHeight": "resolution_height", "fps": "fps", "outputName": "output", - "originalBasename": "originalBasename", - "originalDirname": "originalDirname" + "originalBasename": "originalBasename" }.items(): # Allow to take value from representation # if not found also consider instance.data @@ -582,6 +578,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin): is_udim = bool(repre.get("udim")) + # store as originalDirname only original value without project root + # if instance collected originalDirname it should be used for all repre + # useful to storing transient items, eg. thumbnails, from temp to final + original_directory = instance.data.get("originalDirname") or stagingdir + _rootless = self.get_rootless_path(anatomy, original_directory) + without_root = _rootless[_rootless.rfind('}')+2:] + template_data["originalDirname"] = without_root + is_sequence_representation = isinstance(files, (list, tuple)) if is_sequence_representation: # Collection of files (sequence) @@ -685,8 +689,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): raise KnownPublishError( "This is a bug. Representation file name is full path" ) - if not template_data.get("originalBasename"): - template_data["originalBasename"] = fname + template_data["originalBasename"] = fname # Manage anatomy template data template_data.pop("frame", None) if is_udim: @@ -917,8 +920,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): Raises (KnownPublishError) """ - found, _ = anatomy.find_root_template_from_path(file_path) - if not found: + path = self.get_rootless_path(anatomy, file_path) + if not path: raise KnownPublishError(( "Destination path {} ".format(file_path) + "must be in project dir" From a1f85d3978e0ad87c13c8f415b9b91a375429a67 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Dec 2022 17:57:10 +0100 Subject: [PATCH 064/171] OP-4504 - use always stagingDir from instance instead of repre Representation stagingDir might be in temporary folders (for thumbnails etc.), use value from instance as a backup instead. --- openpype/plugins/publish/integrate.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 45710c8f41..1b79b5b858 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -581,7 +581,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # store as originalDirname only original value without project root # if instance collected originalDirname it should be used for all repre # useful to storing transient items, eg. thumbnails, from temp to final - original_directory = instance.data.get("originalDirname") or stagingdir + original_directory = ( + instance.data.get("originalDirname") or instance_stagingdir) _rootless = self.get_rootless_path(anatomy, original_directory) without_root = _rootless[_rootless.rfind('}')+2:] template_data["originalDirname"] = without_root @@ -694,7 +695,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_data.pop("frame", None) if is_udim: template_data["udim"] = repre["udim"][0] - # Construct destination filepath from template anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled[template_name]["path"] From d8ed8998b2274699760a88f99bb4d09a97fd1d51 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Dec 2022 18:00:24 +0100 Subject: [PATCH 065/171] OP-4504 - removed unwanted lower Removed logging --- openpype/pipeline/anatomy.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py index a50f8f67bb..969e1570fb 100644 --- a/openpype/pipeline/anatomy.py +++ b/openpype/pipeline/anatomy.py @@ -1210,7 +1210,6 @@ class Roots: Raises: ValueError: When roots are not entered and can't be loaded. """ - print("!roots::{}".format(roots)) if roots is None: log.debug( "Looking for matching root in path \"{}\".".format(path) @@ -1221,12 +1220,10 @@ class Roots: raise ValueError("Roots are not set. Can't find path.") if isinstance(roots, RootItem): - print("here") return roots.find_root_template_from_path(path) for root_name, _root in roots.items(): - print("root::{}".format(_root)) - success, result = self.find_root_template_from_path(path.lower(), _root) + success, result = self.find_root_template_from_path(path, _root) if success: log.info("Found match in root \"{}\".".format(root_name)) return success, result From 98f45c24a6a697ae533a48c17cc5ebc9b7930dbf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 7 Dec 2022 18:02:54 +0100 Subject: [PATCH 066/171] OP-4504 - Hound --- openpype/plugins/publish/integrate.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 1b79b5b858..7ef279d787 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -584,7 +584,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): original_directory = ( instance.data.get("originalDirname") or instance_stagingdir) _rootless = self.get_rootless_path(anatomy, original_directory) - without_root = _rootless[_rootless.rfind('}')+2:] + relative_path_start = _rootless.rfind('}') + 2 + without_root = _rootless[relative_path_start:] template_data["originalDirname"] = without_root is_sequence_representation = isinstance(files, (list, tuple)) @@ -923,8 +924,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): path = self.get_rootless_path(anatomy, file_path) if not path: raise KnownPublishError(( - "Destination path {} ".format(file_path) + - "must be in project dir" + "Destination path {} ".format(file_path) + + "must be in project dir" )) def _are_paths_same(self, src, dst): From fc10b26ea0610dc5d32ed7c2ce285dac8fdef9eb Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 8 Dec 2022 11:20:24 +0000 Subject: [PATCH 067/171] Implemented creator and extractor for uasset --- .../unreal/plugins/create/create_uasset.py | 61 +++++++++++++++++++ .../unreal/plugins/publish/extract_uasset.py | 45 ++++++++++++++ openpype/plugins/publish/integrate.py | 3 +- 3 files changed, 108 insertions(+), 1 deletion(-) create mode 100644 openpype/hosts/unreal/plugins/create/create_uasset.py create mode 100644 openpype/hosts/unreal/plugins/publish/extract_uasset.py diff --git a/openpype/hosts/unreal/plugins/create/create_uasset.py b/openpype/hosts/unreal/plugins/create/create_uasset.py new file mode 100644 index 0000000000..ee584ac00c --- /dev/null +++ b/openpype/hosts/unreal/plugins/create/create_uasset.py @@ -0,0 +1,61 @@ +"""Create UAsset.""" +from pathlib import Path + +import unreal + +from openpype.hosts.unreal.api import pipeline +from openpype.pipeline import LegacyCreator + + +class CreateUAsset(LegacyCreator): + """UAsset.""" + + name = "UAsset" + label = "UAsset" + family = "uasset" + icon = "cube" + + root = "/Game/OpenPype" + suffix = "_INS" + + def __init__(self, *args, **kwargs): + super(CreateUAsset, self).__init__(*args, **kwargs) + + def process(self): + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + subset = self.data["subset"] + path = f"{self.root}/PublishInstances/" + + unreal.EditorAssetLibrary.make_directory(path) + + selection = [] + if (self.options or {}).get("useSelection"): + sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() + selection = [a.get_path_name() for a in sel_objects] + + if len(selection) != 1: + raise RuntimeError("Please select only one object.") + + obj = selection[0] + + asset = ar.get_asset_by_object_path(obj).get_asset() + sys_path = unreal.SystemLibrary.get_system_path(asset) + + if not sys_path: + raise RuntimeError( + f"{Path(obj).name} is not on the disk. Likely it needs to" + "be saved first.") + + if Path(sys_path).suffix != ".uasset": + raise RuntimeError(f"{Path(sys_path).name} is not a UAsset.") + + unreal.log("selection: {}".format(selection)) + container_name = f"{subset}{self.suffix}" + pipeline.create_publish_instance( + instance=container_name, path=path) + + data = self.data.copy() + data["members"] = selection + + pipeline.imprint(f"{path}/{container_name}", data) diff --git a/openpype/hosts/unreal/plugins/publish/extract_uasset.py b/openpype/hosts/unreal/plugins/publish/extract_uasset.py new file mode 100644 index 0000000000..99279e38a1 --- /dev/null +++ b/openpype/hosts/unreal/plugins/publish/extract_uasset.py @@ -0,0 +1,45 @@ +from pathlib import Path +import shutil + +import unreal +from unreal import EditorLevelLibrary as ell +from unreal import EditorAssetLibrary as eal + +from openpype.client import get_representation_by_name +from openpype.pipeline import legacy_io, publish + + +class ExtractUAsset(publish.Extractor): + """Extract a UAsset.""" + + label = "Extract UAsset" + hosts = ["unreal"] + families = ["uasset"] + optional = True + + def process(self, instance): + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + self.log.info("Performing extraction..") + + staging_dir = self.staging_dir(instance) + filename = "{}.uasset".format(instance.name) + + obj = instance[0] + + asset = ar.get_asset_by_object_path(obj).get_asset() + sys_path = unreal.SystemLibrary.get_system_path(asset) + filename = Path(sys_path).name + + shutil.copy(sys_path, staging_dir) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'uasset', + 'ext': 'uasset', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 6a85a87129..6efff8440c 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -130,7 +130,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "mvUsdComposition", "mvUsdOverride", "simpleUnrealTexture", - "online" + "online", + "uasset" ] default_template_name = "publish" From 2b566bb594229d452dde7cb871239e07fac9366c Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 8 Dec 2022 11:20:47 +0000 Subject: [PATCH 068/171] Implemented balidator to check if the uasset has any dependency --- .../publish/validate_no_dependencies.py | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py diff --git a/openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py b/openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py new file mode 100644 index 0000000000..b7f42a772b --- /dev/null +++ b/openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py @@ -0,0 +1,40 @@ +import unreal + +import pyblish.api + + +class ValidateNoDependencies(pyblish.api.InstancePlugin): + """Ensure that the uasset has no dependencies + + The uasset is checked for dependencies. If there are any, the instance + cannot be published. + """ + + order = pyblish.api.ValidatorOrder + label = "Check no dependencies" + families = ["uasset"] + hosts = ["unreal"] + optional = True + + def process(self, instance): + ar = unreal.AssetRegistryHelpers.get_asset_registry() + all_dependencies = [] + + for obj in instance[:]: + asset = ar.get_asset_by_object_path(obj) + dependencies = ar.get_dependencies( + asset.package_name, + unreal.AssetRegistryDependencyOptions( + include_soft_package_references=True, + include_hard_package_references=True, + include_searchable_names=False, + include_soft_management_references=False, + include_hard_management_references=False + )) + if dependencies: + for dep in dependencies: + all_dependencies.append(str(dep)) + + if all_dependencies: + raise RuntimeError( + f"Dependencies found: {all_dependencies}") From ee3d88756cbb75deb966fab37c8369d8a4d9df6d Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 8 Dec 2022 13:00:31 +0000 Subject: [PATCH 069/171] Implemented loading --- .../hosts/unreal/plugins/load/load_uasset.py | 91 +++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100644 openpype/hosts/unreal/plugins/load/load_uasset.py diff --git a/openpype/hosts/unreal/plugins/load/load_uasset.py b/openpype/hosts/unreal/plugins/load/load_uasset.py new file mode 100644 index 0000000000..e3f967c43d --- /dev/null +++ b/openpype/hosts/unreal/plugins/load/load_uasset.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +"""Load UAsset.""" +from pathlib import Path +import shutil + +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID +) +from openpype.hosts.unreal.api import plugin +from openpype.hosts.unreal.api import pipeline as unreal_pipeline +import unreal # noqa + + +class UAssetLoader(plugin.Loader): + """Load UAsset.""" + + families = ["uasset"] + label = "Load UAsset" + representations = ["uasset"] + icon = "cube" + color = "orange" + + def load(self, context, name, namespace, options): + """Load and containerise representation into Content Browser. + + Args: + context (dict): application context + name (str): subset name + namespace (str): in Unreal this is basically path to container. + This is not passed here, so namespace is set + by `containerise()` because only then we know + real path. + options (dict): Those would be data to be imprinted. This is not + used now, data are imprinted by `containerise()`. + + Returns: + list(str): list of container content + """ + + # Create directory for asset and OpenPype container + root = "/Game/OpenPype/Assets" + if options and options.get("asset_dir"): + root = options["asset_dir"] + asset = context.get('asset').get('name') + suffix = "_CON" + if asset: + asset_name = "{}_{}".format(asset, name) + else: + asset_name = "{}".format(name) + + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}/{}".format(root, asset, name), suffix="") + + container_name += suffix + + unreal.EditorAssetLibrary.make_directory(asset_dir) + + # Create Asset Container + container = unreal_pipeline.create_container( + container=container_name, path=asset_dir) + + container_path = unreal.SystemLibrary.get_system_path(container) + destination_path = Path(container_path).parent.as_posix() + + shutil.copy(self.fname, destination_path) + + data = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + return asset_content From a847626aac962d0f954b14a8de0e341793092d69 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 8 Dec 2022 16:12:48 +0000 Subject: [PATCH 070/171] Don't use container path to get destination folder --- .../hosts/unreal/plugins/load/load_uasset.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_uasset.py b/openpype/hosts/unreal/plugins/load/load_uasset.py index e3f967c43d..76c4de1fbe 100644 --- a/openpype/hosts/unreal/plugins/load/load_uasset.py +++ b/openpype/hosts/unreal/plugins/load/load_uasset.py @@ -40,8 +40,6 @@ class UAssetLoader(plugin.Loader): # Create directory for asset and OpenPype container root = "/Game/OpenPype/Assets" - if options and options.get("asset_dir"): - root = options["asset_dir"] asset = context.get('asset').get('name') suffix = "_CON" if asset: @@ -57,15 +55,17 @@ class UAssetLoader(plugin.Loader): unreal.EditorAssetLibrary.make_directory(asset_dir) - # Create Asset Container - container = unreal_pipeline.create_container( - container=container_name, path=asset_dir) - - container_path = unreal.SystemLibrary.get_system_path(container) - destination_path = Path(container_path).parent.as_posix() + destination_path = asset_dir.replace( + "/Game", + Path(unreal.Paths.project_content_dir()).as_posix(), + 1) shutil.copy(self.fname, destination_path) + # Create Asset Container + unreal_pipeline.create_container( + container=container_name, path=asset_dir) + data = { "schema": "openpype:container-2.0", "id": AVALON_CONTAINER_ID, From bc574e5e5e8174c69528e3834465de677b857115 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 8 Dec 2022 17:23:10 +0000 Subject: [PATCH 071/171] Implemented update and remove --- .../hosts/unreal/plugins/load/load_uasset.py | 56 ++++++++++++++++++- 1 file changed, 55 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/plugins/load/load_uasset.py b/openpype/hosts/unreal/plugins/load/load_uasset.py index 76c4de1fbe..eccfc7b445 100644 --- a/openpype/hosts/unreal/plugins/load/load_uasset.py +++ b/openpype/hosts/unreal/plugins/load/load_uasset.py @@ -60,7 +60,7 @@ class UAssetLoader(plugin.Loader): Path(unreal.Paths.project_content_dir()).as_posix(), 1) - shutil.copy(self.fname, destination_path) + shutil.copy(self.fname, f"{destination_path}/{name}.uasset") # Create Asset Container unreal_pipeline.create_container( @@ -89,3 +89,57 @@ class UAssetLoader(plugin.Loader): unreal.EditorAssetLibrary.save_asset(a) return asset_content + + def update(self, container, representation): + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + asset_dir = container["namespace"] + name = representation["context"]["subset"] + + destination_path = asset_dir.replace( + "/Game", + Path(unreal.Paths.project_content_dir()).as_posix(), + 1) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=False, include_folder=True + ) + + for asset in asset_content: + obj = ar.get_asset_by_object_path(asset).get_asset() + if not obj.get_class().get_name() == 'AssetContainer': + unreal.EditorAssetLibrary.delete_asset(asset) + + update_filepath = get_representation_path(representation) + + shutil.copy(update_filepath, f"{destination_path}/{name}.uasset") + + container_path = "{}/{}".format(container["namespace"], + container["objectName"]) + # update metadata + unreal_pipeline.imprint( + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + def remove(self, container): + path = container["namespace"] + parent_path = Path(path).parent.as_posix() + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) From 976ba9b6ce5396da651d08b032fab052856fd3f4 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 8 Dec 2022 17:28:26 +0000 Subject: [PATCH 072/171] Hound fixes --- openpype/hosts/unreal/plugins/publish/extract_uasset.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/openpype/hosts/unreal/plugins/publish/extract_uasset.py b/openpype/hosts/unreal/plugins/publish/extract_uasset.py index 99279e38a1..89d779d368 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_uasset.py +++ b/openpype/hosts/unreal/plugins/publish/extract_uasset.py @@ -2,11 +2,8 @@ from pathlib import Path import shutil import unreal -from unreal import EditorLevelLibrary as ell -from unreal import EditorAssetLibrary as eal -from openpype.client import get_representation_by_name -from openpype.pipeline import legacy_io, publish +from openpype.pipeline import publish class ExtractUAsset(publish.Extractor): From fe0336c4359c519dd787aea5f5683e4fa871c171 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 10:34:01 +0100 Subject: [PATCH 073/171] OP-4504 - removed path comparison function Obsolete as it is part of file_transaction file --- openpype/plugins/publish/integrate.py | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 0a885733bd..041f7b1b19 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -297,9 +297,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): for src, dst in prepared["transfers"]: self._validate_path_in_project_roots(anatomy, dst) - if self._are_paths_same(src, dst): - continue - # todo: add support for hardlink transfers file_transactions.add(src, dst) @@ -318,8 +315,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): for files_type, copy_mode in file_copy_modes: for src, dst in instance.data.get(files_type, []): self._validate_path_in_project_roots(anatomy, dst) - if self._are_paths_same(src, dst): - continue + file_transactions.add(src, dst, mode=copy_mode) resource_destinations.add(os.path.abspath(dst)) @@ -929,14 +925,3 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "Destination path {} ".format(file_path) + "must be in project dir" )) - - def _are_paths_same(self, src, dst): - src = str(src).replace("\\", "/").lower() - dst = str(dst).replace("\\", "/").lower() - - same = src == dst - if same: - self.log.info( - "Source '{}' same as destination '{}'. Skipping." - .format(src, dst)) - return same From e026a27b9380467eed8ae2d2ecc082394f77cc2d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 12 Dec 2022 12:11:33 +0100 Subject: [PATCH 074/171] hiero: fixing thumbnail if multillayer exr --- openpype/hosts/hiero/plugins/publish/extract_thumbnail.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py b/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py index e64aa89b26..5ca79dc1dc 100644 --- a/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py @@ -41,7 +41,7 @@ class ExtractThumnail(publish.Extractor): track_item_name, thumb_frame, ".png") thumb_path = os.path.join(staging_dir, thumb_file) - thumbnail = track_item.thumbnail(thumb_frame).save( + thumbnail = track_item.thumbnail(thumb_frame, "colour").save( thumb_path, format='png' ) From d18fc94c01bb2044ef209658f4cab32f8cd87ee8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 12:41:26 +0100 Subject: [PATCH 075/171] OP-4504 - fix for deadline publishing --- openpype/plugins/publish/integrate.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 041f7b1b19..ce37a53c65 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -581,10 +581,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # useful to storing transient items, eg. thumbnails, from temp to final original_directory = ( instance.data.get("originalDirname") or instance_stagingdir) - _rootless = self.get_rootless_path(anatomy, original_directory) - relative_path_start = _rootless.rfind('}') + 2 - without_root = _rootless[relative_path_start:] - template_data["originalDirname"] = without_root + if original_directory: + _rootless = self.get_rootless_path(anatomy, original_directory) + relative_path_start = _rootless.rfind('}') + 2 + without_root = _rootless[relative_path_start:] + template_data["originalDirname"] = without_root is_sequence_representation = isinstance(files, (list, tuple)) if is_sequence_representation: From 8a267f6c340bc31d4c15dd5d90bc7c534d1a03af Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 16:18:25 +0100 Subject: [PATCH 076/171] OP-4504 - handle originalDirname only if in template --- openpype/plugins/publish/integrate.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index ce37a53c65..4692cefe4d 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -295,8 +295,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance) for src, dst in prepared["transfers"]: - self._validate_path_in_project_roots(anatomy, dst) - # todo: add support for hardlink transfers file_transactions.add(src, dst) @@ -576,13 +574,21 @@ class IntegrateAsset(pyblish.api.InstancePlugin): is_udim = bool(repre.get("udim")) - # store as originalDirname only original value without project root - # if instance collected originalDirname it should be used for all repre - # useful to storing transient items, eg. thumbnails, from temp to final - original_directory = ( - instance.data.get("originalDirname") or instance_stagingdir) - if original_directory: + # handle publish in place + if "originalDirname" in template: + # store as originalDirname only original value without project root + # if instance collected originalDirname is present, it should be + # used for all represe + # from temp to final + original_directory = ( + instance.data.get("originalDirname") or instance_stagingdir) + _rootless = self.get_rootless_path(anatomy, original_directory) + if _rootless == original_directory: + raise KnownPublishError(( + "Destination path '{}' ".format(original_directory) + + "must be in project dir" + )) relative_path_start = _rootless.rfind('}') + 2 without_root = _rootless[relative_path_start:] template_data["originalDirname"] = without_root @@ -923,6 +929,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): path = self.get_rootless_path(anatomy, file_path) if not path: raise KnownPublishError(( - "Destination path {} ".format(file_path) + + "Destination path '{}' ".format(file_path) + "must be in project dir" )) From d9e6bedf3b7fbf3c30825df4aff91a4965bd7d7c Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 13 Dec 2022 10:35:15 +0000 Subject: [PATCH 077/171] Do not check soft references --- .../hosts/unreal/plugins/publish/validate_no_dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py b/openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py index b7f42a772b..79d54306c4 100644 --- a/openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py +++ b/openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py @@ -25,7 +25,7 @@ class ValidateNoDependencies(pyblish.api.InstancePlugin): dependencies = ar.get_dependencies( asset.package_name, unreal.AssetRegistryDependencyOptions( - include_soft_package_references=True, + include_soft_package_references=False, include_hard_package_references=True, include_searchable_names=False, include_soft_management_references=False, From bf10a77fb6f1dcde764020de3588e54391df9f4a Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 13 Dec 2022 10:50:38 +0000 Subject: [PATCH 078/171] Check only dependencies that are in the Content folder We ignore native dependencies and dependencies from plugins --- .../hosts/unreal/plugins/publish/validate_no_dependencies.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py b/openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py index 79d54306c4..c760129550 100644 --- a/openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py +++ b/openpype/hosts/unreal/plugins/publish/validate_no_dependencies.py @@ -33,7 +33,8 @@ class ValidateNoDependencies(pyblish.api.InstancePlugin): )) if dependencies: for dep in dependencies: - all_dependencies.append(str(dep)) + if str(dep).startswith("/Game/"): + all_dependencies.append(str(dep)) if all_dependencies: raise RuntimeError( From 9bf00f9cfebb4c5b0ec6586672308d1a74cd6376 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 14 Dec 2022 14:41:52 +0100 Subject: [PATCH 079/171] OP-4504 - added collector for originalDirname --- .../plugins/publish/collect_resources_path.py | 16 ------- .../publish/collect_source_for_source.py | 43 +++++++++++++++++++ 2 files changed, 43 insertions(+), 16 deletions(-) create mode 100644 openpype/plugins/publish/collect_source_for_source.py diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index ea86ab93b4..dcd80fbbdf 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -106,19 +106,3 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): self.log.debug("publishDir: \"{}\"".format(publish_folder)) self.log.debug("resourcesDir: \"{}\"".format(resources_folder)) - - # parse folder name and file name for online and source templates - # currentFile comes from hosts workfiles - # source comes from Publisher - current_file = instance.data.get("currentFile") - source = instance.data.get("source") - source_file = current_file or source - if source_file and os.path.exists(source_file): - self.log.debug("Parsing paths for {}".format(source_file)) - if not instance.data.get("originalBasename"): - instance.data["originalBasename"] = \ - os.path.basename(source_file) - - if not instance.data.get("originalDirname"): - instance.data["originalDirname"] = \ - os.path.dirname(source_file) diff --git a/openpype/plugins/publish/collect_source_for_source.py b/openpype/plugins/publish/collect_source_for_source.py new file mode 100644 index 0000000000..345daa6fe8 --- /dev/null +++ b/openpype/plugins/publish/collect_source_for_source.py @@ -0,0 +1,43 @@ +""" +Requires: + instance -> currentFile + instance -> source + +Provides: + instance -> originalBasename + instance -> originalDirname +""" + +import os +import copy + +import pyblish.api + + +class CollectSourceForSource(pyblish.api.InstancePlugin): + """Collects source location of file for instance. + + Used for 'source' template name which handles in place publishing. + For this kind of publishing files are present with correct file name + pattern and correct location. + """ + + label = "Collect Source" + order = pyblish.api.CollectorOrder + 0.495 + + def process(self, instance): + # parse folder name and file name for online and source templates + # currentFile comes from hosts workfiles + # source comes from Publisher + current_file = instance.data.get("currentFile") + source = instance.data.get("source") + source_file = current_file or source + if source_file and os.path.exists(source_file): + self.log.debug("Parsing paths for {}".format(source_file)) + if not instance.data.get("originalBasename"): + instance.data["originalBasename"] = \ + os.path.basename(source_file) + + if not instance.data.get("originalDirname"): + instance.data["originalDirname"] = \ + os.path.dirname(source_file) From 3cf3fd65b1fe61ee339caf5fc69352e3c9ab1851 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 14 Dec 2022 14:42:51 +0100 Subject: [PATCH 080/171] OP-4504 - added validator for source template Check is output template is 'source' if originalDirname is collected and if it is inside of project --- .../plugins/publish/validate_publish_dir.py | 69 +++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 openpype/plugins/publish/validate_publish_dir.py diff --git a/openpype/plugins/publish/validate_publish_dir.py b/openpype/plugins/publish/validate_publish_dir.py new file mode 100644 index 0000000000..03fc47347d --- /dev/null +++ b/openpype/plugins/publish/validate_publish_dir.py @@ -0,0 +1,69 @@ +import pyblish.api +from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline.publish import ( + KnownPublishError, + get_publish_template_name, +) + + +class ValidatePublishDir(pyblish.api.InstancePlugin): + """Validates if 'publishDir' is a project directory + + 'publishDir' is collected based on publish templates. In specific cases + ('source' template) source folder of items is used as a 'publishDir', this + validates if it is inside any project dir for the project. + (eg. files are not published from local folder, unaccessible for studio' + + """ + + order = ValidateContentsOrder + label = "Validate publish dir" + + checked_template_names = ["source"] + # validate instances might have interim family, needs to be mapped to final + family_mapping = { + "renderLayer": "render", + "renderLocal": "render" + } + + def process(self, instance): + + template_name = self._get_template_name_from_instance(instance) + + if template_name not in self.checked_template_names: + return + + original_dirname = instance.data.get("originalDirname") + if not original_dirname: + raise KnownPublishError("Instance meant for in place publishing." + " Its 'originalDirname' must be collected." + " Contact OP developer to modify collector" + ) + + anatomy = instance.context.data["anatomy"] + + success, _ = anatomy.find_root_template_from_path(original_dirname) + self.log.info(_) + if not success: + raise KnownPublishError( + "Path '{}' not in project folder.".format(original_dirname) + + " Please publish from inside of project folder." + ) + + def _get_template_name_from_instance(self, instance): + project_name = instance.context.data["projectName"] + host_name = instance.context.data["hostName"] + anatomy_data = instance.data["anatomyData"] + family = anatomy_data["family"] + family = self.family_mapping.get("family") or family + task_info = anatomy_data.get("task") or {} + + return get_publish_template_name( + project_name, + host_name, + family, + task_name=task_info.get("name"), + task_type=task_info.get("type"), + project_settings=instance.context.data["project_settings"], + logger=self.log + ) From d9a7d5cb802d1aec7ab71db7f576b2dde09c5f15 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 14 Dec 2022 14:46:01 +0100 Subject: [PATCH 081/171] OP-4504 - Hound --- openpype/plugins/publish/collect_source_for_source.py | 1 - openpype/plugins/publish/validate_publish_dir.py | 8 ++++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/plugins/publish/collect_source_for_source.py b/openpype/plugins/publish/collect_source_for_source.py index 345daa6fe8..fd08d28c74 100644 --- a/openpype/plugins/publish/collect_source_for_source.py +++ b/openpype/plugins/publish/collect_source_for_source.py @@ -9,7 +9,6 @@ Provides: """ import os -import copy import pyblish.api diff --git a/openpype/plugins/publish/validate_publish_dir.py b/openpype/plugins/publish/validate_publish_dir.py index 03fc47347d..eabf4810f3 100644 --- a/openpype/plugins/publish/validate_publish_dir.py +++ b/openpype/plugins/publish/validate_publish_dir.py @@ -35,10 +35,10 @@ class ValidatePublishDir(pyblish.api.InstancePlugin): original_dirname = instance.data.get("originalDirname") if not original_dirname: - raise KnownPublishError("Instance meant for in place publishing." - " Its 'originalDirname' must be collected." - " Contact OP developer to modify collector" - ) + raise KnownPublishError( + "Instance meant for in place publishing." + " Its 'originalDirname' must be collected." + " Contact OP developer to modify collector.") anatomy = instance.context.data["anatomy"] From a3969f8d1a6c901cfa2abb6dd8871527004724cf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Dec 2022 12:51:11 +0100 Subject: [PATCH 082/171] Update openpype/plugins/publish/validate_publish_dir.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- openpype/plugins/publish/validate_publish_dir.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/validate_publish_dir.py b/openpype/plugins/publish/validate_publish_dir.py index eabf4810f3..e375fadf49 100644 --- a/openpype/plugins/publish/validate_publish_dir.py +++ b/openpype/plugins/publish/validate_publish_dir.py @@ -45,7 +45,7 @@ class ValidatePublishDir(pyblish.api.InstancePlugin): success, _ = anatomy.find_root_template_from_path(original_dirname) self.log.info(_) if not success: - raise KnownPublishError( + raise PublishValidationError( "Path '{}' not in project folder.".format(original_dirname) + " Please publish from inside of project folder." ) From ee58c0ce48dc7230a18a575e7089aeaf20616eaf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Dec 2022 13:15:14 +0100 Subject: [PATCH 083/171] OP-4504 - changed to XMLPublishError --- .../publish/help/validate_publish_dir.xml | 31 +++++++++++++++++++ .../plugins/publish/validate_publish_dir.py | 21 ++++++++----- 2 files changed, 44 insertions(+), 8 deletions(-) create mode 100644 openpype/plugins/publish/help/validate_publish_dir.xml diff --git a/openpype/plugins/publish/help/validate_publish_dir.xml b/openpype/plugins/publish/help/validate_publish_dir.xml new file mode 100644 index 0000000000..9f62b264bf --- /dev/null +++ b/openpype/plugins/publish/help/validate_publish_dir.xml @@ -0,0 +1,31 @@ + + + +Source directory not collected + +## Source directory not collected + +Instance is marked for in place publishing. Its 'originalDirname' must be collected. Contact OP developer to modify collector. + + + +### __Detailed Info__ (optional) + +In place publishing uses source directory and file name in resulting path and file name of published item. For this instance + all required metadata weren't filled. This is not recoverable error, unless instance itself is removed. + Collector for this instance must be updated for instance to be published. + + + +Source file not in project dir + +## Source file not in project dir + +Path '{original_dirname}' not in project folder. Please publish from inside of project folder. + +### How to repair? + +Restart publish after you moved source file into project directory. + + + \ No newline at end of file diff --git a/openpype/plugins/publish/validate_publish_dir.py b/openpype/plugins/publish/validate_publish_dir.py index e375fadf49..2f41127548 100644 --- a/openpype/plugins/publish/validate_publish_dir.py +++ b/openpype/plugins/publish/validate_publish_dir.py @@ -1,7 +1,7 @@ import pyblish.api from openpype.pipeline.publish import ValidateContentsOrder from openpype.pipeline.publish import ( - KnownPublishError, + PublishXmlValidationError, get_publish_template_name, ) @@ -35,20 +35,25 @@ class ValidatePublishDir(pyblish.api.InstancePlugin): original_dirname = instance.data.get("originalDirname") if not original_dirname: - raise KnownPublishError( + raise PublishXmlValidationError( + self, "Instance meant for in place publishing." " Its 'originalDirname' must be collected." - " Contact OP developer to modify collector.") + " Contact OP developer to modify collector." + ) anatomy = instance.context.data["anatomy"] success, _ = anatomy.find_root_template_from_path(original_dirname) - self.log.info(_) + + formatting_data = { + "original_dirname": original_dirname, + } + msg = "Path '{}' not in project folder.".format(original_dirname) + \ + " Please publish from inside of project folder." if not success: - raise PublishValidationError( - "Path '{}' not in project folder.".format(original_dirname) + - " Please publish from inside of project folder." - ) + raise PublishXmlValidationError(self, msg, key="not_in_dir", + formatting_data=formatting_data) def _get_template_name_from_instance(self, instance): project_name = instance.context.data["projectName"] From da274a7c8db1df524efd29a8a00cd54d2f30022a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Dec 2022 16:28:49 +0100 Subject: [PATCH 084/171] OP-4504 - safer comparison of two paths --- openpype/lib/file_transaction.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index f265b8815c..4ebede0174 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -92,7 +92,9 @@ class FileTransaction(object): def process(self): # Backup any existing files for dst, (src, _) in self._transfers.items(): - if dst == src or not os.path.exists(dst): + self.log.debug("Checking file ... {} -> {}".format(src, dst)) + path_same = self._same_paths(src, dst) + if path_same or not os.path.exists(dst): continue # Backup original file @@ -105,7 +107,8 @@ class FileTransaction(object): # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): - if dst == src: + path_same = self._same_paths(src, dst) + if path_same: self.log.debug( "Source and destionation are same files {} -> {}".format( src, dst)) @@ -182,3 +185,10 @@ class FileTransaction(object): else: self.log.critical("An unexpected error occurred.") six.reraise(*sys.exc_info()) + + def _same_paths(self, src, dst): + # handles same paths but with C:/project vs c:/project + if os.path.exists(src) and os.path.exists(dst): + return os.path.samefile(src, dst) + + return False \ No newline at end of file From b6873a063eaac8f8e3d1463cd0f82681ae3c7f6e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Dec 2022 16:29:56 +0100 Subject: [PATCH 085/171] OP-4504 - added '_thumb' suffix to thumbnail Without it thumbnail would overwrite source file --- openpype/plugins/publish/extract_thumbnail.py | 2 +- openpype/plugins/publish/extract_thumbnail_from_source.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index 14b43beae8..a3c428fe97 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -91,7 +91,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): full_input_path = os.path.join(src_staging, input_file) self.log.info("input {}".format(full_input_path)) filename = os.path.splitext(input_file)[0] - jpeg_file = filename + ".jpg" + jpeg_file = filename + "_thumb.jpg" full_output_path = os.path.join(dst_staging, jpeg_file) if oiio_supported: diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index 03df1455e2..a92f762cde 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -100,7 +100,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): self.log.info("Thumbnail source: {}".format(thumbnail_source)) src_basename = os.path.basename(thumbnail_source) - dst_filename = os.path.splitext(src_basename)[0] + ".jpg" + dst_filename = os.path.splitext(src_basename)[0] + "_thumb.jpg" full_output_path = os.path.join(dst_staging, dst_filename) if oiio_supported: From e3866dff5abd2022d37d90f0da4eb634e35c995e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Dec 2022 16:37:45 +0100 Subject: [PATCH 086/171] OP-4504 - remove check for existence For sequences source contains `%d` placeholder --- openpype/plugins/publish/collect_source_for_source.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_source_for_source.py b/openpype/plugins/publish/collect_source_for_source.py index fd08d28c74..aa94238b4f 100644 --- a/openpype/plugins/publish/collect_source_for_source.py +++ b/openpype/plugins/publish/collect_source_for_source.py @@ -31,7 +31,7 @@ class CollectSourceForSource(pyblish.api.InstancePlugin): current_file = instance.data.get("currentFile") source = instance.data.get("source") source_file = current_file or source - if source_file and os.path.exists(source_file): + if source_file: self.log.debug("Parsing paths for {}".format(source_file)) if not instance.data.get("originalBasename"): instance.data["originalBasename"] = \ From 3c09dfc80e003021b930b28288664eedff82002e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 15 Dec 2022 16:38:19 +0100 Subject: [PATCH 087/171] OP-4504 - remove extension from originalBasename It would produce weird concatenation of extensions. --- openpype/plugins/publish/integrate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 4692cefe4d..94789bb778 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -696,7 +696,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): raise KnownPublishError( "This is a bug. Representation file name is full path" ) - template_data["originalBasename"] = fname + template_data["originalBasename"], _ = os.path.splitext(fname) # Manage anatomy template data template_data.pop("frame", None) if is_udim: From 5d8ddb6e55cca8a3bc7bb058bc87080f329fc156 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 19 Dec 2022 12:15:09 +0100 Subject: [PATCH 088/171] OP-4504 - added explicit check Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/lib/file_transaction.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 4ebede0174..cba361a8d4 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -191,4 +191,4 @@ class FileTransaction(object): if os.path.exists(src) and os.path.exists(dst): return os.path.samefile(src, dst) - return False \ No newline at end of file + return src == dst From b1c834245072e99126a16d9e714dba8903dc5b95 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 19 Dec 2022 14:13:19 +0100 Subject: [PATCH 089/171] fix access to 'projectName' --- openpype/plugins/publish/integrate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 5da4c76539..b8f5c4eedb 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -243,7 +243,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): return filtered_repres def register(self, instance, file_transactions, filtered_repres): - project_name = instance.context["projectName"] + project_name = instance.context.data["projectName"] instance_stagingdir = instance.data.get("stagingDir") if not instance_stagingdir: From 3d2bfbf5a7447d7894338c12a9b2d41c9c3a363c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 20 Dec 2022 12:08:29 +0100 Subject: [PATCH 090/171] traypublisher: multiple sequences enhancement --- .../plugins/create/create_editorial.py | 73 +++++++++++-------- 1 file changed, 42 insertions(+), 31 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 28a115629e..205403d33e 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -239,35 +239,38 @@ or updating already created. Publishing will create OTIO file. sequence_path_data = pre_create_data["sequence_filepath_data"] media_path_data = pre_create_data["media_filepaths_data"] - sequence_path = self._get_path_from_file_data(sequence_path_data) + sequence_paths = self._get_path_from_file_data( + sequence_path_data, multi=True) media_path = self._get_path_from_file_data(media_path_data) - # get otio timeline - otio_timeline = self._create_otio_timeline( - sequence_path, fps) + for index, seq_path in enumerate(sequence_paths): + # get otio timeline + otio_timeline = self._create_otio_timeline( + seq_path, fps) - # Create all clip instances - clip_instance_properties.update({ - "fps": fps, - "parent_asset_name": asset_name, - "variant": instance_data["variant"] - }) + # Create all clip instances + clip_instance_properties.update({ + "fps": fps, + "parent_asset_name": asset_name, + "variant": instance_data["variant"] + }) - # create clip instances - self._get_clip_instances( - otio_timeline, - media_path, - clip_instance_properties, - family_presets=allowed_family_presets + # create clip instances + self._get_clip_instances( + otio_timeline, + media_path, + clip_instance_properties, + family_presets=allowed_family_presets - ) + ) - # create otio editorial instance - self._create_otio_instance( - subset_name, instance_data, - sequence_path, media_path, - otio_timeline - ) + # create otio editorial instance + self._create_otio_instance( + subset_name + str(index), + instance_data, + seq_path, media_path, + otio_timeline + ) def _create_otio_instance( self, @@ -320,11 +323,12 @@ or updating already created. Publishing will create OTIO file. self.log.info(f"kwargs: {kwargs}") return otio.adapters.read_from_file(sequence_path, **kwargs) - def _get_path_from_file_data(self, file_path_data): + def _get_path_from_file_data(self, file_path_data, multi=False): """Converting creator path data to single path string Args: file_path_data (FileDefItem): creator path data inputs + multi (bool): switch to multiple files mode Raises: FileExistsError: in case nothing had been set @@ -332,16 +336,23 @@ or updating already created. Publishing will create OTIO file. Returns: str: path string """ - # TODO: just temporarly solving only one media file - if isinstance(file_path_data, list): - file_path_data = file_path_data.pop() + return_path_list = [] - if len(file_path_data["filenames"]) == 0: + self.log.debug(f"type: {type(file_path_data)}") + self.log.debug(f"file_path_data: {file_path_data}") + + if isinstance(file_path_data, list): + return_path_list = [ + os.path.join(f["directory"], f["filenames"][0]) + for f in file_path_data + ] + self.log.debug(f"return_path_list: {return_path_list}") + + if not return_path_list: raise FileExistsError( f"File path was not added: {file_path_data}") - return os.path.join( - file_path_data["directory"], file_path_data["filenames"][0]) + return return_path_list if multi else return_path_list[0] def _get_clip_instances( self, @@ -833,7 +844,7 @@ or updating already created. Publishing will create OTIO file. ".fcpxml" ], allow_sequences=False, - single_item=True, + single_item=False, label="Sequence file", ), FileDef( From 409db0fb831fc7447c8acfeb565ce06d57f5a162 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 21 Dec 2022 11:48:27 +0100 Subject: [PATCH 091/171] use qtpy in resolve instead of Qt.py --- openpype/hosts/resolve/api/menu.py | 2 +- openpype/hosts/resolve/api/plugin.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/resolve/api/menu.py b/openpype/hosts/resolve/api/menu.py index 86b292105a..eeb9e65dec 100644 --- a/openpype/hosts/resolve/api/menu.py +++ b/openpype/hosts/resolve/api/menu.py @@ -1,7 +1,7 @@ import os import sys -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.tools.utils import host_tools diff --git a/openpype/hosts/resolve/api/plugin.py b/openpype/hosts/resolve/api/plugin.py index 0ed7beee59..77e30149fd 100644 --- a/openpype/hosts/resolve/api/plugin.py +++ b/openpype/hosts/resolve/api/plugin.py @@ -2,7 +2,7 @@ import re import uuid import qargparse -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.settings import get_current_project_settings from openpype.pipeline.context_tools import get_current_project_asset From 95b15a9f00bbcd5f7bd0e241e3818ee6b6d64c9d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Dec 2022 11:44:35 +0100 Subject: [PATCH 092/171] global,nuke,maya: on demand placeholder removal preset attribute --- .../maya/api/workfile_template_builder.py | 2 +- .../nuke/api/workfile_template_builder.py | 15 ++++-- .../workfile/workfile_template_builder.py | 49 +++++++++++++++---- .../schema_templated_workfile_build.json | 11 ++++- 4 files changed, 62 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index ef043ed0f4..1d3f1cf568 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -28,7 +28,7 @@ class MayaTemplateBuilder(AbstractTemplateBuilder): Args: path (str): A path to current template (usually given by - get_template_path implementation) + get_template_preset implementation) Returns: bool: Wether the template was succesfully imported or not diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 7a2e442e32..60bf906fbe 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -40,7 +40,7 @@ class NukeTemplateBuilder(AbstractTemplateBuilder): Args: path (str): A path to current template (usually given by - get_template_path implementation) + get_template_preset implementation) Returns: bool: Wether the template was succesfully imported or not @@ -273,6 +273,15 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): placeholder.data["nb_children"] += 1 reset_selection() + + # remove placeholders marked as delete + if ( + placeholder.data.get("delete") + and not placeholder.data.get("keep_placeholder") + ): + self.log.debug("Deleting node: {}".format(placeholder_node.name())) + nuke.delete(placeholder_node) + # go back to root group nuke.root().begin() @@ -454,12 +463,12 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): ) for node in placeholder_node.dependent(): for idx in range(node.inputs()): - if node.input(idx) == placeholder_node: + if node.input(idx) == placeholder_node and output_node: node.setInput(idx, output_node) for node in placeholder_node.dependencies(): for idx in range(placeholder_node.inputs()): - if placeholder_node.input(idx) == node: + if placeholder_node.input(idx) == node and input_node: input_node.setInput(0, node) def _create_sib_copies(self, placeholder): diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 582657c735..f6a4ab51cb 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -401,7 +401,12 @@ class AbstractTemplateBuilder(object): key=lambda i: i.order )) - def build_template(self, template_path=None, level_limit=None): + def build_template( + self, + template_path=None, + level_limit=None, + keep_placeholders=None + ): """Main callback for building workfile from template path. Todo: @@ -410,16 +415,22 @@ class AbstractTemplateBuilder(object): Args: template_path (str): Path to a template file with placeholders. - Template from settings 'get_template_path' used when not + Template from settings 'get_template_preset' used when not passed. level_limit (int): Limit of populate loops. Related to 'populate_scene_placeholders' method. """ + template_preset = self.get_template_preset() if template_path is None: - template_path = self.get_template_path() + template_path = template_preset["path"] + + if keep_placeholders is None: + keep_placeholders = template_preset["placeholder_keep"] + self.import_template(template_path) - self.populate_scene_placeholders(level_limit) + self.populate_scene_placeholders( + level_limit, keep_placeholders) def rebuild_template(self): """Go through existing placeholders in scene and update them. @@ -489,7 +500,9 @@ class AbstractTemplateBuilder(object): plugin = plugins_by_identifier[identifier] plugin.prepare_placeholders(placeholders) - def populate_scene_placeholders(self, level_limit=None): + def populate_scene_placeholders( + self, level_limit=None, keep_placeholders=None + ): """Find placeholders in scene using plugins and process them. This should happen after 'import_template'. @@ -541,6 +554,11 @@ class AbstractTemplateBuilder(object): " is already in progress." )) continue + + # add flag for keeping placeholders in scene + # after they are processed + placeholder.data["keep_placeholder"] = keep_placeholders + filtered_placeholders.append(placeholder) self._prepare_placeholders(filtered_placeholders) @@ -599,8 +617,8 @@ class AbstractTemplateBuilder(object): ["profiles"] ) - def get_template_path(self): - """Unified way how template path is received usign settings. + def get_template_preset(self): + """Unified way how template preset is received usign settings. Method is dependent on '_get_build_profiles' which should return filter profiles to resolve path to a template. Default implementation looks @@ -637,6 +655,13 @@ class AbstractTemplateBuilder(object): ).format(task_name, task_type, host_name)) path = profile["path"] + + # switch to remove placeholders after they are used + placeholder_keep = profile.get("placeholder_keep") + # backward compatibility, since default is True + if placeholder_keep is not False: + placeholder_keep = True + if not path: raise TemplateLoadFailed(( "Template path is not set.\n" @@ -657,7 +682,10 @@ class AbstractTemplateBuilder(object): if path and os.path.exists(path): self.log.info("Found template at: '{}'".format(path)) - return path + return { + "path": path, + "placeholder_keep": placeholder_keep + } solved_path = None while True: @@ -683,7 +711,10 @@ class AbstractTemplateBuilder(object): self.log.info("Found template at: '{}'".format(solved_path)) - return solved_path + return { + "path": solved_path, + "placeholder_keep": placeholder_keep + } @six.add_metaclass(ABCMeta) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json index 99a29beb27..1826734291 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json @@ -25,8 +25,15 @@ { "key": "path", "label": "Path to template", - "type": "text", - "object_type": "text" + "type": "path", + "multiplatform": false, + "multipath": false + }, + { + "key": "placeholder_keep", + "label": "Keep placeholders", + "type": "boolean", + "default": true } ] } From 011cd8f2e4e9536ce59194668d91b92712484ea1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Dec 2022 11:45:38 +0100 Subject: [PATCH 093/171] nuke: remove update template menu item --- openpype/hosts/nuke/api/pipeline.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index fb707ca44c..918598c04f 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -217,10 +217,6 @@ def _install_menu(): "Build Workfile from template", lambda: build_workfile_template() ) - menu_template.addCommand( - "Update Workfile", - lambda: update_workfile_template() - ) menu_template.addSeparator() menu_template.addCommand( "Create Place Holder", From 90303d4137d3dfca49035ac1c878dbb830f42b42 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Dec 2022 11:48:46 +0100 Subject: [PATCH 094/171] global: updating docstrings --- openpype/pipeline/workfile/workfile_template_builder.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index f6a4ab51cb..2850175bc9 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -419,6 +419,9 @@ class AbstractTemplateBuilder(object): passed. level_limit (int): Limit of populate loops. Related to 'populate_scene_placeholders' method. + keep_placeholders (bool): Add flag to placeholder data for + hosts to decide if they want to remove + placeholder after it is used. """ template_preset = self.get_template_preset() @@ -518,6 +521,9 @@ class AbstractTemplateBuilder(object): Args: level_limit (int): Level of loops that can happen. Default is 1000. + keep_placeholders (bool): Add flag to placeholder data for + hosts to decide if they want to remove + placeholder after it is used. """ if not self.placeholder_plugins: From 212b372c03dad76e7aa1a7b83148ea78dda5611e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Dec 2022 11:49:18 +0100 Subject: [PATCH 095/171] nuke: make `get_group_io_nodes` soft fail --- openpype/hosts/nuke/api/lib.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index a066bbcdcf..2fdf446357 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2865,10 +2865,11 @@ def get_group_io_nodes(nodes): break if input_node is None: - raise ValueError("No Input found") + log.warning("No Input found") if output_node is None: - raise ValueError("No Output found") + log.warning("No Output found") + return input_node, output_node From 843f6e8cd036452fd8f98dc88b70abdf6b06fdd3 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 22 Dec 2022 16:10:07 +0100 Subject: [PATCH 096/171] :art: improve online family functionality --- .../plugins/create/create_online.py | 31 ++++++++++++++++--- .../plugins/publish/collect_online_file.py | 8 ++++- .../plugins/publish/validate_online_file.py | 2 ++ openpype/plugins/publish/extract_thumbnail.py | 2 +- 4 files changed, 37 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index 19f956a50e..096172d581 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -7,13 +7,14 @@ exists under selected asset. """ from pathlib import Path -from openpype.client import get_subset_by_name, get_asset_by_name -from openpype.lib.attribute_definitions import FileDef +# from openpype.client import get_subset_by_name, get_asset_by_name +from openpype.lib.attribute_definitions import FileDef, BoolDef, UILabelDef from openpype.pipeline import ( CreatedInstance, CreatorError ) from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator +from typing import Union class OnlineCreator(TrayPublishCreator): @@ -23,7 +24,11 @@ class OnlineCreator(TrayPublishCreator): label = "Online" family = "online" description = "Publish file retaining its original file name" - extensions = [".mov", ".mp4", ".mxf", ".m4v", ".mpg"] + extensions = [".mov", ".mp4", ".mxf", ".m4v", ".mpg", ".exr", ".dpx", ".tif", ".png", ".jpg"] + + def __init__(self, *args, **kwargs): + super(OnlineCreator, self).__init__(*args, **kwargs) + self._original_path: Union[str, None] = None def get_detail_description(self): return """# Create file retaining its original file name. @@ -49,13 +54,17 @@ class OnlineCreator(TrayPublishCreator): origin_basename = Path(files[0]).stem + # disable check for existing subset with the same name + """ asset = get_asset_by_name( self.project_name, instance_data["asset"], fields=["_id"]) + if get_subset_by_name( self.project_name, origin_basename, asset["_id"], fields=["_id"]): raise CreatorError(f"subset with {origin_basename} already " "exists in selected asset") + """ instance_data["originalBasename"] = origin_basename subset_name = origin_basename @@ -69,15 +78,29 @@ class OnlineCreator(TrayPublishCreator): instance_data, self) self._store_new_instance(new_instance) + def get_instance_attr_defs(self): + return [ + BoolDef( + "add_review_family", + default=True, + label="Review" + ) + ] + def get_pre_create_attr_defs(self): return [ FileDef( "representation_file", folders=False, extensions=self.extensions, - allow_sequences=False, + allow_sequences=True, single_item=True, label="Representation", + ), + BoolDef( + "add_review_family", + default=True, + label="Review" ) ] diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py index a3f86afa13..05b00e9516 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py @@ -12,12 +12,18 @@ class CollectOnlineFile(pyblish.api.InstancePlugin): def process(self, instance): file = Path(instance.data["creator_attributes"]["path"]) + review = instance.data["creator_attributes"]["add_review_family"] + instance.data["review"] = review + if "review" not in instance.data["families"]: + instance.data["families"].append("review") + self.log.info(f"Adding review: {review}") instance.data["representations"].append( { "name": file.suffix.lstrip("."), "ext": file.suffix.lstrip("."), "files": file.name, - "stagingDir": file.parent.as_posix() + "stagingDir": file.parent.as_posix(), + "tags": ["review"] if review else [] } ) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py index 12b2e72ced..2db865ca2b 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py @@ -20,6 +20,8 @@ class ValidateOnlineFile(OptionalPyblishPluginMixin, optional = True def process(self, instance): + if not self.is_active(instance.data): + return project_name = instance.context.data["projectName"] asset_id = instance.data["assetEntity"]["_id"] subset = get_subset_by_name( diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index 14b43beae8..14c6a21ed0 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -19,7 +19,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): order = pyblish.api.ExtractorOrder families = [ "imagesequence", "render", "render2d", "prerender", - "source", "clip", "take" + "source", "clip", "take", "online" ] hosts = ["shell", "fusion", "resolve", "traypublisher"] enabled = False From 64afc35cd2ea98a2c340a23c6a2a2c0158a2e216 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 22 Dec 2022 16:13:37 +0100 Subject: [PATCH 097/171] :rotating_light: some hound fixes --- .../hosts/traypublisher/plugins/create/create_online.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index 096172d581..1a366bcff5 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -8,7 +8,7 @@ exists under selected asset. from pathlib import Path # from openpype.client import get_subset_by_name, get_asset_by_name -from openpype.lib.attribute_definitions import FileDef, BoolDef, UILabelDef +from openpype.lib.attribute_definitions import FileDef, BoolDef from openpype.pipeline import ( CreatedInstance, CreatorError @@ -24,7 +24,8 @@ class OnlineCreator(TrayPublishCreator): label = "Online" family = "online" description = "Publish file retaining its original file name" - extensions = [".mov", ".mp4", ".mxf", ".m4v", ".mpg", ".exr", ".dpx", ".tif", ".png", ".jpg"] + extensions = [".mov", ".mp4", ".mxf", ".m4v", ".mpg", ".exr", + ".dpx", ".tif", ".png", ".jpg"] def __init__(self, *args, **kwargs): super(OnlineCreator, self).__init__(*args, **kwargs) @@ -58,7 +59,7 @@ class OnlineCreator(TrayPublishCreator): """ asset = get_asset_by_name( self.project_name, instance_data["asset"], fields=["_id"]) - + if get_subset_by_name( self.project_name, origin_basename, asset["_id"], fields=["_id"]): From c86ebf1e93257ad29647797de57111bb1544fe08 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 23 Dec 2022 10:05:51 +0800 Subject: [PATCH 098/171] only allows the loaded reference to be imported reference --- openpype/hosts/maya/plugins/publish/extract_import_reference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index 8e0257dafb..b77740ae13 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -82,7 +82,7 @@ cmds.file(current_name, open=True, force=True) print(">>> Processing references") all_reference = cmds.file(q=True, reference=True) or [] for ref in all_reference: - if cmds.referenceQuery(ref, f=True): + if cmds.referenceQuery(ref, f=True, il=True): cmds.file(ref, importReference=True) nested_ref = cmds.file(q=True, reference=True) From f5cb893dc1b28b62b8132796bf5037d1236d6341 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 12:16:54 +0100 Subject: [PATCH 099/171] global: creator plugin abstraction for workfile builder template --- openpype/hosts/nuke/api/pipeline.py | 4 +- .../workfile/workfile_template_builder.py | 191 +++++++++++++++++- 2 files changed, 192 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 918598c04f..bdf12b7dc4 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -35,6 +35,7 @@ from .lib import ( ) from .workfile_template_builder import ( NukePlaceholderLoadPlugin, + NukePlaceholderCreatePlugin, build_workfile_template, update_workfile_template, create_placeholder, @@ -139,7 +140,8 @@ def _show_workfiles(): def get_workfile_build_placeholder_plugins(): return [ - NukePlaceholderLoadPlugin + NukePlaceholderLoadPlugin, + NukePlaceholderCreatePlugin ] diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 2850175bc9..d85d6b50dd 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -42,7 +42,10 @@ from openpype.pipeline.load import ( get_contexts_for_repre_docs, load_with_repre_context, ) -from openpype.pipeline.create import get_legacy_creator_by_name +from openpype.pipeline.create import ( + get_legacy_creator_by_name, + discover_legacy_creator_plugins +) class TemplateNotFound(Exception): @@ -235,7 +238,7 @@ class AbstractTemplateBuilder(object): def get_creators_by_name(self): if self._creators_by_name is None: - self._creators_by_name = get_legacy_creator_by_name() + self._creators_by_name = discover_legacy_creator_plugins() return self._creators_by_name def get_shared_data(self, key): @@ -1463,6 +1466,165 @@ class PlaceholderLoadMixin(object): pass +class PlaceholderCreateMixin(object): + """Mixin prepared for creating placeholder plugins. + + Implementation prepares options for placeholders with + 'get_create_plugin_options'. + + For placeholder population is implemented 'populate_create_placeholder'. + + PlaceholderItem can have implemented methods: + - 'create_failed' - called when creating of an instance failed + - 'create_succeed' - called when creating of an instance succeeded + """ + + def get_create_plugin_options(self, options=None): + """Unified attribute definitions for create placeholder. + + Common function for placeholder plugins used for creating of + publishable instances. Use it with 'get_placeholder_options'. + + Args: + plugin (PlaceholderPlugin): Plugin used for creating of + publish instances. + options (Dict[str, Any]): Already available options which are used + as defaults for attributes. + + Returns: + List[AbtractAttrDef]: Attribute definitions common for create + plugins. + """ + + creators_by_name = self.builder.get_creators_by_name() + creator_items = [ + (creator_name, creator.label or creator_name) + for creator_name, creator in creators_by_name.items() + ] + + creator_items = list(sorted(creator_items, key=lambda i: i[1])) + options = options or {} + return [ + attribute_definitions.UISeparatorDef(), + attribute_definitions.UILabelDef("Main attributes"), + attribute_definitions.UISeparatorDef(), + + attribute_definitions.EnumDef( + "creator", + label="Creator", + default=options.get("creator"), + items=creator_items, + tooltip=( + "Creator" + "\nDefines what OpenPype creator will be used to" + " create publishable instance." + "\nUseable creator depends on current host's creator list." + "\nField is case sensitive." + ) + ), + attribute_definitions.TextDef( + "create_variant", + label="Variant", + default=options.get("create_variant"), + placeholder='Main', + tooltip=( + "Creator" + "\nDefines variant name which will be use for " + "\ncompiling of subset name." + ) + ), + attribute_definitions.UISeparatorDef(), + attribute_definitions.NumberDef( + "order", + label="Order", + default=options.get("order") or 0, + decimals=0, + minimum=0, + maximum=999, + tooltip=( + "Order" + "\nOrder defines creating instance priority (0 to 999)" + "\nPriority rule is : \"lowest is first to load\"." + ) + ) + ] + + def populate_create_placeholder(self, placeholder): + """Create placeholder is going to create matching publishabe instance. + + Args: + placeholder (PlaceholderItem): Placeholder item with information + about requested publishable instance. + """ + creator_name = placeholder.data["creator"] + create_variant = placeholder.data["create_variant"] + + creator_plugin = get_legacy_creator_by_name(creator_name) + + # create subset name + project_name = legacy_io.Session["AVALON_PROJECT"] + task_name = legacy_io.Session["AVALON_TASK"] + asset_name = legacy_io.Session["AVALON_ASSET"] + + # get asset id + asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) + assert asset_doc, "No current asset found in Session" + asset_id = asset_doc['_id'] + + subset_name = creator_plugin.get_subset_name( + create_variant, + task_name, + asset_id, + project_name + ) + + creator_data = { + "creator_name": creator_name, + "create_variant": create_variant, + "subset_name": subset_name, + "creator_plugin": creator_plugin + } + + # compile subset name from variant + try: + creator_instance = creator_plugin( + subset_name, + asset_name + ).process() + + except Exception: + failed = True + self.create_failed(placeholder, creator_data) + + else: + failed = False + self.create_succeed(placeholder, creator_instance) + + self.cleanup_placeholder(placeholder, failed) + + def create_failed(self, placeholder, creator_data): + if hasattr(placeholder, "create_failed"): + placeholder.create_failed(creator_data) + + def create_succeed(self, placeholder, creator_instance): + if hasattr(placeholder, "create_succeed"): + placeholder.create_succeed(creator_instance) + + def cleanup_placeholder(self, placeholder, failed): + """Cleanup placeholder after load of single representation. + + Can be called multiple times during placeholder item populating and is + called even if loading failed. + + Args: + placeholder (PlaceholderItem): Item which was just used to load + representation. + failed (bool): Loading of representation failed. + """ + + pass + + class LoadPlaceholderItem(PlaceholderItem): """PlaceholderItem for plugin which is loading representations. @@ -1486,3 +1648,28 @@ class LoadPlaceholderItem(PlaceholderItem): def load_failed(self, representation): self._failed_representations.append(representation) + + +class CreatePlaceholderItem(PlaceholderItem): + """PlaceholderItem for plugin which is creating publish instance. + + Connected to 'PlaceholderCreateMixin'. + """ + + def __init__(self, *args, **kwargs): + super(CreatePlaceholderItem, self).__init__(*args, **kwargs) + self._failed_created_publish_instances = [] + + def get_errors(self): + if not self._failed_representations: + return [] + message = ( + "Failed to create {} instance using Creator {}" + ).format( + len(self._failed_created_publish_instances), + self.data["creator"] + ) + return [message] + + def create_failed(self, creator_data): + self._failed_created_publish_instances.append(creator_data) From dfb3d142aa765e27a4fc2eb3c861e70ad57fb464 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 12:17:27 +0100 Subject: [PATCH 100/171] nuke: workfile builder template creator plugin implementation --- .../nuke/api/workfile_template_builder.py | 410 +++++++++++++++++- 1 file changed, 409 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 60bf906fbe..5e9e5fcdce 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -7,7 +7,9 @@ from openpype.pipeline.workfile.workfile_template_builder import ( AbstractTemplateBuilder, PlaceholderPlugin, LoadPlaceholderItem, + CreatePlaceholderItem, PlaceholderLoadMixin, + PlaceholderCreateMixin ) from openpype.tools.workfile_template_build import ( WorkfileBuildPlaceholderDialog, @@ -32,7 +34,7 @@ PLACEHOLDER_SET = "PLACEHOLDERS_SET" class NukeTemplateBuilder(AbstractTemplateBuilder): - """Concrete implementation of AbstractTemplateBuilder for maya""" + """Concrete implementation of AbstractTemplateBuilder for nuke""" def import_template(self, path): """Import template into current scene. @@ -544,6 +546,412 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): siblings_input.setInput(0, copy_output) +class NukePlaceholderCreatePlugin( + NukePlaceholderPlugin, PlaceholderCreateMixin +): + identifier = "nuke.create" + label = "Nuke create" + + def _parse_placeholder_node_data(self, node): + placeholder_data = super( + NukePlaceholderCreatePlugin, self + )._parse_placeholder_node_data(node) + + node_knobs = node.knobs() + nb_children = 0 + if "nb_children" in node_knobs: + nb_children = int(node_knobs["nb_children"].getValue()) + placeholder_data["nb_children"] = nb_children + + siblings = [] + if "siblings" in node_knobs: + siblings = node_knobs["siblings"].values() + placeholder_data["siblings"] = siblings + + node_full_name = node.fullName() + placeholder_data["group_name"] = node_full_name.rpartition(".")[0] + placeholder_data["last_loaded"] = [] + placeholder_data["delete"] = False + return placeholder_data + + def collect_placeholders(self): + output = [] + scene_placeholders = self._collect_scene_placeholders() + for node_name, node in scene_placeholders.items(): + plugin_identifier_knob = node.knob("plugin_identifier") + if ( + plugin_identifier_knob is None + or plugin_identifier_knob.getValue() != self.identifier + ): + continue + + placeholder_data = self._parse_placeholder_node_data(node) + # TODO do data validations and maybe updgrades if are invalid + output.append( + CreatePlaceholderItem(node_name, placeholder_data, self) + ) + + return output + + def populate_placeholder(self, placeholder): + self.populate_create_placeholder(placeholder) + + def repopulate_placeholder(self, placeholder): + self.populate_create_placeholder(placeholder) + + def get_placeholder_options(self, options=None): + return self.get_create_plugin_options(options) + + def cleanup_placeholder(self, placeholder, failed): + # deselect all selected nodes + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + # getting the latest nodes added + nodes_init = placeholder.data["nodes_init"] + nodes_created = list(set(nuke.allNodes()) - set(nodes_init)) + self.log.debug("Created nodes: {}".format(nodes_created)) + if not nodes_created: + return + + placeholder.data["delete"] = True + + nodes_created = self._move_to_placeholder_group( + placeholder, nodes_created + ) + placeholder.data["last_created"] = nodes_created + refresh_nodes(nodes_created) + + # positioning of the created nodes + min_x, min_y, _, _ = get_extreme_positions(nodes_created) + for node in nodes_created: + xpos = (node.xpos() - min_x) + placeholder_node.xpos() + ypos = (node.ypos() - min_y) + placeholder_node.ypos() + node.setXYpos(xpos, ypos) + refresh_nodes(nodes_created) + + # fix the problem of z_order for backdrops + self._fix_z_order(placeholder) + self._imprint_siblings(placeholder) + + if placeholder.data["nb_children"] == 0: + # save initial nodes postions and dimensions, update them + # and set inputs and outputs of created nodes + + self._imprint_inits() + self._update_nodes(placeholder, nuke.allNodes(), nodes_created) + self._set_created_connections(placeholder) + + elif placeholder.data["siblings"]: + # create copies of placeholder siblings for the new created nodes, + # set their inputs and outpus and update all nodes positions and + # dimensions and siblings names + + siblings = get_nodes_by_names(placeholder.data["siblings"]) + refresh_nodes(siblings) + copies = self._create_sib_copies(placeholder) + new_nodes = list(copies.values()) # copies nodes + self._update_nodes(new_nodes, nodes_created) + placeholder_node.removeKnob(placeholder_node.knob("siblings")) + new_nodes_name = get_names_from_nodes(new_nodes) + imprint(placeholder_node, {"siblings": new_nodes_name}) + self._set_copies_connections(placeholder, copies) + + self._update_nodes( + nuke.allNodes(), + new_nodes + nodes_created, + 20 + ) + + new_siblings = get_names_from_nodes(new_nodes) + placeholder.data["siblings"] = new_siblings + + else: + # if the placeholder doesn't have siblings, the created + # nodes will be placed in a free space + + xpointer, ypointer = find_free_space_to_paste_nodes( + nodes_created, direction="bottom", offset=200 + ) + node = nuke.createNode("NoOp") + reset_selection() + nuke.delete(node) + for node in nodes_created: + xpos = (node.xpos() - min_x) + xpointer + ypos = (node.ypos() - min_y) + ypointer + node.setXYpos(xpos, ypos) + + placeholder.data["nb_children"] += 1 + reset_selection() + + # remove placeholders marked as delete + if ( + placeholder.data.get("delete") + and not placeholder.data.get("keep_placeholder") + ): + self.log.debug("Deleting node: {}".format(placeholder_node.name())) + nuke.delete(placeholder_node) + + # go back to root group + nuke.root().begin() + + def _move_to_placeholder_group(self, placeholder, nodes_created): + """ + opening the placeholder's group and copying created nodes in it. + + Returns : + nodes_created (list): the new list of pasted nodes + """ + groups_name = placeholder.data["group_name"] + reset_selection() + select_nodes(nodes_created) + if groups_name: + with node_tempfile() as filepath: + nuke.nodeCopy(filepath) + for node in nuke.selectedNodes(): + nuke.delete(node) + group = nuke.toNode(groups_name) + group.begin() + nuke.nodePaste(filepath) + nodes_created = nuke.selectedNodes() + return nodes_created + + def _fix_z_order(self, placeholder): + """Fix the problem of z_order when a backdrop is create.""" + + nodes_created = placeholder.data["last_created"] + created_backdrops = [] + bd_orders = set() + for node in nodes_created: + if isinstance(node, nuke.BackdropNode): + created_backdrops.append(node) + bd_orders.add(node.knob("z_order").getValue()) + + if not bd_orders: + return + + sib_orders = set() + for node_name in placeholder.data["siblings"]: + node = nuke.toNode(node_name) + if isinstance(node, nuke.BackdropNode): + sib_orders.add(node.knob("z_order").getValue()) + + if not sib_orders: + return + + min_order = min(bd_orders) + max_order = max(sib_orders) + for backdrop_node in created_backdrops: + z_order = backdrop_node.knob("z_order").getValue() + backdrop_node.knob("z_order").setValue( + z_order + max_order - min_order + 1) + + def _imprint_siblings(self, placeholder): + """ + - add siblings names to placeholder attributes (nodes created with it) + - add Id to the attributes of all the other nodes + """ + + created_nodes = placeholder.data["last_created"] + created_nodes_set = set(created_nodes) + data = {"repre_id": str(placeholder.data["last_repre_id"])} + + for node in created_nodes: + node_knobs = node.knobs() + if "builder_type" not in node_knobs: + # save the id of representation for all imported nodes + imprint(node, data) + node.knob("repre_id").setVisible(False) + refresh_node(node) + continue + + if ( + "is_placeholder" not in node_knobs + or ( + "is_placeholder" in node_knobs + and node.knob("is_placeholder").value() + ) + ): + siblings = list(created_nodes_set - {node}) + siblings_name = get_names_from_nodes(siblings) + siblings = {"siblings": siblings_name} + imprint(node, siblings) + + def _imprint_inits(self): + """Add initial positions and dimensions to the attributes""" + + for node in nuke.allNodes(): + refresh_node(node) + imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) + node.knob("x_init").setVisible(False) + node.knob("y_init").setVisible(False) + width = node.screenWidth() + height = node.screenHeight() + if "bdwidth" in node.knobs(): + imprint(node, {"w_init": width, "h_init": height}) + node.knob("w_init").setVisible(False) + node.knob("h_init").setVisible(False) + refresh_node(node) + + def _update_nodes( + self, placeholder, nodes, considered_nodes, offset_y=None + ): + """Adjust backdrop nodes dimensions and positions. + + Considering some nodes sizes. + + Args: + nodes (list): list of nodes to update + considered_nodes (list): list of nodes to consider while updating + positions and dimensions + offset (int): distance between copies + """ + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) + + diff_x = diff_y = 0 + contained_nodes = [] # for backdrops + + if offset_y is None: + width_ph = placeholder_node.screenWidth() + height_ph = placeholder_node.screenHeight() + diff_y = max_y - min_y - height_ph + diff_x = max_x - min_x - width_ph + contained_nodes = [placeholder_node] + min_x = placeholder_node.xpos() + min_y = placeholder_node.ypos() + else: + siblings = get_nodes_by_names(placeholder.data["siblings"]) + minX, _, maxX, _ = get_extreme_positions(siblings) + diff_y = max_y - min_y + 20 + diff_x = abs(max_x - min_x - maxX + minX) + contained_nodes = considered_nodes + + if diff_y <= 0 and diff_x <= 0: + return + + for node in nodes: + refresh_node(node) + + if ( + node == placeholder_node + or node in considered_nodes + ): + continue + + if ( + not isinstance(node, nuke.BackdropNode) + or ( + isinstance(node, nuke.BackdropNode) + and not set(contained_nodes) <= set(node.getNodes()) + ) + ): + if offset_y is None and node.xpos() >= min_x: + node.setXpos(node.xpos() + diff_x) + + if node.ypos() >= min_y: + node.setYpos(node.ypos() + diff_y) + + else: + width = node.screenWidth() + height = node.screenHeight() + node.knob("bdwidth").setValue(width + diff_x) + node.knob("bdheight").setValue(height + diff_y) + + refresh_node(node) + + def _set_created_connections(self, placeholder): + """ + set inputs and outputs of created nodes""" + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + input_node, output_node = get_group_io_nodes( + placeholder.data["last_created"] + ) + for node in placeholder_node.dependent(): + for idx in range(node.inputs()): + if node.input(idx) == placeholder_node and output_node: + node.setInput(idx, output_node) + + for node in placeholder_node.dependencies(): + for idx in range(placeholder_node.inputs()): + if placeholder_node.input(idx) == node and input_node: + input_node.setInput(0, node) + + def _create_sib_copies(self, placeholder): + """ creating copies of the palce_holder siblings (the ones who were + created with it) for the new nodes added + + Returns : + copies (dict) : with copied nodes names and their copies + """ + + copies = {} + siblings = get_nodes_by_names(placeholder.data["siblings"]) + for node in siblings: + new_node = duplicate_node(node) + + x_init = int(new_node.knob("x_init").getValue()) + y_init = int(new_node.knob("y_init").getValue()) + new_node.setXYpos(x_init, y_init) + if isinstance(new_node, nuke.BackdropNode): + w_init = new_node.knob("w_init").getValue() + h_init = new_node.knob("h_init").getValue() + new_node.knob("bdwidth").setValue(w_init) + new_node.knob("bdheight").setValue(h_init) + refresh_node(node) + + if "repre_id" in node.knobs().keys(): + node.removeKnob(node.knob("repre_id")) + copies[node.name()] = new_node + return copies + + def _set_copies_connections(self, placeholder, copies): + """Set inputs and outputs of the copies. + + Args: + copies (dict): Copied nodes by their names. + """ + + last_input, last_output = get_group_io_nodes( + placeholder.data["last_created"] + ) + siblings = get_nodes_by_names(placeholder.data["siblings"]) + siblings_input, siblings_output = get_group_io_nodes(siblings) + copy_input = copies[siblings_input.name()] + copy_output = copies[siblings_output.name()] + + for node_init in siblings: + if node_init == siblings_output: + continue + + node_copy = copies[node_init.name()] + for node in node_init.dependent(): + for idx in range(node.inputs()): + if node.input(idx) != node_init: + continue + + if node in siblings: + copies[node.name()].setInput(idx, node_copy) + else: + last_input.setInput(0, node_copy) + + for node in node_init.dependencies(): + for idx in range(node_init.inputs()): + if node_init.input(idx) != node: + continue + + if node_init == siblings_input: + copy_input.setInput(idx, node) + elif node in siblings: + node_copy.setInput(idx, copies[node.name()]) + else: + node_copy.setInput(idx, last_output) + + siblings_input.setInput(0, copy_output) + + def build_workfile_template(*args): builder = NukeTemplateBuilder(registered_host()) builder.build_template() From 013ee5660af7dcb2b88499edf8d4d6dfcf3a259c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 12:17:39 +0100 Subject: [PATCH 101/171] fix typo --- openpype/hosts/nuke/plugins/load/load_backdrop.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/load/load_backdrop.py b/openpype/hosts/nuke/plugins/load/load_backdrop.py index 164ab6f9f4..d1fb763500 100644 --- a/openpype/hosts/nuke/plugins/load/load_backdrop.py +++ b/openpype/hosts/nuke/plugins/load/load_backdrop.py @@ -28,7 +28,7 @@ class LoadBackdropNodes(load.LoaderPlugin): representations = ["nk"] families = ["workfile", "nukenodes"] - label = "Iport Nuke Nodes" + label = "Import Nuke Nodes" order = 0 icon = "eye" color = "white" From 410ed90cb33348ee38525a3045612ef2a19f1970 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 12:17:54 +0100 Subject: [PATCH 102/171] fix typo --- openpype/pipeline/create/creator_plugins.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index bb5ce00452..8500dd1e22 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -608,7 +608,7 @@ def discover_legacy_creator_plugins(): plugin.apply_settings(project_settings, system_settings) except Exception: log.warning( - "Failed to apply settings to loader {}".format( + "Failed to apply settings to creator {}".format( plugin.__name__ ), exc_info=True From ae709afaaf85ca6bd1d6d74476ea8c561d550eec Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 23 Dec 2022 12:25:01 +0100 Subject: [PATCH 103/171] Added dynamic message to Slack notification Artist can now add additional message, specific per instance and publish, if they are using Publisher. --- .../plugins/publish/collect_slack_family.py | 23 +++++++++++++++++-- .../plugins/publish/integrate_slack_api.py | 11 +++++---- website/docs/module_slack.md | 6 +++++ 3 files changed, 34 insertions(+), 6 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/slack/plugins/publish/collect_slack_family.py index 27e899d59a..b3e7bbdcec 100644 --- a/openpype/modules/slack/plugins/publish/collect_slack_family.py +++ b/openpype/modules/slack/plugins/publish/collect_slack_family.py @@ -1,10 +1,12 @@ import pyblish.api from openpype.lib.profiles_filtering import filter_profiles -from openpype.pipeline import legacy_io +from openpype.lib import attribute_definitions +from openpype.pipeline import OpenPypePyblishPluginMixin -class CollectSlackFamilies(pyblish.api.InstancePlugin): +class CollectSlackFamilies(pyblish.api.InstancePlugin, + OpenPypePyblishPluginMixin): """Collect family for Slack notification Expects configured profile in @@ -17,6 +19,18 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin): profiles = None + @classmethod + def get_attribute_defs(cls): + return [ + attribute_definitions.TextDef( + # Key under which it will be stored + "additional_message", + # Use plugin label as label for attribute + label="Additional Slack message", + placeholder="" + ) + ] + def process(self, instance): task_data = instance.data["anatomyData"].get("task", {}) family = self.main_family_from_instance(instance) @@ -55,6 +69,11 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin): ["token"]) instance.data["slack_token"] = slack_token + attribute_values = self.get_attr_values_from_data(instance.data) + additional_message = attribute_values.get("additional_message") + if additional_message: + instance.data["slack_additional_message"] = additional_message + def main_family_from_instance(self, instance): # TODO yank from integrate """Returns main family of entered instance.""" family = instance.data.get("family") diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 0cd5ec9de8..d94ecb02e4 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -31,11 +31,14 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): review_path = self._get_review_path(instance) publish_files = set() + message = '' + additional_message = instance.data.get("slack_additional_message") + if additional_message: + message = "{} \n".format(additional_message) for message_profile in instance.data["slack_channel_message_profiles"]: - message = self._get_filled_message(message_profile["message"], - instance, - review_path) - self.log.debug("message:: {}".format(message)) + message += self._get_filled_message(message_profile["message"], + instance, + review_path) if not message: return diff --git a/website/docs/module_slack.md b/website/docs/module_slack.md index 3a2842da63..2bfd7cb562 100644 --- a/website/docs/module_slack.md +++ b/website/docs/module_slack.md @@ -94,6 +94,12 @@ Few keys also have Capitalized and UPPERCASE format. Values will be modified acc Here you can find review {review_filepath} ``` +##### Dynamic message for artists +If artists uses host with implemented Publisher (new UI for publishing, implemented in Tray Publisher, Adobe products etc), it is possible for +them to add additional message (notification for specific users for example, artists must provide proper user id with '@'). +Additional message will be sent only if at least one profile, eg. one target channel is configured. +All available template keys (see higher) could be used here as a placeholder too. + #### Message retention Currently no purging of old messages is implemented in Openpype. Admins of Slack should set their own retention of messages and files per channel. (see https://slack.com/help/articles/203457187-Customize-message-and-file-retention-policies) From c6c08fd4ccaf779c2569139ab83f8fb16fe6c785 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 12:46:14 +0100 Subject: [PATCH 104/171] global: fix creator plugin discovery --- .../pipeline/workfile/workfile_template_builder.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index d85d6b50dd..07a1f3ec58 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -238,7 +238,14 @@ class AbstractTemplateBuilder(object): def get_creators_by_name(self): if self._creators_by_name is None: - self._creators_by_name = discover_legacy_creator_plugins() + self._creators_by_name = {} + for creator in discover_legacy_creator_plugins(): + creator_name = creator.__name__ + if creator_name in self._creators_by_name: + raise KeyError( + "Duplicated creator name {} !".format(creator_name) + ) + self._creators_by_name[creator_name] = creator return self._creators_by_name def get_shared_data(self, key): @@ -1497,6 +1504,8 @@ class PlaceholderCreateMixin(object): """ creators_by_name = self.builder.get_creators_by_name() + print(creators_by_name) + creator_items = [ (creator_name, creator.label or creator_name) for creator_name, creator in creators_by_name.items() From c16a5289e49286bb7c65be04a9629f846cab58ce Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 23 Dec 2022 14:09:03 +0100 Subject: [PATCH 105/171] OP-4470 - better handle missing keys Message might contain {placeholder} which are not collected. Previously it would fail without sending message. Now missing keys are double escaped {{}}. --- .../plugins/publish/integrate_slack_api.py | 28 ++++++++++++++++--- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 0cd5ec9de8..9122c1c5ed 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -1,4 +1,5 @@ import os +import re import six import pyblish.api import copy @@ -132,14 +133,14 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): fill_key = "task[{}]".format(key) fill_pairs.append((fill_key, value)) - self.log.debug("fill_pairs ::{}".format(fill_pairs)) multiple_case_variants = prepare_template_data(fill_pairs) fill_data.update(multiple_case_variants) - - message = None + message = '' try: - message = message_templ.format(**fill_data) + message = self._escape_missing_keys(message_templ, fill_data).\ + format(**fill_data) except Exception: + # shouldn't happen self.log.warning( "Some keys are missing in {}".format(message_templ), exc_info=True) @@ -263,3 +264,22 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): msg = " - application must added to channel '{}'.".format(channel) error_str += msg + " Ask Slack admin." return error_str + + def _escape_missing_keys(self, message, fill_data): + """Double escapes placeholder which are missing in 'fill_data'""" + placeholder_keys = re.findall("\{([^}]+)\}", message) + + fill_keys = [] + for key, value in fill_data.items(): + fill_keys.append(key) + if isinstance(value, dict): + for child_key in value.keys(): + fill_keys.append("{}[{}]".format(key, child_key)) + + not_matched = set(placeholder_keys) - set(fill_keys) + + for not_matched_item in not_matched: + message = message.replace("{}".format(not_matched_item), + "{{{}}}".format(not_matched_item)) + + return message From 3a41d6a72158af0707d58f146b834076ed386b13 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 23 Dec 2022 14:13:00 +0100 Subject: [PATCH 106/171] OP-4470 - safer handling of review path 'published_path' might be missing. Thumbnail path was fixed previously, this one was missed. --- .../slack/plugins/publish/integrate_slack_api.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 9122c1c5ed..c4d6b27726 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -163,17 +163,21 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): def _get_review_path(self, instance): """Returns abs url for review if present in instance repres""" - published_path = None + review_path = None for repre in instance.data.get("representations", []): tags = repre.get('tags', []) if (repre.get("review") or "review" in tags or "burnin" in tags): - if os.path.exists(repre["published_path"]): - published_path = repre["published_path"] + repre_review_path = ( + repre.get("published_path") or + os.path.join(repre["stagingDir"], repre["files"]) + ) + if os.path.exists(repre_review_path): + review_path = repre_review_path if "burnin" in tags: # burnin has precedence if exists break - return published_path + return review_path def _python2_call(self, token, channel, message, publish_files): from slackclient import SlackClient From 81de2cf0c902671104de22f7768ed37ac8ed5c39 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 14:25:41 +0100 Subject: [PATCH 107/171] global: fix _repr_ pformat printing --- openpype/pipeline/workfile/workfile_template_builder.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 07a1f3ec58..630a11e4b5 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -984,7 +984,7 @@ class PlaceholderItem(object): def __init__(self, scene_identifier, data, plugin): self._log = None - self._scene_identifier = scene_identifier + self.name = scene_identifier self._data = data self._plugin = plugin @@ -1062,7 +1062,7 @@ class PlaceholderItem(object): @property def scene_identifier(self): - return self._scene_identifier + return self.name @property def finished(self): From 4f7e4fcac3ffbddc258247941b063f25746ef174 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 14:26:28 +0100 Subject: [PATCH 108/171] global: add _before_instance_create function for storing created nodes --- openpype/pipeline/workfile/workfile_template_builder.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 630a11e4b5..dce36eca82 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -1594,6 +1594,8 @@ class PlaceholderCreateMixin(object): "creator_plugin": creator_plugin } + self._before_instance_create(placeholder) + # compile subset name from variant try: creator_instance = creator_plugin( @@ -1633,6 +1635,11 @@ class PlaceholderCreateMixin(object): pass + def _before_instance_create(self, placeholder): + """Can be overriden. Is called before instance is created.""" + + pass + class LoadPlaceholderItem(PlaceholderItem): """PlaceholderItem for plugin which is loading representations. From 5924dcc1f5d80b532fa30137cf1992b89d71d092 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 14:27:32 +0100 Subject: [PATCH 109/171] nuke: implementing _before_instance_create funtion --- openpype/hosts/nuke/api/workfile_template_builder.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 5e9e5fcdce..33dcdab749 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -574,6 +574,9 @@ class NukePlaceholderCreatePlugin( placeholder_data["delete"] = False return placeholder_data + def _before_instance_create(self, placeholder): + placeholder.data["nodes_init"] = nuke.allNodes() + def collect_placeholders(self): output = [] scene_placeholders = self._collect_scene_placeholders() From c06315d5d6d2ce567e152f25600b23fc2b7c2b47 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 14:28:03 +0100 Subject: [PATCH 110/171] nuke: fixing logic for creator placeholder plugin processing --- openpype/hosts/nuke/api/workfile_template_builder.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 33dcdab749..973e15b192 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -76,8 +76,7 @@ class NukePlaceholderPlugin(PlaceholderPlugin): node_knobs = node.knobs() if ( - "builder_type" not in node_knobs - or "is_placeholder" not in node_knobs + "is_placeholder" not in node_knobs or not node.knob("is_placeholder").value() ): continue @@ -756,16 +755,9 @@ class NukePlaceholderCreatePlugin( created_nodes = placeholder.data["last_created"] created_nodes_set = set(created_nodes) - data = {"repre_id": str(placeholder.data["last_repre_id"])} for node in created_nodes: node_knobs = node.knobs() - if "builder_type" not in node_knobs: - # save the id of representation for all imported nodes - imprint(node, data) - node.knob("repre_id").setVisible(False) - refresh_node(node) - continue if ( "is_placeholder" not in node_knobs From c0157e5787a822064238847a4b2a7e5bac71970c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 23 Dec 2022 17:34:47 +0100 Subject: [PATCH 111/171] remove todo --- openpype/hosts/nuke/api/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/workfile_template_builder.py b/openpype/hosts/nuke/api/workfile_template_builder.py index 973e15b192..1b81f24e86 100644 --- a/openpype/hosts/nuke/api/workfile_template_builder.py +++ b/openpype/hosts/nuke/api/workfile_template_builder.py @@ -588,7 +588,7 @@ class NukePlaceholderCreatePlugin( continue placeholder_data = self._parse_placeholder_node_data(node) - # TODO do data validations and maybe updgrades if are invalid + output.append( CreatePlaceholderItem(node_name, placeholder_data, self) ) From f5842d91bd49cc955f49ed53388efd565b84f0a6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Dec 2022 12:18:11 +0100 Subject: [PATCH 112/171] rename variable 'max_len' to 'message_len' --- openpype/widgets/message_window.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/widgets/message_window.py b/openpype/widgets/message_window.py index 94e51f5d4f..8301f5e8f2 100644 --- a/openpype/widgets/message_window.py +++ b/openpype/widgets/message_window.py @@ -105,16 +105,18 @@ class ScrollMessageBox(QtWidgets.QDialog): content_widget = QtWidgets.QWidget(self) scroll_widget.setWidget(content_widget) - max_len = 0 + message_len = 0 content_layout = QtWidgets.QVBoxLayout(content_widget) for message in messages: label_widget = QtWidgets.QLabel(message, content_widget) content_layout.addWidget(label_widget) - max_len = max(max_len, len(message)) + message_len = max(message_len, len(message)) # guess size of scrollable area max_width = QtWidgets.QApplication.desktop().availableGeometry().width - scroll_widget.setMinimumWidth(min(max_width, max_len * 6)) + scroll_widget.setMinimumWidth( + min(max_width, message_len * 6) + ) layout.addWidget(scroll_widget) if not cancelable: # if no specific buttons OK only From a941aabc049ee3ec59ef829dbd58c1abca937868 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Dec 2022 12:21:05 +0100 Subject: [PATCH 113/171] call the width method to get the value --- openpype/widgets/message_window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/widgets/message_window.py b/openpype/widgets/message_window.py index 8301f5e8f2..2b186475ee 100644 --- a/openpype/widgets/message_window.py +++ b/openpype/widgets/message_window.py @@ -113,7 +113,7 @@ class ScrollMessageBox(QtWidgets.QDialog): message_len = max(message_len, len(message)) # guess size of scrollable area - max_width = QtWidgets.QApplication.desktop().availableGeometry().width + max_width = QtWidgets.QApplication.desktop().availableGeometry().width() scroll_widget.setMinimumWidth( min(max_width, message_len * 6) ) From f3c13e7669c149ef4b652820a2f72f4553484ea2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Dec 2022 12:26:25 +0100 Subject: [PATCH 114/171] fix too long line --- openpype/widgets/message_window.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/widgets/message_window.py b/openpype/widgets/message_window.py index 2b186475ee..a44df2ec8e 100644 --- a/openpype/widgets/message_window.py +++ b/openpype/widgets/message_window.py @@ -113,7 +113,8 @@ class ScrollMessageBox(QtWidgets.QDialog): message_len = max(message_len, len(message)) # guess size of scrollable area - max_width = QtWidgets.QApplication.desktop().availableGeometry().width() + desktop = QtWidgets.QApplication.desktop() + max_width = desktop.availableGeometry().width() scroll_widget.setMinimumWidth( min(max_width, message_len * 6) ) From efab124e0f721e03367710f1800bb6b0a8f9ca1e Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 31 Dec 2022 03:27:31 +0000 Subject: [PATCH 115/171] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 40abb9e9fd..4fbe5a3608 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.4" +__version__ = "3.14.10-nightly.5" From df62f315cc60f72ff401992b2853b8fc47e1cd31 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 3 Jan 2023 23:45:51 +0800 Subject: [PATCH 116/171] drop out the force flag in referenceQuery --- openpype/hosts/maya/plugins/publish/extract_import_reference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index b77740ae13..ce284d16fb 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -82,7 +82,7 @@ cmds.file(current_name, open=True, force=True) print(">>> Processing references") all_reference = cmds.file(q=True, reference=True) or [] for ref in all_reference: - if cmds.referenceQuery(ref, f=True, il=True): + if cmds.referenceQuery(ref, il=True): cmds.file(ref, importReference=True) nested_ref = cmds.file(q=True, reference=True) From 7a372d1b1cd481610a044a552e155db62ae8adbb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 3 Jan 2023 17:05:47 +0100 Subject: [PATCH 117/171] Added possibility to mention users or groups --- .../plugins/publish/integrate_slack_api.py | 271 ++++++++++++++---- 1 file changed, 213 insertions(+), 58 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index d94ecb02e4..8d34521194 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -3,6 +3,9 @@ import six import pyblish.api import copy from datetime import datetime +import re +from abc import ABCMeta, abstractmethod +import time from openpype.client import OpenPypeMongoConnection from openpype.lib.plugin_tools import prepare_template_data @@ -33,6 +36,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): publish_files = set() message = '' additional_message = instance.data.get("slack_additional_message") + token = instance.data["slack_token"] if additional_message: message = "{} \n".format(additional_message) for message_profile in instance.data["slack_channel_message_profiles"]: @@ -52,18 +56,16 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): project = instance.context.data["anatomyData"]["project"]["code"] for channel in message_profile["channels"]: if six.PY2: - msg_id, file_ids = \ - self._python2_call(instance.data["slack_token"], - channel, - message, - publish_files) + client = SlackPython2Operations(token, self.log) else: - msg_id, file_ids = \ - self._python3_call(instance.data["slack_token"], - channel, - message, - publish_files) + client = SlackPython3Operations(token, self.log) + users, groups = client.get_users_and_groups() + message = self._translate_users(message, users, groups) + + msg_id, file_ids = client.send_message(channel, + message, + publish_files) if not msg_id: return @@ -177,15 +179,211 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): break return published_path - def _python2_call(self, token, channel, message, publish_files): - from slackclient import SlackClient + def _get_user_id(self, users, user_name): + """Returns internal slack id for user name""" + user_id = None + for user in users: + if (not user.get("deleted") and + (user_name.lower() == user["name"].lower() or + user_name.lower() == user["real_name"])): + user_id = user["id"] + break + return user_id + + def _get_group_id(self, groups, group_name): + """Returns internal group id for string name""" + group_id = None + for group in groups: + if (not group.get("date_delete") and + (group_name.lower() == group["name"].lower() or + group_name.lower() == group["handle"])): + group_id = group["id"] + break + return group_id + + def _translate_users(self, message, users, groups): + matches = re.findall("(?".format(slack_id) + else: + slack_id = self._get_group_id(groups, user_name) + if slack_id: + mention = "".format(slack_id) + if mention: + message = message.replace(orig_user, mention) + + return message + + +@six.add_metaclass(ABCMeta) +class AbstractSlackOperations: + + @abstractmethod + def _get_users_list(self): + """Return response with user list, different methods Python 2 vs 3""" + raise NotImplementedError + + @abstractmethod + def _get_usergroups_list(self): + """Return response with user list, different methods Python 2 vs 3""" + raise NotImplementedError + + @abstractmethod + def get_users_and_groups(self): + """Return users and groups, different retry in Python 2 vs 3""" + raise NotImplementedError + + @abstractmethod + def send_message(self, channel, message, publish_files): + """Sends message to channel, different methods in Python 2 vs 3""" + pass + + def _get_users(self): + """Parse users.list response into list of users (dicts)""" + first = True + next_page = None + users = [] + while first or next_page: + response = self._get_users_list() + first = False + next_page = response.get("response_metadata").get("next_cursor") + for user in response.get("members"): + users.append(user) + + return users + + def _get_groups(self): + """Parses usergroups.list response into list of groups (dicts)""" + response = self._get_usergroups_list() + groups = [] + for group in response.get("usergroups"): + groups.append(group) + return groups + + def _enrich_error(self, error_str, channel): + """Enhance known errors with more helpful notations.""" + if 'not_in_channel' in error_str: + # there is no file.write.public scope, app must be explicitly in + # the channel + msg = " - application must added to channel '{}'.".format(channel) + error_str += msg + " Ask Slack admin." + return error_str + + +class SlackPython3Operations(AbstractSlackOperations): + + def __init__(self, token, log): + from slack_sdk import WebClient + + self.client = WebClient(token=token) + self.log = log + + def _get_users_list(self): + return self.client.users_list() + + def _get_usergroups_list(self): + return self.client.usergroups_list() + + def get_users_and_groups(self): + from slack_sdk.errors import SlackApiError + while True: + try: + users = self._get_users() + groups = self._get_groups() + break + except SlackApiError as e: + retry_after = e.response.headers.get("Retry-After") + if retry_after: + print( + "Rate limit hit, sleeping for {}".format(retry_after)) + time.sleep(int(retry_after)) + else: + raise e + + return users, groups + + def send_message(self, channel, message, publish_files): + from slack_sdk.errors import SlackApiError + try: + attachment_str = "\n\n Attachment links: \n" + file_ids = [] + for published_file in publish_files: + response = self.client.files_upload( + file=published_file, + filename=os.path.basename(published_file)) + attachment_str += "\n<{}|{}>".format( + response["file"]["permalink"], + os.path.basename(published_file)) + file_ids.append(response["file"]["id"]) + + if publish_files: + message += attachment_str + + message = self.translate_users(message) + + response = self.client.chat_postMessage( + channel=channel, + text=message + ) + return response.data["ts"], file_ids + except SlackApiError as e: + # # You will get a SlackApiError if "ok" is False + error_str = self._enrich_error(str(e.response["error"]), channel) + self.log.warning("Error happened {}".format(error_str)) + except Exception as e: + error_str = self._enrich_error(str(e), channel) + self.log.warning("Not SlackAPI error", exc_info=True) + + return None, [] + + +class SlackPython2Operations(AbstractSlackOperations): + + def __init__(self, token, log): + from slackclient import SlackClient + + self.client = SlackClient(token=token) + self.log = log + + def _get_users_list(self): + return self.client.api_call("users.list") + + def _get_usergroups_list(self): + return self.client.api_call("usergroups.list") + + def get_users_and_groups(self): + while True: + try: + users = self._get_users() + groups = self._get_groups() + break + except Exception as e: + retry_after = e.response.headers.get("Retry-After") + if retry_after: + print( + "Rate limit hit, sleeping for {}".format(retry_after)) + time.sleep(int(retry_after)) + else: + raise e + + return users, groups + + def send_message(self, channel, message, publish_files): try: - client = SlackClient(token) attachment_str = "\n\n Attachment links: \n" file_ids = [] for p_file in publish_files: with open(p_file, 'rb') as pf: - response = client.api_call( + response = self.client.api_call( "files.upload", file=pf, channel=channel, @@ -206,7 +404,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): if publish_files: message += attachment_str - response = client.api_call( + response = self.client.api_call( "chat.postMessage", channel=channel, text=message @@ -223,46 +421,3 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): self.log.warning("Error happened: {}".format(error_str)) return None, [] - - def _python3_call(self, token, channel, message, publish_files): - from slack_sdk import WebClient - from slack_sdk.errors import SlackApiError - try: - client = WebClient(token=token) - attachment_str = "\n\n Attachment links: \n" - file_ids = [] - for published_file in publish_files: - response = client.files_upload( - file=published_file, - filename=os.path.basename(published_file)) - attachment_str += "\n<{}|{}>".format( - response["file"]["permalink"], - os.path.basename(published_file)) - file_ids.append(response["file"]["id"]) - - if publish_files: - message += attachment_str - - response = client.chat_postMessage( - channel=channel, - text=message - ) - return response.data["ts"], file_ids - except SlackApiError as e: - # You will get a SlackApiError if "ok" is False - error_str = self._enrich_error(str(e.response["error"]), channel) - self.log.warning("Error happened {}".format(error_str)) - except Exception as e: - error_str = self._enrich_error(str(e), channel) - self.log.warning("Not SlackAPI error", exc_info=True) - - return None, [] - - def _enrich_error(self, error_str, channel): - """Enhance known errors with more helpful notations.""" - if 'not_in_channel' in error_str: - # there is no file.write.public scope, app must be explicitly in - # the channel - msg = " - application must added to channel '{}'.".format(channel) - error_str += msg + " Ask Slack admin." - return error_str From 1ee50975c0e61acac51f7f76b905a420e95487c6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 3 Jan 2023 17:14:41 +0100 Subject: [PATCH 118/171] Fix wrong position of method --- .../plugins/publish/integrate_slack_api.py | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index a010d08a82..fc5342177d 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -228,6 +228,25 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): return message + def _escape_missing_keys(self, message, fill_data): + """Double escapes placeholder which are missing in 'fill_data'""" + placeholder_keys = re.findall("\{([^}]+)\}", message) + + fill_keys = [] + for key, value in fill_data.items(): + fill_keys.append(key) + if isinstance(value, dict): + for child_key in value.keys(): + fill_keys.append("{}[{}]".format(key, child_key)) + + not_matched = set(placeholder_keys) - set(fill_keys) + + for not_matched_item in not_matched: + message = message.replace("{}".format(not_matched_item), + "{{{}}}".format(not_matched_item)) + + return message + @six.add_metaclass(ABCMeta) class AbstractSlackOperations: @@ -283,25 +302,6 @@ class AbstractSlackOperations: error_str += msg + " Ask Slack admin." return error_str - def _escape_missing_keys(self, message, fill_data): - """Double escapes placeholder which are missing in 'fill_data'""" - placeholder_keys = re.findall("\{([^}]+)\}", message) - - fill_keys = [] - for key, value in fill_data.items(): - fill_keys.append(key) - if isinstance(value, dict): - for child_key in value.keys(): - fill_keys.append("{}[{}]".format(key, child_key)) - - not_matched = set(placeholder_keys) - set(fill_keys) - - for not_matched_item in not_matched: - message = message.replace("{}".format(not_matched_item), - "{{{}}}".format(not_matched_item)) - - return message - class SlackPython3Operations(AbstractSlackOperations): From f80fe3fb938488009974cd931b285ea8564d7b2b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 3 Jan 2023 17:14:57 +0100 Subject: [PATCH 119/171] Fix obsolete call of method --- openpype/modules/slack/plugins/publish/integrate_slack_api.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index fc5342177d..803a07f5d2 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -352,8 +352,6 @@ class SlackPython3Operations(AbstractSlackOperations): if publish_files: message += attachment_str - message = self.translate_users(message) - response = self.client.chat_postMessage( channel=channel, text=message From 394c678299f26967ba64b87a7c8684c1d0419191 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 3 Jan 2023 17:19:50 +0100 Subject: [PATCH 120/171] Do not throw exception if user or group list error Skip notification, publish shouldn't fail because of this. --- .../modules/slack/plugins/publish/integrate_slack_api.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 803a07f5d2..f18b927c98 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -331,7 +331,9 @@ class SlackPython3Operations(AbstractSlackOperations): "Rate limit hit, sleeping for {}".format(retry_after)) time.sleep(int(retry_after)) else: - raise e + self.log.warning("Cannot pull user info, " + "mentions won't work", exc_info=True) + return [], [] return users, groups @@ -395,7 +397,9 @@ class SlackPython2Operations(AbstractSlackOperations): "Rate limit hit, sleeping for {}".format(retry_after)) time.sleep(int(retry_after)) else: - raise e + self.log.warning("Cannot pull user info, " + "mentions won't work", exc_info=True) + return [], [] return users, groups From 61ef7479e8bbb842378229e87b2187924a368c7a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 3 Jan 2023 17:21:14 +0100 Subject: [PATCH 121/171] =?UTF-8?q?=F0=9F=94=A7=20pass=20mongo=20url=20as?= =?UTF-8?q?=20default?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/settings/defaults/project_settings/deadline.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index 6e1c0f3540..527f5c0d24 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -2,7 +2,7 @@ "deadline_servers": [], "publish": { "CollectDefaultDeadlineServer": { - "pass_mongo_url": false + "pass_mongo_url": true }, "CollectDeadlinePools": { "primary_pool": "", From 68fe82323883c8035f3831820681174980649802 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 3 Jan 2023 17:53:00 +0100 Subject: [PATCH 122/171] Fix resolving of user_id Display name or real_name could be used also. --- .../slack/plugins/publish/integrate_slack_api.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index f18b927c98..577ead9667 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -187,10 +187,13 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): def _get_user_id(self, users, user_name): """Returns internal slack id for user name""" user_id = None + user_name_lower = user_name.lower() for user in users: if (not user.get("deleted") and - (user_name.lower() == user["name"].lower() or - user_name.lower() == user["real_name"])): + (user_name_lower == user["name"].lower() or + # bots dont have display_name + user_name_lower == user.get("display_name", '').lower() or + user_name_lower == user.get("real_name", '').lower())): user_id = user["id"] break return user_id @@ -208,8 +211,9 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): def _translate_users(self, message, users, groups): matches = re.findall("(? Date: Tue, 3 Jan 2023 17:58:28 +0100 Subject: [PATCH 123/171] Updated documentation --- website/docs/module_slack.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/website/docs/module_slack.md b/website/docs/module_slack.md index 2bfd7cb562..1999912fdc 100644 --- a/website/docs/module_slack.md +++ b/website/docs/module_slack.md @@ -100,6 +100,10 @@ them to add additional message (notification for specific users for example, art Additional message will be sent only if at least one profile, eg. one target channel is configured. All available template keys (see higher) could be used here as a placeholder too. +#### User or group notifications +Message template or dynamic data could contain user or group notification, it must be in format @artist.name, '@John Doe' or "@admin group" for display name containing space. +If value prefixed with @ is not resolved and Slack user is not found, message will contain same value (not translated by Slack into link and proper mention.) + #### Message retention Currently no purging of old messages is implemented in Openpype. Admins of Slack should set their own retention of messages and files per channel. (see https://slack.com/help/articles/203457187-Customize-message-and-file-retention-policies) From f1111a99bda950cb79ff36675f12c34146f05ddf Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 4 Jan 2023 03:28:26 +0000 Subject: [PATCH 124/171] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 4fbe5a3608..ae514e371e 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.5" +__version__ = "3.14.10-nightly.6" From b2e8ea6fb80faeef67cbae6db212422c43cb0b58 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 4 Jan 2023 11:36:12 +0100 Subject: [PATCH 125/171] Hound --- .../modules/slack/plugins/publish/integrate_slack_api.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 577ead9667..97182ffd9b 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -4,7 +4,6 @@ import six import pyblish.api import copy from datetime import datetime -import re from abc import ABCMeta, abstractmethod import time @@ -210,8 +209,9 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): return group_id def _translate_users(self, message, users, groups): - matches = re.findall("(? format.""" + matches = re.findall(r"(? Date: Wed, 4 Jan 2023 12:44:07 +0100 Subject: [PATCH 126/171] Fix - search pattern Updated to use user profile --- .../modules/slack/plugins/publish/integrate_slack_api.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 97182ffd9b..02197a6d01 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -191,8 +191,10 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): if (not user.get("deleted") and (user_name_lower == user["name"].lower() or # bots dont have display_name - user_name_lower == user.get("display_name", '').lower() or - user_name_lower == user.get("real_name", '').lower())): + user_name_lower == user["profile"].get("display_name", + '').lower() or + user_name_lower == user["profile"].get("real_name", + '').lower())): user_id = user["id"] break return user_id @@ -210,7 +212,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): def _translate_users(self, message, users, groups): """Replace all occurences of @mentions with proper <@name> format.""" - matches = re.findall(r"(? Date: Wed, 4 Jan 2023 12:51:47 +0100 Subject: [PATCH 127/171] Fix - cannot pull response from ordinary exception --- .../slack/plugins/publish/integrate_slack_api.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 02197a6d01..bb5cd40936 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -397,15 +397,9 @@ class SlackPython2Operations(AbstractSlackOperations): groups = self._get_groups() break except Exception as e: - retry_after = e.response.headers.get("Retry-After") - if retry_after: - print( - "Rate limit hit, sleeping for {}".format(retry_after)) - time.sleep(int(retry_after)) - else: - self.log.warning("Cannot pull user info, " - "mentions won't work", exc_info=True) - return [], [] + self.log.warning("Cannot pull user info, " + "mentions won't work", exc_info=True) + return [], [] return users, groups From dff87d4e1cfabafb4ec40c46a1cbc48c851f661f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 4 Jan 2023 12:52:32 +0100 Subject: [PATCH 128/171] Hound --- openpype/modules/slack/plugins/publish/integrate_slack_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index bb5cd40936..21069e0b13 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -396,7 +396,7 @@ class SlackPython2Operations(AbstractSlackOperations): users = self._get_users() groups = self._get_groups() break - except Exception as e: + except Exception: self.log.warning("Cannot pull user info, " "mentions won't work", exc_info=True) return [], [] From 0fd51e8a5b5f2068ea2e2970617e4f79fd58b5b0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 4 Jan 2023 14:21:58 +0100 Subject: [PATCH 129/171] traypublisher: adding single file subset name condition --- .../hosts/traypublisher/plugins/create/create_editorial.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 205403d33e..614cf9dbca 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -264,9 +264,14 @@ or updating already created. Publishing will create OTIO file. ) + # alter subset name if multiple files + subset_name_edit = subset_name + if len(sequence_paths) > 1: + subset_name_edit = subset_name + str(index) + # create otio editorial instance self._create_otio_instance( - subset_name + str(index), + subset_name_edit, instance_data, seq_path, media_path, otio_timeline From 40cf2956fd968c8ffaf47c584c69705a852e4bc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 4 Jan 2023 14:26:15 +0100 Subject: [PATCH 130/171] Update openpype/pipeline/workfile/workfile_template_builder.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/pipeline/workfile/workfile_template_builder.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index dce36eca82..24b0cc81f1 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -1504,7 +1504,6 @@ class PlaceholderCreateMixin(object): """ creators_by_name = self.builder.get_creators_by_name() - print(creators_by_name) creator_items = [ (creator_name, creator.label or creator_name) From e69f3539eaeac23d0663e71f83c9d463cbc50699 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 4 Jan 2023 14:26:26 +0100 Subject: [PATCH 131/171] Update openpype/pipeline/workfile/workfile_template_builder.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/pipeline/workfile/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 24b0cc81f1..a834ca0e21 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -675,7 +675,7 @@ class AbstractTemplateBuilder(object): # switch to remove placeholders after they are used placeholder_keep = profile.get("placeholder_keep") # backward compatibility, since default is True - if placeholder_keep is not False: + if placeholder_keep is None: placeholder_keep = True if not path: From b01322645ae14e74b09d4125fcac5a8160d76c57 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 4 Jan 2023 14:44:04 +0100 Subject: [PATCH 132/171] Correctly repair frame range with handle attributes if `handleStart` and `handleEnd` available on instance --- .../plugins/publish/validate_frame_range.py | 33 +++++++++++++++---- 1 file changed, 27 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_frame_range.py b/openpype/hosts/maya/plugins/publish/validate_frame_range.py index 5e50ae72cd..dec2f00700 100644 --- a/openpype/hosts/maya/plugins/publish/validate_frame_range.py +++ b/openpype/hosts/maya/plugins/publish/validate_frame_range.py @@ -92,10 +92,31 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): """ Repair instance container to match asset data. """ - cmds.setAttr( - "{}.frameStart".format(instance.data["name"]), - instance.context.data.get("frameStartHandle")) - cmds.setAttr( - "{}.frameEnd".format(instance.data["name"]), - instance.context.data.get("frameEndHandle")) + node = instance.data["name"] + context = instance.context + + frame_start_handle = int(context.data.get("frameStartHandle")) + frame_end_handle = int(context.data.get("frameEndHandle")) + handle_start = int(context.data.get("handleStart")) + handle_end = int(context.data.get("handleEnd")) + frame_start = int(context.data.get("frameStart")) + frame_end = int(context.data.get("frameEnd")) + + # Start + if cmds.attributeQuery("handleStart", node=node, exists=True): + cmds.setAttr("{}.handleStart".format(node), handle_start) + cmds.setAttr("{}.frameStart".format(node), frame_start) + else: + # Include start handle in frame start if no separate handleStart + # attribute exists on the node + cmds.setAttr("{}.frameStart".format(node), frame_start_handle) + + # End + if cmds.attributeQuery("handleEnd", node=node, exists=True): + cmds.setAttr("{}.handleEnd".format(node), handle_end) + cmds.setAttr("{}.frameEnd".format(node), frame_end) + else: + # Include end handle in frame end if no separate handleEnd + # attribute exists on the node + cmds.setAttr("{}.frameEnd".format(node), frame_end_handle) From 0a4ed0988c0a359bbec6b6929d9073215af3cdea Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 4 Jan 2023 14:45:00 +0100 Subject: [PATCH 133/171] Do not force instance handleStart and handleEnd to zero if not `handles` in data --- openpype/hosts/maya/plugins/publish/collect_instances.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_instances.py b/openpype/hosts/maya/plugins/publish/collect_instances.py index ad1f794680..75bc935143 100644 --- a/openpype/hosts/maya/plugins/publish/collect_instances.py +++ b/openpype/hosts/maya/plugins/publish/collect_instances.py @@ -174,9 +174,6 @@ class CollectInstances(pyblish.api.ContextPlugin): if "handles" in data: data["handleStart"] = data["handles"] data["handleEnd"] = data["handles"] - else: - data["handleStart"] = 0 - data["handleEnd"] = 0 data["frameStartHandle"] = data["frameStart"] - data["handleStart"] # noqa: E501 data["frameEndHandle"] = data["frameEnd"] + data["handleEnd"] # noqa: E501 From e44f585aa63c32ebb6913426626fdb4c3fd0a008 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 4 Jan 2023 15:05:36 +0100 Subject: [PATCH 134/171] OP-4490 - safer resolving if site is active --- openpype/modules/sync_server/providers/dropbox.py | 2 +- openpype/modules/sync_server/providers/gdrive.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/sync_server/providers/dropbox.py b/openpype/modules/sync_server/providers/dropbox.py index 3515aee93f..a517e7d847 100644 --- a/openpype/modules/sync_server/providers/dropbox.py +++ b/openpype/modules/sync_server/providers/dropbox.py @@ -165,7 +165,7 @@ class DropboxHandler(AbstractProvider): Returns: (boolean) """ - return self.presets["enabled"] and self.dbx is not None + return self.presets.get("enabled") and self.dbx is not None @classmethod def get_configurable_items(cls): diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index 297a5c9fec..4e24fe41d2 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -119,7 +119,7 @@ class GDriveHandler(AbstractProvider): Returns: (boolean) """ - return self.presets["enabled"] and self.service is not None + return self.presets.get("enabled") and self.service is not None @classmethod def get_system_settings_schema(cls): From 8527554c2d3bafd1a7eb796219fe7cefebcf749b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 4 Jan 2023 15:07:36 +0100 Subject: [PATCH 135/171] OP-4490 - fixed unnecessary checks Configured sites were checked all the time even if they weren't used. Now it checks only sites that are set for project. --- openpype/modules/sync_server/sync_server.py | 67 ++++++--------------- 1 file changed, 17 insertions(+), 50 deletions(-) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index d0a40a60ff..d1ca69a31c 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -169,7 +169,7 @@ def resolve_paths(module, file_path, project_name, return local_file_path, remote_file_path -def site_is_working(module, project_name, site_name): +def _site_is_working(module, project_name, site_name, site_config): """ Confirm that 'site_name' is configured correctly for 'project_name'. @@ -179,54 +179,17 @@ def site_is_working(module, project_name, site_name): module (SyncServerModule) project_name(string): site_name(string): + site_config (dict): configuration for site from Settings Returns (bool) """ - if _get_configured_sites(module, project_name).get(site_name): - return True - return False + provider = module.get_provider_for_site(site=site_name) + handler = lib.factory.get_provider(provider, + project_name, + site_name, + presets=site_config) - -def _get_configured_sites(module, project_name): - """ - Loops through settings and looks for configured sites and checks - its handlers for particular 'project_name'. - - Args: - project_setting(dict): dictionary from Settings - only_project_name(string, optional): only interested in - particular project - Returns: - (dict of dict) - {'ProjectA': {'studio':True, 'gdrive':False}} - """ - settings = module.get_sync_project_setting(project_name) - return _get_configured_sites_from_setting(module, project_name, settings) - - -def _get_configured_sites_from_setting(module, project_name, project_setting): - if not project_setting.get("enabled"): - return {} - - initiated_handlers = {} - configured_sites = {} - all_sites = module._get_default_site_configs() - all_sites.update(project_setting.get("sites")) - for site_name, config in all_sites.items(): - provider = module.get_provider_for_site(site=site_name) - handler = initiated_handlers.get((provider, site_name)) - if not handler: - handler = lib.factory.get_provider(provider, - project_name, - site_name, - presets=config) - initiated_handlers[(provider, site_name)] = \ - handler - - if handler.is_active(): - configured_sites[site_name] = True - - return configured_sites + return handler.is_active() class SyncServerThread(threading.Thread): @@ -288,7 +251,8 @@ class SyncServerThread(threading.Thread): for project_name in enabled_projects: preset = self.module.sync_project_settings[project_name] - local_site, remote_site = self._working_sites(project_name) + local_site, remote_site = self._working_sites(project_name, + preset) if not all([local_site, remote_site]): continue @@ -464,7 +428,7 @@ class SyncServerThread(threading.Thread): self.timer.cancel() self.timer = None - def _working_sites(self, project_name): + def _working_sites(self, project_name, sync_config): if self.module.is_project_paused(project_name): self.log.debug("Both sites same, skipping") return None, None @@ -476,9 +440,12 @@ class SyncServerThread(threading.Thread): local_site, remote_site)) return None, None - configured_sites = _get_configured_sites(self.module, project_name) - if not all([local_site in configured_sites, - remote_site in configured_sites]): + local_site_config = sync_config.get('sites')[local_site] + remote_site_config = sync_config.get('sites')[remote_site] + if not all([_site_is_working(self.module, project_name, local_site, + local_site_config), + _site_is_working(self.module, project_name, remote_site, + remote_site_config)]): self.log.debug( "Some of the sites {} - {} is not working properly".format( local_site, remote_site From 307a10e123ad0c177248b144f5a2aadc2c503491 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 4 Jan 2023 15:35:43 +0100 Subject: [PATCH 136/171] Implement validate frame range repair for Render Layers - fix #3302 --- .../plugins/publish/validate_frame_range.py | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/validate_frame_range.py b/openpype/hosts/maya/plugins/publish/validate_frame_range.py index dec2f00700..d86925184e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_frame_range.py +++ b/openpype/hosts/maya/plugins/publish/validate_frame_range.py @@ -5,6 +5,11 @@ from openpype.pipeline.publish import ( RepairAction, ValidateContentsOrder, ) +from openpype.hosts.maya.api.lib_rendersetup import ( + get_attr_overrides, + get_attr_in_layer, +) +from maya.app.renderSetup.model.override import AbsOverride class ValidateFrameRange(pyblish.api.InstancePlugin): @@ -93,6 +98,11 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): Repair instance container to match asset data. """ + if "renderlayer" in instance.data.get("families"): + # Special behavior for renderlayers + cls.repair_renderlayer(instance) + return + node = instance.data["name"] context = instance.context @@ -120,3 +130,53 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): # Include end handle in frame end if no separate handleEnd # attribute exists on the node cmds.setAttr("{}.frameEnd".format(node), frame_end_handle) + + @classmethod + def repair_renderlayer(cls, instance): + """Apply frame range in render settings""" + + layer = instance.data["setMembers"] + context = instance.context + + start_attr = "defaultRenderGlobals.startFrame" + end_attr = "defaultRenderGlobals.endFrame" + + frame_start_handle = int(context.data.get("frameStartHandle")) + frame_end_handle = int(context.data.get("frameEndHandle")) + + cls._set_attr_in_layer(start_attr, layer, frame_start_handle) + cls._set_attr_in_layer(end_attr, layer, frame_end_handle) + + @classmethod + def _set_attr_in_layer(cls, node_attr, layer, value): + + if get_attr_in_layer(node_attr, layer=layer) == value: + # Already ok. This can happen if you have multiple renderlayers + # validated and there are no frame range overrides. The first + # layer's repair would have fixed the global value already + return + + overrides = list(get_attr_overrides(node_attr, layer=layer)) + if overrides: + # We set the last absolute override if it is an absolute override + # otherwise we'll add an Absolute override + last_override = overrides[-1][1] + if not isinstance(last_override, AbsOverride): + collection = last_override.parent() + node, attr = node_attr.split(".", 1) + last_override = collection.createAbsoluteOverride(node, attr) + + cls.log.debug("Setting {attr} absolute override in " + "layer '{layer}': {value}".format(layer=layer, + attr=node_attr, + value=value)) + cmds.setAttr(last_override.name() + ".attrValue", value) + + else: + # Set the attribute directly + # (Note that this will set the global attribute) + cls.log.debug("Setting global {attr}: {value}".format( + attr=node_attr, + value=value + )) + cmds.setAttr(node_attr, value) From e353095c24e219914f41b323feba9214f85c38fb Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 4 Jan 2023 15:36:19 +0100 Subject: [PATCH 137/171] Only apply deadline attributes when Deadline is enabled (fixes Create Render with Deadline module disabled) --- .../maya/plugins/create/create_render.py | 42 +++++++++++-------- 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index a3e1272652..8375149442 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -72,15 +72,19 @@ class CreateRender(plugin.Creator): def __init__(self, *args, **kwargs): """Constructor.""" super(CreateRender, self).__init__(*args, **kwargs) - deadline_settings = get_system_settings()["modules"]["deadline"] - if not deadline_settings["enabled"]: - self.deadline_servers = {} - return + + # Defaults self._project_settings = get_project_settings( legacy_io.Session["AVALON_PROJECT"]) if self._project_settings["maya"]["RenderSettings"]["apply_render_settings"]: # noqa lib_rendersettings.RenderSettings().set_default_renderer_settings() + + # Deadline-only manager = ModulesManager() + deadline_settings = get_system_settings()["modules"]["deadline"] + if not deadline_settings["enabled"]: + self.deadline_servers = {} + return self.deadline_module = manager.modules_by_name["deadline"] try: default_servers = deadline_settings["deadline_urls"] @@ -193,8 +197,6 @@ class CreateRender(plugin.Creator): pool_names = [] default_priority = 50 - self.server_aliases = list(self.deadline_servers.keys()) - self.data["deadlineServers"] = self.server_aliases self.data["suspendPublishJob"] = False self.data["review"] = True self.data["extendFrames"] = False @@ -233,6 +235,9 @@ class CreateRender(plugin.Creator): raise RuntimeError("Both Deadline and Muster are enabled") if deadline_enabled: + self.server_aliases = list(self.deadline_servers.keys()) + self.data["deadlineServers"] = self.server_aliases + try: deadline_url = self.deadline_servers["default"] except KeyError: @@ -254,6 +259,19 @@ class CreateRender(plugin.Creator): default_priority) self.data["tile_priority"] = tile_priority + pool_setting = (self._project_settings["deadline"] + ["publish"] + ["CollectDeadlinePools"]) + primary_pool = pool_setting["primary_pool"] + self.data["primaryPool"] = self._set_default_pool(pool_names, + primary_pool) + # We add a string "-" to allow the user to not + # set any secondary pools + pool_names = ["-"] + pool_names + secondary_pool = pool_setting["secondary_pool"] + self.data["secondaryPool"] = self._set_default_pool(pool_names, + secondary_pool) + if muster_enabled: self.log.info(">>> Loading Muster credentials ...") self._load_credentials() @@ -273,18 +291,6 @@ class CreateRender(plugin.Creator): self.log.info(" - pool: {}".format(pool["name"])) pool_names.append(pool["name"]) - pool_setting = (self._project_settings["deadline"] - ["publish"] - ["CollectDeadlinePools"]) - primary_pool = pool_setting["primary_pool"] - self.data["primaryPool"] = self._set_default_pool(pool_names, - primary_pool) - # We add a string "-" to allow the user to not - # set any secondary pools - pool_names = ["-"] + pool_names - secondary_pool = pool_setting["secondary_pool"] - self.data["secondaryPool"] = self._set_default_pool(pool_names, - secondary_pool) self.options = {"useSelection": False} # Force no content def _set_default_pool(self, pool_names, pool_value): From d3f09c075badd52f3faa9d4fe41678dd1abf1d9b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 4 Jan 2023 15:39:13 +0100 Subject: [PATCH 138/171] OP-4490 - Hound --- openpype/modules/sync_server/sync_server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index d1ca69a31c..85b0774e90 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -443,9 +443,9 @@ class SyncServerThread(threading.Thread): local_site_config = sync_config.get('sites')[local_site] remote_site_config = sync_config.get('sites')[remote_site] if not all([_site_is_working(self.module, project_name, local_site, - local_site_config), + local_site_config), _site_is_working(self.module, project_name, remote_site, - remote_site_config)]): + remote_site_config)]): self.log.debug( "Some of the sites {} - {} is not working properly".format( local_site, remote_site From ddb6ae8a5a38e2a408a701c47bc5484c485dbbd7 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 5 Jan 2023 12:25:55 +0800 Subject: [PATCH 139/171] import reference during publish --- .../plugins/publish/extract_import_reference.py | 16 ++++++---------- .../modules/deadline/abstract_submit_deadline.py | 4 ++-- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_import_reference.py b/openpype/hosts/maya/plugins/publish/extract_import_reference.py index ce284d16fb..51c82dde92 100644 --- a/openpype/hosts/maya/plugins/publish/extract_import_reference.py +++ b/openpype/hosts/maya/plugins/publish/extract_import_reference.py @@ -105,17 +105,13 @@ print("*** Done") # can't use TemporaryNamedFile as that can't be opened in another # process until handles are closed by context manager. with tempfile.TemporaryDirectory() as tmp_dir_name: - tmp_file_name = os.path.join(tmp_dir_name, "import_ref.py") - tmp = open(tmp_file_name, "w+t") - subprocess_args = [ - mayapy_exe, - tmp_file_name - ] - self.log.info("Using temp file: {}".format(tmp.name)) - try: + tmp_script_path = os.path.join(tmp_dir_name, "import_ref.py") + self.log.info("Using script file: {}".format(tmp_script_path)) + with open(tmp_script_path, "wt") as tmp: tmp.write(script) - tmp.close() - run_subprocess(subprocess_args) + + try: + run_subprocess([mayapy_exe, tmp_script_path]) except Exception: self.log.error("Import reference failed", exc_info=True) raise diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 909a5871e3..155a647ff6 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -525,9 +525,9 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): # determine published path from Anatomy. template_data = workfile_instance.data.get("anatomyData") if self.import_reference: - rep = workfile_instance.data.get("representations")[1] + rep = workfile_instance.data["representations"][1] else: - rep = workfile_instance.data.get("representations")[0] + rep = workfile_instance.data["representations"][0] template_data["representation"] = rep.get("name") template_data["ext"] = rep.get("ext") template_data["comment"] = None From 5c6d86e06b84c464d0bafb640d5ef428f3d60650 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Jan 2023 10:35:14 +0100 Subject: [PATCH 140/171] maya: fix typo in template builder --- openpype/hosts/maya/api/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index 1d3f1cf568..3416c98793 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -240,7 +240,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): cmds.setAttr(node + ".hiddenInOutliner", True) def load_succeed(self, placeholder, container): - self._parent_in_hierarhchy(placeholder, container) + self._parent_in_hierarchy(placeholder, container) def _parent_in_hierarchy(self, placeholder, container): """Parent loaded container to placeholder's parent. From 20b2b50bac741d82b454b207d83c1ca5bda849a3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Jan 2023 10:35:54 +0100 Subject: [PATCH 141/171] global: adding project anatomy data for formating --- openpype/pipeline/workfile/workfile_template_builder.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index a834ca0e21..390a5759fc 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -691,7 +691,14 @@ class AbstractTemplateBuilder(object): key: value for key, value in os.environ.items() } + fill_data["root"] = anatomy.roots + fill_data["project"] = { + "name": project_name, + "code": anatomy["attributes"]["code"] + } + + result = StringTemplate.format_template(path, fill_data) if result.solved: path = result.normalized() From 673e7a735928408dcc7eae463aba62af2a53744a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Fri, 6 Jan 2023 10:38:36 +0100 Subject: [PATCH 142/171] Update openpype/pipeline/workfile/workfile_template_builder.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/pipeline/workfile/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 390a5759fc..58f152591f 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -1517,7 +1517,7 @@ class PlaceholderCreateMixin(object): for creator_name, creator in creators_by_name.items() ] - creator_items = list(sorted(creator_items, key=lambda i: i[1])) + creator_items.sort(key=lambda i: i[1]) options = options or {} return [ attribute_definitions.UISeparatorDef(), From d6004c26462e3f0400674ce1fbc3b6936c46b5cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Fri, 6 Jan 2023 10:39:43 +0100 Subject: [PATCH 143/171] Update openpype/pipeline/workfile/workfile_template_builder.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/pipeline/workfile/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 58f152591f..4fa45cdf30 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -1574,7 +1574,7 @@ class PlaceholderCreateMixin(object): creator_name = placeholder.data["creator"] create_variant = placeholder.data["create_variant"] - creator_plugin = get_legacy_creator_by_name(creator_name) + creator_plugin = self.builder.get_creators_by_name()[creator_name] # create subset name project_name = legacy_io.Session["AVALON_PROJECT"] From c071140a6fcc8525b78f7f6b3bce8303bc011505 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Jan 2023 10:54:06 +0100 Subject: [PATCH 144/171] PR comments --- .../workfile/workfile_template_builder.py | 25 +++++++++++-------- .../schema_templated_workfile_build.json | 2 +- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 4fa45cdf30..e3821bb4d7 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -43,7 +43,6 @@ from openpype.pipeline.load import ( load_with_repre_context, ) from openpype.pipeline.create import ( - get_legacy_creator_by_name, discover_legacy_creator_plugins ) @@ -439,7 +438,7 @@ class AbstractTemplateBuilder(object): template_path = template_preset["path"] if keep_placeholders is None: - keep_placeholders = template_preset["placeholder_keep"] + keep_placeholders = template_preset["keep_placeholder"] self.import_template(template_path) self.populate_scene_placeholders( @@ -673,10 +672,10 @@ class AbstractTemplateBuilder(object): path = profile["path"] # switch to remove placeholders after they are used - placeholder_keep = profile.get("placeholder_keep") + keep_placeholder = profile.get("keep_placeholder") # backward compatibility, since default is True - if placeholder_keep is None: - placeholder_keep = True + if keep_placeholder is None: + keep_placeholder = True if not path: raise TemplateLoadFailed(( @@ -707,7 +706,7 @@ class AbstractTemplateBuilder(object): self.log.info("Found template at: '{}'".format(path)) return { "path": path, - "placeholder_keep": placeholder_keep + "keep_placeholder": keep_placeholder } solved_path = None @@ -736,7 +735,7 @@ class AbstractTemplateBuilder(object): return { "path": solved_path, - "placeholder_keep": placeholder_keep + "keep_placeholder": keep_placeholder } @@ -991,7 +990,7 @@ class PlaceholderItem(object): def __init__(self, scene_identifier, data, plugin): self._log = None - self.name = scene_identifier + self._scene_identifier = scene_identifier self._data = data self._plugin = plugin @@ -1056,7 +1055,13 @@ class PlaceholderItem(object): return self._log def __repr__(self): - return "< {} {} >".format(self.__class__.__name__, self.name) + name = None + if hasattr("name", self): + name = self.name + if hasattr("_scene_identifier ", self): + name = self._scene_identifier + + return "< {} {} >".format(self.__class__.__name__, name) @property def order(self): @@ -1069,7 +1074,7 @@ class PlaceholderItem(object): @property def scene_identifier(self): - return self.name + return self._scene_identifier @property def finished(self): diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json index 1826734291..b244460bbf 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_templated_workfile_build.json @@ -30,7 +30,7 @@ "multipath": false }, { - "key": "placeholder_keep", + "key": "keep_placeholder", "label": "Keep placeholders", "type": "boolean", "default": true From cd2324f07e48e03b36a99918d3110e6d505757f4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 6 Jan 2023 15:42:47 +0100 Subject: [PATCH 145/171] fix how host ip is received --- .../ftrack/ftrack_server/event_server_cli.py | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 20c5ab24a8..9adc784224 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -169,6 +169,22 @@ def legacy_server(ftrack_url): time.sleep(1) +def get_host_ip(): + host_name = socket.gethostname() + try: + return socket.gethostbyname(host_name) + except Exception: + pass + + try: + import ipaddress + return socket.gethostbyname(str(ipaddress.ip_address(8888))) + + except Exception: + pass + return None + + def main_loop(ftrack_url): """ This is main loop of event handling. @@ -245,11 +261,13 @@ def main_loop(ftrack_url): ) host_name = socket.gethostname() + host_ip = get_host_ip() + main_info = [ ["created_at", datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S")], ["Username", getpass.getuser()], ["Host Name", host_name], - ["Host IP", socket.gethostbyname(host_name)], + ["Host IP", host_ip or "N/A"], ["OpenPype executable", get_openpype_execute_args()[-1]], ["OpenPype version", get_openpype_version() or "N/A"], ["OpenPype build version", get_build_version() or "N/A"] From e1edb76f731097f9887c096db0fdcb05aaed0616 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 6 Jan 2023 17:48:05 +0100 Subject: [PATCH 146/171] use 'get_host_ip' on more places --- .../action_where_run_ask.py | 4 ++-- .../ftrack/ftrack_server/event_server_cli.py | 17 +---------------- openpype/modules/ftrack/ftrack_server/lib.py | 19 ++++++++++++++++++- .../ftrack/scripts/sub_event_status.py | 7 ++++--- 4 files changed, 25 insertions(+), 22 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py b/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py index 0d69913996..65d1b42d82 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py +++ b/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py @@ -3,6 +3,7 @@ import socket import getpass from openpype_modules.ftrack.lib import BaseAction +from openpype_modules.ftrack.ftrack_server.lib import get_host_ip class ActionWhereIRun(BaseAction): @@ -53,8 +54,7 @@ class ActionWhereIRun(BaseAction): try: host_name = socket.gethostname() msgs["Hostname"] = host_name - host_ip = socket.gethostbyname(host_name) - msgs["IP"] = host_ip + msgs["IP"] = get_host_ip() or "N/A" except Exception: pass diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 9adc784224..25ebad6658 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -26,6 +26,7 @@ from openpype_modules.ftrack import ( ) from openpype_modules.ftrack.lib import credentials from openpype_modules.ftrack.ftrack_server import socket_thread +from openpype_modules.ftrack.ftrack_server.lib import get_host_ip class MongoPermissionsError(Exception): @@ -169,22 +170,6 @@ def legacy_server(ftrack_url): time.sleep(1) -def get_host_ip(): - host_name = socket.gethostname() - try: - return socket.gethostbyname(host_name) - except Exception: - pass - - try: - import ipaddress - return socket.gethostbyname(str(ipaddress.ip_address(8888))) - - except Exception: - pass - return None - - def main_loop(ftrack_url): """ This is main loop of event handling. diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index c8143f739c..61d3bfa259 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -9,8 +9,9 @@ import time import queue import collections import appdirs -import pymongo +import socket +import pymongo import requests import ftrack_api import ftrack_api.session @@ -32,6 +33,22 @@ TOPIC_STATUS_SERVER = "openpype.event.server.status" TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result" +def get_host_ip(): + host_name = socket.gethostname() + try: + return socket.gethostbyname(host_name) + except Exception: + pass + + try: + import ipaddress + return socket.gethostbyname(str(ipaddress.ip_address(8888))) + + except Exception: + pass + return None + + class SocketBaseEventHub(ftrack_api.event.hub.EventHub): hearbeat_msg = b"hearbeat" diff --git a/openpype/modules/ftrack/scripts/sub_event_status.py b/openpype/modules/ftrack/scripts/sub_event_status.py index eb3f63c04b..dc5836e7f2 100644 --- a/openpype/modules/ftrack/scripts/sub_event_status.py +++ b/openpype/modules/ftrack/scripts/sub_event_status.py @@ -15,7 +15,8 @@ from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, StatusEventHub, TOPIC_STATUS_SERVER, - TOPIC_STATUS_SERVER_RESULT + TOPIC_STATUS_SERVER_RESULT, + get_host_ip ) from openpype.lib import ( Logger, @@ -29,10 +30,10 @@ log = Logger.get_logger("Event storer") action_identifier = ( "event.server.status" + os.environ["FTRACK_EVENT_SUB_ID"] ) -host_ip = socket.gethostbyname(socket.gethostname()) +host_ip = get_host_ip() action_data = { "label": "OpenPype Admin", - "variant": "- Event server Status ({})".format(host_ip), + "variant": "- Event server Status ({})".format(host_ip or "IP N/A"), "description": "Get Infromation about event server", "actionIdentifier": action_identifier } From 1c530c0cb4c35353b7b697905d79c5628c705ff3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 6 Jan 2023 18:09:17 +0100 Subject: [PATCH 147/171] skip ipaddress way to receive ip address --- openpype/modules/ftrack/ftrack_server/lib.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index 61d3bfa259..eb64063fab 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -40,12 +40,6 @@ def get_host_ip(): except Exception: pass - try: - import ipaddress - return socket.gethostbyname(str(ipaddress.ip_address(8888))) - - except Exception: - pass return None From 823f661c47d254e6560dd33945a942d0e92a55c0 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 7 Jan 2023 03:27:59 +0000 Subject: [PATCH 148/171] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index ae514e371e..732682dd60 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.6" +__version__ = "3.14.10-nightly.7" From 4cc66395c5a60c82cd862165c849591a112180f0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Jan 2023 06:31:59 +0000 Subject: [PATCH 149/171] Bump json5 from 1.0.1 to 1.0.2 in /website Bumps [json5](https://github.com/json5/json5) from 1.0.1 to 1.0.2. - [Release notes](https://github.com/json5/json5/releases) - [Changelog](https://github.com/json5/json5/blob/main/CHANGELOG.md) - [Commits](https://github.com/json5/json5/compare/v1.0.1...v1.0.2) --- updated-dependencies: - dependency-name: json5 dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 220a489dfa..9af21c7500 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -4740,9 +4740,9 @@ json-schema-traverse@^1.0.0: integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== json5@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" - integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + version "1.0.2" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== dependencies: minimist "^1.2.0" @@ -5154,16 +5154,11 @@ minimatch@^3.0.4: dependencies: brace-expansion "^1.1.7" -minimist@^1.2.0: +minimist@^1.2.0, minimist@^1.2.5: version "1.2.7" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== -minimist@^1.2.5: - version "1.2.6" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" - integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== - mkdirp@^0.5.5: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" From e37bff0eb5389c99d7e1e9ba04853a398ce73d8b Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 9 Jan 2023 12:44:21 +0100 Subject: [PATCH 150/171] :recycle: remove unused method --- openpype/hosts/traypublisher/plugins/create/create_online.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index 1a366bcff5..c751801340 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -27,10 +27,6 @@ class OnlineCreator(TrayPublishCreator): extensions = [".mov", ".mp4", ".mxf", ".m4v", ".mpg", ".exr", ".dpx", ".tif", ".png", ".jpg"] - def __init__(self, *args, **kwargs): - super(OnlineCreator, self).__init__(*args, **kwargs) - self._original_path: Union[str, None] = None - def get_detail_description(self): return """# Create file retaining its original file name. From ba2646580916fca6224c7b72c1a43ee776bf3dd5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 9 Jan 2023 12:45:05 +0100 Subject: [PATCH 151/171] :recycle: remove unused import --- openpype/hosts/traypublisher/plugins/create/create_online.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index c751801340..199fae6d2c 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -14,7 +14,6 @@ from openpype.pipeline import ( CreatorError ) from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator -from typing import Union class OnlineCreator(TrayPublishCreator): From 6cfab9bd312ede9aa08cece6fffa0bb0e03c74f3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 9 Jan 2023 17:12:46 +0100 Subject: [PATCH 152/171] OP-4706 - add vn tag for ffmpeg This tag ignores any graphic data from secondary audio input. This solves an issue where audio stream should be used from mp4/mov. Without it ffmpeg uses graphical data from this input too. --- openpype/plugins/publish/extract_review.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 9310923a9f..dcb43d7fa2 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -1038,6 +1038,9 @@ class ExtractReview(pyblish.api.InstancePlugin): # Set audio duration audio_in_args.append("-to {:0.10f}".format(audio_duration)) + # Ignore video data from audio input + audio_in_args.append("-vn") + # Add audio input path audio_in_args.append("-i {}".format( path_to_subprocess_arg(audio["filename"]) From 2cf9b1ee6371d740c178451d1d65c6e7637df03c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 10 Jan 2023 00:28:10 +0800 Subject: [PATCH 153/171] use current file for the scene rendering if the reference is imported --- openpype/modules/deadline/abstract_submit_deadline.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 155a647ff6..f6750bc0f2 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -425,7 +425,11 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): file_path = None if self.use_published: - file_path = self.from_published_scene() + if not self.import_reference: + file_path = self.from_published_scene() + else: + self.log.info("use the scene with imported reference for rendering") # noqa + file_path = context.data["currentFile"] # fallback if nothing was set if not file_path: From 9301bf03fac16278f13eb094bc9116e8916328f4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 10 Jan 2023 00:30:18 +0800 Subject: [PATCH 154/171] use current file for the scene rendering if the reference is imported --- openpype/modules/deadline/abstract_submit_deadline.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index f6750bc0f2..648eb77007 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -528,10 +528,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): # determine published path from Anatomy. template_data = workfile_instance.data.get("anatomyData") - if self.import_reference: - rep = workfile_instance.data["representations"][1] - else: - rep = workfile_instance.data["representations"][0] + rep = workfile_instance.data["representations"][0] template_data["representation"] = rep.get("name") template_data["ext"] = rep.get("ext") template_data["comment"] = None From 59f051d06596f63cbb861938b1a7c26d2d992e05 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 9 Jan 2023 18:30:10 +0100 Subject: [PATCH 155/171] :bug: fix instance collection --- openpype/hosts/unreal/plugins/publish/collect_instances.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/plugins/publish/collect_instances.py b/openpype/hosts/unreal/plugins/publish/collect_instances.py index 1f25cbde7d..6696eacb6a 100644 --- a/openpype/hosts/unreal/plugins/publish/collect_instances.py +++ b/openpype/hosts/unreal/plugins/publish/collect_instances.py @@ -26,8 +26,8 @@ class CollectInstances(pyblish.api.ContextPlugin): ar = unreal.AssetRegistryHelpers.get_asset_registry() class_name = ["/Script/OpenPype", - "AssetContainer"] if UNREAL_VERSION.major == 5 and \ - UNREAL_VERSION.minor > 0 else "OpenPypePublishInstance" # noqa + "OpenPypePublishInstance"] if UNREAL_VERSION.major == 5 and \ + UNREAL_VERSION.minor > 0 else "OpenPypePublishInstance" # noqa instance_containers = ar.get_assets_by_class(class_name, True) for container_data in instance_containers: From ea65afdbc176e3daf4ffcf1fb95939d39ed1e0b5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 9 Jan 2023 18:36:14 +0100 Subject: [PATCH 156/171] :rotating_light: hound fix --- .../hosts/unreal/plugins/publish/collect_instances.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/unreal/plugins/publish/collect_instances.py b/openpype/hosts/unreal/plugins/publish/collect_instances.py index 6696eacb6a..27b711cad6 100644 --- a/openpype/hosts/unreal/plugins/publish/collect_instances.py +++ b/openpype/hosts/unreal/plugins/publish/collect_instances.py @@ -25,9 +25,13 @@ class CollectInstances(pyblish.api.ContextPlugin): def process(self, context): ar = unreal.AssetRegistryHelpers.get_asset_registry() - class_name = ["/Script/OpenPype", - "OpenPypePublishInstance"] if UNREAL_VERSION.major == 5 and \ - UNREAL_VERSION.minor > 0 else "OpenPypePublishInstance" # noqa + class_name = [ + "/Script/OpenPype", + "OpenPypePublishInstance" + ] if ( + UNREAL_VERSION.major == 5 + and UNREAL_VERSION.minor > 0 + ) else "OpenPypePublishInstance" # noqa instance_containers = ar.get_assets_by_class(class_name, True) for container_data in instance_containers: From b539ba029990c25318cc98f5776de75eda7ccaf4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Jan 2023 22:16:16 +0100 Subject: [PATCH 157/171] traypublisher: multiple edl workflow with multilayer otio timeline --- openpype/hosts/traypublisher/api/editorial.py | 5 -- .../plugins/create/create_editorial.py | 67 +++++++------------ .../plugins/publish/collect_shot_instances.py | 5 -- 3 files changed, 23 insertions(+), 54 deletions(-) diff --git a/openpype/hosts/traypublisher/api/editorial.py b/openpype/hosts/traypublisher/api/editorial.py index 7c392ef508..293db542a9 100644 --- a/openpype/hosts/traypublisher/api/editorial.py +++ b/openpype/hosts/traypublisher/api/editorial.py @@ -171,7 +171,6 @@ class ShotMetadataSolver: _index == 0 and parents[-1]["entity_name"] == parent_name ): - self.log.debug(f" skipping : {parent_name}") continue # in case first parent is project then start parents from start @@ -179,7 +178,6 @@ class ShotMetadataSolver: _index == 0 and parent_token_type == "Project" ): - self.log.debug("rebuilding parents from scratch") project_parent = parents[0] parents = [project_parent] continue @@ -189,8 +187,6 @@ class ShotMetadataSolver: "entity_name": parent_name }) - self.log.debug(f"__ parents: {parents}") - return parents def _create_hierarchy_path(self, parents): @@ -297,7 +293,6 @@ class ShotMetadataSolver: Returns: (str, dict): shot name and hierarchy data """ - self.log.info(f"_ source_data: {source_data}") tasks = {} asset_doc = source_data["selected_asset_doc"] diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 614cf9dbca..d1086a1ff3 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -1,6 +1,5 @@ import os from copy import deepcopy -from pprint import pformat import opentimelineio as otio from openpype.client import ( get_asset_by_name, @@ -13,9 +12,7 @@ from openpype.hosts.traypublisher.api.plugin import ( from openpype.hosts.traypublisher.api.editorial import ( ShotMetadataSolver ) - from openpype.pipeline import CreatedInstance - from openpype.lib import ( get_ffprobe_data, convert_ffprobe_fps_value, @@ -70,14 +67,12 @@ class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator): host_name = "traypublisher" def create(self, instance_data, source_data=None): - self.log.info(f"instance_data: {instance_data}") subset_name = instance_data["subset"] # Create new instance new_instance = CreatedInstance( self.family, subset_name, instance_data, self ) - self.log.info(f"instance_data: {pformat(new_instance.data)}") self._store_new_instance(new_instance) @@ -223,8 +218,6 @@ or updating already created. Publishing will create OTIO file. asset_name = instance_data["asset"] asset_doc = get_asset_by_name(self.project_name, asset_name) - self.log.info(pre_create_data["fps"]) - if pre_create_data["fps"] == "from_selection": # get asset doc data attributes fps = asset_doc["data"]["fps"] @@ -243,7 +236,8 @@ or updating already created. Publishing will create OTIO file. sequence_path_data, multi=True) media_path = self._get_path_from_file_data(media_path_data) - for index, seq_path in enumerate(sequence_paths): + first_otio_timeline = None + for seq_path in sequence_paths: # get otio timeline otio_timeline = self._create_otio_timeline( seq_path, fps) @@ -260,22 +254,22 @@ or updating already created. Publishing will create OTIO file. otio_timeline, media_path, clip_instance_properties, - family_presets=allowed_family_presets - + allowed_family_presets, + os.path.basename(seq_path), + first_otio_timeline ) - # alter subset name if multiple files - subset_name_edit = subset_name - if len(sequence_paths) > 1: - subset_name_edit = subset_name + str(index) + if not first_otio_timeline: + # assing otio timeline for multi file to layer + first_otio_timeline = otio_timeline - # create otio editorial instance - self._create_otio_instance( - subset_name_edit, - instance_data, - seq_path, media_path, - otio_timeline - ) + # create otio editorial instance + self._create_otio_instance( + subset_name, + instance_data, + seq_path, media_path, + first_otio_timeline + ) def _create_otio_instance( self, @@ -325,7 +319,6 @@ or updating already created. Publishing will create OTIO file. kwargs["rate"] = fps kwargs["ignore_timecode_mismatch"] = True - self.log.info(f"kwargs: {kwargs}") return otio.adapters.read_from_file(sequence_path, **kwargs) def _get_path_from_file_data(self, file_path_data, multi=False): @@ -343,15 +336,12 @@ or updating already created. Publishing will create OTIO file. """ return_path_list = [] - self.log.debug(f"type: {type(file_path_data)}") - self.log.debug(f"file_path_data: {file_path_data}") if isinstance(file_path_data, list): return_path_list = [ os.path.join(f["directory"], f["filenames"][0]) for f in file_path_data ] - self.log.debug(f"return_path_list: {return_path_list}") if not return_path_list: raise FileExistsError( @@ -364,7 +354,9 @@ or updating already created. Publishing will create OTIO file. otio_timeline, media_path, instance_data, - family_presets + family_presets, + sequence_file_name, + first_otio_timeline=None ): """Helping function fro creating clip instance @@ -384,17 +376,15 @@ or updating already created. Publishing will create OTIO file. media_data = self._get_media_source_metadata(media_path) for track in tracks: - self.log.debug(f"track.name: {track.name}") + track.name = f"{sequence_file_name} - {otio_timeline.name}" try: track_start_frame = ( abs(track.source_range.start_time.value) ) - self.log.debug(f"track_start_frame: {track_start_frame}") track_start_frame -= self.timeline_frame_start except AttributeError: track_start_frame = 0 - self.log.debug(f"track_start_frame: {track_start_frame}") for clip in track.each_child(): if not self._validate_clip_for_processing(clip): @@ -416,10 +406,6 @@ or updating already created. Publishing will create OTIO file. "instance_label": None, "instance_id": None } - self.log.info(( - "Creating subsets from presets: \n" - f"{pformat(family_presets)}" - )) for _fpreset in family_presets: # exclude audio family if no audio stream @@ -435,7 +421,10 @@ or updating already created. Publishing will create OTIO file. deepcopy(base_instance_data), parenting_data ) - self.log.debug(f"{pformat(dict(instance.data))}") + + # add track to first otioTimeline if it is in input args + if first_otio_timeline: + first_otio_timeline.tracks.append(deepcopy(track)) def _restore_otio_source_range(self, otio_clip): """Infusing source range. @@ -476,7 +465,6 @@ or updating already created. Publishing will create OTIO file. target_url=media_path, available_range=available_range ) - otio_clip.media_reference = media_reference def _get_media_source_metadata(self, path): @@ -497,7 +485,6 @@ or updating already created. Publishing will create OTIO file. media_data = get_ffprobe_data( path, self.log ) - self.log.debug(f"__ media_data: {pformat(media_data)}") # get video stream data video_stream = media_data["streams"][0] @@ -605,9 +592,6 @@ or updating already created. Publishing will create OTIO file. # get variant name from preset or from inharitance _variant_name = preset.get("variant") or variant_name - self.log.debug(f"__ family: {family}") - self.log.debug(f"__ preset: {preset}") - # subset name subset_name = "{}{}".format( family, _variant_name.capitalize() @@ -738,17 +722,13 @@ or updating already created. Publishing will create OTIO file. clip_in += track_start_frame clip_out = otio_clip.range_in_parent().end_time_inclusive().value clip_out += track_start_frame - self.log.info(f"clip_in: {clip_in} | clip_out: {clip_out}") # add offset in case there is any - self.log.debug(f"__ timeline_offset: {timeline_offset}") if timeline_offset: clip_in += timeline_offset clip_out += timeline_offset clip_duration = otio_clip.duration().value - self.log.info(f"clip duration: {clip_duration}") - source_in = otio_clip.trimmed_range().start_time.value source_out = source_in + clip_duration @@ -778,7 +758,6 @@ or updating already created. Publishing will create OTIO file. Returns: list: lit of dict with preset items """ - self.log.debug(f"__ pre_create_data: {pre_create_data}") return [ {"family": "shot"}, *[ diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py index 716f73022e..78c1f14e4e 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py @@ -33,8 +33,6 @@ class CollectShotInstance(pyblish.api.InstancePlugin): ] def process(self, instance): - self.log.debug(pformat(instance.data)) - creator_identifier = instance.data["creator_identifier"] if "editorial" not in creator_identifier: return @@ -82,7 +80,6 @@ class CollectShotInstance(pyblish.api.InstancePlugin): ] otio_clip = clips.pop() - self.log.debug(f"__ otioclip.parent: {otio_clip.parent}") return otio_clip @@ -172,7 +169,6 @@ class CollectShotInstance(pyblish.api.InstancePlugin): } parents = instance.data.get('parents', []) - self.log.debug(f"parents: {pformat(parents)}") actual = {name: in_info} @@ -190,7 +186,6 @@ class CollectShotInstance(pyblish.api.InstancePlugin): # adding hierarchy context to instance context.data["hierarchyContext"] = final_context - self.log.debug(pformat(final_context)) def _update_dict(self, ex_dict, new_dict): """ Recursion function From fb886125393d93b1216ece77e987823998401afa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 10 Jan 2023 10:44:52 +0100 Subject: [PATCH 158/171] added 'install' method with docstring to 'HostBase' --- openpype/host/host.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/openpype/host/host.py b/openpype/host/host.py index 99f7868727..94416bb39a 100644 --- a/openpype/host/host.py +++ b/openpype/host/host.py @@ -76,6 +76,18 @@ class HostBase(object): pass + def install(self): + """Install host specific functionality. + + This is where should be added menu with tools, registered callbacks + and other host integration initialization. + + It is called automatically when 'openpype.pipeline.install_host' is + triggered. + """ + + pass + @property def log(self): if self._log is None: From 4d0f954785ed03012247263805bff0715ec8d536 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 10 Jan 2023 16:29:51 +0100 Subject: [PATCH 159/171] :bug: fix missing maintained_selection call --- openpype/hosts/unreal/api/__init__.py | 2 ++ openpype/hosts/unreal/api/pipeline.py | 14 ++++++++++++++ 2 files changed, 16 insertions(+) diff --git a/openpype/hosts/unreal/api/__init__.py b/openpype/hosts/unreal/api/__init__.py index 3f96d8ac6f..ca9db259e6 100644 --- a/openpype/hosts/unreal/api/__init__.py +++ b/openpype/hosts/unreal/api/__init__.py @@ -18,6 +18,7 @@ from .pipeline import ( show_tools_popup, instantiate, UnrealHost, + maintained_selection ) __all__ = [ @@ -36,4 +37,5 @@ __all__ = [ "show_tools_popup", "instantiate", "UnrealHost", + "maintained_selection" ] diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index ca5a42cd82..2081c8fd13 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -2,6 +2,7 @@ import os import logging from typing import List +from contextlib import contextmanager import semver import pyblish.api @@ -447,3 +448,16 @@ def get_subsequences(sequence: unreal.LevelSequence): if subscene_track is not None and subscene_track.get_sections(): return subscene_track.get_sections() return [] + + +@contextmanager +def maintained_selection(): + """Stub to be either implemented or replaced. + + This is needed for old publisher implementation, but + it is not supported (yet) in UE. + """ + try: + yield + finally: + pass From 48d43f7fea8817af9bfbf906be451cff2d562b5a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 10 Jan 2023 18:16:31 +0100 Subject: [PATCH 160/171] OP-4679 - check comment on context Check for context must be preserved until old Pyblish is completely eradicated as comment could be filled after collection phase, therefore not caught by collector. --- .../modules/ftrack/plugins/publish/integrate_ftrack_note.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 6776509dda..2aecd97591 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -45,7 +45,8 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): host_name = context.data["hostName"] app_name = context.data["appName"] app_label = context.data["appLabel"] - comment = instance.data["comment"] + # context comment is fallback until old Pyblish is removed + comment = instance.data["comment"] or context.data.get("comment") if not comment: self.log.info("Comment is not set.") else: From 59e2bd2bdf0c738aca7fcfdd76473753c16c97b0 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 10 Jan 2023 18:17:15 +0100 Subject: [PATCH 161/171] OP-4679 - updated logging Printing what is actually missing is more helpful than only full data. --- .../modules/ftrack/plugins/publish/integrate_ftrack_note.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 2aecd97591..994df304ad 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -130,8 +130,8 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): if not note_text.solved: self.log.warning(( "Note template require more keys then can be provided." - "\nTemplate: {}\nData: {}" - ).format(template, format_data)) + "\nTemplate: {}\nMissing values for keys:{}\nData: {}" + ).format(template, note_text.missing_keys, format_data)) continue if not note_text: From e5b3520d6c0bebc40cb5be8bd23476002b6d14db Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 11 Jan 2023 03:28:47 +0000 Subject: [PATCH 162/171] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 732682dd60..67cce01ec0 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.7" +__version__ = "3.14.10-nightly.8" From c0cabcc6ecd4a6bf5dac2188c93f090f912c3c72 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 11 Jan 2023 12:46:56 +0100 Subject: [PATCH 163/171] changelog for release 3.14.10 --- CHANGELOG.md | 53 ++++++++++++++++++++++++++++++++++++++++++++++++++++ HISTORY.md | 52 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 105 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f9820dec45..530622f491 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,58 @@ # Changelog +## [3.14.10](https://github.com/ynput/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/ynput/OpenPype/compare/3.14.9...HEAD) + +**🆕 New features** + +- Global | Nuke: Creator placeholders in workfile template builder [\#4266](https://github.com/ynput/OpenPype/pull/4266) +- Slack: Added dynamic message [\#4265](https://github.com/ynput/OpenPype/pull/4265) +- Blender: Workfile Loader [\#4234](https://github.com/ynput/OpenPype/pull/4234) +- Unreal: Publishing and Loading for UAssets [\#4198](https://github.com/ynput/OpenPype/pull/4198) +- Publish: register publishes without copying them [\#4157](https://github.com/ynput/OpenPype/pull/4157) + +**🚀 Enhancements** + +- General: Added install method with docstring to HostBase [\#4298](https://github.com/ynput/OpenPype/pull/4298) +- Traypublisher: simple editorial multiple edl [\#4248](https://github.com/ynput/OpenPype/pull/4248) +- General: Extend 'IPluginPaths' to have more available methods [\#4214](https://github.com/ynput/OpenPype/pull/4214) +- Refactorization of folder coloring [\#4211](https://github.com/ynput/OpenPype/pull/4211) +- Flame - loading multilayer with controlled layer names [\#4204](https://github.com/ynput/OpenPype/pull/4204) + +**🐛 Bug fixes** + +- Unreal: fix missing `maintained_selection` call [\#4300](https://github.com/ynput/OpenPype/pull/4300) +- Ftrack: Fix receive of host ip on MacOs [\#4288](https://github.com/ynput/OpenPype/pull/4288) +- SiteSync: sftp connection failing when shouldnt be tested [\#4278](https://github.com/ynput/OpenPype/pull/4278) +- Deadline: fix default value for passing mongo url [\#4275](https://github.com/ynput/OpenPype/pull/4275) +- Scene Manager: Fix variable name [\#4268](https://github.com/ynput/OpenPype/pull/4268) +- Slack: notification fails because of missing published path [\#4264](https://github.com/ynput/OpenPype/pull/4264) +- hiero: creator gui with min max [\#4257](https://github.com/ynput/OpenPype/pull/4257) +- NiceCheckbox: Fix checker positioning in Python 2 [\#4253](https://github.com/ynput/OpenPype/pull/4253) +- Publisher: Fix 'CreatorType' not equal for Python 2 DCCs [\#4249](https://github.com/ynput/OpenPype/pull/4249) +- Deadline: fix dependencies [\#4242](https://github.com/ynput/OpenPype/pull/4242) +- Houdini: hotfix instance data access [\#4236](https://github.com/ynput/OpenPype/pull/4236) +- bugfix/image plane load error [\#4222](https://github.com/ynput/OpenPype/pull/4222) +- Hiero: thumbnail from multilayer exr [\#4209](https://github.com/ynput/OpenPype/pull/4209) + +**🔀 Refactored code** + +- Resolve: Use qtpy in Resolve [\#4254](https://github.com/ynput/OpenPype/pull/4254) +- Houdini: Use qtpy in Houdini [\#4252](https://github.com/ynput/OpenPype/pull/4252) +- Max: Use qtpy in Max [\#4251](https://github.com/ynput/OpenPype/pull/4251) +- Maya: Use qtpy in Maya [\#4250](https://github.com/ynput/OpenPype/pull/4250) +- Hiero: Use qtpy in Hiero [\#4240](https://github.com/ynput/OpenPype/pull/4240) +- Nuke: Use qtpy in Nuke [\#4239](https://github.com/ynput/OpenPype/pull/4239) +- Flame: Use qtpy in flame [\#4238](https://github.com/ynput/OpenPype/pull/4238) +- General: Legacy io not used in global plugins [\#4134](https://github.com/ynput/OpenPype/pull/4134) + +**Merged pull requests:** + +- Bump json5 from 1.0.1 to 1.0.2 in /website [\#4292](https://github.com/ynput/OpenPype/pull/4292) +- Maya: Fix validate frame range repair + fix create render with deadline disabled [\#4279](https://github.com/ynput/OpenPype/pull/4279) + + ## [3.14.9](https://github.com/pypeclub/OpenPype/tree/3.14.9) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.8...3.14.9) diff --git a/HISTORY.md b/HISTORY.md index f24e95b2e1..88b50c67dd 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,57 @@ # Changelog +## [3.14.10](https://github.com/ynput/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/ynput/OpenPype/compare/3.14.9...3.14.10) + +**🆕 New features** + +- Global | Nuke: Creator placeholders in workfile template builder [\#4266](https://github.com/ynput/OpenPype/pull/4266) +- Slack: Added dynamic message [\#4265](https://github.com/ynput/OpenPype/pull/4265) +- Blender: Workfile Loader [\#4234](https://github.com/ynput/OpenPype/pull/4234) +- Unreal: Publishing and Loading for UAssets [\#4198](https://github.com/ynput/OpenPype/pull/4198) +- Publish: register publishes without copying them [\#4157](https://github.com/ynput/OpenPype/pull/4157) + +**🚀 Enhancements** + +- General: Added install method with docstring to HostBase [\#4298](https://github.com/ynput/OpenPype/pull/4298) +- Traypublisher: simple editorial multiple edl [\#4248](https://github.com/ynput/OpenPype/pull/4248) +- General: Extend 'IPluginPaths' to have more available methods [\#4214](https://github.com/ynput/OpenPype/pull/4214) +- Refactorization of folder coloring [\#4211](https://github.com/ynput/OpenPype/pull/4211) +- Flame - loading multilayer with controlled layer names [\#4204](https://github.com/ynput/OpenPype/pull/4204) + +**🐛 Bug fixes** + +- Unreal: fix missing `maintained_selection` call [\#4300](https://github.com/ynput/OpenPype/pull/4300) +- Ftrack: Fix receive of host ip on MacOs [\#4288](https://github.com/ynput/OpenPype/pull/4288) +- SiteSync: sftp connection failing when shouldnt be tested [\#4278](https://github.com/ynput/OpenPype/pull/4278) +- Deadline: fix default value for passing mongo url [\#4275](https://github.com/ynput/OpenPype/pull/4275) +- Scene Manager: Fix variable name [\#4268](https://github.com/ynput/OpenPype/pull/4268) +- Slack: notification fails because of missing published path [\#4264](https://github.com/ynput/OpenPype/pull/4264) +- hiero: creator gui with min max [\#4257](https://github.com/ynput/OpenPype/pull/4257) +- NiceCheckbox: Fix checker positioning in Python 2 [\#4253](https://github.com/ynput/OpenPype/pull/4253) +- Publisher: Fix 'CreatorType' not equal for Python 2 DCCs [\#4249](https://github.com/ynput/OpenPype/pull/4249) +- Deadline: fix dependencies [\#4242](https://github.com/ynput/OpenPype/pull/4242) +- Houdini: hotfix instance data access [\#4236](https://github.com/ynput/OpenPype/pull/4236) +- bugfix/image plane load error [\#4222](https://github.com/ynput/OpenPype/pull/4222) +- Hiero: thumbnail from multilayer exr [\#4209](https://github.com/ynput/OpenPype/pull/4209) + +**🔀 Refactored code** + +- Resolve: Use qtpy in Resolve [\#4254](https://github.com/ynput/OpenPype/pull/4254) +- Houdini: Use qtpy in Houdini [\#4252](https://github.com/ynput/OpenPype/pull/4252) +- Max: Use qtpy in Max [\#4251](https://github.com/ynput/OpenPype/pull/4251) +- Maya: Use qtpy in Maya [\#4250](https://github.com/ynput/OpenPype/pull/4250) +- Hiero: Use qtpy in Hiero [\#4240](https://github.com/ynput/OpenPype/pull/4240) +- Nuke: Use qtpy in Nuke [\#4239](https://github.com/ynput/OpenPype/pull/4239) +- Flame: Use qtpy in flame [\#4238](https://github.com/ynput/OpenPype/pull/4238) +- General: Legacy io not used in global plugins [\#4134](https://github.com/ynput/OpenPype/pull/4134) + +**Merged pull requests:** + +- Bump json5 from 1.0.1 to 1.0.2 in /website [\#4292](https://github.com/ynput/OpenPype/pull/4292) +- Maya: Fix validate frame range repair + fix create render with deadline disabled [\#4279](https://github.com/ynput/OpenPype/pull/4279) + ## [3.14.9](https://github.com/pypeclub/OpenPype/tree/3.14.9) From 56813ee8ddf70c14acad8c19b8b4d2834281eb15 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 11 Jan 2023 12:18:43 +0000 Subject: [PATCH 164/171] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 67cce01ec0..c050cdafda 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.8" +__version__ = "3.14.10-nightly.9" From 4720208cfd6ec587d532a8c77c62de8009de85fe Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 11 Jan 2023 12:23:22 +0000 Subject: [PATCH 165/171] [Automated] Release --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index c050cdafda..129f7f684b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10-nightly.9" +__version__ = "3.14.10" From 12e29805249bdb31a77c8c9249e2f96c0df56080 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 11 Jan 2023 15:03:55 +0100 Subject: [PATCH 166/171] OP-4679 - push adding comment to instance to Extractor phase Pyblish allows modifying comment after collect phase, eg. collector wouldn't collect it. Should be pushed back to Collect phase after Pyblish is eradicated. --- openpype/plugins/publish/collect_comment.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index 12579cd957..5be04731ac 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -73,7 +73,9 @@ class CollectComment( """ label = "Collect Instance Comment" - order = pyblish.api.CollectorOrder + 0.49 + # TODO change to CollectorOrder after Pyblish is purged + # Pyblish allows modifying comment after collect phase + order = pyblish.api.ExtractorOrder - 0.49 def process(self, context): context_comment = self.cleanup_comment(context.data.get("comment")) From f854de46295da4d96c007335421647f78b2e6b36 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 11 Jan 2023 15:06:03 +0100 Subject: [PATCH 167/171] OP-4679 - revert back fallback Availability of comment on instance has been resolved by bumping up order of CollectComment. --- .../modules/ftrack/plugins/publish/integrate_ftrack_note.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index 994df304ad..6e82897d89 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -45,8 +45,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): host_name = context.data["hostName"] app_name = context.data["appName"] app_label = context.data["appLabel"] - # context comment is fallback until old Pyblish is removed - comment = instance.data["comment"] or context.data.get("comment") + comment = instance.data["comment"] if not comment: self.log.info("Comment is not set.") else: From 3752c8dfa6e1c3b4cd43216fcb784502ba7cb965 Mon Sep 17 00:00:00 2001 From: Toke Jepsen Date: Fri, 13 Jan 2023 09:45:32 +0000 Subject: [PATCH 168/171] Fix run_documentation.ps1 --- tools/run_documentation.ps1 | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/run_documentation.ps1 b/tools/run_documentation.ps1 index a3e3a9b8dd..d5459f0d2c 100644 --- a/tools/run_documentation.ps1 +++ b/tools/run_documentation.ps1 @@ -43,4 +43,5 @@ $openpype_root = (Get-Item $script_dir).parent.FullName Set-Location $openpype_root/website -& yarn run start +& yarn install +& yarn start From b6e849bc73a073d27133b3ae7f3f0d1e20dbd98a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Fri, 13 Jan 2023 15:06:46 +0100 Subject: [PATCH 169/171] :bug: fix animation family contamination --- .../hosts/maya/plugins/publish/extract_multiverse_usd.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 27f676e86c..0af748f7b6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -156,6 +156,14 @@ class ExtractMultiverseUsd(publish.Extractor): return members def process(self, instance): + # check if there is "usd" in families because of + # `ExtractMultiverseUsdAnim` that inherits this and should + # run on animation family too. + families = set(instance.data["families"]) + families.add(instance.data["family"]) + if "usd" not in families: + return + # Load plugin first cmds.loadPlugin("MultiverseForMaya", quiet=True) From a947c4f7a12f1ef90560b642fc7c2d6f1df1fea8 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 14 Jan 2023 03:27:57 +0000 Subject: [PATCH 170/171] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 129f7f684b..ed7da82afb 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.10" +__version__ = "3.14.11-nightly.1" From 8db210fce0b0e34988c386239f51b79d2b949072 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 16 Jan 2023 10:45:18 +0100 Subject: [PATCH 171/171] :recycle: use pyblish matching --- .../maya/plugins/publish/extract_multiverse_usd.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 0af748f7b6..4399eacda1 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -4,6 +4,7 @@ import six from maya import cmds from maya import mel +import pyblish.api from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection @@ -156,13 +157,6 @@ class ExtractMultiverseUsd(publish.Extractor): return members def process(self, instance): - # check if there is "usd" in families because of - # `ExtractMultiverseUsdAnim` that inherits this and should - # run on animation family too. - families = set(instance.data["families"]) - families.add(instance.data["family"]) - if "usd" not in families: - return # Load plugin first cmds.loadPlugin("MultiverseForMaya", quiet=True) @@ -262,7 +256,8 @@ class ExtractMultiverseUsdAnim(ExtractMultiverseUsd): Upon publish a .usd sparse cache will be written. """ label = "Extract Multiverse USD Animation Sparse Cache" - families = ["animation"] + families = ["animation", "usd"] + match = pyblish.api.Subset def get_default_options(self): anim_options = self.default_options