diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py index c28042b6ae..ebd4b8f944 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py @@ -24,7 +24,7 @@ class AERenderInstance(RenderInstance): class CollectAERender(publish.AbstractCollectRender): - order = pyblish.api.CollectorOrder + 0.405 + order = pyblish.api.CollectorOrder + 0.100 label = "Collect After Effects Render Layers" hosts = ["aftereffects"] @@ -145,6 +145,7 @@ class CollectAERender(publish.AbstractCollectRender): if "review" in instance.families: # to skip ExtractReview locally instance.families.remove("review") + instance.deadline = inst.data.get("deadline") instances.append(instance) diff --git a/client/ayon_core/hosts/fusion/api/lib.py b/client/ayon_core/hosts/fusion/api/lib.py index 08722463e1..7f7d20010d 100644 --- a/client/ayon_core/hosts/fusion/api/lib.py +++ b/client/ayon_core/hosts/fusion/api/lib.py @@ -169,7 +169,7 @@ def validate_comp_prefs(comp=None, force_repair=False): def _on_repair(): attributes = dict() for key, comp_key, _label in validations: - value = folder_value[key] + value = folder_attributes[key] comp_key_full = "Comp.FrameFormat.{}".format(comp_key) attributes[comp_key_full] = value comp.SetPrefs(attributes) diff --git a/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py b/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py index 7a2844d5db..95d029aad4 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py @@ -115,6 +115,7 @@ class CollectFusionRender( if "review" in instance.families: # to skip ExtractReview locally instance.families.remove("review") + instance.deadline = inst.data.get("deadline") instances.append(instance) diff --git a/client/ayon_core/hosts/harmony/plugins/publish/collect_farm_render.py b/client/ayon_core/hosts/harmony/plugins/publish/collect_farm_render.py index 156e2ac6ba..c63eb114e5 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/collect_farm_render.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/collect_farm_render.py @@ -177,7 +177,10 @@ class CollectFarmRender(publish.AbstractCollectRender): outputFormat=info[1], outputStartFrame=info[3], leadingZeros=info[2], - ignoreFrameHandleCheck=True + ignoreFrameHandleCheck=True, + #todo: inst is not available, must be determined, fix when + #reworking to Publisher + # deadline=inst.data.get("deadline") ) render_instance.context = context diff --git a/client/ayon_core/hosts/hiero/api/workio.py b/client/ayon_core/hosts/hiero/api/workio.py index 4c2416ca38..6e8fc20172 100644 --- a/client/ayon_core/hosts/hiero/api/workio.py +++ b/client/ayon_core/hosts/hiero/api/workio.py @@ -51,13 +51,12 @@ def open_file(filepath): project = hiero.core.projects()[-1] - # open project file - hiero.core.openProject(filepath.replace(os.path.sep, "/")) - - # close previous project - project.close() - - + # Close previous project if its different to the current project. + filepath = filepath.replace(os.path.sep, "/") + if project.path().replace(os.path.sep, "/") != filepath: + # open project file + hiero.core.openProject(filepath) + project.close() return True diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py b/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py index 82cad71c3e..6f5de20ae0 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py @@ -76,11 +76,11 @@ class FbxModelLoader(load.LoaderPlugin): for fbx_object in current_fbx_objects: fbx_object.name = f"{namespace}:{fbx_object.name}" fbx_objects.append(fbx_object) - fbx_transform = f"{fbx_object.name}.transform" + fbx_transform = f"{fbx_object}.transform" if fbx_transform in transform_data.keys(): fbx_object.pos = transform_data[fbx_transform] or 0 fbx_object.scale = transform_data[ - f"{fbx_object.name}.scale"] or 0 + f"{fbx_object}.scale"] or 0 with maintained_selection(): rt.Select(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py index 38f2cdf43c..a9119259df 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py @@ -67,11 +67,11 @@ class ObjLoader(load.LoaderPlugin): selections = rt.GetCurrentSelection() for selection in selections: selection.name = f"{namespace}:{selection.name}" - selection_transform = f"{selection.name}.transform" + selection_transform = f"{selection}.transform" if selection_transform in transform_data.keys(): selection.pos = transform_data[selection_transform] or 0 selection.scale = transform_data[ - f"{selection.name}.scale"] or 0 + f"{selection}.scale"] or 0 update_custom_attribute_data(node, selections) with maintained_selection(): rt.Select(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_usd.py b/client/ayon_core/hosts/max/plugins/load/load_model_usd.py index 2b946eb2aa..2ed5d64a18 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_usd.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_usd.py @@ -95,11 +95,11 @@ class ModelUSDLoader(load.LoaderPlugin): for children in asset.Children: children.name = f"{namespace}:{children.name}" usd_objects.append(children) - children_transform = f"{children.name}.transform" + children_transform = f"{children}.transform" if children_transform in transform_data.keys(): children.pos = transform_data[children_transform] or 0 children.scale = transform_data[ - f"{children.name}.scale"] or 0 + f"{children}.scale"] or 0 asset.name = f"{namespace}:{asset.name}" usd_objects.append(asset) diff --git a/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py b/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py index 2efb7c7f62..47690f84e9 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py +++ b/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py @@ -92,10 +92,10 @@ class OxAbcLoader(load.LoaderPlugin): abc.Parent = container abc.name = f"{namespace}:{abc.name}" ox_abc_objects.append(abc) - ox_transform = f"{abc.name}.transform" + ox_transform = f"{abc}.transform" if ox_transform in transform_data.keys(): abc.pos = transform_data[ox_transform] or 0 - abc.scale = transform_data[f"{abc.name}.scale"] or 0 + abc.scale = transform_data[f"{abc}.scale"] or 0 update_custom_attribute_data(node, ox_abc_objects) lib.imprint( container["instance_node"], diff --git a/client/ayon_core/hosts/max/startup/startup.ms b/client/ayon_core/hosts/max/startup/startup.ms index 2dfe53a6a5..c5b4f0e526 100644 --- a/client/ayon_core/hosts/max/startup/startup.ms +++ b/client/ayon_core/hosts/max/startup/startup.ms @@ -12,4 +12,4 @@ max create mode python.ExecuteFile startup -) \ No newline at end of file +) diff --git a/client/ayon_core/hosts/maya/api/alembic.py b/client/ayon_core/hosts/maya/api/alembic.py new file mode 100644 index 0000000000..bf887df4c7 --- /dev/null +++ b/client/ayon_core/hosts/maya/api/alembic.py @@ -0,0 +1,305 @@ +import json +import logging +import os + +from maya import cmds # noqa + +from ayon_core.hosts.maya.api.lib import evaluation + +log = logging.getLogger(__name__) + +# The maya alembic export types +ALEMBIC_ARGS = { + "attr": (list, tuple), + "attrPrefix": (list, tuple), + "autoSubd": bool, + "dataFormat": str, + "endFrame": float, + "eulerFilter": bool, + "frameRange": str, # "start end"; overrides startFrame & endFrame + "frameRelativeSample": float, + "melPerFrameCallback": str, + "melPostJobCallback": str, + "noNormals": bool, + "preRoll": bool, + "preRollStartFrame": int, + "pythonPerFrameCallback": str, + "pythonPostJobCallback": str, + "renderableOnly": bool, + "root": (list, tuple), + "selection": bool, + "startFrame": float, + "step": float, + "stripNamespaces": bool, + "userAttr": (list, tuple), + "userAttrPrefix": (list, tuple), + "uvWrite": bool, + "uvsOnly": bool, + "verbose": bool, + "wholeFrameGeo": bool, + "worldSpace": bool, + "writeColorSets": bool, + "writeCreases": bool, # Maya 2015 Ext1+ + "writeFaceSets": bool, + "writeUVSets": bool, # Maya 2017+ + "writeVisibility": bool, +} + + +def extract_alembic( + file, + attr=None, + attrPrefix=None, + dataFormat="ogawa", + endFrame=None, + eulerFilter=True, + frameRange="", + noNormals=False, + preRoll=False, + preRollStartFrame=0, + renderableOnly=False, + root=None, + selection=True, + startFrame=None, + step=1.0, + stripNamespaces=True, + uvWrite=True, + verbose=False, + wholeFrameGeo=False, + worldSpace=False, + writeColorSets=False, + writeCreases=False, + writeFaceSets=False, + writeUVSets=False, + writeVisibility=False +): + """Extract a single Alembic Cache. + + This extracts an Alembic cache using the `-selection` flag to minimize + the extracted content to solely what was Collected into the instance. + + Arguments: + file (str): The filepath to write the alembic file to. + + attr (list of str, optional): A specific geometric attribute to write + out. Defaults to []. + + attrPrefix (list of str, optional): Prefix filter for determining which + geometric attributes to write out. Defaults to ["ABC_"]. + + dataFormat (str): The data format to use for the cache, + defaults to "ogawa" + + endFrame (float): End frame of output. Ignored if `frameRange` + provided. + + eulerFilter (bool): When on, X, Y, and Z rotation data is filtered with + an Euler filter. Euler filtering helps resolve irregularities in + rotations especially if X, Y, and Z rotations exceed 360 degrees. + Defaults to True. + + frameRange (tuple or str): Two-tuple with start and end frame or a + string formatted as: "startFrame endFrame". This argument + overrides `startFrame` and `endFrame` arguments. + + noNormals (bool): When on, normal data from the original polygon + objects is not included in the exported Alembic cache file. + + preRoll (bool): This frame range will not be sampled. + Defaults to False. + + preRollStartFrame (float): The frame to start scene + evaluation at. This is used to set the starting frame for time + dependent translations and can be used to evaluate run-up that + isn't actually translated. Defaults to 0. + + renderableOnly (bool): When on, any non-renderable nodes or hierarchy, + such as hidden objects, are not included in the Alembic file. + Defaults to False. + + root (list of str): Maya dag path which will be parented to + the root of the Alembic file. Defaults to [], which means the + entire scene will be written out. + + selection (bool): Write out all all selected nodes from the + active selection list that are descendents of the roots specified + with -root. Defaults to False. + + startFrame (float): Start frame of output. Ignored if `frameRange` + provided. + + step (float): The time interval (expressed in frames) at + which the frame range is sampled. Additional samples around each + frame can be specified with -frs. Defaults to 1.0. + + stripNamespaces (bool): When on, any namespaces associated with the + exported objects are removed from the Alembic file. For example, an + object with the namespace taco:foo:bar appears as bar in the + Alembic file. + + uvWrite (bool): When on, UV data from polygon meshes and subdivision + objects are written to the Alembic file. Only the current UV map is + included. + + verbose (bool): When on, outputs frame number information to the + Script Editor or output window during extraction. + + wholeFrameGeo (bool): Data for geometry will only be written + out on whole frames. Defaults to False. + + worldSpace (bool): When on, the top node in the node hierarchy is + stored as world space. By default, these nodes are stored as local + space. Defaults to False. + + writeColorSets (bool): Write all color sets on MFnMeshes as + color 3 or color 4 indexed geometry parameters with face varying + scope. Defaults to False. + + writeCreases (bool): If the mesh has crease edges or crease + vertices, the mesh (OPolyMesh) would now be written out as an OSubD + and crease info will be stored in the Alembic file. Otherwise, + creases info won't be preserved in Alembic file unless a custom + Boolean attribute SubDivisionMesh has been added to mesh node and + its value is true. Defaults to False. + + writeFaceSets (bool): Write all Face sets on MFnMeshes. + Defaults to False. + + writeUVSets (bool): Write all uv sets on MFnMeshes as vector + 2 indexed geometry parameters with face varying scope. Defaults to + False. + + writeVisibility (bool): Visibility state will be stored in + the Alembic file. Otherwise everything written out is treated as + visible. Defaults to False. + """ + + # Ensure alembic exporter is loaded + cmds.loadPlugin('AbcExport', quiet=True) + + # Alembic Exporter requires forward slashes + file = file.replace('\\', '/') + + # Ensure list arguments are valid. + attr = attr or [] + attrPrefix = attrPrefix or [] + root = root or [] + + # Pass the start and end frame on as `frameRange` so that it + # never conflicts with that argument + if not frameRange: + # Fallback to maya timeline if no start or end frame provided. + if startFrame is None: + startFrame = cmds.playbackOptions(query=True, + animationStartTime=True) + if endFrame is None: + endFrame = cmds.playbackOptions(query=True, + animationEndTime=True) + + # Ensure valid types are converted to frame range + assert isinstance(startFrame, ALEMBIC_ARGS["startFrame"]) + assert isinstance(endFrame, ALEMBIC_ARGS["endFrame"]) + frameRange = "{0} {1}".format(startFrame, endFrame) + else: + # Allow conversion from tuple for `frameRange` + if isinstance(frameRange, (list, tuple)): + assert len(frameRange) == 2 + frameRange = "{0} {1}".format(frameRange[0], frameRange[1]) + + # Assemble options + options = { + "selection": selection, + "frameRange": frameRange, + "eulerFilter": eulerFilter, + "noNormals": noNormals, + "preRoll": preRoll, + "renderableOnly": renderableOnly, + "uvWrite": uvWrite, + "writeColorSets": writeColorSets, + "writeFaceSets": writeFaceSets, + "wholeFrameGeo": wholeFrameGeo, + "worldSpace": worldSpace, + "writeVisibility": writeVisibility, + "writeUVSets": writeUVSets, + "writeCreases": writeCreases, + "dataFormat": dataFormat, + "step": step, + "attr": attr, + "attrPrefix": attrPrefix, + "stripNamespaces": stripNamespaces, + "verbose": verbose, + "preRollStartFrame": preRollStartFrame + } + + # Validate options + for key, value in options.copy().items(): + + # Discard unknown options + if key not in ALEMBIC_ARGS: + log.warning("extract_alembic() does not support option '%s'. " + "Flag will be ignored..", key) + options.pop(key) + continue + + # Validate value type + valid_types = ALEMBIC_ARGS[key] + if not isinstance(value, valid_types): + raise TypeError("Alembic option unsupported type: " + "{0} (expected {1})".format(value, valid_types)) + + # Ignore empty values, like an empty string, since they mess up how + # job arguments are built + if isinstance(value, (list, tuple)): + value = [x for x in value if x.strip()] + + # Ignore option completely if no values remaining + if not value: + options.pop(key) + continue + + options[key] = value + + # The `writeCreases` argument was changed to `autoSubd` in Maya 2018+ + maya_version = int(cmds.about(version=True)) + if maya_version >= 2018: + options['autoSubd'] = options.pop('writeCreases', False) + + # Format the job string from options + job_args = list() + for key, value in options.items(): + if isinstance(value, (list, tuple)): + for entry in value: + job_args.append("-{} {}".format(key, entry)) + elif isinstance(value, bool): + # Add only when state is set to True + if value: + job_args.append("-{0}".format(key)) + else: + job_args.append("-{0} {1}".format(key, value)) + + job_str = " ".join(job_args) + job_str += ' -file "%s"' % file + + # Ensure output directory exists + parent_dir = os.path.dirname(file) + if not os.path.exists(parent_dir): + os.makedirs(parent_dir) + + if verbose: + log.debug("Preparing Alembic export with options: %s", + json.dumps(options, indent=4)) + log.debug("Extracting Alembic with job arguments: %s", job_str) + + # Perform extraction + print("Alembic Job Arguments : {}".format(job_str)) + + # Disable the parallel evaluation temporarily to ensure no buggy + # exports are made. (PLN-31) + # TODO: Make sure this actually fixes the issues + with evaluation("off"): + cmds.AbcExport(j=job_str, verbose=verbose) + + if verbose: + log.debug("Extracted Alembic to: %s", file) + + return file diff --git a/client/ayon_core/hosts/maya/api/lib.py b/client/ayon_core/hosts/maya/api/lib.py index b8c9bedc60..2b41ffc06c 100644 --- a/client/ayon_core/hosts/maya/api/lib.py +++ b/client/ayon_core/hosts/maya/api/lib.py @@ -70,37 +70,6 @@ DEFAULT_MATRIX = [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0] -# The maya alembic export types -_alembic_options = { - "startFrame": float, - "endFrame": float, - "frameRange": str, # "start end"; overrides startFrame & endFrame - "eulerFilter": bool, - "frameRelativeSample": float, - "noNormals": bool, - "renderableOnly": bool, - "step": float, - "stripNamespaces": bool, - "uvWrite": bool, - "wholeFrameGeo": bool, - "worldSpace": bool, - "writeVisibility": bool, - "writeColorSets": bool, - "writeFaceSets": bool, - "writeCreases": bool, # Maya 2015 Ext1+ - "writeUVSets": bool, # Maya 2017+ - "dataFormat": str, - "root": (list, tuple), - "attr": (list, tuple), - "attrPrefix": (list, tuple), - "userAttr": (list, tuple), - "melPerFrameCallback": str, - "melPostJobCallback": str, - "pythonPerFrameCallback": str, - "pythonPostJobCallback": str, - "selection": bool -} - INT_FPS = {15, 24, 25, 30, 48, 50, 60, 44100, 48000} FLOAT_FPS = {23.98, 23.976, 29.97, 47.952, 59.94} @@ -1330,7 +1299,7 @@ def is_visible(node, override_enabled = cmds.getAttr('{}.overrideEnabled'.format(node)) override_visibility = cmds.getAttr('{}.overrideVisibility'.format( node)) - if override_enabled and override_visibility: + if override_enabled and not override_visibility: return False if parentHidden: @@ -1346,178 +1315,6 @@ def is_visible(node, return True - -def extract_alembic(file, - startFrame=None, - endFrame=None, - selection=True, - uvWrite=True, - eulerFilter=True, - dataFormat="ogawa", - verbose=False, - **kwargs): - """Extract a single Alembic Cache. - - This extracts an Alembic cache using the `-selection` flag to minimize - the extracted content to solely what was Collected into the instance. - - Arguments: - - startFrame (float): Start frame of output. Ignored if `frameRange` - provided. - - endFrame (float): End frame of output. Ignored if `frameRange` - provided. - - frameRange (tuple or str): Two-tuple with start and end frame or a - string formatted as: "startFrame endFrame". This argument - overrides `startFrame` and `endFrame` arguments. - - dataFormat (str): The data format to use for the cache, - defaults to "ogawa" - - verbose (bool): When on, outputs frame number information to the - Script Editor or output window during extraction. - - noNormals (bool): When on, normal data from the original polygon - objects is not included in the exported Alembic cache file. - - renderableOnly (bool): When on, any non-renderable nodes or hierarchy, - such as hidden objects, are not included in the Alembic file. - Defaults to False. - - stripNamespaces (bool): When on, any namespaces associated with the - exported objects are removed from the Alembic file. For example, an - object with the namespace taco:foo:bar appears as bar in the - Alembic file. - - uvWrite (bool): When on, UV data from polygon meshes and subdivision - objects are written to the Alembic file. Only the current UV map is - included. - - worldSpace (bool): When on, the top node in the node hierarchy is - stored as world space. By default, these nodes are stored as local - space. Defaults to False. - - eulerFilter (bool): When on, X, Y, and Z rotation data is filtered with - an Euler filter. Euler filtering helps resolve irregularities in - rotations especially if X, Y, and Z rotations exceed 360 degrees. - Defaults to True. - - """ - - # Ensure alembic exporter is loaded - cmds.loadPlugin('AbcExport', quiet=True) - - # Alembic Exporter requires forward slashes - file = file.replace('\\', '/') - - # Pass the start and end frame on as `frameRange` so that it - # never conflicts with that argument - if "frameRange" not in kwargs: - # Fallback to maya timeline if no start or end frame provided. - if startFrame is None: - startFrame = cmds.playbackOptions(query=True, - animationStartTime=True) - if endFrame is None: - endFrame = cmds.playbackOptions(query=True, - animationEndTime=True) - - # Ensure valid types are converted to frame range - assert isinstance(startFrame, _alembic_options["startFrame"]) - assert isinstance(endFrame, _alembic_options["endFrame"]) - kwargs["frameRange"] = "{0} {1}".format(startFrame, endFrame) - else: - # Allow conversion from tuple for `frameRange` - frame_range = kwargs["frameRange"] - if isinstance(frame_range, (list, tuple)): - assert len(frame_range) == 2 - kwargs["frameRange"] = "{0} {1}".format(frame_range[0], - frame_range[1]) - - # Assemble options - options = { - "selection": selection, - "uvWrite": uvWrite, - "eulerFilter": eulerFilter, - "dataFormat": dataFormat - } - options.update(kwargs) - - # Validate options - for key, value in options.copy().items(): - - # Discard unknown options - if key not in _alembic_options: - log.warning("extract_alembic() does not support option '%s'. " - "Flag will be ignored..", key) - options.pop(key) - continue - - # Validate value type - valid_types = _alembic_options[key] - if not isinstance(value, valid_types): - raise TypeError("Alembic option unsupported type: " - "{0} (expected {1})".format(value, valid_types)) - - # Ignore empty values, like an empty string, since they mess up how - # job arguments are built - if isinstance(value, (list, tuple)): - value = [x for x in value if x.strip()] - - # Ignore option completely if no values remaining - if not value: - options.pop(key) - continue - - options[key] = value - - # The `writeCreases` argument was changed to `autoSubd` in Maya 2018+ - maya_version = int(cmds.about(version=True)) - if maya_version >= 2018: - options['autoSubd'] = options.pop('writeCreases', False) - - # Format the job string from options - job_args = list() - for key, value in options.items(): - if isinstance(value, (list, tuple)): - for entry in value: - job_args.append("-{} {}".format(key, entry)) - elif isinstance(value, bool): - # Add only when state is set to True - if value: - job_args.append("-{0}".format(key)) - else: - job_args.append("-{0} {1}".format(key, value)) - - job_str = " ".join(job_args) - job_str += ' -file "%s"' % file - - # Ensure output directory exists - parent_dir = os.path.dirname(file) - if not os.path.exists(parent_dir): - os.makedirs(parent_dir) - - if verbose: - log.debug("Preparing Alembic export with options: %s", - json.dumps(options, indent=4)) - log.debug("Extracting Alembic with job arguments: %s", job_str) - - # Perform extraction - print("Alembic Job Arguments : {}".format(job_str)) - - # Disable the parallel evaluation temporarily to ensure no buggy - # exports are made. (PLN-31) - # TODO: Make sure this actually fixes the issues - with evaluation("off"): - cmds.AbcExport(j=job_str, verbose=verbose) - - if verbose: - log.debug("Extracted Alembic to: %s", file) - - return file - - # region ID def get_id_required_nodes(referenced_nodes=False, nodes=None, @@ -4415,3 +4212,23 @@ def create_rig_animation_instance( variant=namespace, pre_create_data={"use_selection": True} ) + + +def get_node_index_under_parent(node: str) -> int: + """Return the index of a DAG node under its parent. + + Arguments: + node (str): A DAG Node path. + + Returns: + int: The DAG node's index under its parents or world + + """ + node = cmds.ls(node, long=True)[0] # enforce long names + parent = node.rsplit("|", 1)[0] + if not parent: + return cmds.ls(assemblies=True, long=True).index(node) + else: + return cmds.listRelatives(parent, + children=True, + fullPath=True).index(node) diff --git a/client/ayon_core/hosts/maya/api/workfile_template_builder.py b/client/ayon_core/hosts/maya/api/workfile_template_builder.py index cfd416b708..f4f9a34983 100644 --- a/client/ayon_core/hosts/maya/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/maya/api/workfile_template_builder.py @@ -1,3 +1,5 @@ +import json + from maya import cmds from ayon_core.pipeline import ( @@ -8,13 +10,15 @@ from ayon_core.pipeline import ( ) from ayon_core.pipeline.workfile.workfile_template_builder import ( TemplateAlreadyImported, - AbstractTemplateBuilder + AbstractTemplateBuilder, + PlaceholderPlugin, + PlaceholderItem, ) from ayon_core.tools.workfile_template_build import ( WorkfileBuildPlaceholderDialog, ) -from .lib import get_main_window +from .lib import read, imprint, get_main_window PLACEHOLDER_SET = "PLACEHOLDERS_SET" @@ -86,6 +90,162 @@ class MayaTemplateBuilder(AbstractTemplateBuilder): return True +class MayaPlaceholderPlugin(PlaceholderPlugin): + """Base Placeholder Plugin for Maya with one unified cache. + + Creates a locator as placeholder node, which during populate provide + all of its attributes defined on the locator's transform in + `placeholder.data` and where `placeholder.scene_identifier` is the + full path to the node. + + Inherited classes must still implement `populate_placeholder` + + """ + + use_selection_as_parent = True + item_class = PlaceholderItem + + def _create_placeholder_name(self, placeholder_data): + return self.identifier.replace(".", "_") + + def _collect_scene_placeholders(self): + nodes_by_identifier = self.builder.get_shared_populate_data( + "placeholder_nodes" + ) + if nodes_by_identifier is None: + # Cache placeholder data to shared data + nodes = cmds.ls("*.plugin_identifier", long=True, objectsOnly=True) + + nodes_by_identifier = {} + for node in nodes: + identifier = cmds.getAttr("{}.plugin_identifier".format(node)) + nodes_by_identifier.setdefault(identifier, []).append(node) + + # Set the cache + self.builder.set_shared_populate_data( + "placeholder_nodes", nodes_by_identifier + ) + + return nodes_by_identifier + + def create_placeholder(self, placeholder_data): + + parent = None + if self.use_selection_as_parent: + selection = cmds.ls(selection=True) + if len(selection) > 1: + raise ValueError( + "More than one node is selected. " + "Please select only one to define the parent." + ) + parent = selection[0] if selection else None + + placeholder_data["plugin_identifier"] = self.identifier + placeholder_name = self._create_placeholder_name(placeholder_data) + + placeholder = cmds.spaceLocator(name=placeholder_name)[0] + if parent: + placeholder = cmds.parent(placeholder, selection[0])[0] + + self.imprint(placeholder, placeholder_data) + + def update_placeholder(self, placeholder_item, placeholder_data): + node_name = placeholder_item.scene_identifier + + changed_values = {} + for key, value in placeholder_data.items(): + if value != placeholder_item.data.get(key): + changed_values[key] = value + + # Delete attributes to ensure we imprint new data with correct type + for key in changed_values.keys(): + placeholder_item.data[key] = value + if cmds.attributeQuery(key, node=node_name, exists=True): + attribute = "{}.{}".format(node_name, key) + cmds.deleteAttr(attribute) + + self.imprint(node_name, changed_values) + + def collect_placeholders(self): + placeholders = [] + nodes_by_identifier = self._collect_scene_placeholders() + for node in nodes_by_identifier.get(self.identifier, []): + # TODO do data validations and maybe upgrades if they are invalid + placeholder_data = self.read(node) + placeholders.append( + self.item_class(scene_identifier=node, + data=placeholder_data, + plugin=self) + ) + + return placeholders + + def post_placeholder_process(self, placeholder, failed): + """Cleanup placeholder after load of its corresponding representations. + + Hide placeholder, add them to placeholder set. + Used only by PlaceholderCreateMixin and PlaceholderLoadMixin + + Args: + placeholder (PlaceholderItem): Item which was just used to load + representation. + failed (bool): Loading of representation failed. + """ + # Hide placeholder and add them to placeholder set + node = placeholder.scene_identifier + + # If we just populate the placeholders from current scene, the + # placeholder set will not be created so account for that. + if not cmds.objExists(PLACEHOLDER_SET): + cmds.sets(name=PLACEHOLDER_SET, empty=True) + + cmds.sets(node, addElement=PLACEHOLDER_SET) + cmds.hide(node) + cmds.setAttr("{}.hiddenInOutliner".format(node), True) + + def delete_placeholder(self, placeholder): + """Remove placeholder if building was successful + + Used only by PlaceholderCreateMixin and PlaceholderLoadMixin. + """ + node = placeholder.scene_identifier + + # To avoid that deleting a placeholder node will have Maya delete + # any objectSets the node was a member of we will first remove it + # from any sets it was a member of. This way the `PLACEHOLDERS_SET` + # will survive long enough + sets = cmds.listSets(o=node) or [] + for object_set in sets: + cmds.sets(node, remove=object_set) + + cmds.delete(node) + + def imprint(self, node, data): + """Imprint call for placeholder node""" + + # Complicated data that can't be represented as flat maya attributes + # we write to json strings, e.g. multiselection EnumDef + for key, value in data.items(): + if isinstance(value, (list, tuple, dict)): + data[key] = "JSON::{}".format(json.dumps(value)) + + imprint(node, data) + + def read(self, node): + """Read call for placeholder node""" + + data = read(node) + + # Complicated data that can't be represented as flat maya attributes + # we read from json strings, e.g. multiselection EnumDef + for key, value in data.items(): + if isinstance(value, str) and value.startswith("JSON::"): + value = value[len("JSON::"):] # strip of JSON:: prefix + data[key] = json.loads(value) + + return data + + def build_workfile_template(*args): builder = MayaTemplateBuilder(registered_host()) builder.build_template() diff --git a/client/ayon_core/hosts/maya/plugins/create/create_animation.py b/client/ayon_core/hosts/maya/plugins/create/create_animation.py deleted file mode 100644 index f30d9aba81..0000000000 --- a/client/ayon_core/hosts/maya/plugins/create/create_animation.py +++ /dev/null @@ -1,89 +0,0 @@ -from ayon_core.hosts.maya.api import ( - lib, - plugin -) -from ayon_core.lib import ( - BoolDef, - TextDef -) - - -class CreateAnimation(plugin.MayaHiddenCreator): - """Animation output for character rigs - - We hide the animation creator from the UI since the creation of it is - automated upon loading a rig. There's an inventory action to recreate it - for loaded rigs if by chance someone deleted the animation instance. - """ - identifier = "io.openpype.creators.maya.animation" - name = "animationDefault" - label = "Animation" - product_type = "animation" - icon = "male" - - write_color_sets = False - write_face_sets = False - include_parent_hierarchy = False - include_user_defined_attributes = False - - def get_instance_attr_defs(self): - - defs = lib.collect_animation_defs() - - defs.extend([ - BoolDef("writeColorSets", - label="Write vertex colors", - tooltip="Write vertex colors with the geometry", - default=self.write_color_sets), - BoolDef("writeFaceSets", - label="Write face sets", - tooltip="Write face sets with the geometry", - default=self.write_face_sets), - BoolDef("writeNormals", - label="Write normals", - tooltip="Write normals with the deforming geometry", - default=True), - BoolDef("renderableOnly", - label="Renderable Only", - tooltip="Only export renderable visible shapes", - default=False), - BoolDef("visibleOnly", - label="Visible Only", - tooltip="Only export dag objects visible during " - "frame range", - default=False), - BoolDef("includeParentHierarchy", - label="Include Parent Hierarchy", - tooltip="Whether to include parent hierarchy of nodes in " - "the publish instance", - default=self.include_parent_hierarchy), - BoolDef("worldSpace", - label="World-Space Export", - default=True), - BoolDef("includeUserDefinedAttributes", - label="Include User Defined Attributes", - default=self.include_user_defined_attributes), - TextDef("attr", - label="Custom Attributes", - default="", - placeholder="attr1, attr2"), - TextDef("attrPrefix", - label="Custom Attributes Prefix", - placeholder="prefix1, prefix2") - ]) - - # TODO: Implement these on a Deadline plug-in instead? - """ - # Default to not send to farm. - self.data["farm"] = False - self.data["priority"] = 50 - """ - - return defs - - def apply_settings(self, project_settings): - super(CreateAnimation, self).apply_settings(project_settings) - # Hardcoding creator to be enabled due to existing settings would - # disable the creator causing the creator plugin to not be - # discoverable. - self.enabled = True diff --git a/client/ayon_core/hosts/maya/plugins/create/create_animation_pointcache.py b/client/ayon_core/hosts/maya/plugins/create/create_animation_pointcache.py new file mode 100644 index 0000000000..08d50a1ab8 --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/create/create_animation_pointcache.py @@ -0,0 +1,139 @@ +from maya import cmds + +from ayon_core.hosts.maya.api import lib, plugin + +from ayon_core.lib import ( + BoolDef, + NumberDef, +) +from ayon_core.pipeline import CreatedInstance + + +def _get_animation_attr_defs(cls): + """Get Animation generic definitions.""" + defs = lib.collect_animation_defs() + defs.extend( + [ + BoolDef("farm", label="Submit to Farm"), + NumberDef("priority", label="Farm job Priority", default=50), + BoolDef("refresh", label="Refresh viewport during export"), + BoolDef( + "includeParentHierarchy", + label="Include Parent Hierarchy", + tooltip=( + "Whether to include parent hierarchy of nodes in the " + "publish instance." + ) + ), + BoolDef( + "includeUserDefinedAttributes", + label="Include User Defined Attributes", + tooltip=( + "Whether to include all custom maya attributes found " + "on nodes as attributes in the Alembic data." + ) + ), + ] + ) + + return defs + + +def convert_legacy_alembic_creator_attributes(node_data, class_name): + """This is a legacy transfer of creator attributes to publish attributes + for ExtractAlembic/ExtractAnimation plugin. + """ + publish_attributes = node_data["publish_attributes"] + + if class_name in publish_attributes: + return node_data + + attributes = [ + "attr", + "attrPrefix", + "visibleOnly", + "writeColorSets", + "writeFaceSets", + "writeNormals", + "renderableOnly", + "visibleOnly", + "worldSpace", + "renderableOnly" + ] + plugin_attributes = {} + for attr in attributes: + if attr not in node_data["creator_attributes"]: + continue + value = node_data["creator_attributes"].pop(attr) + + plugin_attributes[attr] = value + + publish_attributes[class_name] = plugin_attributes + + return node_data + + +class CreateAnimation(plugin.MayaHiddenCreator): + """Animation output for character rigs + + We hide the animation creator from the UI since the creation of it is + automated upon loading a rig. There's an inventory action to recreate it + for loaded rigs if by chance someone deleted the animation instance. + """ + + identifier = "io.openpype.creators.maya.animation" + name = "animationDefault" + label = "Animation" + product_type = "animation" + icon = "male" + + write_color_sets = False + write_face_sets = False + include_parent_hierarchy = False + include_user_defined_attributes = False + + def read_instance_node(self, node): + node_data = super(CreateAnimation, self).read_instance_node(node) + node_data = convert_legacy_alembic_creator_attributes( + node_data, "ExtractAnimation" + ) + return node_data + + def get_instance_attr_defs(self): + defs = super(CreateAnimation, self).get_instance_attr_defs() + defs += _get_animation_attr_defs(self) + return defs + + +class CreatePointCache(plugin.MayaCreator): + """Alembic pointcache for animated data""" + + identifier = "io.openpype.creators.maya.pointcache" + label = "Pointcache" + product_type = "pointcache" + icon = "gears" + write_color_sets = False + write_face_sets = False + include_user_defined_attributes = False + + def read_instance_node(self, node): + node_data = super(CreatePointCache, self).read_instance_node(node) + node_data = convert_legacy_alembic_creator_attributes( + node_data, "ExtractAlembic" + ) + return node_data + + def get_instance_attr_defs(self): + defs = super(CreatePointCache, self).get_instance_attr_defs() + defs += _get_animation_attr_defs(self) + return defs + + def create(self, product_name, instance_data, pre_create_data): + instance = super(CreatePointCache, self).create( + product_name, instance_data, pre_create_data + ) + instance_node = instance.get("instance_node") + + # For Arnold standin proxy + proxy_set = cmds.sets(name=instance_node + "_proxy_SET", empty=True) + cmds.sets(proxy_set, forceElement=instance_node) diff --git a/client/ayon_core/hosts/maya/plugins/create/create_arnold_scene_source.py b/client/ayon_core/hosts/maya/plugins/create/create_arnold_scene_source.py index dc0ffb02c1..e321c13ca0 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_arnold_scene_source.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_arnold_scene_source.py @@ -1,3 +1,5 @@ +from maya import cmds + from ayon_core.hosts.maya.api import ( lib, plugin @@ -87,16 +89,24 @@ class CreateArnoldSceneSource(plugin.MayaCreator): return defs + +class CreateArnoldSceneSourceProxy(CreateArnoldSceneSource): + """Arnold Scene Source Proxy + + This product type facilitates working with proxy geometry in the viewport. + """ + + identifier = "io.openpype.creators.maya.assproxy" + label = "Arnold Scene Source Proxy" + product_type = "assProxy" + icon = "cube" + def create(self, product_name, instance_data, pre_create_data): - - from maya import cmds - instance = super(CreateArnoldSceneSource, self).create( product_name, instance_data, pre_create_data ) instance_node = instance.get("instance_node") - content = cmds.sets(name=instance_node + "_content_SET", empty=True) proxy = cmds.sets(name=instance_node + "_proxy_SET", empty=True) - cmds.sets([content, proxy], forceElement=instance_node) + cmds.sets([proxy], forceElement=instance_node) diff --git a/client/ayon_core/hosts/maya/plugins/create/create_pointcache.py b/client/ayon_core/hosts/maya/plugins/create/create_pointcache.py deleted file mode 100644 index 05e3a1a29f..0000000000 --- a/client/ayon_core/hosts/maya/plugins/create/create_pointcache.py +++ /dev/null @@ -1,88 +0,0 @@ -from maya import cmds - -from ayon_core.hosts.maya.api import ( - lib, - plugin -) -from ayon_core.lib import ( - BoolDef, - TextDef -) - - -class CreatePointCache(plugin.MayaCreator): - """Alembic pointcache for animated data""" - - identifier = "io.openpype.creators.maya.pointcache" - label = "Pointcache" - product_type = "pointcache" - icon = "gears" - write_color_sets = False - write_face_sets = False - include_user_defined_attributes = False - - def get_instance_attr_defs(self): - - defs = lib.collect_animation_defs() - - defs.extend([ - BoolDef("writeColorSets", - label="Write vertex colors", - tooltip="Write vertex colors with the geometry", - default=False), - BoolDef("writeFaceSets", - label="Write face sets", - tooltip="Write face sets with the geometry", - default=False), - BoolDef("renderableOnly", - label="Renderable Only", - tooltip="Only export renderable visible shapes", - default=False), - BoolDef("visibleOnly", - label="Visible Only", - tooltip="Only export dag objects visible during " - "frame range", - default=False), - BoolDef("includeParentHierarchy", - label="Include Parent Hierarchy", - tooltip="Whether to include parent hierarchy of nodes in " - "the publish instance", - default=False), - BoolDef("worldSpace", - label="World-Space Export", - default=True), - BoolDef("refresh", - label="Refresh viewport during export", - default=False), - BoolDef("includeUserDefinedAttributes", - label="Include User Defined Attributes", - default=self.include_user_defined_attributes), - TextDef("attr", - label="Custom Attributes", - default="", - placeholder="attr1, attr2"), - TextDef("attrPrefix", - label="Custom Attributes Prefix", - default="", - placeholder="prefix1, prefix2") - ]) - - # TODO: Implement these on a Deadline plug-in instead? - """ - # Default to not send to farm. - self.data["farm"] = False - self.data["priority"] = 50 - """ - - return defs - - def create(self, product_name, instance_data, pre_create_data): - - instance = super(CreatePointCache, self).create( - product_name, instance_data, pre_create_data - ) - instance_node = instance.get("instance_node") - - # For Arnold standin proxy - proxy_set = cmds.sets(name=instance_node + "_proxy_SET", empty=True) - cmds.sets(proxy_set, forceElement=instance_node) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py b/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py index 4b7d2f42ab..ae3b68965a 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py @@ -12,6 +12,7 @@ from ayon_core.hosts.maya.api.lib import ( unique_namespace, get_attribute_input, maintained_selection, + get_fps_for_current_context ) from ayon_core.hosts.maya.api.pipeline import containerise from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type @@ -29,7 +30,13 @@ class ArnoldStandinLoader(load.LoaderPlugin): """Load as Arnold standin""" product_types = { - "ass", "animation", "model", "proxyAbc", "pointcache", "usd" + "ass", + "assProxy", + "animation", + "model", + "proxyAbc", + "pointcache", + "usd" } representations = {"ass", "abc", "usda", "usdc", "usd"} @@ -95,8 +102,10 @@ class ArnoldStandinLoader(load.LoaderPlugin): sequence = is_sequence(os.listdir(os.path.dirname(repre_path))) cmds.setAttr(standin_shape + ".useFrameExtension", sequence) - fps = float(version_attributes.get("fps")) or 25 - cmds.setAttr(standin_shape + ".abcFPS", fps) + fps = ( + version_attributes.get("fps") or get_fps_for_current_context() + ) + cmds.setAttr(standin_shape + ".abcFPS", float(fps)) nodes = [root, standin, standin_shape] if operator is not None: @@ -128,6 +137,18 @@ class ArnoldStandinLoader(load.LoaderPlugin): proxy_path = "/".join([os.path.dirname(path), proxy_basename]) return proxy_basename, proxy_path + def _update_operators(self, string_replace_operator, proxy_basename, path): + cmds.setAttr( + string_replace_operator + ".match", + proxy_basename.split(".")[0], + type="string" + ) + cmds.setAttr( + string_replace_operator + ".replace", + os.path.basename(path).split(".")[0], + type="string" + ) + def _setup_proxy(self, shape, path, namespace): proxy_basename, proxy_path = self._get_proxy_path(path) @@ -150,16 +171,7 @@ class ArnoldStandinLoader(load.LoaderPlugin): "*.(@node=='{}')".format(node_type), type="string" ) - cmds.setAttr( - string_replace_operator + ".match", - proxy_basename, - type="string" - ) - cmds.setAttr( - string_replace_operator + ".replace", - os.path.basename(path), - type="string" - ) + self._update_operators(string_replace_operator, proxy_basename, path) cmds.connectAttr( string_replace_operator + ".out", @@ -194,18 +206,9 @@ class ArnoldStandinLoader(load.LoaderPlugin): path = get_representation_path(repre_entity) proxy_basename, proxy_path = self._get_proxy_path(path) - # Whether there is proxy or so, we still update the string operator. + # Whether there is proxy or not, we still update the string operator. # If no proxy exists, the string operator won't replace anything. - cmds.setAttr( - string_replace_operator + ".match", - proxy_basename, - type="string" - ) - cmds.setAttr( - string_replace_operator + ".replace", - os.path.basename(path), - type="string" - ) + self._update_operators(string_replace_operator, proxy_basename, path) dso_path = path if os.path.exists(proxy_path): diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_animation.py b/client/ayon_core/hosts/maya/plugins/publish/collect_animation.py index 2ab6511ece..391c80c84e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_animation.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_animation.py @@ -58,4 +58,3 @@ class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin): if instance.data.get("farm"): instance.data["families"].append("publish.farm") - diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py b/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py index 0db89bee31..fb71e128eb 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py @@ -10,21 +10,23 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin): # Offset to be after renderable camera collection. order = pyblish.api.CollectorOrder + 0.2 label = "Collect Arnold Scene Source" - families = ["ass"] + families = ["ass", "assProxy"] def process(self, instance): - objsets = instance.data["setMembers"] + instance.data["members"] = [] + for set_member in instance.data["setMembers"]: + if cmds.nodeType(set_member) != "objectSet": + instance.data["members"].extend(self.get_hierarchy(set_member)) + continue - for objset in objsets: - objset = str(objset) - members = cmds.sets(objset, query=True) + members = cmds.sets(set_member, query=True) members = cmds.ls(members, long=True) if members is None: - self.log.warning("Skipped empty instance: \"%s\" " % objset) + self.log.warning( + "Skipped empty instance: \"%s\" " % set_member + ) continue - if objset.endswith("content_SET"): - instance.data["contentMembers"] = self.get_hierarchy(members) - if objset.endswith("proxy_SET"): + if set_member.endswith("proxy_SET"): instance.data["proxy"] = self.get_hierarchy(members) # Use camera in object set if present else default to render globals @@ -33,7 +35,7 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin): renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)] if renderable: camera = renderable[0] - for node in instance.data["contentMembers"]: + for node in instance.data["members"]: camera_shapes = cmds.listRelatives( node, shapes=True, type="camera" ) @@ -46,18 +48,11 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin): self.log.debug("data: {}".format(instance.data)) def get_hierarchy(self, nodes): - """Return nodes with all their children. - - Arguments: - nodes (List[str]): List of nodes to collect children hierarchy for - - Returns: - list: Input nodes with their children hierarchy - - """ + """Return nodes with all their children""" nodes = cmds.ls(nodes, long=True) if not nodes: return [] - - children = get_all_children(nodes, ignore_intermediate_objects=True) - return list(children.union(nodes)) + children = get_all_children(nodes) + # Make sure nodes merged with children only + # contains unique entries + return list(set(nodes + list(children))) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_user_defined_attributes.py b/client/ayon_core/hosts/maya/plugins/publish/collect_user_defined_attributes.py index 16fef2e168..3d586d48fb 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_user_defined_attributes.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_user_defined_attributes.py @@ -14,7 +14,9 @@ class CollectUserDefinedAttributes(pyblish.api.InstancePlugin): def process(self, instance): # Collect user defined attributes. - if not instance.data.get("includeUserDefinedAttributes", False): + if not instance.data["creator_attributes"].get( + "includeUserDefinedAttributes" + ): return if "out_hierarchy" in instance.data: diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_arnold_scene_source.py b/client/ayon_core/hosts/maya/plugins/publish/extract_arnold_scene_source.py index ed8f2ad40c..fb4c41f1de 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_arnold_scene_source.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_arnold_scene_source.py @@ -17,8 +17,7 @@ class ExtractArnoldSceneSource(publish.Extractor): families = ["ass"] asciiAss = False - def process(self, instance): - staging_dir = self.staging_dir(instance) + def _pre_process(self, instance, staging_dir): file_path = os.path.join(staging_dir, "{}.ass".format(instance.name)) # Mask @@ -70,24 +69,38 @@ class ExtractArnoldSceneSource(publish.Extractor): "mask": mask } - filenames, nodes_by_id = self._extract( - instance.data["contentMembers"], attribute_data, kwargs - ) - if "representations" not in instance.data: instance.data["representations"] = [] + return attribute_data, kwargs + + def process(self, instance): + staging_dir = self.staging_dir(instance) + attribute_data, kwargs = self._pre_process(instance, staging_dir) + + filenames = self._extract( + instance.data["members"], attribute_data, kwargs + ) + + self._post_process( + instance, filenames, staging_dir, kwargs["startFrame"] + ) + + def _post_process(self, instance, filenames, staging_dir, frame_start): + nodes_by_id = self._nodes_by_id(instance[:]) representation = { "name": "ass", "ext": "ass", "files": filenames if len(filenames) > 1 else filenames[0], "stagingDir": staging_dir, - "frameStart": kwargs["startFrame"] + "frameStart": frame_start } instance.data["representations"].append(representation) - json_path = os.path.join(staging_dir, "{}.json".format(instance.name)) + json_path = os.path.join( + staging_dir, "{}.json".format(instance.name) + ) with open(json_path, "w") as f: json.dump(nodes_by_id, f) @@ -104,13 +117,68 @@ class ExtractArnoldSceneSource(publish.Extractor): "Extracted instance {} to: {}".format(instance.name, staging_dir) ) - # Extract proxy. - if not instance.data.get("proxy", []): - return + def _nodes_by_id(self, nodes): + nodes_by_id = defaultdict(list) - kwargs["filename"] = file_path.replace(".ass", "_proxy.ass") + for node in nodes: + id = lib.get_id(node) - filenames, _ = self._extract( + if id is None: + continue + + # Converting Maya hierarchy separator "|" to Arnold separator "/". + nodes_by_id[id].append(node.replace("|", "/")) + + return nodes_by_id + + def _extract(self, nodes, attribute_data, kwargs): + filenames = [] + with lib.attribute_values(attribute_data): + with lib.maintained_selection(): + self.log.debug( + "Writing: {}".format(nodes) + ) + cmds.select(nodes, noExpand=True) + + self.log.debug( + "Extracting ass sequence with: {}".format(kwargs) + ) + + exported_files = cmds.arnoldExportAss(**kwargs) + + for file in exported_files: + filenames.append(os.path.split(file)[1]) + + self.log.debug("Exported: {}".format(filenames)) + + return filenames + + +class ExtractArnoldSceneSourceProxy(ExtractArnoldSceneSource): + """Extract the content of the instance to an Arnold Scene Source file.""" + + label = "Extract Arnold Scene Source Proxy" + hosts = ["maya"] + families = ["assProxy"] + asciiAss = True + + def process(self, instance): + staging_dir = self.staging_dir(instance) + attribute_data, kwargs = self._pre_process(instance, staging_dir) + + filenames, _ = self._duplicate_extract( + instance.data["members"], attribute_data, kwargs + ) + + self._post_process( + instance, filenames, staging_dir, kwargs["startFrame"] + ) + + kwargs["filename"] = os.path.join( + staging_dir, "{}_proxy.ass".format(instance.name) + ) + + filenames, _ = self._duplicate_extract( instance.data["proxy"], attribute_data, kwargs ) @@ -125,12 +193,11 @@ class ExtractArnoldSceneSource(publish.Extractor): instance.data["representations"].append(representation) - def _extract(self, nodes, attribute_data, kwargs): + def _duplicate_extract(self, nodes, attribute_data, kwargs): self.log.debug( "Writing {} with:\n{}".format(kwargs["filename"], kwargs) ) filenames = [] - nodes_by_id = defaultdict(list) # Duplicating nodes so they are direct children of the world. This # makes the hierarchy of any exported ass file the same. with lib.delete_after() as delete_bin: @@ -147,7 +214,9 @@ class ExtractArnoldSceneSource(publish.Extractor): if not shapes: continue - duplicate_transform = cmds.duplicate(node)[0] + basename = cmds.duplicate(node)[0] + parents = cmds.ls(node, long=True)[0].split("|")[:-1] + duplicate_transform = "|".join(parents + [basename]) if cmds.listRelatives(duplicate_transform, parent=True): duplicate_transform = cmds.parent( @@ -172,28 +241,7 @@ class ExtractArnoldSceneSource(publish.Extractor): duplicate_nodes.extend(shapes) delete_bin.append(duplicate_transform) - # Copy cbId to mtoa_constant. - for node in duplicate_nodes: - # Converting Maya hierarchy separator "|" to Arnold - # separator "/". - nodes_by_id[lib.get_id(node)].append(node.replace("|", "/")) - - with lib.attribute_values(attribute_data): - with lib.maintained_selection(): - self.log.debug( - "Writing: {}".format(duplicate_nodes) - ) - cmds.select(duplicate_nodes, noExpand=True) - - self.log.debug( - "Extracting ass sequence with: {}".format(kwargs) - ) - - exported_files = cmds.arnoldExportAss(**kwargs) - - for file in exported_files: - filenames.append(os.path.split(file)[1]) - - self.log.debug("Exported: {}".format(filenames)) + nodes_by_id = self._nodes_by_id(duplicate_nodes) + filenames = self._extract(duplicate_nodes, attribute_data, kwargs) return filenames, nodes_by_id diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_assembly.py b/client/ayon_core/hosts/maya/plugins/publish/extract_assembly.py index 2c23f9b752..5f51dc38cb 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_assembly.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_assembly.py @@ -2,7 +2,7 @@ import os import json from ayon_core.pipeline import publish -from ayon_core.hosts.maya.api.lib import extract_alembic +from ayon_core.hosts.maya.api.alembic import extract_alembic from maya import cmds diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py b/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py index 5de72f7674..d7f9594374 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py @@ -1,17 +1,28 @@ import os +from collections import OrderedDict from maya import cmds from ayon_core.pipeline import publish +from ayon_core.hosts.maya.api.alembic import extract_alembic from ayon_core.hosts.maya.api.lib import ( - extract_alembic, suspended_refresh, maintained_selection, iter_visible_nodes_in_range ) +from ayon_core.lib import ( + BoolDef, + TextDef, + NumberDef, + EnumDef, + UISeparatorDef, + UILabelDef, +) +from ayon_core.pipeline.publish import AYONPyblishPluginMixin +from ayon_core.pipeline import KnownPublishError -class ExtractAlembic(publish.Extractor): +class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin): """Produce an alembic of just point positions and normals. Positions and normals, uvs, creases are preserved, but nothing more, @@ -27,8 +38,35 @@ class ExtractAlembic(publish.Extractor): targets = ["local", "remote"] # From settings + attr = [] + attrPrefix = [] + autoSubd = False bake_attributes = [] bake_attribute_prefixes = [] + dataFormat = "ogawa" + eulerFilter = False + melPerFrameCallback = "" + melPostJobCallback = "" + overrides = [] + preRoll = False + preRollStartFrame = 0 + pythonPerFrameCallback = "" + pythonPostJobCallback = "" + renderableOnly = False + stripNamespaces = True + uvsOnly = False + uvWrite = False + userAttr = "" + userAttrPrefix = "" + verbose = False + visibleOnly = False + wholeFrameGeo = False + worldSpace = True + writeColorSets = False + writeFaceSets = False + writeNormals = True + writeUVSets = False + writeVisibility = False def process(self, instance): if instance.data.get("farm"): @@ -41,16 +79,38 @@ class ExtractAlembic(publish.Extractor): start = float(instance.data.get("frameStartHandle", 1)) end = float(instance.data.get("frameEndHandle", 1)) - attrs = instance.data.get("attr", "").split(";") - attrs = [value for value in attrs if value.strip()] + attribute_values = self.get_attr_values_from_data( + instance.data + ) + + attrs = [ + attr.strip() + for attr in attribute_values.get("attr", "").split(";") + if attr.strip() + ] attrs += instance.data.get("userDefinedAttributes", []) attrs += self.bake_attributes attrs += ["cbId"] - attr_prefixes = instance.data.get("attrPrefix", "").split(";") - attr_prefixes = [value for value in attr_prefixes if value.strip()] + attr_prefixes = [ + attr.strip() + for attr in attribute_values.get("attrPrefix", "").split(";") + if attr.strip() + ] attr_prefixes += self.bake_attribute_prefixes + user_attrs = [ + attr.strip() + for attr in attribute_values.get("userAttr", "").split(";") + if attr.strip() + ] + + user_attr_prefixes = [ + attr.strip() + for attr in attribute_values.get("userAttrPrefix", "").split(";") + if attr.strip() + ] + self.log.debug("Extracting pointcache..") dirname = self.staging_dir(instance) @@ -58,28 +118,83 @@ class ExtractAlembic(publish.Extractor): filename = "{name}.abc".format(**instance.data) path = os.path.join(parent_dir, filename) - options = { - "step": instance.data.get("step", 1.0), - "attr": attrs, - "attrPrefix": attr_prefixes, - "writeVisibility": True, - "writeCreases": True, - "writeColorSets": instance.data.get("writeColorSets", False), - "writeFaceSets": instance.data.get("writeFaceSets", False), - "uvWrite": True, - "selection": True, - "worldSpace": instance.data.get("worldSpace", True) - } - + root = None if not instance.data.get("includeParentHierarchy", True): # Set the root nodes if we don't want to include parents # The roots are to be considered the ones that are the actual # direct members of the set - options["root"] = roots + root = roots - if int(cmds.about(version=True)) >= 2017: - # Since Maya 2017 alembic supports multiple uv sets - write them. - options["writeUVSets"] = True + kwargs = { + "file": path, + "attr": attrs, + "attrPrefix": attr_prefixes, + "userAttr": user_attrs, + "userAttrPrefix": user_attr_prefixes, + "dataFormat": attribute_values.get("dataFormat", self.dataFormat), + "endFrame": end, + "eulerFilter": attribute_values.get( + "eulerFilter", self.eulerFilter + ), + "preRoll": attribute_values.get("preRoll", self.preRoll), + "preRollStartFrame": attribute_values.get( + "preRollStartFrame", self.preRollStartFrame + ), + "renderableOnly": attribute_values.get( + "renderableOnly", self.renderableOnly + ), + "root": root, + "selection": True, + "startFrame": start, + "step": instance.data.get( + "creator_attributes", {} + ).get("step", 1.0), + "stripNamespaces": attribute_values.get( + "stripNamespaces", self.stripNamespaces + ), + "uvWrite": attribute_values.get("uvWrite", self.uvWrite), + "verbose": attribute_values.get("verbose", self.verbose), + "wholeFrameGeo": attribute_values.get( + "wholeFrameGeo", self.wholeFrameGeo + ), + "worldSpace": attribute_values.get("worldSpace", self.worldSpace), + "writeColorSets": attribute_values.get( + "writeColorSets", self.writeColorSets + ), + "writeCreases": attribute_values.get( + "writeCreases", self.writeCreases + ), + "writeFaceSets": attribute_values.get( + "writeFaceSets", self.writeFaceSets + ), + "writeUVSets": attribute_values.get( + "writeUVSets", self.writeUVSets + ), + "writeVisibility": attribute_values.get( + "writeVisibility", self.writeVisibility + ), + "autoSubd": attribute_values.get( + "autoSubd", self.autoSubd + ), + "uvsOnly": attribute_values.get( + "uvsOnly", self.uvsOnly + ), + "writeNormals": attribute_values.get( + "writeNormals", self.writeNormals + ), + "melPerFrameCallback": attribute_values.get( + "melPerFrameCallback", self.melPerFrameCallback + ), + "melPostJobCallback": attribute_values.get( + "melPostJobCallback", self.melPostJobCallback + ), + "pythonPerFrameCallback": attribute_values.get( + "pythonPerFrameCallback", self.pythonPostJobCallback + ), + "pythonPostJobCallback": attribute_values.get( + "pythonPostJobCallback", self.pythonPostJobCallback + ) + } if instance.data.get("visibleOnly", False): # If we only want to include nodes that are visible in the frame @@ -87,20 +202,19 @@ class ExtractAlembic(publish.Extractor): # flag does not filter out those that are only hidden on some # frames as it counts "animated" or "connected" visibilities as # if it's always visible. - nodes = list(iter_visible_nodes_in_range(nodes, - start=start, - end=end)) + nodes = list( + iter_visible_nodes_in_range(nodes, start=start, end=end) + ) suspend = not instance.data.get("refresh", False) with suspended_refresh(suspend=suspend): with maintained_selection(): cmds.select(nodes, noExpand=True) - extract_alembic( - file=path, - startFrame=start, - endFrame=end, - **options + self.log.debug( + "Running `extract_alembic` with the keyword arguments: " + "{}".format(kwargs) ) + extract_alembic(**kwargs) if "representations" not in instance.data: instance.data["representations"] = [] @@ -124,21 +238,17 @@ class ExtractAlembic(publish.Extractor): return path = path.replace(".abc", "_proxy.abc") + kwargs["file"] = path if not instance.data.get("includeParentHierarchy", True): # Set the root nodes if we don't want to include parents # The roots are to be considered the ones that are the actual # direct members of the set - options["root"] = instance.data["proxyRoots"] + kwargs["root"] = instance.data["proxyRoots"] with suspended_refresh(suspend=suspend): with maintained_selection(): cmds.select(instance.data["proxy"]) - extract_alembic( - file=path, - startFrame=start, - endFrame=end, - **options - ) + extract_alembic(**kwargs) representation = { "name": "proxy", @@ -152,24 +262,274 @@ class ExtractAlembic(publish.Extractor): def get_members_and_roots(self, instance): return instance[:], instance.data.get("setMembers") + @classmethod + def get_attribute_defs(cls): + if not cls.overrides: + return [] + + override_defs = OrderedDict({ + "autoSubd": BoolDef( + "autoSubd", + label="Auto Subd", + default=cls.autoSubd, + tooltip=( + "If this flag is present and the mesh has crease edges, " + "crease vertices or holes, the mesh (OPolyMesh) would now " + "be written out as an OSubD and crease info will be stored" + " in the Alembic file. Otherwise, creases info won't be " + "preserved in Alembic file unless a custom Boolean " + "attribute SubDivisionMesh has been added to mesh node and" + " its value is true." + ) + ), + "eulerFilter": BoolDef( + "eulerFilter", + label="Euler Filter", + default=cls.eulerFilter, + tooltip="Apply Euler filter while sampling rotations." + ), + "renderableOnly": BoolDef( + "renderableOnly", + label="Renderable Only", + default=cls.renderableOnly, + tooltip="Only export renderable visible shapes." + ), + "stripNamespaces": BoolDef( + "stripNamespaces", + label="Strip Namespaces", + default=cls.stripNamespaces, + tooltip=( + "Namespaces will be stripped off of the node before being " + "written to Alembic." + ) + ), + "uvsOnly": BoolDef( + "uvsOnly", + label="UVs Only", + default=cls.uvsOnly, + tooltip=( + "If this flag is present, only uv data for PolyMesh and " + "SubD shapes will be written to the Alembic file." + ) + ), + "uvWrite": BoolDef( + "uvWrite", + label="UV Write", + default=cls.uvWrite, + tooltip=( + "Uv data for PolyMesh and SubD shapes will be written to " + "the Alembic file." + ) + ), + "verbose": BoolDef( + "verbose", + label="Verbose", + default=cls.verbose, + tooltip="Prints the current frame that is being evaluated." + ), + "visibleOnly": BoolDef( + "visibleOnly", + label="Visible Only", + default=cls.visibleOnly, + tooltip="Only export dag objects visible during frame range." + ), + "wholeFrameGeo": BoolDef( + "wholeFrameGeo", + label="Whole Frame Geo", + default=cls.wholeFrameGeo, + tooltip=( + "Data for geometry will only be written out on whole " + "frames." + ) + ), + "worldSpace": BoolDef( + "worldSpace", + label="World Space", + default=cls.worldSpace, + tooltip="Any root nodes will be stored in world space." + ), + "writeColorSets": BoolDef( + "writeColorSets", + label="Write Color Sets", + default=cls.writeColorSets, + tooltip="Write vertex colors with the geometry." + ), + "writeFaceSets": BoolDef( + "writeFaceSets", + label="Write Face Sets", + default=cls.writeFaceSets, + tooltip="Write face sets with the geometry." + ), + "writeNormals": BoolDef( + "writeNormals", + label="Write Normals", + default=cls.writeNormals, + tooltip="Write normals with the deforming geometry." + ), + "writeUVSets": BoolDef( + "writeUVSets", + label="Write UV Sets", + default=cls.writeUVSets, + tooltip=( + "Write all uv sets on MFnMeshes as vector 2 indexed " + "geometry parameters with face varying scope." + ) + ), + "writeVisibility": BoolDef( + "writeVisibility", + label="Write Visibility", + default=cls.writeVisibility, + tooltip=( + "Visibility state will be stored in the Alembic file. " + "Otherwise everything written out is treated as visible." + ) + ), + "preRoll": BoolDef( + "preRoll", + label="Pre Roll", + default=cls.preRoll, + tooltip="This frame range will not be sampled." + ), + "preRollStartFrame": NumberDef( + "preRollStartFrame", + label="Pre Roll Start Frame", + tooltip=( + "The frame to start scene evaluation at. This is used" + " to set the starting frame for time dependent " + "translations and can be used to evaluate run-up that" + " isn't actually translated." + ), + default=cls.preRollStartFrame + ), + "dataFormat": EnumDef( + "dataFormat", + label="Data Format", + items=["ogawa", "HDF"], + default=cls.dataFormat, + tooltip="The data format to use to write the file." + ), + "attr": TextDef( + "attr", + label="Custom Attributes", + placeholder="attr1; attr2; ...", + default=cls.attr, + tooltip=( + "Attributes matching by name will be included in the " + "Alembic export. Attributes should be separated by " + "semi-colon `;`" + ) + ), + "attrPrefix": TextDef( + "attrPrefix", + label="Custom Attributes Prefix", + placeholder="prefix1; prefix2; ...", + default=cls.attrPrefix, + tooltip=( + "Attributes starting with these prefixes will be included " + "in the Alembic export. Attributes should be separated by " + "semi-colon `;`" + ) + ), + "userAttr": TextDef( + "userAttr", + label="User Attr", + placeholder="attr1; attr2; ...", + default=cls.userAttr, + tooltip=( + "Attributes matching by name will be included in the " + "Alembic export. Attributes should be separated by " + "semi-colon `;`" + ) + ), + "userAttrPrefix": TextDef( + "userAttrPrefix", + label="User Attr Prefix", + placeholder="prefix1; prefix2; ...", + default=cls.userAttrPrefix, + tooltip=( + "Attributes starting with these prefixes will be included " + "in the Alembic export. Attributes should be separated by " + "semi-colon `;`" + ) + ), + "melPerFrameCallback": TextDef( + "melPerFrameCallback", + label="Mel Per Frame Callback", + default=cls.melPerFrameCallback, + tooltip=( + "When each frame (and the static frame) is evaluated the " + "string specified is evaluated as a Mel command." + ) + ), + "melPostJobCallback": TextDef( + "melPostJobCallback", + label="Mel Post Job Callback", + default=cls.melPostJobCallback, + tooltip=( + "When the translation has finished the string specified " + "is evaluated as a Mel command." + ) + ), + "pythonPerFrameCallback": TextDef( + "pythonPerFrameCallback", + label="Python Per Frame Callback", + default=cls.pythonPerFrameCallback, + tooltip=( + "When each frame (and the static frame) is evaluated the " + "string specified is evaluated as a python command." + ) + ), + "pythonPostJobCallback": TextDef( + "pythonPostJobCallback", + label="Python Post Frame Callback", + default=cls.pythonPostJobCallback, + tooltip=( + "When the translation has finished the string specified " + "is evaluated as a python command." + ) + ) + }) + + defs = super(ExtractAlembic, cls).get_attribute_defs() + + defs.extend([ + UISeparatorDef("sep_alembic_options"), + UILabelDef("Alembic Options"), + ]) + + # The Arguments that can be modified by the Publisher + overrides = set(cls.overrides) + for key, value in override_defs.items(): + if key not in overrides: + continue + + defs.append(value) + + defs.append( + UISeparatorDef("sep_alembic_options_end") + ) + + return defs + class ExtractAnimation(ExtractAlembic): - label = "Extract Animation" + label = "Extract Animation (Alembic)" families = ["animation"] def get_members_and_roots(self, instance): - # Collect the out set nodes out_sets = [node for node in instance if node.endswith("out_SET")] if len(out_sets) != 1: - raise RuntimeError("Couldn't find exactly one out_SET: " - "{0}".format(out_sets)) + raise KnownPublishError( + "Couldn't find exactly one out_SET: {0}".format(out_sets) + ) out_set = out_sets[0] - roots = cmds.sets(out_set, query=True) + roots = cmds.sets(out_set, query=True) or [] # Include all descendants - nodes = roots + cmds.listRelatives(roots, - allDescendents=True, - fullPath=True) or [] + nodes = roots + nodes += cmds.listRelatives( + roots, allDescendents=True, fullPath=True + ) or [] return nodes, roots diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_proxy_abc.py b/client/ayon_core/hosts/maya/plugins/publish/extract_proxy_abc.py index 3637a58614..5aefdfc33a 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -3,8 +3,8 @@ import os from maya import cmds from ayon_core.pipeline import publish +from ayon_core.hosts.maya.api.alembic import extract_alembic from ayon_core.hosts.maya.api.lib import ( - extract_alembic, suspended_refresh, maintained_selection, iter_visible_nodes_in_range diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py index 1a389f3d33..b5cc7745a1 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py @@ -5,8 +5,8 @@ import os from maya import cmds # noqa from ayon_core.pipeline import publish +from ayon_core.hosts.maya.api.alembic import extract_alembic from ayon_core.hosts.maya.api.lib import ( - extract_alembic, suspended_refresh, maintained_selection ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py b/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py index d799486184..54d295b479 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py @@ -5,7 +5,7 @@ import copy from maya import cmds import pyblish.api -from ayon_core.hosts.maya.api.lib import extract_alembic +from ayon_core.hosts.maya.api.alembic import extract_alembic from ayon_core.pipeline import publish diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_alembic_options_defaults.py b/client/ayon_core/hosts/maya/plugins/publish/validate_alembic_options_defaults.py new file mode 100644 index 0000000000..5197100406 --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_alembic_options_defaults.py @@ -0,0 +1,98 @@ +import pyblish.api + +from ayon_core.pipeline import OptionalPyblishPluginMixin +from ayon_core.pipeline.publish import RepairAction, PublishValidationError + + +class ValidateAlembicDefaultsPointcache( + pyblish.api.InstancePlugin, OptionalPyblishPluginMixin +): + """Validate the attributes on the instance are defaults. + + The defaults are defined in the project settings. + """ + + order = pyblish.api.ValidatorOrder + families = ["pointcache"] + hosts = ["maya"] + label = "Validate Alembic Options Defaults" + actions = [RepairAction] + optional = True + + plugin_name = "ExtractAlembic" + + @classmethod + def _get_settings(cls, context): + maya_settings = context.data["project_settings"]["maya"] + settings = maya_settings["publish"]["ExtractAlembic"] + return settings + + @classmethod + def _get_publish_attributes(cls, instance): + attributes = instance.data["publish_attributes"][ + cls.plugin_name( + instance.data["publish_attributes"] + ) + ] + + return attributes + + def process(self, instance): + if not self.is_active(instance.data): + return + + settings = self._get_settings(instance.context) + + attributes = self._get_publish_attributes(instance) + + msg = ( + "Alembic Extract setting \"{}\" is not the default value:" + "\nCurrent: {}" + "\nDefault Value: {}\n" + ) + errors = [] + for key, value in attributes.items(): + default_value = settings[key] + + # Lists are best to compared sorted since we cant rely on the order + # of the items. + if isinstance(value, list): + value = sorted(value) + default_value = sorted(default_value) + + if value != default_value: + errors.append(msg.format(key, value, default_value)) + + if errors: + raise PublishValidationError("\n".join(errors)) + + @classmethod + def repair(cls, instance): + # Find create instance twin. + create_context = instance.context.data["create_context"] + create_instance = create_context.get_instance_by_id( + instance.data["instance_id"] + ) + + # Set the settings values on the create context then save to workfile. + publish_attributes = instance.data["publish_attributes"] + plugin_name = cls.plugin_name(publish_attributes) + attributes = cls._get_publish_attributes(instance) + settings = cls._get_settings(instance.context) + create_publish_attributes = create_instance.data["publish_attributes"] + for key in attributes: + create_publish_attributes[plugin_name][key] = settings[key] + + create_context.save_changes() + + +class ValidateAlembicDefaultsAnimation( + ValidateAlembicDefaultsPointcache +): + """Validate the attributes on the instance are defaults. + + The defaults are defined in the project settings. + """ + label = "Validate Alembic Options Defaults" + families = ["animation"] + plugin_name = "ExtractAnimation" diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source.py b/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source.py index 92b4922492..8574b3ecc8 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source.py @@ -1,30 +1,56 @@ +from maya import cmds + import pyblish.api + from ayon_core.pipeline.publish import ( ValidateContentsOrder, PublishValidationError ) +from ayon_core.hosts.maya.api.lib import is_visible class ValidateArnoldSceneSource(pyblish.api.InstancePlugin): """Validate Arnold Scene Source. - We require at least 1 root node/parent for the meshes. This is to ensure we - can duplicate the nodes and preserve the names. + Ensure no nodes are hidden. + """ - If using proxies we need the nodes to share the same names and not be + order = ValidateContentsOrder + hosts = ["maya"] + families = ["ass", "assProxy"] + label = "Validate Arnold Scene Source" + + def process(self, instance): + # Validate against having nodes hidden, which will result in the + # extraction to ignore the node. + nodes = instance.data["members"] + instance.data.get("proxy", []) + nodes = [x for x in nodes if cmds.objectType(x, isAType='dagNode')] + hidden_nodes = [ + x for x in nodes if not is_visible(x, intermediateObject=False) + ] + if hidden_nodes: + raise PublishValidationError( + "Found hidden nodes:\n\n{}\n\nPlease unhide for" + " publishing.".format("\n".join(hidden_nodes)) + ) + + +class ValidateArnoldSceneSourceProxy(pyblish.api.InstancePlugin): + """Validate Arnold Scene Source Proxy. + + When using proxies we need the nodes to share the same names and not be parent to the world. This ends up needing at least two groups with content nodes and proxy nodes in another. """ order = ValidateContentsOrder hosts = ["maya"] - families = ["ass"] - label = "Validate Arnold Scene Source" + families = ["assProxy"] + label = "Validate Arnold Scene Source Proxy" def _get_nodes_by_name(self, nodes): ungrouped_nodes = [] nodes_by_name = {} parents = [] - same_named_nodes = {} for node in nodes: node_split = node.split("|") if len(node_split) == 2: @@ -35,33 +61,16 @@ class ValidateArnoldSceneSource(pyblish.api.InstancePlugin): parents.append(parent) node_name = node.rsplit("|", 1)[-1].rsplit(":", 1)[-1] - - # Check for same same nodes, which can happen in different - # hierarchies. - if node_name in nodes_by_name: - try: - same_named_nodes[node_name].append(node) - except KeyError: - same_named_nodes[node_name] = [ - nodes_by_name[node_name], node - ] - nodes_by_name[node_name] = node - if same_named_nodes: - message = "Found nodes with the same name:" - for name, nodes in same_named_nodes.items(): - message += "\n\n\"{}\":\n{}".format(name, "\n".join(nodes)) - - raise PublishValidationError(message) - return ungrouped_nodes, nodes_by_name, parents def process(self, instance): + # Validate against nodes directly parented to world. ungrouped_nodes = [] nodes, content_nodes_by_name, content_parents = ( - self._get_nodes_by_name(instance.data["contentMembers"]) + self._get_nodes_by_name(instance.data["members"]) ) ungrouped_nodes.extend(nodes) @@ -70,24 +79,21 @@ class ValidateArnoldSceneSource(pyblish.api.InstancePlugin): ) ungrouped_nodes.extend(nodes) - # Validate against nodes directly parented to world. if ungrouped_nodes: raise PublishValidationError( "Found nodes parented to the world: {}\n" "All nodes need to be grouped.".format(ungrouped_nodes) ) - # Proxy validation. - if not instance.data.get("proxy", []): - return - # Validate for content and proxy nodes amount being the same. - if len(instance.data["contentMembers"]) != len(instance.data["proxy"]): + if len(instance.data["members"]) != len(instance.data["proxy"]): raise PublishValidationError( "Amount of content nodes ({}) and proxy nodes ({}) needs to " - "be the same.".format( - len(instance.data["contentMembers"]), - len(instance.data["proxy"]) + "be the same.\nContent nodes: {}\nProxy nodes:{}".format( + len(instance.data["members"]), + len(instance.data["proxy"]), + instance.data["members"], + instance.data["proxy"] ) ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source_cbid.py b/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source_cbid.py index a9d896952d..e5dbe178fc 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source_cbid.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source_cbid.py @@ -17,7 +17,7 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin, order = ValidateContentsOrder hosts = ["maya"] - families = ["ass"] + families = ["assProxy"] label = "Validate Arnold Scene Source CBID" actions = [RepairAction] optional = False @@ -40,15 +40,11 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin, @classmethod def get_invalid_couples(cls, instance): - content_nodes_by_name = cls._get_nodes_by_name( - instance.data["contentMembers"] - ) - proxy_nodes_by_name = cls._get_nodes_by_name( - instance.data.get("proxy", []) - ) + nodes_by_name = cls._get_nodes_by_name(instance.data["members"]) + proxy_nodes_by_name = cls._get_nodes_by_name(instance.data["proxy"]) invalid_couples = [] - for content_name, content_node in content_nodes_by_name.items(): + for content_name, content_node in nodes_by_name.items(): proxy_node = proxy_nodes_by_name.get(content_name, None) if not proxy_node: @@ -70,7 +66,7 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin, if not self.is_active(instance.data): return # Proxy validation. - if not instance.data.get("proxy", []): + if not instance.data["proxy"]: return # Validate for proxy nodes sharing the same cbId as content nodes. diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py b/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py index 78a247b3f2..7badfdc027 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py @@ -10,6 +10,7 @@ from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, PublishValidationError, + OptionalPyblishPluginMixin ) from ayon_core.hosts.maya.api import lib from ayon_core.hosts.maya.api.lib_rendersettings import RenderSettings @@ -37,7 +38,8 @@ def get_redshift_image_format_labels(): return mel.eval("{0}={0}".format(var)) -class ValidateRenderSettings(pyblish.api.InstancePlugin): +class ValidateRenderSettings(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validates the global render settings * File Name Prefix must start with: `` @@ -55,7 +57,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): * Frame Padding must be: * default: 4 - * Animation must be toggle on, in Render Settings - Common tab: + * Animation must be toggled on, in Render Settings - Common tab: * vray: Animation on standard of specific * arnold: Frame / Animation ext: Any choice without "(Single Frame)" * redshift: Animation toggled on @@ -67,10 +69,11 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): """ order = ValidateContentsOrder - label = "Render Settings" + label = "Validate Render Settings" hosts = ["maya"] families = ["renderlayer"] actions = [RepairAction] + optional = True ImagePrefixes = { 'mentalray': 'defaultRenderGlobals.imageFilePrefix', @@ -112,6 +115,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): DEFAULT_PREFIX = "//_" def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: diff --git a/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py b/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py index cf4a350c36..b07c7e9a70 100644 --- a/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py +++ b/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py @@ -1,87 +1,48 @@ -import json - from maya import cmds from ayon_core.pipeline.workfile.workfile_template_builder import ( - PlaceholderPlugin, - LoadPlaceholderItem, PlaceholderLoadMixin, + LoadPlaceholderItem ) from ayon_core.hosts.maya.api.lib import ( - read, - imprint, - get_reference_node + get_container_transforms, + get_node_parent, + get_node_index_under_parent +) +from ayon_core.hosts.maya.api.workfile_template_builder import ( + MayaPlaceholderPlugin, ) -from ayon_core.hosts.maya.api.workfile_template_builder import PLACEHOLDER_SET -class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): +class MayaPlaceholderLoadPlugin(MayaPlaceholderPlugin, PlaceholderLoadMixin): identifier = "maya.load" label = "Maya load" - def _collect_scene_placeholders(self): - # Cache placeholder data to shared data - placeholder_nodes = self.builder.get_shared_populate_data( - "placeholder_nodes" - ) - if placeholder_nodes is None: - attributes = cmds.ls("*.plugin_identifier", long=True) - placeholder_nodes = {} - for attribute in attributes: - node_name = attribute.rpartition(".")[0] - placeholder_nodes[node_name] = ( - self._parse_placeholder_node_data(node_name) - ) - - self.builder.set_shared_populate_data( - "placeholder_nodes", placeholder_nodes - ) - return placeholder_nodes - - def _parse_placeholder_node_data(self, node_name): - placeholder_data = read(node_name) - parent_name = ( - cmds.getAttr(node_name + ".parent", asString=True) - or node_name.rpartition("|")[0] - or "" - ) - if parent_name: - siblings = cmds.listRelatives(parent_name, children=True) - else: - siblings = cmds.ls(assemblies=True) - node_shortname = node_name.rpartition("|")[2] - current_index = cmds.getAttr(node_name + ".index", asString=True) - if current_index < 0: - current_index = siblings.index(node_shortname) - - placeholder_data.update({ - "parent": parent_name, - "index": current_index - }) - return placeholder_data + item_class = LoadPlaceholderItem def _create_placeholder_name(self, placeholder_data): - placeholder_name_parts = placeholder_data["builder_type"].split("_") - pos = 1 + # Split builder type: context_assets, linked_assets, all_assets + prefix, suffix = placeholder_data["builder_type"].split("_", 1) + parts = [prefix] + + # add family if any placeholder_product_type = placeholder_data.get("product_type") if placeholder_product_type is None: placeholder_product_type = placeholder_data.get("family") if placeholder_product_type: - placeholder_name_parts.insert(pos, placeholder_product_type) - pos += 1 + parts.append(placeholder_product_type) # add loader arguments if any loader_args = placeholder_data["loader_args"] if loader_args: - loader_args = json.loads(loader_args.replace('\'', '\"')) - values = [v for v in loader_args.values()] - for value in values: - placeholder_name_parts.insert(pos, value) - pos += 1 + loader_args = eval(loader_args) + for value in loader_args.values(): + parts.append(str(value)) - placeholder_name = "_".join(placeholder_name_parts) + parts.append(suffix) + placeholder_name = "_".join(parts) return placeholder_name.capitalize() @@ -104,68 +65,6 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): ) return loaded_representation_ids - def create_placeholder(self, placeholder_data): - selection = cmds.ls(selection=True) - if len(selection) > 1: - raise ValueError("More then one item are selected") - - parent = selection[0] if selection else None - - placeholder_data["plugin_identifier"] = self.identifier - - placeholder_name = self._create_placeholder_name(placeholder_data) - - placeholder = cmds.spaceLocator(name=placeholder_name)[0] - if parent: - placeholder = cmds.parent(placeholder, selection[0])[0] - - imprint(placeholder, placeholder_data) - - # Add helper attributes to keep placeholder info - cmds.addAttr( - placeholder, - longName="parent", - hidden=True, - dataType="string" - ) - cmds.addAttr( - placeholder, - longName="index", - hidden=True, - attributeType="short", - defaultValue=-1 - ) - - cmds.setAttr(placeholder + ".parent", "", type="string") - - def update_placeholder(self, placeholder_item, placeholder_data): - node_name = placeholder_item.scene_identifier - new_values = {} - for key, value in placeholder_data.items(): - placeholder_value = placeholder_item.data.get(key) - if value != placeholder_value: - new_values[key] = value - placeholder_item.data[key] = value - - for key in new_values.keys(): - cmds.deleteAttr(node_name + "." + key) - - imprint(node_name, new_values) - - def collect_placeholders(self): - output = [] - scene_placeholders = self._collect_scene_placeholders() - for node_name, placeholder_data in scene_placeholders.items(): - if placeholder_data.get("plugin_identifier") != self.identifier: - continue - - # TODO do data validations and maybe upgrades if they are invalid - output.append( - LoadPlaceholderItem(node_name, placeholder_data, self) - ) - - return output - def populate_placeholder(self, placeholder): self.populate_load_placeholder(placeholder) @@ -176,25 +75,6 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): def get_placeholder_options(self, options=None): return self.get_load_plugin_options(options) - def post_placeholder_process(self, placeholder, failed): - """Cleanup placeholder after load of its corresponding representations. - - Args: - placeholder (PlaceholderItem): Item which was just used to load - representation. - failed (bool): Loading of representation failed. - """ - # Hide placeholder and add them to placeholder set - node = placeholder.scene_identifier - - cmds.sets(node, addElement=PLACEHOLDER_SET) - cmds.hide(node) - cmds.setAttr(node + ".hiddenInOutliner", True) - - def delete_placeholder(self, placeholder): - """Remove placeholder if building was successful""" - cmds.delete(placeholder.scene_identifier) - def load_succeed(self, placeholder, container): self._parent_in_hierarchy(placeholder, container) @@ -210,56 +90,43 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): if not container: return - roots = cmds.sets(container, q=True) or [] - ref_node = None - try: - ref_node = get_reference_node(roots) - except AssertionError as e: - self.log.info(e.args[0]) + # TODO: This currently returns only a single root but a loaded scene + # could technically load more than a single root + container_root = get_container_transforms(container, root=True) - nodes_to_parent = [] - for root in roots: - if ref_node: - ref_root = cmds.referenceQuery(root, nodes=True)[0] - ref_root = ( - cmds.listRelatives(ref_root, parent=True, path=True) or - [ref_root] - ) - nodes_to_parent.extend(ref_root) - continue - if root.endswith("_RN"): - # Backwards compatibility for hardcoded reference names. - refRoot = cmds.referenceQuery(root, n=True)[0] - refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot] - nodes_to_parent.extend(refRoot) - elif root not in cmds.listSets(allSets=True): - nodes_to_parent.append(root) + # Bugfix: The get_container_transforms does not recognize the load + # reference group currently + # TODO: Remove this when it does + parent = get_node_parent(container_root) + if parent: + container_root = parent + roots = [container_root] - elif not cmds.sets(root, q=True): - return + # Add the loaded roots to the holding sets if they exist + holding_sets = cmds.listSets(object=placeholder.scene_identifier) or [] + for holding_set in holding_sets: + cmds.sets(roots, forceElement=holding_set) - # Move loaded nodes to correct index in outliner hierarchy + # Parent the roots to the place of the placeholder locator and match + # its matrix placeholder_form = cmds.xform( placeholder.scene_identifier, - q=True, + query=True, matrix=True, worldSpace=True ) - scene_parent = cmds.listRelatives( - placeholder.scene_identifier, parent=True, fullPath=True - ) - for node in set(nodes_to_parent): - cmds.reorder(node, front=True) - cmds.reorder(node, relative=placeholder.data["index"]) - cmds.xform(node, matrix=placeholder_form, ws=True) - if scene_parent: - cmds.parent(node, scene_parent) - else: - if cmds.listRelatives(node, parent=True): - cmds.parent(node, world=True) + scene_parent = get_node_parent(placeholder.scene_identifier) + for node in set(roots): + cmds.xform(node, matrix=placeholder_form, worldSpace=True) - holding_sets = cmds.listSets(object=placeholder.scene_identifier) - if not holding_sets: - return - for holding_set in holding_sets: - cmds.sets(roots, forceElement=holding_set) + if scene_parent != get_node_parent(node): + if scene_parent: + node = cmds.parent(node, scene_parent)[0] + else: + node = cmds.parent(node, world=True)[0] + + # Move loaded nodes in index order next to their placeholder node + cmds.reorder(node, back=True) + index = get_node_index_under_parent(placeholder.scene_identifier) + cmds.reorder(node, front=True) + cmds.reorder(node, relative=index + 1) diff --git a/client/ayon_core/hosts/unreal/ue_workers.py b/client/ayon_core/hosts/unreal/ue_workers.py index e3f8729c2e..256c0557be 100644 --- a/client/ayon_core/hosts/unreal/ue_workers.py +++ b/client/ayon_core/hosts/unreal/ue_workers.py @@ -260,11 +260,11 @@ class UEProjectGenerationWorker(UEWorker): self.failed.emit(msg, return_code) raise RuntimeError(msg) - # ensure we have PySide2 installed in engine + # ensure we have PySide2/6 installed in engine self.progress.emit(0) self.stage_begin.emit( - (f"Checking PySide2 installation... {stage_count} " + (f"Checking Qt bindings installation... {stage_count} " f" out of {stage_count}")) python_path = None if platform.system().lower() == "windows": @@ -287,11 +287,30 @@ class UEProjectGenerationWorker(UEWorker): msg = f"Unreal Python not found at {python_path}" self.failed.emit(msg, 1) raise RuntimeError(msg) - pyside_cmd = [python_path.as_posix(), - "-m", - "pip", - "install", - "pyside2"] + + pyside_version = "PySide2" + ue_version = self.ue_version.split(".") + if int(ue_version[0]) == 5 and int(ue_version[1]) >= 4: + # Use PySide6 6.6.3 because 6.7.0 had a bug + # - 'QPushButton' can't be added to 'QBoxLayout' + pyside_version = "PySide6==6.6.3" + + site_packages_prefix = python_path.parent.as_posix() + + pyside_cmd = [ + python_path.as_posix(), + "-m", "pip", + "install", + "--ignore-installed", + pyside_version, + + ] + + if platform.system().lower() == "windows": + pyside_cmd += ["--target", site_packages_prefix] + + print(f"--- Installing {pyside_version} ...") + print(" ".join(pyside_cmd)) pyside_install = subprocess.Popen(pyside_cmd, stdout=subprocess.PIPE, @@ -306,8 +325,8 @@ class UEProjectGenerationWorker(UEWorker): return_code = pyside_install.wait() if return_code and return_code != 0: - msg = ("Failed to create the project! " - "The installation of PySide2 has failed!") + msg = (f"Failed to create the project! {return_code} " + f"The installation of {pyside_version} has failed!: {pyside_install}") self.failed.emit(msg, return_code) raise RuntimeError(msg) diff --git a/client/ayon_core/lib/__init__.py b/client/ayon_core/lib/__init__.py index 408262ca42..e436396c6c 100644 --- a/client/ayon_core/lib/__init__.py +++ b/client/ayon_core/lib/__init__.py @@ -27,6 +27,10 @@ from .local_settings import ( get_openpype_username, ) from .ayon_connection import initialize_ayon_connection +from .cache import ( + CacheItem, + NestedCacheItem, +) from .events import ( emit_event, register_event_callback @@ -157,6 +161,9 @@ __all__ = [ "initialize_ayon_connection", + "CacheItem", + "NestedCacheItem", + "emit_event", "register_event_callback", diff --git a/client/ayon_core/lib/cache.py b/client/ayon_core/lib/cache.py new file mode 100644 index 0000000000..dc83520f76 --- /dev/null +++ b/client/ayon_core/lib/cache.py @@ -0,0 +1,250 @@ +import time +import collections + +InitInfo = collections.namedtuple( + "InitInfo", + ["default_factory", "lifetime"] +) + + +def _default_factory_func(): + return None + + +class CacheItem: + """Simple cache item with lifetime and default factory for default value. + + Default factory should return default value that is used on init + and on reset. + + Args: + default_factory (Optional[callable]): Function that returns default + value used on init and on reset. + lifetime (Optional[int]): Lifetime of the cache data in seconds. + Default lifetime is 120 seconds. + + """ + def __init__(self, default_factory=None, lifetime=None): + if lifetime is None: + lifetime = 120 + self._lifetime = lifetime + self._last_update = None + if default_factory is None: + default_factory = _default_factory_func + self._default_factory = default_factory + self._data = default_factory() + + @property + def is_valid(self): + """Is cache valid to use. + + Return: + bool: True if cache is valid, False otherwise. + + """ + if self._last_update is None: + return False + + return (time.time() - self._last_update) < self._lifetime + + def set_lifetime(self, lifetime): + """Change lifetime of cache item. + + Args: + lifetime (int): Lifetime of the cache data in seconds. + """ + + self._lifetime = lifetime + + def set_invalid(self): + """Set cache as invalid.""" + + self._last_update = None + + def reset(self): + """Set cache as invalid and reset data.""" + + self._last_update = None + self._data = self._default_factory() + + def get_data(self): + """Receive cached data. + + Returns: + Any: Any data that are cached. + + """ + return self._data + + def update_data(self, data): + """Update cache data. + + Args: + data (Any): Any data that are cached. + + """ + self._data = data + self._last_update = time.time() + + +class NestedCacheItem: + """Helper for cached items stored in nested structure. + + Example: + >>> cache = NestedCacheItem(levels=2, default_factory=lambda: 0) + >>> cache["a"]["b"].is_valid + False + >>> cache["a"]["b"].get_data() + 0 + >>> cache["a"]["b"] = 1 + >>> cache["a"]["b"].is_valid + True + >>> cache["a"]["b"].get_data() + 1 + >>> cache.reset() + >>> cache["a"]["b"].is_valid + False + + Args: + levels (int): Number of nested levels where read cache is stored. + default_factory (Optional[callable]): Function that returns default + value used on init and on reset. + lifetime (Optional[int]): Lifetime of the cache data in seconds. + Default value is based on default value of 'CacheItem'. + _init_info (Optional[InitInfo]): Private argument. Init info for + nested cache where created from parent item. + + """ + def __init__( + self, levels=1, default_factory=None, lifetime=None, _init_info=None + ): + if levels < 1: + raise ValueError("Nested levels must be greater than 0") + self._data_by_key = {} + if _init_info is None: + _init_info = InitInfo(default_factory, lifetime) + self._init_info = _init_info + self._levels = levels + + def __getitem__(self, key): + """Get cached data. + + Args: + key (str): Key of the cache item. + + Returns: + Union[NestedCacheItem, CacheItem]: Cache item. + + """ + cache = self._data_by_key.get(key) + if cache is None: + if self._levels > 1: + cache = NestedCacheItem( + levels=self._levels - 1, + _init_info=self._init_info + ) + else: + cache = CacheItem( + self._init_info.default_factory, + self._init_info.lifetime + ) + self._data_by_key[key] = cache + return cache + + def __setitem__(self, key, value): + """Update cached data. + + Args: + key (str): Key of the cache item. + value (Any): Any data that are cached. + + """ + if self._levels > 1: + raise AttributeError(( + "{} does not support '__setitem__'. Lower nested level by {}" + ).format(self.__class__.__name__, self._levels - 1)) + cache = self[key] + cache.update_data(value) + + def get(self, key): + """Get cached data. + + Args: + key (str): Key of the cache item. + + Returns: + Union[NestedCacheItem, CacheItem]: Cache item. + + """ + return self[key] + + def cached_count(self): + """Amount of cached items. + + Returns: + int: Amount of cached items. + + """ + return len(self._data_by_key) + + def clear_key(self, key): + """Clear cached item by key. + + Args: + key (str): Key of the cache item. + + """ + self._data_by_key.pop(key, None) + + def clear_invalid(self): + """Clear all invalid cache items. + + Note: + To clear all cache items use 'reset'. + + """ + changed = {} + children_are_nested = self._levels > 1 + for key, cache in tuple(self._data_by_key.items()): + if children_are_nested: + output = cache.clear_invalid() + if output: + changed[key] = output + if not cache.cached_count(): + self._data_by_key.pop(key) + elif not cache.is_valid: + changed[key] = cache.get_data() + self._data_by_key.pop(key) + return changed + + def reset(self): + """Reset cache. + + Note: + To clear only invalid cache items use 'clear_invalid'. + + """ + self._data_by_key = {} + + def set_lifetime(self, lifetime): + """Change lifetime of all children cache items. + + Args: + lifetime (int): Lifetime of the cache data in seconds. + + """ + self._init_info.lifetime = lifetime + for cache in self._data_by_key.values(): + cache.set_lifetime(lifetime) + + @property + def is_valid(self): + """Raise reasonable error when called on wrong level. + + Raises: + AttributeError: If called on nested cache item. + + """ + raise AttributeError(( + "{} does not support 'is_valid'. Lower nested level by '{}'" + ).format(self.__class__.__name__, self._levels)) diff --git a/client/ayon_core/modules/deadline/__init__.py b/client/ayon_core/modules/deadline/__init__.py index 5631e501d8..683d8dbe4a 100644 --- a/client/ayon_core/modules/deadline/__init__.py +++ b/client/ayon_core/modules/deadline/__init__.py @@ -1,6 +1,8 @@ from .deadline_module import DeadlineModule +from .version import __version__ __all__ = ( "DeadlineModule", + "__version__" ) diff --git a/client/ayon_core/modules/deadline/abstract_submit_deadline.py b/client/ayon_core/modules/deadline/abstract_submit_deadline.py index 2e0518ae20..00e51100bc 100644 --- a/client/ayon_core/modules/deadline/abstract_submit_deadline.py +++ b/client/ayon_core/modules/deadline/abstract_submit_deadline.py @@ -49,6 +49,10 @@ def requests_post(*args, **kwargs): if 'verify' not in kwargs: kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa + + auth = kwargs.get("auth") + if auth: + kwargs["auth"] = tuple(auth) # explicit cast to tuple # add 10sec timeout before bailing out kwargs['timeout'] = 10 return requests.post(*args, **kwargs) @@ -70,6 +74,9 @@ def requests_get(*args, **kwargs): if 'verify' not in kwargs: kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa + auth = kwargs.get("auth") + if auth: + kwargs["auth"] = tuple(auth) # add 10sec timeout before bailing out kwargs['timeout'] = 10 return requests.get(*args, **kwargs) @@ -434,9 +441,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin, """Plugin entry point.""" self._instance = instance context = instance.context - self._deadline_url = context.data.get("defaultDeadline") - self._deadline_url = instance.data.get( - "deadlineUrl", self._deadline_url) + self._deadline_url = instance.data["deadline"]["url"] assert self._deadline_url, "Requires Deadline Webservice URL" @@ -460,7 +465,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin, self.plugin_info = self.get_plugin_info() self.aux_files = self.get_aux_files() - job_id = self.process_submission() + auth = instance.data["deadline"]["auth"] + job_id = self.process_submission(auth) self.log.info("Submitted job to Deadline: {}.".format(job_id)) # TODO: Find a way that's more generic and not render type specific @@ -473,10 +479,10 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin, job_info=render_job_info, plugin_info=render_plugin_info ) - render_job_id = self.submit(payload) + render_job_id = self.submit(payload, auth) self.log.info("Render job id: %s", render_job_id) - def process_submission(self): + def process_submission(self, auth=None): """Process data for submission. This takes Deadline JobInfo, PluginInfo, AuxFile, creates payload @@ -487,7 +493,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin, """ payload = self.assemble_payload() - return self.submit(payload) + return self.submit(payload, auth) @abstractmethod def get_job_info(self): @@ -577,7 +583,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin, "AuxFiles": aux_files or self.aux_files } - def submit(self, payload): + def submit(self, payload, auth): """Submit payload to Deadline API end-point. This takes payload in the form of JSON file and POST it to @@ -585,6 +591,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin, Args: payload (dict): dict to become json in deadline submission. + auth (tuple): (username, password) Returns: str: resulting Deadline job id. @@ -594,7 +601,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin, """ url = "{}/api/jobs".format(self._deadline_url) - response = requests_post(url, json=payload) + response = requests_post(url, json=payload, + auth=auth) if not response.ok: self.log.error("Submission failed!") self.log.error(response.status_code) diff --git a/client/ayon_core/modules/deadline/deadline_module.py b/client/ayon_core/modules/deadline/deadline_module.py index c0ba83477e..b1089bbfe2 100644 --- a/client/ayon_core/modules/deadline/deadline_module.py +++ b/client/ayon_core/modules/deadline/deadline_module.py @@ -19,23 +19,23 @@ class DeadlineModule(AYONAddon, IPluginPaths): def initialize(self, studio_settings): # This module is always enabled - deadline_urls = {} + deadline_servers_info = {} enabled = self.name in studio_settings if enabled: deadline_settings = studio_settings[self.name] - deadline_urls = { - url_item["name"]: url_item["value"] + deadline_servers_info = { + url_item["name"]: url_item for url_item in deadline_settings["deadline_urls"] } - if enabled and not deadline_urls: + if enabled and not deadline_servers_info: enabled = False self.log.warning(( "Deadline Webservice URLs are not specified. Disabling addon." )) self.enabled = enabled - self.deadline_urls = deadline_urls + self.deadline_servers_info = deadline_servers_info def get_plugin_paths(self): """Deadline plugin paths.""" @@ -45,13 +45,15 @@ class DeadlineModule(AYONAddon, IPluginPaths): } @staticmethod - def get_deadline_pools(webservice, log=None): + def get_deadline_pools(webservice, auth=None, log=None): """Get pools from Deadline. Args: webservice (str): Server url. - log (Logger) + auth (Optional[Tuple[str, str]]): Tuple containing username, + password + log (Optional[Logger]): Logger to log errors to, if provided. Returns: - list: Pools. + List[str]: Pools. Throws: RuntimeError: If deadline webservice is unreachable. @@ -63,7 +65,10 @@ class DeadlineModule(AYONAddon, IPluginPaths): argument = "{}/api/pools?NamesOnly=true".format(webservice) try: - response = requests_get(argument) + kwargs = {} + if auth: + kwargs["auth"] = auth + response = requests_get(argument, **kwargs) except requests.exceptions.ConnectionError as exc: msg = 'Cannot connect to DL web service {}'.format(webservice) log.error(msg) diff --git a/client/ayon_core/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/client/ayon_core/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py index ea4b7a213e..22022831a0 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py +++ b/client/ayon_core/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py @@ -13,17 +13,45 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): """Collect Deadline Webservice URL from instance.""" # Run before collect_render. - order = pyblish.api.CollectorOrder + 0.005 + order = pyblish.api.CollectorOrder + 0.225 label = "Deadline Webservice from the Instance" - families = ["rendering", "renderlayer"] - hosts = ["maya"] + targets = ["local"] + families = ["render", + "rendering", + "render.farm", + "renderFarm", + "renderlayer", + "maxrender", + "usdrender", + "redshift_rop", + "arnold_rop", + "mantra_rop", + "karma_rop", + "vray_rop", + "publish.hou", + "image"] # for Fusion def process(self, instance): - instance.data["deadlineUrl"] = self._collect_deadline_url(instance) - instance.data["deadlineUrl"] = \ - instance.data["deadlineUrl"].strip().rstrip("/") + if not instance.data.get("farm"): + self.log.debug("Should not be processed on farm, skipping.") + return + + if not instance.data.get("deadline"): + instance.data["deadline"] = {} + + # todo: separate logic should be removed, all hosts should have same + host_name = instance.context.data["hostName"] + if host_name == "maya": + deadline_url = self._collect_deadline_url(instance) + else: + deadline_url = (instance.data.get("deadlineUrl") or # backwards + instance.data.get("deadline", {}).get("url")) + if deadline_url: + instance.data["deadline"]["url"] = deadline_url.strip().rstrip("/") + else: + instance.data["deadline"]["url"] = instance.context.data["deadline"]["defaultUrl"] # noqa self.log.debug( - "Using {} for submission.".format(instance.data["deadlineUrl"])) + "Using {} for submission".format(instance.data["deadline"]["url"])) def _collect_deadline_url(self, render_instance): # type: (pyblish.api.Instance) -> str @@ -49,13 +77,13 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): ["project_settings"] ["deadline"] ) - - default_server = render_instance.context.data["defaultDeadline"] + default_server_url = (render_instance.context.data["deadline"] + ["defaultUrl"]) # QUESTION How and where is this is set? Should be removed? instance_server = render_instance.data.get("deadlineServers") if not instance_server: self.log.debug("Using default server.") - return default_server + return default_server_url # Get instance server as sting. if isinstance(instance_server, int): @@ -66,7 +94,7 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): default_servers = { url_item["name"]: url_item["value"] - for url_item in deadline_settings["deadline_urls"] + for url_item in deadline_settings["deadline_servers_info"] } project_servers = ( render_instance.context.data diff --git a/client/ayon_core/modules/deadline/plugins/publish/collect_default_deadline_server.py b/client/ayon_core/modules/deadline/plugins/publish/collect_default_deadline_server.py index b7ca227b01..9238e0ed95 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/collect_default_deadline_server.py +++ b/client/ayon_core/modules/deadline/plugins/publish/collect_default_deadline_server.py @@ -18,10 +18,9 @@ class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin): """ # Run before collect_deadline_server_instance. - order = pyblish.api.CollectorOrder + 0.0025 + order = pyblish.api.CollectorOrder + 0.200 label = "Default Deadline Webservice" - - pass_mongo_url = False + targets = ["local"] def process(self, context): try: @@ -33,15 +32,17 @@ class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin): deadline_settings = context.data["project_settings"]["deadline"] deadline_server_name = deadline_settings["deadline_server"] - deadline_webservice = None + dl_server_info = None if deadline_server_name: - deadline_webservice = deadline_module.deadline_urls.get( + dl_server_info = deadline_module.deadline_servers_info.get( deadline_server_name) - default_deadline_webservice = deadline_module.deadline_urls["default"] - deadline_webservice = ( - deadline_webservice - or default_deadline_webservice - ) + if dl_server_info: + deadline_url = dl_server_info["value"] + else: + default_dl_server_info = deadline_module.deadline_servers_info[0] + deadline_url = default_dl_server_info["value"] - context.data["defaultDeadline"] = deadline_webservice.strip().rstrip("/") # noqa + context.data["deadline"] = {} + context.data["deadline"]["defaultUrl"] = ( + deadline_url.strip().rstrip("/")) diff --git a/client/ayon_core/modules/deadline/plugins/publish/collect_user_credentials.py b/client/ayon_core/modules/deadline/plugins/publish/collect_user_credentials.py new file mode 100644 index 0000000000..5d03523c89 --- /dev/null +++ b/client/ayon_core/modules/deadline/plugins/publish/collect_user_credentials.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +"""Collect user credentials + +Requires: + context -> project_settings + instance.data["deadline"]["url"] + +Provides: + instance.data["deadline"] -> require_authentication (bool) + instance.data["deadline"] -> auth (tuple (str, str)) - + (username, password) or None +""" +import pyblish.api + +from ayon_api import get_server_api_connection +from ayon_core.modules.deadline.deadline_module import DeadlineModule +from ayon_core.modules.deadline import __version__ + + +class CollectDeadlineUserCredentials(pyblish.api.InstancePlugin): + """Collects user name and password for artist if DL requires authentication + """ + order = pyblish.api.CollectorOrder + 0.250 + label = "Collect Deadline User Credentials" + + targets = ["local"] + hosts = ["aftereffects", + "blender", + "fusion", + "harmony", + "nuke", + "maya", + "max", + "houdini"] + + families = ["render", + "rendering", + "render.farm", + "renderFarm", + "renderlayer", + "maxrender", + "usdrender", + "redshift_rop", + "arnold_rop", + "mantra_rop", + "karma_rop", + "vray_rop", + "publish.hou"] + + def process(self, instance): + if not instance.data.get("farm"): + self.log.debug("Should not be processed on farm, skipping.") + return + + collected_deadline_url = instance.data["deadline"]["url"] + if not collected_deadline_url: + raise ValueError("Instance doesn't have '[deadline][url]'.") + context_data = instance.context.data + deadline_settings = context_data["project_settings"]["deadline"] + + deadline_server_name = None + # deadline url might be set directly from instance, need to find + # metadata for it + for deadline_info in deadline_settings["deadline_urls"]: + dl_settings_url = deadline_info["value"].strip().rstrip("/") + if dl_settings_url == collected_deadline_url: + deadline_server_name = deadline_info["name"] + break + + if not deadline_server_name: + raise ValueError(f"Collected {collected_deadline_url} doesn't " + "match any site configured in Studio Settings") + + instance.data["deadline"]["require_authentication"] = ( + deadline_info["require_authentication"] + ) + instance.data["deadline"]["auth"] = None + + if not deadline_info["require_authentication"]: + return + # TODO import 'get_addon_site_settings' when available + # in public 'ayon_api' + local_settings = get_server_api_connection().get_addon_site_settings( + DeadlineModule.name, __version__) + local_settings = local_settings["local_settings"] + for server_info in local_settings: + if deadline_server_name == server_info["server_name"]: + instance.data["deadline"]["auth"] = (server_info["username"], + server_info["password"]) diff --git a/client/ayon_core/modules/deadline/plugins/publish/help/validate_deadline_connection.xml b/client/ayon_core/modules/deadline/plugins/publish/help/validate_deadline_connection.xml new file mode 100644 index 0000000000..eec05df08a --- /dev/null +++ b/client/ayon_core/modules/deadline/plugins/publish/help/validate_deadline_connection.xml @@ -0,0 +1,17 @@ + + + + Deadline Authentication + +## Deadline authentication is required + +This project has set in Settings that Deadline requires authentication. + +### How to repair? + +Please go to Ayon Server > Site Settings and provide your Deadline username and password. +In some cases the password may be empty if Deadline is configured to allow that. Ask your administrator. + + + + \ No newline at end of file diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py index ab342c1a9d..f5805beb5c 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py @@ -174,7 +174,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, instance.data["toBeRenderedOn"] = "deadline" payload = self.assemble_payload() - return self.submit(payload) + return self.submit(payload, + auth=instance.data["deadline"]["auth"]) def from_published_scene(self): """ diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py index 1fae23c9b2..2220442dac 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -2,9 +2,10 @@ import os import re import json import getpass -import requests import pyblish.api +from openpype_modules.deadline.abstract_submit_deadline import requests_post + class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline @@ -30,11 +31,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): context = instance.context - # get default deadline webservice url from deadline module - deadline_url = instance.context.data["defaultDeadline"] - # if custom one is set in instance, use that - if instance.data.get("deadlineUrl"): - deadline_url = instance.data.get("deadlineUrl") + deadline_url = instance.data["deadline"]["url"] assert deadline_url, "Requires Deadline Webservice URL" self.deadline_url = "{}/api/jobs".format(deadline_url) @@ -197,7 +194,8 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): self.log.debug("__ expectedFiles: `{}`".format( instance.data["expectedFiles"])) - response = requests.post(self.deadline_url, json=payload) + response = requests_post(self.deadline_url, json=payload, + auth=instance.data["deadline"]["require_authentication"]) if not response.ok: self.log.error( diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py index e3a4cd8030..e9b93a47cd 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py @@ -2,17 +2,13 @@ import os import json import getpass -import requests - import pyblish.api +from openpype_modules.deadline.abstract_submit_deadline import requests_post from ayon_core.pipeline.publish import ( AYONPyblishPluginMixin ) -from ayon_core.lib import ( - BoolDef, - NumberDef, -) +from ayon_core.lib import NumberDef class FusionSubmitDeadline( @@ -64,11 +60,6 @@ class FusionSubmitDeadline( decimals=0, minimum=1, maximum=10 - ), - BoolDef( - "suspend_publish", - default=False, - label="Suspend publish" ) ] @@ -80,10 +71,6 @@ class FusionSubmitDeadline( attribute_values = self.get_attr_values_from_data( instance.data) - # add suspend_publish attributeValue to instance data - instance.data["suspend_publish"] = attribute_values[ - "suspend_publish"] - context = instance.context key = "__hasRun{}".format(self.__class__.__name__) @@ -94,11 +81,7 @@ class FusionSubmitDeadline( from ayon_core.hosts.fusion.api.lib import get_frame_path - # get default deadline webservice url from deadline module - deadline_url = instance.context.data["defaultDeadline"] - # if custom one is set in instance, use that - if instance.data.get("deadlineUrl"): - deadline_url = instance.data.get("deadlineUrl") + deadline_url = instance.data["deadline"]["url"] assert deadline_url, "Requires Deadline Webservice URL" # Collect all saver instances in context that are to be rendered @@ -258,7 +241,8 @@ class FusionSubmitDeadline( # E.g. http://192.168.0.1:8082/api/jobs url = "{}/api/jobs".format(deadline_url) - response = requests.post(url, json=payload) + auth = instance.data["deadline"]["auth"] + response = requests_post(url, json=payload, auth=auth) if not response.ok: raise Exception(response.text) diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index 6952604293..597a3cfc55 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -10,7 +10,6 @@ from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo from ayon_core.lib import ( is_in_tests, - BoolDef, TextDef, NumberDef ) @@ -90,11 +89,6 @@ class HoudiniSubmitDeadline( @classmethod def get_attribute_defs(cls): return [ - BoolDef( - "suspend_publish", - default=False, - label="Suspend publish" - ), NumberDef( "priority", label="Priority", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py index cba05f6948..e9f6c382c5 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py @@ -187,11 +187,13 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, payload_data, project_settings) job_infos, plugin_infos = payload for job_info, plugin_info in zip(job_infos, plugin_infos): - self.submit(self.assemble_payload(job_info, plugin_info)) + self.submit(self.assemble_payload(job_info, plugin_info), + instance.data["deadline"]["auth"]) else: payload = self._use_published_name(payload_data, project_settings) job_info, plugin_info = payload - self.submit(self.assemble_payload(job_info, plugin_info)) + self.submit(self.assemble_payload(job_info, plugin_info), + instance.data["deadline"]["auth"]) def _use_published_name(self, data, project_settings): # Not all hosts can import these modules. diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py index 0300b12104..250dc8b7ea 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -292,7 +292,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, return plugin_payload - def process_submission(self): + def process_submission(self, auth=None): from maya import cmds instance = self._instance @@ -332,7 +332,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, if "vrayscene" in instance.data["families"]: self.log.debug("Submitting V-Ray scene render..") vray_export_payload = self._get_vray_export_payload(payload_data) - export_job = self.submit(vray_export_payload) + export_job = self.submit(vray_export_payload, + instance.data["deadline"]["auth"]) payload = self._get_vray_render_payload(payload_data) @@ -351,7 +352,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, else: # Submit main render job job_info, plugin_info = payload - self.submit(self.assemble_payload(job_info, plugin_info)) + self.submit(self.assemble_payload(job_info, plugin_info), + instance.data["deadline"]["auth"]) def _tile_render(self, payload): """Submit as tile render per frame with dependent assembly jobs.""" @@ -451,7 +453,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, # Submit frame tile jobs frame_tile_job_id = {} for frame, tile_job_payload in frame_payloads.items(): - job_id = self.submit(tile_job_payload) + job_id = self.submit(tile_job_payload, + instance.data["deadline"]["auth"]) frame_tile_job_id[frame] = job_id # Define assembly payloads @@ -559,7 +562,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "submitting assembly job {} of {}".format(i + 1, num_assemblies) ) - assembly_job_id = self.submit(payload) + assembly_job_id = self.submit(payload, + instance.data["deadline"]["auth"]) assembly_job_ids.append(assembly_job_id) instance.data["assemblySubmissionJobs"] = assembly_job_ids diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py index d70cb75bf3..ef744ae1e1 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -4,9 +4,9 @@ import json import getpass from datetime import datetime -import requests import pyblish.api +from openpype_modules.deadline.abstract_submit_deadline import requests_post from ayon_core.pipeline.publish import ( AYONPyblishPluginMixin ) @@ -76,11 +76,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, default=cls.use_gpu, label="Use GPU" ), - BoolDef( - "suspend_publish", - default=False, - label="Suspend publish" - ), BoolDef( "workfile_dependency", default=cls.workfile_dependency, @@ -100,20 +95,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, instance.data["attributeValues"] = self.get_attr_values_from_data( instance.data) - # add suspend_publish attributeValue to instance data - instance.data["suspend_publish"] = instance.data["attributeValues"][ - "suspend_publish"] - families = instance.data["families"] node = instance.data["transientData"]["node"] context = instance.context - # get default deadline webservice url from deadline module - deadline_url = instance.context.data["defaultDeadline"] - # if custom one is set in instance, use that - if instance.data.get("deadlineUrl"): - deadline_url = instance.data.get("deadlineUrl") + deadline_url = instance.data["deadline"]["url"] assert deadline_url, "Requires Deadline Webservice URL" self.deadline_url = "{}/api/jobs".format(deadline_url) @@ -436,7 +423,9 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, self.log.debug("__ expectedFiles: `{}`".format( instance.data["expectedFiles"])) - response = requests.post(self.deadline_url, json=payload, timeout=10) + auth = instance.data["deadline"]["auth"] + response = requests_post(self.deadline_url, json=payload, timeout=10, + auth=auth) if not response.ok: raise Exception(response.text) diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py index 4e4657d886..ce15eda9a0 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py @@ -5,10 +5,10 @@ import json import re from copy import deepcopy -import requests import ayon_api import pyblish.api +from openpype_modules.deadline.abstract_submit_deadline import requests_post from ayon_core.pipeline import publish from ayon_core.lib import EnumDef, is_in_tests from ayon_core.pipeline.version_start import get_versioning_start @@ -147,9 +147,6 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, instance_settings = self.get_attr_values_from_data(instance.data) initial_status = instance_settings.get("publishJobState", "Active") - # TODO: Remove this backwards compatibility of `suspend_publish` - if instance.data.get("suspend_publish"): - initial_status = "Suspended" args = [ "--headless", @@ -212,7 +209,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, self.log.debug("Submitting Deadline publish job ...") url = "{}/api/jobs".format(self.deadline_url) - response = requests.post(url, json=payload, timeout=10) + auth = instance.data["deadline"]["auth"] + response = requests_post(url, json=payload, timeout=10, + auth=auth) if not response.ok: raise Exception(response.text) @@ -344,11 +343,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, deadline_publish_job_id = None if submission_type == "deadline": - # get default deadline webservice url from deadline module - self.deadline_url = instance.context.data["defaultDeadline"] - # if custom one is set in instance, use that - if instance.data.get("deadlineUrl"): - self.deadline_url = instance.data.get("deadlineUrl") + self.deadline_url = instance.data["deadline"]["url"] assert self.deadline_url, "Requires Deadline Webservice URL" deadline_publish_job_id = \ @@ -356,7 +351,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, # Inject deadline url to instances. for inst in instances: - inst["deadlineUrl"] = self.deadline_url + if "deadline" not in inst: + inst["deadline"] = {} + inst["deadline"] = instance.data["deadline"] # publish job file publish_job = { diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py index 8def9cc63c..06dd62e18b 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py @@ -5,11 +5,11 @@ import json import re from copy import deepcopy -import requests import clique import ayon_api import pyblish.api +from openpype_modules.deadline.abstract_submit_deadline import requests_post from ayon_core.pipeline import publish from ayon_core.lib import EnumDef, is_in_tests from ayon_core.pipeline.version_start import get_versioning_start @@ -88,9 +88,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, hosts = ["fusion", "max", "maya", "nuke", "houdini", "celaction", "aftereffects", "harmony", "blender"] - families = ["render.farm", "render.frames_farm", - "prerender.farm", "prerender.frames_farm", - "renderlayer", "imagesequence", + families = ["render", "render.farm", "render.frames_farm", + "prerender", "prerender.farm", "prerender.frames_farm", + "renderlayer", "imagesequence", "image", "vrayscene", "maxrender", "arnold_rop", "mantra_rop", "karma_rop", "vray_rop", @@ -224,9 +224,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, instance_settings = self.get_attr_values_from_data(instance.data) initial_status = instance_settings.get("publishJobState", "Active") - # TODO: Remove this backwards compatibility of `suspend_publish` - if instance.data.get("suspend_publish"): - initial_status = "Suspended" args = [ "--headless", @@ -306,7 +303,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, self.log.debug("Submitting Deadline publish job ...") url = "{}/api/jobs".format(self.deadline_url) - response = requests.post(url, json=payload, timeout=10) + auth = instance.data["deadline"]["auth"] + response = requests_post(url, json=payload, timeout=10, + auth=auth) if not response.ok: raise Exception(response.text) @@ -314,7 +313,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, return deadline_publish_job_id - def process(self, instance): # type: (pyblish.api.Instance) -> None """Process plugin. @@ -461,18 +459,17 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, } # get default deadline webservice url from deadline module - self.deadline_url = instance.context.data["defaultDeadline"] - # if custom one is set in instance, use that - if instance.data.get("deadlineUrl"): - self.deadline_url = instance.data.get("deadlineUrl") + self.deadline_url = instance.data["deadline"]["url"] assert self.deadline_url, "Requires Deadline Webservice URL" deadline_publish_job_id = \ self._submit_deadline_post_job(instance, render_job, instances) - # Inject deadline url to instances. + # Inject deadline url to instances to query DL for job id for overrides for inst in instances: - inst["deadlineUrl"] = self.deadline_url + if not "deadline" in inst: + inst["deadline"] = {} + inst["deadline"] = instance.data["deadline"] # publish job file publish_job = { diff --git a/client/ayon_core/modules/deadline/plugins/publish/validate_deadline_connection.py b/client/ayon_core/modules/deadline/plugins/publish/validate_deadline_connection.py index a7b300beff..8fffd47786 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/validate_deadline_connection.py +++ b/client/ayon_core/modules/deadline/plugins/publish/validate_deadline_connection.py @@ -1,5 +1,7 @@ import pyblish.api +from ayon_core.pipeline import PublishXmlValidationError + from openpype_modules.deadline.abstract_submit_deadline import requests_get @@ -8,27 +10,42 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin): label = "Validate Deadline Web Service" order = pyblish.api.ValidatorOrder - hosts = ["maya", "nuke"] - families = ["renderlayer", "render"] + hosts = ["maya", "nuke", "aftereffects", "harmony", "fusion"] + families = ["renderlayer", "render", "render.farm"] # cache responses = {} def process(self, instance): - # get default deadline webservice url from deadline module - deadline_url = instance.context.data["defaultDeadline"] - # if custom one is set in instance, use that - if instance.data.get("deadlineUrl"): - deadline_url = instance.data.get("deadlineUrl") - self.log.debug( - "We have deadline URL on instance {}".format(deadline_url) - ) + if not instance.data.get("farm"): + self.log.debug("Should not be processed on farm, skipping.") + return + + deadline_url = instance.data["deadline"]["url"] assert deadline_url, "Requires Deadline Webservice URL" + kwargs = {} + if instance.data["deadline"]["require_authentication"]: + auth = instance.data["deadline"]["auth"] + kwargs["auth"] = auth + + if not auth[0]: + raise PublishXmlValidationError( + self, + "Deadline requires authentication. " + "At least username is required to be set in " + "Site Settings.") + if deadline_url not in self.responses: - self.responses[deadline_url] = requests_get(deadline_url) + self.responses[deadline_url] = requests_get(deadline_url, **kwargs) response = self.responses[deadline_url] + if response.status_code == 401: + raise PublishXmlValidationError( + self, + "Deadline requires authentication. " + "Provided credentials are not working. " + "Please change them in Site Settings") assert response.ok, "Response must be ok" assert response.text.startswith("Deadline Web Service "), ( "Web service did not respond with 'Deadline Web Service'" diff --git a/client/ayon_core/modules/deadline/plugins/publish/validate_deadline_pools.py b/client/ayon_core/modules/deadline/plugins/publish/validate_deadline_pools.py index 2feb044cf1..5094b3deaf 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/validate_deadline_pools.py +++ b/client/ayon_core/modules/deadline/plugins/publish/validate_deadline_pools.py @@ -37,8 +37,9 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin, self.log.debug("Skipping local instance.") return - deadline_url = self.get_deadline_url(instance) - pools = self.get_pools(deadline_url) + deadline_url = instance.data["deadline"]["url"] + pools = self.get_pools(deadline_url, + instance.data["deadline"].get("auth")) invalid_pools = {} primary_pool = instance.data.get("primaryPool") @@ -61,22 +62,18 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin, formatting_data={"pools_str": ", ".join(pools)} ) - def get_deadline_url(self, instance): - # get default deadline webservice url from deadline module - deadline_url = instance.context.data["defaultDeadline"] - if instance.data.get("deadlineUrl"): - # if custom one is set in instance, use that - deadline_url = instance.data.get("deadlineUrl") - return deadline_url - - def get_pools(self, deadline_url): + def get_pools(self, deadline_url, auth): if deadline_url not in self.pools_per_url: self.log.debug( "Querying available pools for Deadline url: {}".format( deadline_url) ) pools = DeadlineModule.get_deadline_pools(deadline_url, + auth=auth, log=self.log) + # some DL return "none" as a pool name + if not "none" in pools: + pools.append("none") self.log.info("Available pools: {}".format(pools)) self.pools_per_url[deadline_url] = pools diff --git a/client/ayon_core/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py b/client/ayon_core/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py index 6263526d5c..83e867408c 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py +++ b/client/ayon_core/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py @@ -199,16 +199,16 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin): (dict): Job info from Deadline """ - # get default deadline webservice url from deadline module - deadline_url = instance.context.data["defaultDeadline"] - # if custom one is set in instance, use that - if instance.data.get("deadlineUrl"): - deadline_url = instance.data.get("deadlineUrl") + deadline_url = instance.data["deadline"]["url"] assert deadline_url, "Requires Deadline Webservice URL" url = "{}/api/jobs?JobID={}".format(deadline_url, job_id) try: - response = requests_get(url) + kwargs = {} + auth = instance.data["deadline"]["auth"] + if auth: + kwargs["auth"] = auth + response = requests_get(url, **kwargs) except requests.exceptions.ConnectionError: self.log.error("Deadline is not accessible at " "{}".format(deadline_url)) diff --git a/client/ayon_core/modules/deadline/version.py b/client/ayon_core/modules/deadline/version.py new file mode 100644 index 0000000000..569b1212f7 --- /dev/null +++ b/client/ayon_core/modules/deadline/version.py @@ -0,0 +1 @@ +__version__ = "0.1.10" diff --git a/client/ayon_core/pipeline/anatomy/anatomy.py b/client/ayon_core/pipeline/anatomy/anatomy.py index 2aa8eeddbc..98bbaa9bdc 100644 --- a/client/ayon_core/pipeline/anatomy/anatomy.py +++ b/client/ayon_core/pipeline/anatomy/anatomy.py @@ -3,11 +3,16 @@ import re import copy import platform import collections -import time import ayon_api -from ayon_core.lib import Logger, get_local_site_id, StringTemplate +from ayon_core.lib import ( + Logger, + get_local_site_id, + StringTemplate, + CacheItem, + NestedCacheItem, +) from ayon_core.addon import AddonsManager from .exceptions import RootCombinationError, ProjectNotSet @@ -397,62 +402,11 @@ class BaseAnatomy(object): ) -class CacheItem: - """Helper to cache data. - - Helper does not handle refresh of data and does not mark data as outdated. - Who uses the object should check of outdated state on his own will. - """ - - default_lifetime = 10 - - def __init__(self, lifetime=None): - self._data = None - self._cached = None - self._lifetime = lifetime or self.default_lifetime - - @property - def data(self): - """Cached data/object. - - Returns: - Any: Whatever was cached. - """ - - return self._data - - @property - def is_outdated(self): - """Item has outdated cache. - - Lifetime of cache item expired or was not yet set. - - Returns: - bool: Item is outdated. - """ - - if self._cached is None: - return True - return (time.time() - self._cached) > self._lifetime - - def update_data(self, data): - """Update cache of data. - - Args: - data (Any): Data to cache. - """ - - self._data = data - self._cached = time.time() - - class Anatomy(BaseAnatomy): - _sitesync_addon_cache = CacheItem() - _project_cache = collections.defaultdict(CacheItem) - _default_site_id_cache = collections.defaultdict(CacheItem) - _root_overrides_cache = collections.defaultdict( - lambda: collections.defaultdict(CacheItem) - ) + _project_cache = NestedCacheItem(lifetime=10) + _sitesync_addon_cache = CacheItem(lifetime=60) + _default_site_id_cache = NestedCacheItem(lifetime=60) + _root_overrides_cache = NestedCacheItem(2, lifetime=60) def __init__( self, project_name=None, site_name=None, project_entity=None @@ -477,18 +431,18 @@ class Anatomy(BaseAnatomy): @classmethod def get_project_entity_from_cache(cls, project_name): project_cache = cls._project_cache[project_name] - if project_cache.is_outdated: + if not project_cache.is_valid: project_cache.update_data(ayon_api.get_project(project_name)) - return copy.deepcopy(project_cache.data) + return copy.deepcopy(project_cache.get_data()) @classmethod def get_sitesync_addon(cls): - if cls._sitesync_addon_cache.is_outdated: + if not cls._sitesync_addon_cache.is_valid: manager = AddonsManager() cls._sitesync_addon_cache.update_data( manager.get_enabled_addon("sitesync") ) - return cls._sitesync_addon_cache.data + return cls._sitesync_addon_cache.get_data() @classmethod def _get_studio_roots_overrides(cls, project_name): @@ -533,14 +487,14 @@ class Anatomy(BaseAnatomy): elif not site_name: # Use sync server to receive active site name project_cache = cls._default_site_id_cache[project_name] - if project_cache.is_outdated: + if not project_cache.is_valid: project_cache.update_data( sitesync_addon.get_active_site_type(project_name) ) - site_name = project_cache.data + site_name = project_cache.get_data() site_cache = cls._root_overrides_cache[project_name][site_name] - if site_cache.is_outdated: + if not site_cache.is_valid: if site_name == "studio": # Handle studio root overrides without sync server # - studio root overrides can be done even without sync server @@ -553,4 +507,4 @@ class Anatomy(BaseAnatomy): project_name, site_name ) site_cache.update_data(roots_overrides) - return site_cache.data + return site_cache.get_data() diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index b8618738fb..dd005d250c 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -1987,12 +1987,12 @@ class CreateContext: "Folder '{}' was not found".format(folder_path) ) - task_name = None if task_entity is None: - task_name = self.get_current_task_name() - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name - ) + current_task_name = self.get_current_task_name() + if current_task_name: + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], current_task_name + ) if pre_create_data is None: pre_create_data = {} @@ -2018,7 +2018,7 @@ class CreateContext: instance_data = { "folderPath": folder_entity["path"], - "task": task_name, + "task": task_entity["name"] if task_entity else None, "productType": creator.product_type, "variant": variant } diff --git a/client/ayon_core/pipeline/farm/pyblish_functions.py b/client/ayon_core/pipeline/farm/pyblish_functions.py index eb6f8569d9..72deee185e 100644 --- a/client/ayon_core/pipeline/farm/pyblish_functions.py +++ b/client/ayon_core/pipeline/farm/pyblish_functions.py @@ -225,6 +225,7 @@ def create_skeleton_instance( instance_skeleton_data = { "productType": product_type, "productName": data["productName"], + "task": data["task"], "families": families, "folderPath": data["folderPath"], "frameStart": time_data.start, diff --git a/client/ayon_core/pipeline/publish/abstract_collect_render.py b/client/ayon_core/pipeline/publish/abstract_collect_render.py index c50dc16380..17cab876b6 100644 --- a/client/ayon_core/pipeline/publish/abstract_collect_render.py +++ b/client/ayon_core/pipeline/publish/abstract_collect_render.py @@ -80,6 +80,7 @@ class RenderInstance(object): anatomyData = attr.ib(default=None) outputDir = attr.ib(default=None) context = attr.ib(default=None) + deadline = attr.ib(default=None) # The source instance the data of this render instance should merge into source_instance = attr.ib(default=None, type=pyblish.api.Instance) @@ -215,13 +216,12 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): # add additional data data = self.add_additional_data(data) - render_instance_dict = attr.asdict(render_instance) - # Merge into source instance if provided, otherwise create instance - instance = render_instance_dict.pop("source_instance", None) + instance = render_instance.source_instance if instance is None: instance = context.create_instance(render_instance.name) + render_instance_dict = attr.asdict(render_instance) instance.data.update(render_instance_dict) instance.data.update(data) diff --git a/client/ayon_core/plugins/publish/integrate.py b/client/ayon_core/plugins/publish/integrate.py index 764168edd3..9ae96e1a20 100644 --- a/client/ayon_core/plugins/publish/integrate.py +++ b/client/ayon_core/plugins/publish/integrate.py @@ -42,7 +42,7 @@ def prepare_changes(old_entity, new_entity): Returns: dict[str, Any]: Changes that have new entity. - + """ changes = {} for key in set(new_entity.keys()): @@ -121,6 +121,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "setdress", "layout", "ass", + "assProxy", "vdbcache", "scene", "vrayproxy", diff --git a/client/ayon_core/resources/app_icons/3de4.png b/client/ayon_core/resources/app_icons/3de4.png new file mode 100644 index 0000000000..bd0fe40d37 Binary files /dev/null and b/client/ayon_core/resources/app_icons/3de4.png differ diff --git a/client/ayon_core/tools/common_models/cache.py b/client/ayon_core/tools/common_models/cache.py index 221a14160c..59b727728f 100644 --- a/client/ayon_core/tools/common_models/cache.py +++ b/client/ayon_core/tools/common_models/cache.py @@ -1,239 +1,31 @@ -import time -import collections +import warnings -InitInfo = collections.namedtuple( - "InitInfo", - ["default_factory", "lifetime"] +from ayon_core.lib import CacheItem as _CacheItem +from ayon_core.lib import NestedCacheItem as _NestedCacheItem + + +# Cache classes were moved to `ayon_core.lib.cache` +class CacheItem(_CacheItem): + def __init__(self, *args, **kwargs): + warnings.warn( + "Used 'CacheItem' from deprecated location " + "'ayon_core.tools.common_models', use 'ayon_core.lib' instead.", + DeprecationWarning, + ) + super().__init__(*args, **kwargs) + + +class NestedCacheItem(_NestedCacheItem): + def __init__(self, *args, **kwargs): + warnings.warn( + "Used 'NestedCacheItem' from deprecated location " + "'ayon_core.tools.common_models', use 'ayon_core.lib' instead.", + DeprecationWarning, + ) + super().__init__(*args, **kwargs) + + +__all__ = ( + "CacheItem", + "NestedCacheItem", ) - - -def _default_factory_func(): - return None - - -class CacheItem: - """Simple cache item with lifetime and default value. - - Args: - default_factory (Optional[callable]): Function that returns default - value used on init and on reset. - lifetime (Optional[int]): Lifetime of the cache data in seconds. - """ - - def __init__(self, default_factory=None, lifetime=None): - if lifetime is None: - lifetime = 120 - self._lifetime = lifetime - self._last_update = None - if default_factory is None: - default_factory = _default_factory_func - self._default_factory = default_factory - self._data = default_factory() - - @property - def is_valid(self): - """Is cache valid to use. - - Return: - bool: True if cache is valid, False otherwise. - """ - - if self._last_update is None: - return False - - return (time.time() - self._last_update) < self._lifetime - - def set_lifetime(self, lifetime): - """Change lifetime of cache item. - - Args: - lifetime (int): Lifetime of the cache data in seconds. - """ - - self._lifetime = lifetime - - def set_invalid(self): - """Set cache as invalid.""" - - self._last_update = None - - def reset(self): - """Set cache as invalid and reset data.""" - - self._last_update = None - self._data = self._default_factory() - - def get_data(self): - """Receive cached data. - - Returns: - Any: Any data that are cached. - """ - - return self._data - - def update_data(self, data): - self._data = data - self._last_update = time.time() - - -class NestedCacheItem: - """Helper for cached items stored in nested structure. - - Example: - >>> cache = NestedCacheItem(levels=2, default_factory=lambda: 0) - >>> cache["a"]["b"].is_valid - False - >>> cache["a"]["b"].get_data() - 0 - >>> cache["a"]["b"] = 1 - >>> cache["a"]["b"].is_valid - True - >>> cache["a"]["b"].get_data() - 1 - >>> cache.reset() - >>> cache["a"]["b"].is_valid - False - - Args: - levels (int): Number of nested levels where read cache is stored. - default_factory (Optional[callable]): Function that returns default - value used on init and on reset. - lifetime (Optional[int]): Lifetime of the cache data in seconds. - _init_info (Optional[InitInfo]): Private argument. Init info for - nested cache where created from parent item. - """ - - def __init__( - self, levels=1, default_factory=None, lifetime=None, _init_info=None - ): - if levels < 1: - raise ValueError("Nested levels must be greater than 0") - self._data_by_key = {} - if _init_info is None: - _init_info = InitInfo(default_factory, lifetime) - self._init_info = _init_info - self._levels = levels - - def __getitem__(self, key): - """Get cached data. - - Args: - key (str): Key of the cache item. - - Returns: - Union[NestedCacheItem, CacheItem]: Cache item. - """ - - cache = self._data_by_key.get(key) - if cache is None: - if self._levels > 1: - cache = NestedCacheItem( - levels=self._levels - 1, - _init_info=self._init_info - ) - else: - cache = CacheItem( - self._init_info.default_factory, - self._init_info.lifetime - ) - self._data_by_key[key] = cache - return cache - - def __setitem__(self, key, value): - """Update cached data. - - Args: - key (str): Key of the cache item. - value (Any): Any data that are cached. - """ - - if self._levels > 1: - raise AttributeError(( - "{} does not support '__setitem__'. Lower nested level by {}" - ).format(self.__class__.__name__, self._levels - 1)) - cache = self[key] - cache.update_data(value) - - def get(self, key): - """Get cached data. - - Args: - key (str): Key of the cache item. - - Returns: - Union[NestedCacheItem, CacheItem]: Cache item. - """ - - return self[key] - - def cached_count(self): - """Amount of cached items. - - Returns: - int: Amount of cached items. - """ - - return len(self._data_by_key) - - def clear_key(self, key): - """Clear cached item by key. - - Args: - key (str): Key of the cache item. - """ - - self._data_by_key.pop(key, None) - - def clear_invalid(self): - """Clear all invalid cache items. - - Note: - To clear all cache items use 'reset'. - """ - - changed = {} - children_are_nested = self._levels > 1 - for key, cache in tuple(self._data_by_key.items()): - if children_are_nested: - output = cache.clear_invalid() - if output: - changed[key] = output - if not cache.cached_count(): - self._data_by_key.pop(key) - elif not cache.is_valid: - changed[key] = cache.get_data() - self._data_by_key.pop(key) - return changed - - def reset(self): - """Reset cache. - - Note: - To clear only invalid cache items use 'clear_invalid'. - """ - - self._data_by_key = {} - - def set_lifetime(self, lifetime): - """Change lifetime of all children cache items. - - Args: - lifetime (int): Lifetime of the cache data in seconds. - """ - - self._init_info.lifetime = lifetime - for cache in self._data_by_key.values(): - cache.set_lifetime(lifetime) - - @property - def is_valid(self): - """Raise reasonable error when called on wront level. - - Raises: - AttributeError: If called on nested cache item. - """ - - raise AttributeError(( - "{} does not support 'is_valid'. Lower nested level by '{}'" - ).format(self.__class__.__name__, self._levels)) diff --git a/client/ayon_core/tools/common_models/hierarchy.py b/client/ayon_core/tools/common_models/hierarchy.py index d8b28f020d..78b8a7f492 100644 --- a/client/ayon_core/tools/common_models/hierarchy.py +++ b/client/ayon_core/tools/common_models/hierarchy.py @@ -6,8 +6,7 @@ import ayon_api import six from ayon_core.style import get_default_entity_icon_color - -from .cache import NestedCacheItem +from ayon_core.lib import NestedCacheItem HIERARCHY_MODEL_SENDER = "hierarchy.model" diff --git a/client/ayon_core/tools/common_models/projects.py b/client/ayon_core/tools/common_models/projects.py index e30561000e..19a38bee21 100644 --- a/client/ayon_core/tools/common_models/projects.py +++ b/client/ayon_core/tools/common_models/projects.py @@ -5,8 +5,7 @@ import ayon_api import six from ayon_core.style import get_default_entity_icon_color - -from .cache import CacheItem +from ayon_core.lib import CacheItem PROJECTS_MODEL_SENDER = "projects.model" diff --git a/client/ayon_core/tools/common_models/thumbnails.py b/client/ayon_core/tools/common_models/thumbnails.py index 1c3aadc49f..6d14783b9a 100644 --- a/client/ayon_core/tools/common_models/thumbnails.py +++ b/client/ayon_core/tools/common_models/thumbnails.py @@ -5,7 +5,7 @@ import collections import ayon_api import appdirs -from .cache import NestedCacheItem +from ayon_core.lib import NestedCacheItem FileInfo = collections.namedtuple( "FileInfo", diff --git a/client/ayon_core/tools/loader/models/actions.py b/client/ayon_core/tools/loader/models/actions.py index ad2993af50..cfe91cadab 100644 --- a/client/ayon_core/tools/loader/models/actions.py +++ b/client/ayon_core/tools/loader/models/actions.py @@ -6,6 +6,7 @@ import uuid import ayon_api +from ayon_core.lib import NestedCacheItem from ayon_core.pipeline.load import ( discover_loader_plugins, ProductLoaderPlugin, @@ -17,7 +18,6 @@ from ayon_core.pipeline.load import ( LoadError, IncompatibleLoaderError, ) -from ayon_core.tools.common_models import NestedCacheItem from ayon_core.tools.loader.abstract import ActionItem ACTIONS_MODEL_SENDER = "actions.model" diff --git a/client/ayon_core/tools/loader/models/products.py b/client/ayon_core/tools/loader/models/products.py index 812446a012..a3bbc30a09 100644 --- a/client/ayon_core/tools/loader/models/products.py +++ b/client/ayon_core/tools/loader/models/products.py @@ -5,8 +5,8 @@ import arrow import ayon_api from ayon_api.operations import OperationsSession +from ayon_core.lib import NestedCacheItem from ayon_core.style import get_default_entity_icon_color -from ayon_core.tools.common_models import NestedCacheItem from ayon_core.tools.loader.abstract import ( ProductTypeItem, ProductItem, diff --git a/client/ayon_core/tools/loader/models/sitesync.py b/client/ayon_core/tools/loader/models/sitesync.py index 987510905b..02504c2ad3 100644 --- a/client/ayon_core/tools/loader/models/sitesync.py +++ b/client/ayon_core/tools/loader/models/sitesync.py @@ -2,9 +2,8 @@ import collections from ayon_api import get_representations, get_versions_links -from ayon_core.lib import Logger +from ayon_core.lib import Logger, NestedCacheItem from ayon_core.addon import AddonsManager -from ayon_core.tools.common_models import NestedCacheItem from ayon_core.tools.loader.abstract import ActionItem DOWNLOAD_IDENTIFIER = "sitesync.download" diff --git a/pyproject.toml b/pyproject.toml index c1f6ddfb0b..4726bef41a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -106,7 +106,7 @@ line-ending = "auto" [tool.codespell] # Ignore words that are not in the dictionary. -ignore-words-list = "ayon,ynput,parms,parm,hda,developpement" +ignore-words-list = "ayon,ynput,parms,parm,hda,developpement,ue" skip = "./.*,./package/*,*/vendor/*,*/unreal/integration/*,*/aftereffects/api/extension/js/libs/*" count = true diff --git a/server_addon/applications/client/ayon_applications/addon.py b/server_addon/applications/client/ayon_applications/addon.py index 0f1b68af0e..a8eaa46cad 100644 --- a/server_addon/applications/client/ayon_applications/addon.py +++ b/server_addon/applications/client/ayon_applications/addon.py @@ -110,6 +110,26 @@ class ApplicationsAddon(AYONAddon, IPluginPaths): ] } + def launch_application( + self, app_name, project_name, folder_path, task_name + ): + """Launch application. + + Args: + app_name (str): Full application name e.g. 'maya/2024'. + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. + + """ + app_manager = self.get_applications_manager() + return app_manager.launch( + app_name, + project_name=project_name, + folder_path=folder_path, + task_name=task_name, + ) + # --- CLI --- def cli(self, addon_click_group): main_group = click_wrap.group( @@ -134,6 +154,17 @@ class ApplicationsAddon(AYONAddon, IPluginPaths): default=None ) ) + ( + main_group.command( + self._cli_launch_applications, + name="launch", + help="Launch application" + ) + .option("--app", required=True, help="Application name") + .option("--project", required=True, help="Project name") + .option("--folder", required=True, help="Folder path") + .option("--task", required=True, help="Task name") + ) # Convert main command to click object and add it to parent group addon_click_group.add_command( main_group.to_click_obj() @@ -171,3 +202,15 @@ class ApplicationsAddon(AYONAddon, IPluginPaths): with open(output_json_path, "w") as file_stream: json.dump(env, file_stream, indent=4) + + def _cli_launch_applications(self, project, folder, task, app): + """Launch application. + + Args: + project (str): Project name. + folder (str): Folder path. + task (str): Task name. + app (str): Full application name e.g. 'maya/2024'. + + """ + self.launch_application(app, project, folder, task) diff --git a/server_addon/applications/client/ayon_applications/utils.py b/server_addon/applications/client/ayon_applications/utils.py index 234fa6c683..185779a949 100644 --- a/server_addon/applications/client/ayon_applications/utils.py +++ b/server_addon/applications/client/ayon_applications/utils.py @@ -281,13 +281,20 @@ def prepare_app_environments( app.environment ] + task_entity = data.get("task_entity") folder_entity = data.get("folder_entity") # Add tools environments groups_by_name = {} tool_by_group_name = collections.defaultdict(dict) - if folder_entity: - # Make sure each tool group can be added only once - for key in folder_entity["attrib"].get("tools") or []: + tools = None + if task_entity: + tools = task_entity["attrib"].get("tools") + + if tools is None and folder_entity: + tools = folder_entity["attrib"].get("tools") + + if tools: + for key in tools: tool = app.manager.tools.get(key) if not tool or not tool.is_valid_for_app(app): continue diff --git a/server_addon/applications/package.py b/server_addon/applications/package.py index 43a301b7c2..983749355e 100644 --- a/server_addon/applications/package.py +++ b/server_addon/applications/package.py @@ -1,6 +1,6 @@ name = "applications" title = "Applications" -version = "0.2.0" +version = "0.2.2" ayon_server_version = ">=1.0.7" ayon_launcher_version = ">=1.0.2" diff --git a/server_addon/applications/server/applications.json b/server_addon/applications/server/applications.json index e4b72fdff9..84b7fa33cf 100644 --- a/server_addon/applications/server/applications.json +++ b/server_addon/applications/server/applications.json @@ -1271,6 +1271,28 @@ } ] }, + "equalizer": { + "enabled": true, + "label": "3DEqualizer", + "icon": "{}/app_icons/3de4.png", + "host_name": "equalizer", + "environment": "{}", + "variants": [ + { + "name": "7-1v2", + "label": "7.1v2", + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\3DE4_win64_r7.1v2\\bin\\3DE4.exe" + ], + "darwin": [], + "linux": [] + }, + "environment": "{}" + } + ] + }, "additional_apps": [] } } diff --git a/server_addon/applications/server/settings.py b/server_addon/applications/server/settings.py index 5743e9f471..b77686cee0 100644 --- a/server_addon/applications/server/settings.py +++ b/server_addon/applications/server/settings.py @@ -190,6 +190,8 @@ class ApplicationsSettings(BaseSettingsModel): default_factory=AppGroupWithPython, title="OpenRV") zbrush: AppGroup = SettingsField( default_factory=AppGroupWithPython, title="Zbrush") + equalizer: AppGroup = SettingsField( + default_factory=AppGroupWithPython, title="3DEqualizer") additional_apps: list[AdditionalAppGroup] = SettingsField( default_factory=list, title="Additional Applications") diff --git a/server_addon/deadline/package.py b/server_addon/deadline/package.py index 944797fea6..25ba1c1166 100644 --- a/server_addon/deadline/package.py +++ b/server_addon/deadline/package.py @@ -1,3 +1,3 @@ name = "deadline" title = "Deadline" -version = "0.1.10" +version = "0.1.11" diff --git a/server_addon/deadline/server/__init__.py b/server_addon/deadline/server/__init__.py index e7dcb7d347..8d2dc152cd 100644 --- a/server_addon/deadline/server/__init__.py +++ b/server_addon/deadline/server/__init__.py @@ -2,11 +2,13 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .settings import DeadlineSettings, DEFAULT_VALUES +from .settings import DeadlineSettings, DEFAULT_VALUES, DeadlineSiteSettings class Deadline(BaseServerAddon): settings_model: Type[DeadlineSettings] = DeadlineSettings + site_settings_model: Type[DeadlineSiteSettings] = DeadlineSiteSettings + async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/deadline/server/settings/__init__.py b/server_addon/deadline/server/settings/__init__.py index 0307862afa..d25c0fb330 100644 --- a/server_addon/deadline/server/settings/__init__.py +++ b/server_addon/deadline/server/settings/__init__.py @@ -2,9 +2,11 @@ from .main import ( DeadlineSettings, DEFAULT_VALUES, ) +from .site_settings import DeadlineSiteSettings __all__ = ( "DeadlineSettings", + "DeadlineSiteSettings", "DEFAULT_VALUES", ) diff --git a/server_addon/deadline/server/settings/main.py b/server_addon/deadline/server/settings/main.py index 21a314cd2f..5d42b9b1ef 100644 --- a/server_addon/deadline/server/settings/main.py +++ b/server_addon/deadline/server/settings/main.py @@ -15,12 +15,6 @@ from .publish_plugins import ( ) -class ServerListSubmodel(BaseSettingsModel): - _layout = "compact" - name: str = SettingsField(title="Name") - value: str = SettingsField(title="Value") - - async def defined_deadline_ws_name_enum_resolver( addon: "BaseServerAddon", settings_variant: str = "production", @@ -32,25 +26,40 @@ async def defined_deadline_ws_name_enum_resolver( settings = await addon.get_studio_settings(variant=settings_variant) - ws_urls = [] + ws_server_name = [] for deadline_url_item in settings.deadline_urls: - ws_urls.append(deadline_url_item.name) + ws_server_name.append(deadline_url_item.name) - return ws_urls + return ws_server_name + +class ServerItemSubmodel(BaseSettingsModel): + """Connection info about configured DL servers.""" + _layout = "compact" + name: str = SettingsField(title="Name") + value: str = SettingsField(title="Url") + require_authentication: bool = SettingsField( + False, + title="Require authentication") + ssl: bool = SettingsField(False, + title="SSL") class DeadlineSettings(BaseSettingsModel): - deadline_urls: list[ServerListSubmodel] = SettingsField( + # configured DL servers + deadline_urls: list[ServerItemSubmodel] = SettingsField( default_factory=list, - title="System Deadline Webservice URLs", + title="System Deadline Webservice Info", scope=["studio"], ) + + # name(key) of selected server for project deadline_server: str = SettingsField( - title="Project deadline server", + title="Project Deadline server name", section="---", scope=["project"], enum_resolver=defined_deadline_ws_name_enum_resolver ) + publish: PublishPluginsModel = SettingsField( default_factory=PublishPluginsModel, title="Publish Plugins", @@ -62,11 +71,14 @@ class DeadlineSettings(BaseSettingsModel): return value + DEFAULT_VALUES = { "deadline_urls": [ { "name": "default", - "value": "http://127.0.0.1:8082" + "value": "http://127.0.0.1:8082", + "require_authentication": False, + "ssl": False } ], "deadline_server": "default", diff --git a/server_addon/deadline/server/settings/publish_plugins.py b/server_addon/deadline/server/settings/publish_plugins.py index 9f69143e37..784ad2560b 100644 --- a/server_addon/deadline/server/settings/publish_plugins.py +++ b/server_addon/deadline/server/settings/publish_plugins.py @@ -191,7 +191,6 @@ class NukeSubmitDeadlineModel(BaseSettingsModel): @validator( "limit_groups", - "env_allowed_keys", "env_search_replace_values") def validate_unique_names(cls, value): ensure_unique_names(value) diff --git a/server_addon/deadline/server/settings/site_settings.py b/server_addon/deadline/server/settings/site_settings.py new file mode 100644 index 0000000000..a77a6edc7e --- /dev/null +++ b/server_addon/deadline/server/settings/site_settings.py @@ -0,0 +1,26 @@ +from ayon_server.settings import ( + BaseSettingsModel, + SettingsField, +) +from .main import defined_deadline_ws_name_enum_resolver + + +class CredentialPerServerModel(BaseSettingsModel): + """Provide credentials for configured DL servers""" + _layout = "expanded" + server_name: str = SettingsField("", + title="DL server name", + enum_resolver=defined_deadline_ws_name_enum_resolver) + username: str = SettingsField("", + title="Username") + password: str = SettingsField("", + title="Password") + + +class DeadlineSiteSettings(BaseSettingsModel): + local_settings: list[CredentialPerServerModel] = SettingsField( + default_factory=list, + title="Local setting", + description="Please provide credentials for configured Deadline servers", + ) + diff --git a/server_addon/maya/package.py b/server_addon/maya/package.py index 00f28d901e..5c6ce923aa 100644 --- a/server_addon/maya/package.py +++ b/server_addon/maya/package.py @@ -1,3 +1,3 @@ name = "maya" title = "Maya" -version = "0.1.16" +version = "0.1.17" diff --git a/server_addon/maya/server/settings/publishers.py b/server_addon/maya/server/settings/publishers.py index 27288053a2..bc38d5f746 100644 --- a/server_addon/maya/server/settings/publishers.py +++ b/server_addon/maya/server/settings/publishers.py @@ -35,6 +35,51 @@ def angular_unit_enum(): ] +def extract_alembic_data_format_enum(): + return [ + {"label": "ogawa", "value": "ogawa"}, + {"label": "HDF", "value": "HDF"} + ] + + +def extract_alembic_overrides_enum(): + return [ + {"label": "Custom Attributes", "value": "attr"}, + {"label": "Custom Attributes Prefix", "value": "attrPrefix"}, + {"label": "Auto Subd", "value": "autoSubd"}, + {"label": "Data Format", "value": "dataFormat"}, + {"label": "Euler Filter", "value": "eulerFilter"}, + {"label": "Mel Per Frame Callback", "value": "melPerFrameCallback"}, + {"label": "Mel Post Job Callback", "value": "melPostJobCallback"}, + {"label": "Pre Roll", "value": "preRoll"}, + {"label": "Pre Roll Start Frame", "value": "preRollStartFrame"}, + { + "label": "Python Per Frame Callback", + "value": "pythonPerFrameCallback" + }, + { + "label": "Python Post Job Callback", + "value": "pythonPostJobCallback" + }, + {"label": "Renderable Only", "value": "renderableOnly"}, + {"label": "Strip Namespaces", "value": "stripNamespaces"}, + {"label": "User Attr", "value": "userAttr"}, + {"label": "User Attr Prefix", "value": "userAttrPrefix"}, + {"label": "UV Write", "value": "uvWrite"}, + {"label": "UVs Only", "value": "uvsOnly"}, + {"label": "Verbose", "value": "verbose"}, + {"label": "Visible Only", "value": "visibleOnly"}, + {"label": "Whole Frame Geo", "value": "wholeFrameGeo"}, + {"label": "World Space", "value": "worldSpace"}, + {"label": "Write Color Sets", "value": "writeColorSets"}, + {"label": "Write Creases", "value": "writeCreases"}, + {"label": "Write Face Sets", "value": "writeFaceSets"}, + {"label": "Write Normals", "value": "writeNormals"}, + {"label": "Write UV Sets", "value": "writeUVSets"}, + {"label": "Write Visibility", "value": "writeVisibility"} + ] + + class BasicValidateModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") @@ -184,7 +229,7 @@ class ValidateAttributesModel(BaseSettingsModel): if not success: raise BadRequestException( - "The attibutes can't be parsed as json object" + "The attributes can't be parsed as json object" ) return value @@ -220,7 +265,7 @@ class ValidateUnrealStaticMeshNameModel(BaseSettingsModel): enabled: bool = SettingsField(title="ValidateUnrealStaticMeshName") optional: bool = SettingsField(title="Optional") validate_mesh: bool = SettingsField(title="Validate mesh names") - validate_collision: bool = SettingsField(title="Validate collison names") + validate_collision: bool = SettingsField(title="Validate collision names") class ValidateCycleErrorModel(BaseSettingsModel): @@ -243,7 +288,7 @@ class ValidatePluginPathAttributesModel(BaseSettingsModel): and the node attribute is abc_file """ - enabled: bool = True + enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") attribute: list[ValidatePluginPathAttributesAttrModel] = SettingsField( @@ -265,6 +310,9 @@ class RendererAttributesModel(BaseSettingsModel): class ValidateRenderSettingsModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") arnold_render_attributes: list[RendererAttributesModel] = SettingsField( default_factory=list, title="Arnold Render Attributes") vray_render_attributes: list[RendererAttributesModel] = SettingsField( @@ -299,6 +347,115 @@ class ExtractAlembicModel(BaseSettingsModel): families: list[str] = SettingsField( default_factory=list, title="Families") + autoSubd: bool = SettingsField( + title="Auto Subd", + description=( + "If this flag is present and the mesh has crease edges, crease " + "vertices or holes, the mesh (OPolyMesh) would now be written out " + "as an OSubD and crease info will be stored in the Alembic file. " + "Otherwise, creases info won't be preserved in Alembic file unless" + " a custom Boolean attribute SubDivisionMesh has been added to " + "mesh node and its value is true." + ) + ) + eulerFilter: bool = SettingsField( + title="Euler Filter", + description="Apply Euler filter while sampling rotations." + ) + renderableOnly: bool = SettingsField( + title="Renderable Only", + description="Only export renderable visible shapes." + ) + stripNamespaces: bool = SettingsField( + title="Strip Namespaces", + description=( + "Namespaces will be stripped off of the node before being written " + "to Alembic." + ) + ) + uvsOnly: bool = SettingsField( + title="UVs Only", + description=( + "If this flag is present, only uv data for PolyMesh and SubD " + "shapes will be written to the Alembic file." + ) + ) + uvWrite: bool = SettingsField( + title="UV Write", + description=( + "Uv data for PolyMesh and SubD shapes will be written to the " + "Alembic file." + ) + ) + verbose: bool = SettingsField( + title="Verbose", + description="Prints the current frame that is being evaluated." + ) + visibleOnly: bool = SettingsField( + title="Visible Only", + description="Only export dag objects visible during frame range." + ) + wholeFrameGeo: bool = SettingsField( + title="Whole Frame Geo", + description=( + "Data for geometry will only be written out on whole frames." + ) + ) + worldSpace: bool = SettingsField( + title="World Space", + description="Any root nodes will be stored in world space." + ) + writeColorSets: bool = SettingsField( + title="Write Color Sets", + description="Write vertex colors with the geometry." + ) + writeFaceSets: bool = SettingsField( + title="Write Face Sets", + description="Write face sets with the geometry." + ) + writeNormals: bool = SettingsField( + title="Write Normals", + description="Write normals with the deforming geometry." + ) + writeUVSets: bool = SettingsField( + title="Write UV Sets", + description=( + "Write all uv sets on MFnMeshes as vector 2 indexed geometry " + "parameters with face varying scope." + ) + ) + writeVisibility: bool = SettingsField( + title="Write Visibility", + description=( + "Visibility state will be stored in the Alembic file. Otherwise " + "everything written out is treated as visible." + ) + ) + preRoll: bool = SettingsField( + title="Pre Roll", + description=( + "When enabled, the pre roll start frame is used to pre roll the " + "When enabled, the pre roll start frame is used to being the " + "evaluation of the mesh. From the pre roll start frame to the " + "alembic start frame, will not be written to disk. This can be " + "used for simulation run up." + ) + ) + preRollStartFrame: int = SettingsField( + title="Pre Roll Start Frame", + description=( + "The frame to start scene evaluation at. This is used to set the " + "starting frame for time dependent translations and can be used to" + " evaluate run-up that isn't actually translated.\n" + "NOTE: Pre Roll needs to be enabled for this start frame " + "to be considered." + ) + ) + dataFormat: str = SettingsField( + enum_resolver=extract_alembic_data_format_enum, + title="Data Format", + description="The data format to use to write the file." + ) bake_attributes: list[str] = SettingsField( default_factory=list, title="Bake Attributes", description="List of attributes that will be included in the alembic " @@ -309,6 +466,73 @@ class ExtractAlembicModel(BaseSettingsModel): description="List of attribute prefixes for attributes that will be " "included in the alembic export.", ) + attr: str = SettingsField( + title="Custom Attributes", + placeholder="attr1;attr2", + description=( + "Attributes matching by name will be included in the Alembic " + "export. Attributes should be separated by semi-colon `;`" + ) + ) + attrPrefix: str = SettingsField( + title="Custom Attributes Prefix", + placeholder="prefix1;prefix2", + description=( + "Attributes starting with these prefixes will be included in the " + "Alembic export. Attributes should be separated by semi-colon `;`" + ) + ) + userAttr: str = SettingsField( + title="User Attr", + placeholder="attr1;attr2", + description=( + "Attributes matching by name will be included in the Alembic " + "export. Attributes should be separated by semi-colon `;`" + ) + ) + userAttrPrefix: str = SettingsField( + title="User Attr Prefix", + placeholder="prefix1;prefix2", + description=( + "Attributes starting with these prefixes will be included in the " + "Alembic export. Attributes should be separated by semi-colon `;`" + ) + ) + melPerFrameCallback: str = SettingsField( + title="Mel Per Frame Callback", + description=( + "When each frame (and the static frame) is evaluated the string " + "specified is evaluated as a Mel command." + ) + ) + melPostJobCallback: str = SettingsField( + title="Mel Post Job Callback", + description=( + "When the translation has finished the string specified is " + "evaluated as a Mel command." + ) + ) + pythonPerFrameCallback: str = SettingsField( + title="Python Per Frame Callback", + description=( + "When each frame (and the static frame) is evaluated the string " + "specified is evaluated as a python command." + ) + ) + pythonPostJobCallback: str = SettingsField( + title="Python Post Job Callback", + description=( + "When the translation has finished the string specified is " + "evaluated as a python command." + ) + ) + overrides: list[str] = SettingsField( + enum_resolver=extract_alembic_overrides_enum, + title="Exposed Overrides", + description=( + "Expose the attribute in this list to the user when publishing." + ) + ) class ExtractObjModel(BaseSettingsModel): @@ -392,7 +616,7 @@ class ExtractGPUCacheModel(BaseSettingsModel): title="Optimize Animations For Motion Blur" ) writeMaterials: bool = SettingsField(title="Write Materials") - useBaseTessellation: bool = SettingsField(title="User Base Tesselation") + useBaseTessellation: bool = SettingsField(title="User Based Tessellation") class PublishersModel(BaseSettingsModel): @@ -668,15 +892,19 @@ class PublishersModel(BaseSettingsModel): default_factory=BasicValidateModel, title="Validate Alembic Visible Node", ) + ValidateAlembicDefaultsPointcache: BasicValidateModel = SettingsField( + default_factory=BasicValidateModel, + title="Validate Alembic Defaults Pointcache" + ) + ValidateAlembicDefaultsAnimation: BasicValidateModel = SettingsField( + default_factory=BasicValidateModel, + title="Validate Alembic Defaults Animation" + ) ExtractProxyAlembic: ExtractProxyAlembicModel = SettingsField( default_factory=ExtractProxyAlembicModel, title="Extract Proxy Alembic", section="Model Extractors", ) - ExtractAlembic: ExtractAlembicModel = SettingsField( - default_factory=ExtractAlembicModel, - title="Extract Alembic", - ) ExtractObj: ExtractObjModel = SettingsField( default_factory=ExtractObjModel, title="Extract OBJ" @@ -811,6 +1039,10 @@ class PublishersModel(BaseSettingsModel): default_factory=ExtractModelModel, title="Extract Model (Maya Scene)" ) + ExtractAlembic: ExtractAlembicModel = SettingsField( + default_factory=ExtractAlembicModel, + title="Extract Alembic" + ) DEFAULT_SUFFIX_NAMING = { @@ -942,6 +1174,9 @@ DEFAULT_PUBLISH_SETTINGS = { ] }, "ValidateRenderSettings": { + "enabled": True, + "active": True, + "optional": False, "arnold_render_attributes": [], "vray_render_attributes": [], "redshift_render_attributes": [], @@ -1200,16 +1435,6 @@ DEFAULT_PUBLISH_SETTINGS = { "proxyAbc" ] }, - "ExtractAlembic": { - "enabled": True, - "families": [ - "pointcache", - "model", - "vrayproxy.alembic" - ], - "bake_attributes": [], - "bake_attribute_prefixes": [] - }, "ExtractObj": { "enabled": False, "optional": True, @@ -1330,6 +1555,16 @@ DEFAULT_PUBLISH_SETTINGS = { "optional": False, "validate_shapes": True }, + "ValidateAlembicDefaultsPointcache": { + "enabled": True, + "optional": True, + "active": True + }, + "ValidateAlembicDefaultsAnimation": { + "enabled": True, + "optional": True, + "active": True + }, "ExtractPlayblast": DEFAULT_PLAYBLAST_SETTING, "ExtractMayaSceneRaw": { "enabled": True, @@ -1371,6 +1606,52 @@ DEFAULT_PUBLISH_SETTINGS = { "ExtractModel": { "enabled": True, "optional": True, - "active": True, + "active": True + }, + "ExtractAlembic": { + "enabled": True, + "families": [ + "pointcache", + "model", + "vrayproxy.alembic" + ], + "attr": "", + "attrPrefix": "", + "autoSubd": False, + "bake_attributes": [], + "bake_attribute_prefixes": [], + "dataFormat": "ogawa", + "eulerFilter": False, + "melPerFrameCallback": "", + "melPostJobCallback": "", + "overrides": [ + "attr", + "attrPrefix", + "renderableOnly", + "visibleOnly", + "worldSpace", + "writeColorSets", + "writeFaceSets", + "writeNormals" + ], + "preRoll": False, + "preRollStartFrame": 0, + "pythonPerFrameCallback": "", + "pythonPostJobCallback": "", + "renderableOnly": False, + "stripNamespaces": True, + "uvsOnly": False, + "uvWrite": False, + "userAttr": "", + "userAttrPrefix": "", + "verbose": False, + "visibleOnly": False, + "wholeFrameGeo": False, + "worldSpace": True, + "writeColorSets": False, + "writeFaceSets": False, + "writeNormals": True, + "writeUVSets": False, + "writeVisibility": False } }