diff --git a/client/ayon_core/hosts/fusion/api/lib.py b/client/ayon_core/hosts/fusion/api/lib.py index 08722463e1..7f7d20010d 100644 --- a/client/ayon_core/hosts/fusion/api/lib.py +++ b/client/ayon_core/hosts/fusion/api/lib.py @@ -169,7 +169,7 @@ def validate_comp_prefs(comp=None, force_repair=False): def _on_repair(): attributes = dict() for key, comp_key, _label in validations: - value = folder_value[key] + value = folder_attributes[key] comp_key_full = "Comp.FrameFormat.{}".format(comp_key) attributes[comp_key_full] = value comp.SetPrefs(attributes) diff --git a/client/ayon_core/hosts/hiero/api/workio.py b/client/ayon_core/hosts/hiero/api/workio.py index 4c2416ca38..6e8fc20172 100644 --- a/client/ayon_core/hosts/hiero/api/workio.py +++ b/client/ayon_core/hosts/hiero/api/workio.py @@ -51,13 +51,12 @@ def open_file(filepath): project = hiero.core.projects()[-1] - # open project file - hiero.core.openProject(filepath.replace(os.path.sep, "/")) - - # close previous project - project.close() - - + # Close previous project if its different to the current project. + filepath = filepath.replace(os.path.sep, "/") + if project.path().replace(os.path.sep, "/") != filepath: + # open project file + hiero.core.openProject(filepath) + project.close() return True diff --git a/client/ayon_core/hosts/maya/api/lib.py b/client/ayon_core/hosts/maya/api/lib.py index 525c156fe1..2b41ffc06c 100644 --- a/client/ayon_core/hosts/maya/api/lib.py +++ b/client/ayon_core/hosts/maya/api/lib.py @@ -4212,3 +4212,23 @@ def create_rig_animation_instance( variant=namespace, pre_create_data={"use_selection": True} ) + + +def get_node_index_under_parent(node: str) -> int: + """Return the index of a DAG node under its parent. + + Arguments: + node (str): A DAG Node path. + + Returns: + int: The DAG node's index under its parents or world + + """ + node = cmds.ls(node, long=True)[0] # enforce long names + parent = node.rsplit("|", 1)[0] + if not parent: + return cmds.ls(assemblies=True, long=True).index(node) + else: + return cmds.listRelatives(parent, + children=True, + fullPath=True).index(node) diff --git a/client/ayon_core/hosts/maya/api/workfile_template_builder.py b/client/ayon_core/hosts/maya/api/workfile_template_builder.py index cfd416b708..f4f9a34983 100644 --- a/client/ayon_core/hosts/maya/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/maya/api/workfile_template_builder.py @@ -1,3 +1,5 @@ +import json + from maya import cmds from ayon_core.pipeline import ( @@ -8,13 +10,15 @@ from ayon_core.pipeline import ( ) from ayon_core.pipeline.workfile.workfile_template_builder import ( TemplateAlreadyImported, - AbstractTemplateBuilder + AbstractTemplateBuilder, + PlaceholderPlugin, + PlaceholderItem, ) from ayon_core.tools.workfile_template_build import ( WorkfileBuildPlaceholderDialog, ) -from .lib import get_main_window +from .lib import read, imprint, get_main_window PLACEHOLDER_SET = "PLACEHOLDERS_SET" @@ -86,6 +90,162 @@ class MayaTemplateBuilder(AbstractTemplateBuilder): return True +class MayaPlaceholderPlugin(PlaceholderPlugin): + """Base Placeholder Plugin for Maya with one unified cache. + + Creates a locator as placeholder node, which during populate provide + all of its attributes defined on the locator's transform in + `placeholder.data` and where `placeholder.scene_identifier` is the + full path to the node. + + Inherited classes must still implement `populate_placeholder` + + """ + + use_selection_as_parent = True + item_class = PlaceholderItem + + def _create_placeholder_name(self, placeholder_data): + return self.identifier.replace(".", "_") + + def _collect_scene_placeholders(self): + nodes_by_identifier = self.builder.get_shared_populate_data( + "placeholder_nodes" + ) + if nodes_by_identifier is None: + # Cache placeholder data to shared data + nodes = cmds.ls("*.plugin_identifier", long=True, objectsOnly=True) + + nodes_by_identifier = {} + for node in nodes: + identifier = cmds.getAttr("{}.plugin_identifier".format(node)) + nodes_by_identifier.setdefault(identifier, []).append(node) + + # Set the cache + self.builder.set_shared_populate_data( + "placeholder_nodes", nodes_by_identifier + ) + + return nodes_by_identifier + + def create_placeholder(self, placeholder_data): + + parent = None + if self.use_selection_as_parent: + selection = cmds.ls(selection=True) + if len(selection) > 1: + raise ValueError( + "More than one node is selected. " + "Please select only one to define the parent." + ) + parent = selection[0] if selection else None + + placeholder_data["plugin_identifier"] = self.identifier + placeholder_name = self._create_placeholder_name(placeholder_data) + + placeholder = cmds.spaceLocator(name=placeholder_name)[0] + if parent: + placeholder = cmds.parent(placeholder, selection[0])[0] + + self.imprint(placeholder, placeholder_data) + + def update_placeholder(self, placeholder_item, placeholder_data): + node_name = placeholder_item.scene_identifier + + changed_values = {} + for key, value in placeholder_data.items(): + if value != placeholder_item.data.get(key): + changed_values[key] = value + + # Delete attributes to ensure we imprint new data with correct type + for key in changed_values.keys(): + placeholder_item.data[key] = value + if cmds.attributeQuery(key, node=node_name, exists=True): + attribute = "{}.{}".format(node_name, key) + cmds.deleteAttr(attribute) + + self.imprint(node_name, changed_values) + + def collect_placeholders(self): + placeholders = [] + nodes_by_identifier = self._collect_scene_placeholders() + for node in nodes_by_identifier.get(self.identifier, []): + # TODO do data validations and maybe upgrades if they are invalid + placeholder_data = self.read(node) + placeholders.append( + self.item_class(scene_identifier=node, + data=placeholder_data, + plugin=self) + ) + + return placeholders + + def post_placeholder_process(self, placeholder, failed): + """Cleanup placeholder after load of its corresponding representations. + + Hide placeholder, add them to placeholder set. + Used only by PlaceholderCreateMixin and PlaceholderLoadMixin + + Args: + placeholder (PlaceholderItem): Item which was just used to load + representation. + failed (bool): Loading of representation failed. + """ + # Hide placeholder and add them to placeholder set + node = placeholder.scene_identifier + + # If we just populate the placeholders from current scene, the + # placeholder set will not be created so account for that. + if not cmds.objExists(PLACEHOLDER_SET): + cmds.sets(name=PLACEHOLDER_SET, empty=True) + + cmds.sets(node, addElement=PLACEHOLDER_SET) + cmds.hide(node) + cmds.setAttr("{}.hiddenInOutliner".format(node), True) + + def delete_placeholder(self, placeholder): + """Remove placeholder if building was successful + + Used only by PlaceholderCreateMixin and PlaceholderLoadMixin. + """ + node = placeholder.scene_identifier + + # To avoid that deleting a placeholder node will have Maya delete + # any objectSets the node was a member of we will first remove it + # from any sets it was a member of. This way the `PLACEHOLDERS_SET` + # will survive long enough + sets = cmds.listSets(o=node) or [] + for object_set in sets: + cmds.sets(node, remove=object_set) + + cmds.delete(node) + + def imprint(self, node, data): + """Imprint call for placeholder node""" + + # Complicated data that can't be represented as flat maya attributes + # we write to json strings, e.g. multiselection EnumDef + for key, value in data.items(): + if isinstance(value, (list, tuple, dict)): + data[key] = "JSON::{}".format(json.dumps(value)) + + imprint(node, data) + + def read(self, node): + """Read call for placeholder node""" + + data = read(node) + + # Complicated data that can't be represented as flat maya attributes + # we read from json strings, e.g. multiselection EnumDef + for key, value in data.items(): + if isinstance(value, str) and value.startswith("JSON::"): + value = value[len("JSON::"):] # strip of JSON:: prefix + data[key] = json.loads(value) + + return data + + def build_workfile_template(*args): builder = MayaTemplateBuilder(registered_host()) builder.build_template() diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_assembly.py b/client/ayon_core/hosts/maya/plugins/publish/extract_assembly.py index 2c23f9b752..5f51dc38cb 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_assembly.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_assembly.py @@ -2,7 +2,7 @@ import os import json from ayon_core.pipeline import publish -from ayon_core.hosts.maya.api.lib import extract_alembic +from ayon_core.hosts.maya.api.alembic import extract_alembic from maya import cmds diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_proxy_abc.py b/client/ayon_core/hosts/maya/plugins/publish/extract_proxy_abc.py index 3637a58614..5aefdfc33a 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -3,8 +3,8 @@ import os from maya import cmds from ayon_core.pipeline import publish +from ayon_core.hosts.maya.api.alembic import extract_alembic from ayon_core.hosts.maya.api.lib import ( - extract_alembic, suspended_refresh, maintained_selection, iter_visible_nodes_in_range diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py index 1a389f3d33..b5cc7745a1 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py @@ -5,8 +5,8 @@ import os from maya import cmds # noqa from ayon_core.pipeline import publish +from ayon_core.hosts.maya.api.alembic import extract_alembic from ayon_core.hosts.maya.api.lib import ( - extract_alembic, suspended_refresh, maintained_selection ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py b/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py index 78a247b3f2..7badfdc027 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_rendersettings.py @@ -10,6 +10,7 @@ from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, PublishValidationError, + OptionalPyblishPluginMixin ) from ayon_core.hosts.maya.api import lib from ayon_core.hosts.maya.api.lib_rendersettings import RenderSettings @@ -37,7 +38,8 @@ def get_redshift_image_format_labels(): return mel.eval("{0}={0}".format(var)) -class ValidateRenderSettings(pyblish.api.InstancePlugin): +class ValidateRenderSettings(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validates the global render settings * File Name Prefix must start with: `` @@ -55,7 +57,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): * Frame Padding must be: * default: 4 - * Animation must be toggle on, in Render Settings - Common tab: + * Animation must be toggled on, in Render Settings - Common tab: * vray: Animation on standard of specific * arnold: Frame / Animation ext: Any choice without "(Single Frame)" * redshift: Animation toggled on @@ -67,10 +69,11 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): """ order = ValidateContentsOrder - label = "Render Settings" + label = "Validate Render Settings" hosts = ["maya"] families = ["renderlayer"] actions = [RepairAction] + optional = True ImagePrefixes = { 'mentalray': 'defaultRenderGlobals.imageFilePrefix', @@ -112,6 +115,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): DEFAULT_PREFIX = "//_" def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: diff --git a/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py b/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py index 5e73933722..b07c7e9a70 100644 --- a/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py +++ b/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py @@ -1,87 +1,48 @@ -import json - from maya import cmds from ayon_core.pipeline.workfile.workfile_template_builder import ( - PlaceholderPlugin, - LoadPlaceholderItem, PlaceholderLoadMixin, + LoadPlaceholderItem ) from ayon_core.hosts.maya.api.lib import ( - read, - imprint, - get_reference_node + get_container_transforms, + get_node_parent, + get_node_index_under_parent +) +from ayon_core.hosts.maya.api.workfile_template_builder import ( + MayaPlaceholderPlugin, ) -from ayon_core.hosts.maya.api.workfile_template_builder import PLACEHOLDER_SET -class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): +class MayaPlaceholderLoadPlugin(MayaPlaceholderPlugin, PlaceholderLoadMixin): identifier = "maya.load" label = "Maya load" - def _collect_scene_placeholders(self): - # Cache placeholder data to shared data - placeholder_nodes = self.builder.get_shared_populate_data( - "placeholder_nodes" - ) - if placeholder_nodes is None: - attributes = cmds.ls("*.plugin_identifier", long=True) - placeholder_nodes = {} - for attribute in attributes: - node_name = attribute.rpartition(".")[0] - placeholder_nodes[node_name] = ( - self._parse_placeholder_node_data(node_name) - ) - - self.builder.set_shared_populate_data( - "placeholder_nodes", placeholder_nodes - ) - return placeholder_nodes - - def _parse_placeholder_node_data(self, node_name): - placeholder_data = read(node_name) - parent_name = ( - cmds.getAttr(node_name + ".parent", asString=True) - or node_name.rpartition("|")[0] - or "" - ) - if parent_name: - siblings = cmds.listRelatives(parent_name, children=True) - else: - siblings = cmds.ls(assemblies=True) - node_shortname = node_name.rpartition("|")[2] - current_index = cmds.getAttr(node_name + ".index", asString=True) - if current_index < 0: - current_index = siblings.index(node_shortname) - - placeholder_data.update({ - "parent": parent_name, - "index": current_index - }) - return placeholder_data + item_class = LoadPlaceholderItem def _create_placeholder_name(self, placeholder_data): - placeholder_name_parts = placeholder_data["builder_type"].split("_") - pos = 1 + # Split builder type: context_assets, linked_assets, all_assets + prefix, suffix = placeholder_data["builder_type"].split("_", 1) + parts = [prefix] + + # add family if any placeholder_product_type = placeholder_data.get("product_type") if placeholder_product_type is None: placeholder_product_type = placeholder_data.get("family") if placeholder_product_type: - placeholder_name_parts.insert(pos, placeholder_product_type) - pos += 1 + parts.append(placeholder_product_type) # add loader arguments if any loader_args = placeholder_data["loader_args"] if loader_args: - loader_args = json.loads(loader_args.replace('\'', '\"')) - values = [v for v in loader_args.values()] - for value in values: - placeholder_name_parts.insert(pos, value) - pos += 1 + loader_args = eval(loader_args) + for value in loader_args.values(): + parts.append(str(value)) - placeholder_name = "_".join(placeholder_name_parts) + parts.append(suffix) + placeholder_name = "_".join(parts) return placeholder_name.capitalize() @@ -104,68 +65,6 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): ) return loaded_representation_ids - def create_placeholder(self, placeholder_data): - selection = cmds.ls(selection=True) - if len(selection) > 1: - raise ValueError("More then one item are selected") - - parent = selection[0] if selection else None - - placeholder_data["plugin_identifier"] = self.identifier - - placeholder_name = self._create_placeholder_name(placeholder_data) - - placeholder = cmds.spaceLocator(name=placeholder_name)[0] - if parent: - placeholder = cmds.parent(placeholder, selection[0])[0] - - imprint(placeholder, placeholder_data) - - # Add helper attributes to keep placeholder info - cmds.addAttr( - placeholder, - longName="parent", - hidden=True, - dataType="string" - ) - cmds.addAttr( - placeholder, - longName="index", - hidden=True, - attributeType="short", - defaultValue=-1 - ) - - cmds.setAttr(placeholder + ".parent", "", type="string") - - def update_placeholder(self, placeholder_item, placeholder_data): - node_name = placeholder_item.scene_identifier - new_values = {} - for key, value in placeholder_data.items(): - placeholder_value = placeholder_item.data.get(key) - if value != placeholder_value: - new_values[key] = value - placeholder_item.data[key] = value - - for key in new_values.keys(): - cmds.deleteAttr(node_name + "." + key) - - imprint(node_name, new_values) - - def collect_placeholders(self): - output = [] - scene_placeholders = self._collect_scene_placeholders() - for node_name, placeholder_data in scene_placeholders.items(): - if placeholder_data.get("plugin_identifier") != self.identifier: - continue - - # TODO do data validations and maybe upgrades if they are invalid - output.append( - LoadPlaceholderItem(node_name, placeholder_data, self) - ) - - return output - def populate_placeholder(self, placeholder): self.populate_load_placeholder(placeholder) @@ -176,30 +75,6 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): def get_placeholder_options(self, options=None): return self.get_load_plugin_options(options) - def post_placeholder_process(self, placeholder, failed): - """Cleanup placeholder after load of its corresponding representations. - - Args: - placeholder (PlaceholderItem): Item which was just used to load - representation. - failed (bool): Loading of representation failed. - """ - # Hide placeholder and add them to placeholder set - node = placeholder.scene_identifier - - # If we just populate the placeholders from current scene, the - # placeholder set will not be created so account for that. - if not cmds.objExists(PLACEHOLDER_SET): - cmds.sets(name=PLACEHOLDER_SET, empty=True) - - cmds.sets(node, addElement=PLACEHOLDER_SET) - cmds.hide(node) - cmds.setAttr(node + ".hiddenInOutliner", True) - - def delete_placeholder(self, placeholder): - """Remove placeholder if building was successful""" - cmds.delete(placeholder.scene_identifier) - def load_succeed(self, placeholder, container): self._parent_in_hierarchy(placeholder, container) @@ -215,56 +90,43 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): if not container: return - roots = cmds.sets(container, q=True) or [] - ref_node = None - try: - ref_node = get_reference_node(roots) - except AssertionError as e: - self.log.info(e.args[0]) + # TODO: This currently returns only a single root but a loaded scene + # could technically load more than a single root + container_root = get_container_transforms(container, root=True) - nodes_to_parent = [] - for root in roots: - if ref_node: - ref_root = cmds.referenceQuery(root, nodes=True)[0] - ref_root = ( - cmds.listRelatives(ref_root, parent=True, path=True) or - [ref_root] - ) - nodes_to_parent.extend(ref_root) - continue - if root.endswith("_RN"): - # Backwards compatibility for hardcoded reference names. - refRoot = cmds.referenceQuery(root, n=True)[0] - refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot] - nodes_to_parent.extend(refRoot) - elif root not in cmds.listSets(allSets=True): - nodes_to_parent.append(root) + # Bugfix: The get_container_transforms does not recognize the load + # reference group currently + # TODO: Remove this when it does + parent = get_node_parent(container_root) + if parent: + container_root = parent + roots = [container_root] - elif not cmds.sets(root, q=True): - return + # Add the loaded roots to the holding sets if they exist + holding_sets = cmds.listSets(object=placeholder.scene_identifier) or [] + for holding_set in holding_sets: + cmds.sets(roots, forceElement=holding_set) - # Move loaded nodes to correct index in outliner hierarchy + # Parent the roots to the place of the placeholder locator and match + # its matrix placeholder_form = cmds.xform( placeholder.scene_identifier, - q=True, + query=True, matrix=True, worldSpace=True ) - scene_parent = cmds.listRelatives( - placeholder.scene_identifier, parent=True, fullPath=True - ) - for node in set(nodes_to_parent): - cmds.reorder(node, front=True) - cmds.reorder(node, relative=placeholder.data["index"]) - cmds.xform(node, matrix=placeholder_form, ws=True) - if scene_parent: - cmds.parent(node, scene_parent) - else: - if cmds.listRelatives(node, parent=True): - cmds.parent(node, world=True) + scene_parent = get_node_parent(placeholder.scene_identifier) + for node in set(roots): + cmds.xform(node, matrix=placeholder_form, worldSpace=True) - holding_sets = cmds.listSets(object=placeholder.scene_identifier) - if not holding_sets: - return - for holding_set in holding_sets: - cmds.sets(roots, forceElement=holding_set) + if scene_parent != get_node_parent(node): + if scene_parent: + node = cmds.parent(node, scene_parent)[0] + else: + node = cmds.parent(node, world=True)[0] + + # Move loaded nodes in index order next to their placeholder node + cmds.reorder(node, back=True) + index = get_node_index_under_parent(placeholder.scene_identifier) + cmds.reorder(node, front=True) + cmds.reorder(node, relative=index + 1) diff --git a/client/ayon_core/hosts/unreal/ue_workers.py b/client/ayon_core/hosts/unreal/ue_workers.py index e3f8729c2e..256c0557be 100644 --- a/client/ayon_core/hosts/unreal/ue_workers.py +++ b/client/ayon_core/hosts/unreal/ue_workers.py @@ -260,11 +260,11 @@ class UEProjectGenerationWorker(UEWorker): self.failed.emit(msg, return_code) raise RuntimeError(msg) - # ensure we have PySide2 installed in engine + # ensure we have PySide2/6 installed in engine self.progress.emit(0) self.stage_begin.emit( - (f"Checking PySide2 installation... {stage_count} " + (f"Checking Qt bindings installation... {stage_count} " f" out of {stage_count}")) python_path = None if platform.system().lower() == "windows": @@ -287,11 +287,30 @@ class UEProjectGenerationWorker(UEWorker): msg = f"Unreal Python not found at {python_path}" self.failed.emit(msg, 1) raise RuntimeError(msg) - pyside_cmd = [python_path.as_posix(), - "-m", - "pip", - "install", - "pyside2"] + + pyside_version = "PySide2" + ue_version = self.ue_version.split(".") + if int(ue_version[0]) == 5 and int(ue_version[1]) >= 4: + # Use PySide6 6.6.3 because 6.7.0 had a bug + # - 'QPushButton' can't be added to 'QBoxLayout' + pyside_version = "PySide6==6.6.3" + + site_packages_prefix = python_path.parent.as_posix() + + pyside_cmd = [ + python_path.as_posix(), + "-m", "pip", + "install", + "--ignore-installed", + pyside_version, + + ] + + if platform.system().lower() == "windows": + pyside_cmd += ["--target", site_packages_prefix] + + print(f"--- Installing {pyside_version} ...") + print(" ".join(pyside_cmd)) pyside_install = subprocess.Popen(pyside_cmd, stdout=subprocess.PIPE, @@ -306,8 +325,8 @@ class UEProjectGenerationWorker(UEWorker): return_code = pyside_install.wait() if return_code and return_code != 0: - msg = ("Failed to create the project! " - "The installation of PySide2 has failed!") + msg = (f"Failed to create the project! {return_code} " + f"The installation of {pyside_version} has failed!: {pyside_install}") self.failed.emit(msg, return_code) raise RuntimeError(msg) diff --git a/client/ayon_core/lib/__init__.py b/client/ayon_core/lib/__init__.py index 408262ca42..e436396c6c 100644 --- a/client/ayon_core/lib/__init__.py +++ b/client/ayon_core/lib/__init__.py @@ -27,6 +27,10 @@ from .local_settings import ( get_openpype_username, ) from .ayon_connection import initialize_ayon_connection +from .cache import ( + CacheItem, + NestedCacheItem, +) from .events import ( emit_event, register_event_callback @@ -157,6 +161,9 @@ __all__ = [ "initialize_ayon_connection", + "CacheItem", + "NestedCacheItem", + "emit_event", "register_event_callback", diff --git a/client/ayon_core/lib/cache.py b/client/ayon_core/lib/cache.py new file mode 100644 index 0000000000..dc83520f76 --- /dev/null +++ b/client/ayon_core/lib/cache.py @@ -0,0 +1,250 @@ +import time +import collections + +InitInfo = collections.namedtuple( + "InitInfo", + ["default_factory", "lifetime"] +) + + +def _default_factory_func(): + return None + + +class CacheItem: + """Simple cache item with lifetime and default factory for default value. + + Default factory should return default value that is used on init + and on reset. + + Args: + default_factory (Optional[callable]): Function that returns default + value used on init and on reset. + lifetime (Optional[int]): Lifetime of the cache data in seconds. + Default lifetime is 120 seconds. + + """ + def __init__(self, default_factory=None, lifetime=None): + if lifetime is None: + lifetime = 120 + self._lifetime = lifetime + self._last_update = None + if default_factory is None: + default_factory = _default_factory_func + self._default_factory = default_factory + self._data = default_factory() + + @property + def is_valid(self): + """Is cache valid to use. + + Return: + bool: True if cache is valid, False otherwise. + + """ + if self._last_update is None: + return False + + return (time.time() - self._last_update) < self._lifetime + + def set_lifetime(self, lifetime): + """Change lifetime of cache item. + + Args: + lifetime (int): Lifetime of the cache data in seconds. + """ + + self._lifetime = lifetime + + def set_invalid(self): + """Set cache as invalid.""" + + self._last_update = None + + def reset(self): + """Set cache as invalid and reset data.""" + + self._last_update = None + self._data = self._default_factory() + + def get_data(self): + """Receive cached data. + + Returns: + Any: Any data that are cached. + + """ + return self._data + + def update_data(self, data): + """Update cache data. + + Args: + data (Any): Any data that are cached. + + """ + self._data = data + self._last_update = time.time() + + +class NestedCacheItem: + """Helper for cached items stored in nested structure. + + Example: + >>> cache = NestedCacheItem(levels=2, default_factory=lambda: 0) + >>> cache["a"]["b"].is_valid + False + >>> cache["a"]["b"].get_data() + 0 + >>> cache["a"]["b"] = 1 + >>> cache["a"]["b"].is_valid + True + >>> cache["a"]["b"].get_data() + 1 + >>> cache.reset() + >>> cache["a"]["b"].is_valid + False + + Args: + levels (int): Number of nested levels where read cache is stored. + default_factory (Optional[callable]): Function that returns default + value used on init and on reset. + lifetime (Optional[int]): Lifetime of the cache data in seconds. + Default value is based on default value of 'CacheItem'. + _init_info (Optional[InitInfo]): Private argument. Init info for + nested cache where created from parent item. + + """ + def __init__( + self, levels=1, default_factory=None, lifetime=None, _init_info=None + ): + if levels < 1: + raise ValueError("Nested levels must be greater than 0") + self._data_by_key = {} + if _init_info is None: + _init_info = InitInfo(default_factory, lifetime) + self._init_info = _init_info + self._levels = levels + + def __getitem__(self, key): + """Get cached data. + + Args: + key (str): Key of the cache item. + + Returns: + Union[NestedCacheItem, CacheItem]: Cache item. + + """ + cache = self._data_by_key.get(key) + if cache is None: + if self._levels > 1: + cache = NestedCacheItem( + levels=self._levels - 1, + _init_info=self._init_info + ) + else: + cache = CacheItem( + self._init_info.default_factory, + self._init_info.lifetime + ) + self._data_by_key[key] = cache + return cache + + def __setitem__(self, key, value): + """Update cached data. + + Args: + key (str): Key of the cache item. + value (Any): Any data that are cached. + + """ + if self._levels > 1: + raise AttributeError(( + "{} does not support '__setitem__'. Lower nested level by {}" + ).format(self.__class__.__name__, self._levels - 1)) + cache = self[key] + cache.update_data(value) + + def get(self, key): + """Get cached data. + + Args: + key (str): Key of the cache item. + + Returns: + Union[NestedCacheItem, CacheItem]: Cache item. + + """ + return self[key] + + def cached_count(self): + """Amount of cached items. + + Returns: + int: Amount of cached items. + + """ + return len(self._data_by_key) + + def clear_key(self, key): + """Clear cached item by key. + + Args: + key (str): Key of the cache item. + + """ + self._data_by_key.pop(key, None) + + def clear_invalid(self): + """Clear all invalid cache items. + + Note: + To clear all cache items use 'reset'. + + """ + changed = {} + children_are_nested = self._levels > 1 + for key, cache in tuple(self._data_by_key.items()): + if children_are_nested: + output = cache.clear_invalid() + if output: + changed[key] = output + if not cache.cached_count(): + self._data_by_key.pop(key) + elif not cache.is_valid: + changed[key] = cache.get_data() + self._data_by_key.pop(key) + return changed + + def reset(self): + """Reset cache. + + Note: + To clear only invalid cache items use 'clear_invalid'. + + """ + self._data_by_key = {} + + def set_lifetime(self, lifetime): + """Change lifetime of all children cache items. + + Args: + lifetime (int): Lifetime of the cache data in seconds. + + """ + self._init_info.lifetime = lifetime + for cache in self._data_by_key.values(): + cache.set_lifetime(lifetime) + + @property + def is_valid(self): + """Raise reasonable error when called on wrong level. + + Raises: + AttributeError: If called on nested cache item. + + """ + raise AttributeError(( + "{} does not support 'is_valid'. Lower nested level by '{}'" + ).format(self.__class__.__name__, self._levels)) diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py index e3a4cd8030..1eba00a17f 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py @@ -9,10 +9,7 @@ import pyblish.api from ayon_core.pipeline.publish import ( AYONPyblishPluginMixin ) -from ayon_core.lib import ( - BoolDef, - NumberDef, -) +from ayon_core.lib import NumberDef class FusionSubmitDeadline( @@ -64,11 +61,6 @@ class FusionSubmitDeadline( decimals=0, minimum=1, maximum=10 - ), - BoolDef( - "suspend_publish", - default=False, - label="Suspend publish" ) ] @@ -80,10 +72,6 @@ class FusionSubmitDeadline( attribute_values = self.get_attr_values_from_data( instance.data) - # add suspend_publish attributeValue to instance data - instance.data["suspend_publish"] = attribute_values[ - "suspend_publish"] - context = instance.context key = "__hasRun{}".format(self.__class__.__name__) diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index 6952604293..597a3cfc55 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -10,7 +10,6 @@ from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo from ayon_core.lib import ( is_in_tests, - BoolDef, TextDef, NumberDef ) @@ -90,11 +89,6 @@ class HoudiniSubmitDeadline( @classmethod def get_attribute_defs(cls): return [ - BoolDef( - "suspend_publish", - default=False, - label="Suspend publish" - ), NumberDef( "priority", label="Priority", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py index d70cb75bf3..52a2ad122b 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -76,11 +76,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, default=cls.use_gpu, label="Use GPU" ), - BoolDef( - "suspend_publish", - default=False, - label="Suspend publish" - ), BoolDef( "workfile_dependency", default=cls.workfile_dependency, @@ -100,10 +95,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, instance.data["attributeValues"] = self.get_attr_values_from_data( instance.data) - # add suspend_publish attributeValue to instance data - instance.data["suspend_publish"] = instance.data["attributeValues"][ - "suspend_publish"] - families = instance.data["families"] node = instance.data["transientData"]["node"] diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py index 4e4657d886..ad7e5b8ed1 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py @@ -147,9 +147,6 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, instance_settings = self.get_attr_values_from_data(instance.data) initial_status = instance_settings.get("publishJobState", "Active") - # TODO: Remove this backwards compatibility of `suspend_publish` - if instance.data.get("suspend_publish"): - initial_status = "Suspended" args = [ "--headless", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py index 8def9cc63c..99a5f94cf1 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py @@ -88,9 +88,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, hosts = ["fusion", "max", "maya", "nuke", "houdini", "celaction", "aftereffects", "harmony", "blender"] - families = ["render.farm", "render.frames_farm", - "prerender.farm", "prerender.frames_farm", - "renderlayer", "imagesequence", + families = ["render", "render.farm", "render.frames_farm", + "prerender", "prerender.farm", "prerender.frames_farm", + "renderlayer", "imagesequence", "image", "vrayscene", "maxrender", "arnold_rop", "mantra_rop", "karma_rop", "vray_rop", @@ -224,9 +224,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, instance_settings = self.get_attr_values_from_data(instance.data) initial_status = instance_settings.get("publishJobState", "Active") - # TODO: Remove this backwards compatibility of `suspend_publish` - if instance.data.get("suspend_publish"): - initial_status = "Suspended" args = [ "--headless", @@ -314,7 +311,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, return deadline_publish_job_id - def process(self, instance): # type: (pyblish.api.Instance) -> None """Process plugin. diff --git a/client/ayon_core/pipeline/anatomy/anatomy.py b/client/ayon_core/pipeline/anatomy/anatomy.py index 2aa8eeddbc..98bbaa9bdc 100644 --- a/client/ayon_core/pipeline/anatomy/anatomy.py +++ b/client/ayon_core/pipeline/anatomy/anatomy.py @@ -3,11 +3,16 @@ import re import copy import platform import collections -import time import ayon_api -from ayon_core.lib import Logger, get_local_site_id, StringTemplate +from ayon_core.lib import ( + Logger, + get_local_site_id, + StringTemplate, + CacheItem, + NestedCacheItem, +) from ayon_core.addon import AddonsManager from .exceptions import RootCombinationError, ProjectNotSet @@ -397,62 +402,11 @@ class BaseAnatomy(object): ) -class CacheItem: - """Helper to cache data. - - Helper does not handle refresh of data and does not mark data as outdated. - Who uses the object should check of outdated state on his own will. - """ - - default_lifetime = 10 - - def __init__(self, lifetime=None): - self._data = None - self._cached = None - self._lifetime = lifetime or self.default_lifetime - - @property - def data(self): - """Cached data/object. - - Returns: - Any: Whatever was cached. - """ - - return self._data - - @property - def is_outdated(self): - """Item has outdated cache. - - Lifetime of cache item expired or was not yet set. - - Returns: - bool: Item is outdated. - """ - - if self._cached is None: - return True - return (time.time() - self._cached) > self._lifetime - - def update_data(self, data): - """Update cache of data. - - Args: - data (Any): Data to cache. - """ - - self._data = data - self._cached = time.time() - - class Anatomy(BaseAnatomy): - _sitesync_addon_cache = CacheItem() - _project_cache = collections.defaultdict(CacheItem) - _default_site_id_cache = collections.defaultdict(CacheItem) - _root_overrides_cache = collections.defaultdict( - lambda: collections.defaultdict(CacheItem) - ) + _project_cache = NestedCacheItem(lifetime=10) + _sitesync_addon_cache = CacheItem(lifetime=60) + _default_site_id_cache = NestedCacheItem(lifetime=60) + _root_overrides_cache = NestedCacheItem(2, lifetime=60) def __init__( self, project_name=None, site_name=None, project_entity=None @@ -477,18 +431,18 @@ class Anatomy(BaseAnatomy): @classmethod def get_project_entity_from_cache(cls, project_name): project_cache = cls._project_cache[project_name] - if project_cache.is_outdated: + if not project_cache.is_valid: project_cache.update_data(ayon_api.get_project(project_name)) - return copy.deepcopy(project_cache.data) + return copy.deepcopy(project_cache.get_data()) @classmethod def get_sitesync_addon(cls): - if cls._sitesync_addon_cache.is_outdated: + if not cls._sitesync_addon_cache.is_valid: manager = AddonsManager() cls._sitesync_addon_cache.update_data( manager.get_enabled_addon("sitesync") ) - return cls._sitesync_addon_cache.data + return cls._sitesync_addon_cache.get_data() @classmethod def _get_studio_roots_overrides(cls, project_name): @@ -533,14 +487,14 @@ class Anatomy(BaseAnatomy): elif not site_name: # Use sync server to receive active site name project_cache = cls._default_site_id_cache[project_name] - if project_cache.is_outdated: + if not project_cache.is_valid: project_cache.update_data( sitesync_addon.get_active_site_type(project_name) ) - site_name = project_cache.data + site_name = project_cache.get_data() site_cache = cls._root_overrides_cache[project_name][site_name] - if site_cache.is_outdated: + if not site_cache.is_valid: if site_name == "studio": # Handle studio root overrides without sync server # - studio root overrides can be done even without sync server @@ -553,4 +507,4 @@ class Anatomy(BaseAnatomy): project_name, site_name ) site_cache.update_data(roots_overrides) - return site_cache.data + return site_cache.get_data() diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index b8618738fb..dd005d250c 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -1987,12 +1987,12 @@ class CreateContext: "Folder '{}' was not found".format(folder_path) ) - task_name = None if task_entity is None: - task_name = self.get_current_task_name() - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name - ) + current_task_name = self.get_current_task_name() + if current_task_name: + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], current_task_name + ) if pre_create_data is None: pre_create_data = {} @@ -2018,7 +2018,7 @@ class CreateContext: instance_data = { "folderPath": folder_entity["path"], - "task": task_name, + "task": task_entity["name"] if task_entity else None, "productType": creator.product_type, "variant": variant } diff --git a/client/ayon_core/resources/app_icons/3de4.png b/client/ayon_core/resources/app_icons/3de4.png new file mode 100644 index 0000000000..bd0fe40d37 Binary files /dev/null and b/client/ayon_core/resources/app_icons/3de4.png differ diff --git a/client/ayon_core/tools/common_models/cache.py b/client/ayon_core/tools/common_models/cache.py index 221a14160c..59b727728f 100644 --- a/client/ayon_core/tools/common_models/cache.py +++ b/client/ayon_core/tools/common_models/cache.py @@ -1,239 +1,31 @@ -import time -import collections +import warnings -InitInfo = collections.namedtuple( - "InitInfo", - ["default_factory", "lifetime"] +from ayon_core.lib import CacheItem as _CacheItem +from ayon_core.lib import NestedCacheItem as _NestedCacheItem + + +# Cache classes were moved to `ayon_core.lib.cache` +class CacheItem(_CacheItem): + def __init__(self, *args, **kwargs): + warnings.warn( + "Used 'CacheItem' from deprecated location " + "'ayon_core.tools.common_models', use 'ayon_core.lib' instead.", + DeprecationWarning, + ) + super().__init__(*args, **kwargs) + + +class NestedCacheItem(_NestedCacheItem): + def __init__(self, *args, **kwargs): + warnings.warn( + "Used 'NestedCacheItem' from deprecated location " + "'ayon_core.tools.common_models', use 'ayon_core.lib' instead.", + DeprecationWarning, + ) + super().__init__(*args, **kwargs) + + +__all__ = ( + "CacheItem", + "NestedCacheItem", ) - - -def _default_factory_func(): - return None - - -class CacheItem: - """Simple cache item with lifetime and default value. - - Args: - default_factory (Optional[callable]): Function that returns default - value used on init and on reset. - lifetime (Optional[int]): Lifetime of the cache data in seconds. - """ - - def __init__(self, default_factory=None, lifetime=None): - if lifetime is None: - lifetime = 120 - self._lifetime = lifetime - self._last_update = None - if default_factory is None: - default_factory = _default_factory_func - self._default_factory = default_factory - self._data = default_factory() - - @property - def is_valid(self): - """Is cache valid to use. - - Return: - bool: True if cache is valid, False otherwise. - """ - - if self._last_update is None: - return False - - return (time.time() - self._last_update) < self._lifetime - - def set_lifetime(self, lifetime): - """Change lifetime of cache item. - - Args: - lifetime (int): Lifetime of the cache data in seconds. - """ - - self._lifetime = lifetime - - def set_invalid(self): - """Set cache as invalid.""" - - self._last_update = None - - def reset(self): - """Set cache as invalid and reset data.""" - - self._last_update = None - self._data = self._default_factory() - - def get_data(self): - """Receive cached data. - - Returns: - Any: Any data that are cached. - """ - - return self._data - - def update_data(self, data): - self._data = data - self._last_update = time.time() - - -class NestedCacheItem: - """Helper for cached items stored in nested structure. - - Example: - >>> cache = NestedCacheItem(levels=2, default_factory=lambda: 0) - >>> cache["a"]["b"].is_valid - False - >>> cache["a"]["b"].get_data() - 0 - >>> cache["a"]["b"] = 1 - >>> cache["a"]["b"].is_valid - True - >>> cache["a"]["b"].get_data() - 1 - >>> cache.reset() - >>> cache["a"]["b"].is_valid - False - - Args: - levels (int): Number of nested levels where read cache is stored. - default_factory (Optional[callable]): Function that returns default - value used on init and on reset. - lifetime (Optional[int]): Lifetime of the cache data in seconds. - _init_info (Optional[InitInfo]): Private argument. Init info for - nested cache where created from parent item. - """ - - def __init__( - self, levels=1, default_factory=None, lifetime=None, _init_info=None - ): - if levels < 1: - raise ValueError("Nested levels must be greater than 0") - self._data_by_key = {} - if _init_info is None: - _init_info = InitInfo(default_factory, lifetime) - self._init_info = _init_info - self._levels = levels - - def __getitem__(self, key): - """Get cached data. - - Args: - key (str): Key of the cache item. - - Returns: - Union[NestedCacheItem, CacheItem]: Cache item. - """ - - cache = self._data_by_key.get(key) - if cache is None: - if self._levels > 1: - cache = NestedCacheItem( - levels=self._levels - 1, - _init_info=self._init_info - ) - else: - cache = CacheItem( - self._init_info.default_factory, - self._init_info.lifetime - ) - self._data_by_key[key] = cache - return cache - - def __setitem__(self, key, value): - """Update cached data. - - Args: - key (str): Key of the cache item. - value (Any): Any data that are cached. - """ - - if self._levels > 1: - raise AttributeError(( - "{} does not support '__setitem__'. Lower nested level by {}" - ).format(self.__class__.__name__, self._levels - 1)) - cache = self[key] - cache.update_data(value) - - def get(self, key): - """Get cached data. - - Args: - key (str): Key of the cache item. - - Returns: - Union[NestedCacheItem, CacheItem]: Cache item. - """ - - return self[key] - - def cached_count(self): - """Amount of cached items. - - Returns: - int: Amount of cached items. - """ - - return len(self._data_by_key) - - def clear_key(self, key): - """Clear cached item by key. - - Args: - key (str): Key of the cache item. - """ - - self._data_by_key.pop(key, None) - - def clear_invalid(self): - """Clear all invalid cache items. - - Note: - To clear all cache items use 'reset'. - """ - - changed = {} - children_are_nested = self._levels > 1 - for key, cache in tuple(self._data_by_key.items()): - if children_are_nested: - output = cache.clear_invalid() - if output: - changed[key] = output - if not cache.cached_count(): - self._data_by_key.pop(key) - elif not cache.is_valid: - changed[key] = cache.get_data() - self._data_by_key.pop(key) - return changed - - def reset(self): - """Reset cache. - - Note: - To clear only invalid cache items use 'clear_invalid'. - """ - - self._data_by_key = {} - - def set_lifetime(self, lifetime): - """Change lifetime of all children cache items. - - Args: - lifetime (int): Lifetime of the cache data in seconds. - """ - - self._init_info.lifetime = lifetime - for cache in self._data_by_key.values(): - cache.set_lifetime(lifetime) - - @property - def is_valid(self): - """Raise reasonable error when called on wront level. - - Raises: - AttributeError: If called on nested cache item. - """ - - raise AttributeError(( - "{} does not support 'is_valid'. Lower nested level by '{}'" - ).format(self.__class__.__name__, self._levels)) diff --git a/client/ayon_core/tools/common_models/hierarchy.py b/client/ayon_core/tools/common_models/hierarchy.py index d8b28f020d..78b8a7f492 100644 --- a/client/ayon_core/tools/common_models/hierarchy.py +++ b/client/ayon_core/tools/common_models/hierarchy.py @@ -6,8 +6,7 @@ import ayon_api import six from ayon_core.style import get_default_entity_icon_color - -from .cache import NestedCacheItem +from ayon_core.lib import NestedCacheItem HIERARCHY_MODEL_SENDER = "hierarchy.model" diff --git a/client/ayon_core/tools/common_models/projects.py b/client/ayon_core/tools/common_models/projects.py index e30561000e..19a38bee21 100644 --- a/client/ayon_core/tools/common_models/projects.py +++ b/client/ayon_core/tools/common_models/projects.py @@ -5,8 +5,7 @@ import ayon_api import six from ayon_core.style import get_default_entity_icon_color - -from .cache import CacheItem +from ayon_core.lib import CacheItem PROJECTS_MODEL_SENDER = "projects.model" diff --git a/client/ayon_core/tools/common_models/thumbnails.py b/client/ayon_core/tools/common_models/thumbnails.py index 1c3aadc49f..6d14783b9a 100644 --- a/client/ayon_core/tools/common_models/thumbnails.py +++ b/client/ayon_core/tools/common_models/thumbnails.py @@ -5,7 +5,7 @@ import collections import ayon_api import appdirs -from .cache import NestedCacheItem +from ayon_core.lib import NestedCacheItem FileInfo = collections.namedtuple( "FileInfo", diff --git a/client/ayon_core/tools/loader/models/actions.py b/client/ayon_core/tools/loader/models/actions.py index ad2993af50..cfe91cadab 100644 --- a/client/ayon_core/tools/loader/models/actions.py +++ b/client/ayon_core/tools/loader/models/actions.py @@ -6,6 +6,7 @@ import uuid import ayon_api +from ayon_core.lib import NestedCacheItem from ayon_core.pipeline.load import ( discover_loader_plugins, ProductLoaderPlugin, @@ -17,7 +18,6 @@ from ayon_core.pipeline.load import ( LoadError, IncompatibleLoaderError, ) -from ayon_core.tools.common_models import NestedCacheItem from ayon_core.tools.loader.abstract import ActionItem ACTIONS_MODEL_SENDER = "actions.model" diff --git a/client/ayon_core/tools/loader/models/products.py b/client/ayon_core/tools/loader/models/products.py index 812446a012..a3bbc30a09 100644 --- a/client/ayon_core/tools/loader/models/products.py +++ b/client/ayon_core/tools/loader/models/products.py @@ -5,8 +5,8 @@ import arrow import ayon_api from ayon_api.operations import OperationsSession +from ayon_core.lib import NestedCacheItem from ayon_core.style import get_default_entity_icon_color -from ayon_core.tools.common_models import NestedCacheItem from ayon_core.tools.loader.abstract import ( ProductTypeItem, ProductItem, diff --git a/client/ayon_core/tools/loader/models/sitesync.py b/client/ayon_core/tools/loader/models/sitesync.py index 987510905b..02504c2ad3 100644 --- a/client/ayon_core/tools/loader/models/sitesync.py +++ b/client/ayon_core/tools/loader/models/sitesync.py @@ -2,9 +2,8 @@ import collections from ayon_api import get_representations, get_versions_links -from ayon_core.lib import Logger +from ayon_core.lib import Logger, NestedCacheItem from ayon_core.addon import AddonsManager -from ayon_core.tools.common_models import NestedCacheItem from ayon_core.tools.loader.abstract import ActionItem DOWNLOAD_IDENTIFIER = "sitesync.download" diff --git a/pyproject.toml b/pyproject.toml index c1f6ddfb0b..4726bef41a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -106,7 +106,7 @@ line-ending = "auto" [tool.codespell] # Ignore words that are not in the dictionary. -ignore-words-list = "ayon,ynput,parms,parm,hda,developpement" +ignore-words-list = "ayon,ynput,parms,parm,hda,developpement,ue" skip = "./.*,./package/*,*/vendor/*,*/unreal/integration/*,*/aftereffects/api/extension/js/libs/*" count = true diff --git a/server_addon/applications/client/ayon_applications/addon.py b/server_addon/applications/client/ayon_applications/addon.py index 0f1b68af0e..a8eaa46cad 100644 --- a/server_addon/applications/client/ayon_applications/addon.py +++ b/server_addon/applications/client/ayon_applications/addon.py @@ -110,6 +110,26 @@ class ApplicationsAddon(AYONAddon, IPluginPaths): ] } + def launch_application( + self, app_name, project_name, folder_path, task_name + ): + """Launch application. + + Args: + app_name (str): Full application name e.g. 'maya/2024'. + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. + + """ + app_manager = self.get_applications_manager() + return app_manager.launch( + app_name, + project_name=project_name, + folder_path=folder_path, + task_name=task_name, + ) + # --- CLI --- def cli(self, addon_click_group): main_group = click_wrap.group( @@ -134,6 +154,17 @@ class ApplicationsAddon(AYONAddon, IPluginPaths): default=None ) ) + ( + main_group.command( + self._cli_launch_applications, + name="launch", + help="Launch application" + ) + .option("--app", required=True, help="Application name") + .option("--project", required=True, help="Project name") + .option("--folder", required=True, help="Folder path") + .option("--task", required=True, help="Task name") + ) # Convert main command to click object and add it to parent group addon_click_group.add_command( main_group.to_click_obj() @@ -171,3 +202,15 @@ class ApplicationsAddon(AYONAddon, IPluginPaths): with open(output_json_path, "w") as file_stream: json.dump(env, file_stream, indent=4) + + def _cli_launch_applications(self, project, folder, task, app): + """Launch application. + + Args: + project (str): Project name. + folder (str): Folder path. + task (str): Task name. + app (str): Full application name e.g. 'maya/2024'. + + """ + self.launch_application(app, project, folder, task) diff --git a/server_addon/applications/package.py b/server_addon/applications/package.py index 43a301b7c2..500f609fc6 100644 --- a/server_addon/applications/package.py +++ b/server_addon/applications/package.py @@ -1,6 +1,6 @@ name = "applications" title = "Applications" -version = "0.2.0" +version = "0.2.1" ayon_server_version = ">=1.0.7" ayon_launcher_version = ">=1.0.2" diff --git a/server_addon/applications/server/applications.json b/server_addon/applications/server/applications.json index e4b72fdff9..84b7fa33cf 100644 --- a/server_addon/applications/server/applications.json +++ b/server_addon/applications/server/applications.json @@ -1271,6 +1271,28 @@ } ] }, + "equalizer": { + "enabled": true, + "label": "3DEqualizer", + "icon": "{}/app_icons/3de4.png", + "host_name": "equalizer", + "environment": "{}", + "variants": [ + { + "name": "7-1v2", + "label": "7.1v2", + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\3DE4_win64_r7.1v2\\bin\\3DE4.exe" + ], + "darwin": [], + "linux": [] + }, + "environment": "{}" + } + ] + }, "additional_apps": [] } } diff --git a/server_addon/applications/server/settings.py b/server_addon/applications/server/settings.py index 5743e9f471..b77686cee0 100644 --- a/server_addon/applications/server/settings.py +++ b/server_addon/applications/server/settings.py @@ -190,6 +190,8 @@ class ApplicationsSettings(BaseSettingsModel): default_factory=AppGroupWithPython, title="OpenRV") zbrush: AppGroup = SettingsField( default_factory=AppGroupWithPython, title="Zbrush") + equalizer: AppGroup = SettingsField( + default_factory=AppGroupWithPython, title="3DEqualizer") additional_apps: list[AdditionalAppGroup] = SettingsField( default_factory=list, title="Additional Applications") diff --git a/server_addon/deadline/package.py b/server_addon/deadline/package.py index 944797fea6..25ba1c1166 100644 --- a/server_addon/deadline/package.py +++ b/server_addon/deadline/package.py @@ -1,3 +1,3 @@ name = "deadline" title = "Deadline" -version = "0.1.10" +version = "0.1.11" diff --git a/server_addon/deadline/server/settings/publish_plugins.py b/server_addon/deadline/server/settings/publish_plugins.py index 9f69143e37..784ad2560b 100644 --- a/server_addon/deadline/server/settings/publish_plugins.py +++ b/server_addon/deadline/server/settings/publish_plugins.py @@ -191,7 +191,6 @@ class NukeSubmitDeadlineModel(BaseSettingsModel): @validator( "limit_groups", - "env_allowed_keys", "env_search_replace_values") def validate_unique_names(cls, value): ensure_unique_names(value) diff --git a/server_addon/maya/server/settings/publishers.py b/server_addon/maya/server/settings/publishers.py index 8dcffbb59a..bc38d5f746 100644 --- a/server_addon/maya/server/settings/publishers.py +++ b/server_addon/maya/server/settings/publishers.py @@ -229,7 +229,7 @@ class ValidateAttributesModel(BaseSettingsModel): if not success: raise BadRequestException( - "The attibutes can't be parsed as json object" + "The attributes can't be parsed as json object" ) return value @@ -265,7 +265,7 @@ class ValidateUnrealStaticMeshNameModel(BaseSettingsModel): enabled: bool = SettingsField(title="ValidateUnrealStaticMeshName") optional: bool = SettingsField(title="Optional") validate_mesh: bool = SettingsField(title="Validate mesh names") - validate_collision: bool = SettingsField(title="Validate collison names") + validate_collision: bool = SettingsField(title="Validate collision names") class ValidateCycleErrorModel(BaseSettingsModel): @@ -288,7 +288,7 @@ class ValidatePluginPathAttributesModel(BaseSettingsModel): and the node attribute is abc_file """ - enabled: bool = True + enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") attribute: list[ValidatePluginPathAttributesAttrModel] = SettingsField( @@ -310,6 +310,9 @@ class RendererAttributesModel(BaseSettingsModel): class ValidateRenderSettingsModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") arnold_render_attributes: list[RendererAttributesModel] = SettingsField( default_factory=list, title="Arnold Render Attributes") vray_render_attributes: list[RendererAttributesModel] = SettingsField( @@ -613,7 +616,7 @@ class ExtractGPUCacheModel(BaseSettingsModel): title="Optimize Animations For Motion Blur" ) writeMaterials: bool = SettingsField(title="Write Materials") - useBaseTessellation: bool = SettingsField(title="User Base Tesselation") + useBaseTessellation: bool = SettingsField(title="User Based Tessellation") class PublishersModel(BaseSettingsModel): @@ -1171,6 +1174,9 @@ DEFAULT_PUBLISH_SETTINGS = { ] }, "ValidateRenderSettings": { + "enabled": True, + "active": True, + "optional": False, "arnold_render_attributes": [], "vray_render_attributes": [], "redshift_render_attributes": [],