From f1aed92d795b92ac2ec15a52e91a9ae37535da87 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 5 Jul 2024 15:21:41 +0200 Subject: [PATCH 01/28] Enable asset contributions to write AYON Entity URIs --- .../publish/extract_usd_layer_contributions.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py index 162b7d3d41..58cffcd59a 100644 --- a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py +++ b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py @@ -561,6 +561,8 @@ class ExtractUSDLayerContribution(publish.Extractor): label = "Extract USD Layer Contributions (Asset/Shot)" order = pyblish.api.ExtractorOrder + 0.45 + use_ayon_entity_uri = True + def process(self, instance): folder_path = instance.data["folderPath"] @@ -578,7 +580,8 @@ class ExtractUSDLayerContribution(publish.Extractor): contributions = instance.data.get("usd_contributions", []) for contribution in sorted(contributions, key=attrgetter("order")): - path = get_instance_uri_path(contribution.instance) + path = get_instance_uri_path(contribution.instance, + resolve=not self.use_ayon_entity_uri) if isinstance(contribution, VariantContribution): # Add contribution as a reference inside a variant self.log.debug(f"Adding variant: {contribution}") @@ -720,6 +723,8 @@ class ExtractUSDAssetContribution(publish.Extractor): label = "Extract USD Asset/Shot Contributions" order = ExtractUSDLayerContribution.order + 0.01 + use_ayon_entity_uri = True + def process(self, instance): folder_path = instance.data["folderPath"] @@ -795,15 +800,15 @@ class ExtractUSDAssetContribution(publish.Extractor): layer_id = layer_instance.data["usd_layer_id"] order = layer_instance.data["usd_layer_order"] - path = get_instance_uri_path(instance=layer_instance) + path = get_instance_uri_path(instance=layer_instance, + resolve=not self.use_ayon_entity_uri) add_ordered_sublayer(target_layer, contribution_path=path, layer_id=layer_id, order=order, # Add the sdf argument metadata which allows # us to later detect whether another path - # has the same layer id, so we can replace it - # it. + # has the same layer id, so we can replace it. add_sdf_arguments_metadata=True) # Save the file From 104e03b13d2185b97ac926992abb4f13ac9756b8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 5 Jul 2024 15:59:55 +0200 Subject: [PATCH 02/28] Add toggle to settings --- server/settings/publish_plugins.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index 36bb3f7340..930fa9d0a3 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -84,6 +84,17 @@ class CollectUSDLayerContributionsModel(BaseSettingsModel): return value +class AyonEntityURIModel(BaseSettingsModel): + use_ayon_entity_uri: bool = SettingsField( + title="Use AYON Entity URI", + description=( + "When enabled the USD paths written using the contribution " + "workflow will use ayon entity URIs instead of resolved published " + "paths. You can only load these if you use the AYON USD Resolver." + ) + ) + + class PluginStateByHostModelProfile(BaseSettingsModel): _layout = "expanded" # Filtering @@ -857,6 +868,14 @@ class PublishPuginsModel(BaseSettingsModel): default_factory=ExtractBurninModel, title="Extract Burnin" ) + ExtractUSDAssetContribution: AyonEntityURIModel = SettingsField( + default_factory=AyonEntityURIModel, + title="Extract USD Asset Contribution", + ) + ExtractUSDLayerContribution: AyonEntityURIModel = SettingsField( + default_factory=AyonEntityURIModel, + title="Extract USD Layer Contribution", + ) PreIntegrateThumbnails: PreIntegrateThumbnailsModel = SettingsField( default_factory=PreIntegrateThumbnailsModel, title="Override Integrate Thumbnail Representations" @@ -1161,6 +1180,12 @@ DEFAULT_PUBLISH_VALUES = { } ] }, + "ExtractUSDAssetContribution": { + "use_ayon_entity_uri": True, + }, + "ExtractUSDLayerContribution": { + "use_ayon_entity_uri": True, + }, "PreIntegrateThumbnails": { "enabled": True, "integrate_profiles": [] From 86066660e854011d839a2412f1868b32a0e3b8cf Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 5 Jul 2024 16:00:15 +0200 Subject: [PATCH 03/28] Change default state to False --- server/settings/publish_plugins.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index 930fa9d0a3..c1c6bc42a5 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -1181,10 +1181,10 @@ DEFAULT_PUBLISH_VALUES = { ] }, "ExtractUSDAssetContribution": { - "use_ayon_entity_uri": True, + "use_ayon_entity_uri": False, }, "ExtractUSDLayerContribution": { - "use_ayon_entity_uri": True, + "use_ayon_entity_uri": False, }, "PreIntegrateThumbnails": { "enabled": True, From db069448df569a165fd3521a4a4353d83bf4862d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 16 Jul 2024 16:59:43 +0200 Subject: [PATCH 04/28] python module tools do not support python 2 anymore --- client/ayon_core/lib/python_module_tools.py | 137 ++++++-------------- 1 file changed, 38 insertions(+), 99 deletions(-) diff --git a/client/ayon_core/lib/python_module_tools.py b/client/ayon_core/lib/python_module_tools.py index cb6e4c14c4..d146e069a9 100644 --- a/client/ayon_core/lib/python_module_tools.py +++ b/client/ayon_core/lib/python_module_tools.py @@ -5,43 +5,30 @@ import importlib import inspect import logging -import six - log = logging.getLogger(__name__) def import_filepath(filepath, module_name=None): """Import python file as python module. - Python 2 and Python 3 compatibility. - Args: - filepath(str): Path to python file. - module_name(str): Name of loaded module. Only for Python 3. By default + filepath (str): Path to python file. + module_name (str): Name of loaded module. Only for Python 3. By default is filled with filename of filepath. + """ if module_name is None: module_name = os.path.splitext(os.path.basename(filepath))[0] - # Make sure it is not 'unicode' in Python 2 - module_name = str(module_name) - # Prepare module object where content of file will be parsed module = types.ModuleType(module_name) module.__file__ = filepath - if six.PY3: - # Use loader so module has full specs - module_loader = importlib.machinery.SourceFileLoader( - module_name, filepath - ) - module_loader.exec_module(module) - else: - # Execute module code and store content to module - with open(filepath) as _stream: - # Execute content and store it to module object - six.exec_(_stream.read(), module.__dict__) - + # Use loader so module has full specs + module_loader = importlib.machinery.SourceFileLoader( + module_name, filepath + ) + module_loader.exec_module(module) return module @@ -139,35 +126,31 @@ def classes_from_module(superclass, module): return classes -def _import_module_from_dirpath_py2(dirpath, module_name, dst_module_name): - """Import passed dirpath as python module using `imp`.""" +def import_module_from_dirpath(dirpath, folder_name, dst_module_name=None): + """Import passed directory as a python module. + + Imported module can be assigned as a child attribute of already loaded + module from `sys.modules` if has support of `setattr`. That is not default + behavior of python modules so parent module must be a custom module with + that ability. + + It is not possible to reimport already cached module. If you need to + reimport module you have to remove it from caches manually. + + Args: + dirpath (str): Parent directory path of loaded folder. + folder_name (str): Folder name which should be imported inside passed + directory. + dst_module_name (str): Parent module name under which can be loaded + module added. + + """ + # Import passed dirpath as python module if dst_module_name: - full_module_name = "{}.{}".format(dst_module_name, module_name) + full_module_name = "{}.{}".format(dst_module_name, folder_name) dst_module = sys.modules[dst_module_name] else: - full_module_name = module_name - dst_module = None - - if full_module_name in sys.modules: - return sys.modules[full_module_name] - - import imp - - fp, pathname, description = imp.find_module(module_name, [dirpath]) - module = imp.load_module(full_module_name, fp, pathname, description) - if dst_module is not None: - setattr(dst_module, module_name, module) - - return module - - -def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): - """Import passed dirpath as python module using Python 3 modules.""" - if dst_module_name: - full_module_name = "{}.{}".format(dst_module_name, module_name) - dst_module = sys.modules[dst_module_name] - else: - full_module_name = module_name + full_module_name = folder_name dst_module = None # Skip import if is already imported @@ -191,7 +174,7 @@ def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): # Store module to destination module and `sys.modules` # WARNING this mus be done before module execution if dst_module is not None: - setattr(dst_module, module_name, module) + setattr(dst_module, folder_name, module) sys.modules[full_module_name] = module @@ -201,37 +184,6 @@ def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): return module -def import_module_from_dirpath(dirpath, folder_name, dst_module_name=None): - """Import passed directory as a python module. - - Python 2 and 3 compatible. - - Imported module can be assigned as a child attribute of already loaded - module from `sys.modules` if has support of `setattr`. That is not default - behavior of python modules so parent module must be a custom module with - that ability. - - It is not possible to reimport already cached module. If you need to - reimport module you have to remove it from caches manually. - - Args: - dirpath(str): Parent directory path of loaded folder. - folder_name(str): Folder name which should be imported inside passed - directory. - dst_module_name(str): Parent module name under which can be loaded - module added. - """ - if six.PY3: - module = _import_module_from_dirpath_py3( - dirpath, folder_name, dst_module_name - ) - else: - module = _import_module_from_dirpath_py2( - dirpath, folder_name, dst_module_name - ) - return module - - def is_func_signature_supported(func, *args, **kwargs): """Check if a function signature supports passed args and kwargs. @@ -275,25 +227,12 @@ def is_func_signature_supported(func, *args, **kwargs): Returns: bool: Function can pass in arguments. + """ - - if hasattr(inspect, "signature"): - # Python 3 using 'Signature' object where we try to bind arg - # or kwarg. Using signature is recommended approach based on - # documentation. - sig = inspect.signature(func) - try: - sig.bind(*args, **kwargs) - return True - except TypeError: - pass - - else: - # In Python 2 'signature' is not available so 'getcallargs' is used - # - 'getcallargs' is marked as deprecated since Python 3.0 - try: - inspect.getcallargs(func, *args, **kwargs) - return True - except TypeError: - pass + sig = inspect.signature(func) + try: + sig.bind(*args, **kwargs) + return True + except TypeError: + pass return False From 6e7d6201c969b8f3147db9f724efd03c836cf17a Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 16 Jul 2024 17:00:15 +0200 Subject: [PATCH 05/28] use WeakMethod from weakref --- client/ayon_core/lib/events.py | 3 +- client/ayon_core/lib/python_2_comp.py | 53 +++++++-------------------- 2 files changed, 14 insertions(+), 42 deletions(-) diff --git a/client/ayon_core/lib/events.py b/client/ayon_core/lib/events.py index 774790b80a..9a3d1edfd4 100644 --- a/client/ayon_core/lib/events.py +++ b/client/ayon_core/lib/events.py @@ -8,7 +8,6 @@ import logging import weakref from uuid import uuid4 -from .python_2_comp import WeakMethod from .python_module_tools import is_func_signature_supported @@ -18,7 +17,7 @@ class MissingEventSystem(Exception): def _get_func_ref(func): if inspect.ismethod(func): - return WeakMethod(func) + return weakref.WeakMethod(func) return weakref.ref(func) diff --git a/client/ayon_core/lib/python_2_comp.py b/client/ayon_core/lib/python_2_comp.py index 091c51a6f6..900db59062 100644 --- a/client/ayon_core/lib/python_2_comp.py +++ b/client/ayon_core/lib/python_2_comp.py @@ -1,44 +1,17 @@ +# Deprecated file +# - the file container 'WeakMethod' implementation for Python 2 which is not +# needed anymore. +import warnings import weakref -WeakMethod = getattr(weakref, "WeakMethod", None) +WeakMethod = weakref.WeakMethod -if WeakMethod is None: - class _WeakCallable: - def __init__(self, obj, func): - self.im_self = obj - self.im_func = func - - def __call__(self, *args, **kws): - if self.im_self is None: - return self.im_func(*args, **kws) - else: - return self.im_func(self.im_self, *args, **kws) - - - class WeakMethod: - """ Wraps a function or, more importantly, a bound method in - a way that allows a bound method's object to be GCed, while - providing the same interface as a normal weak reference. """ - - def __init__(self, fn): - try: - self._obj = weakref.ref(fn.im_self) - self._meth = fn.im_func - except AttributeError: - # It's not a bound method - self._obj = None - self._meth = fn - - def __call__(self): - if self._dead(): - return None - return _WeakCallable(self._getobj(), self._meth) - - def _dead(self): - return self._obj is not None and self._obj() is None - - def _getobj(self): - if self._obj is None: - return None - return self._obj() +warnings.warn( + ( + "'ayon_core.lib.python_2_comp' is deprecated." + "Please use 'weakref.WeakMethod'." + ), + DeprecationWarning, + stacklevel=2 +) From e731dd7064a52035cfec4f8d233344507e2b3a6c Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 16 Jul 2024 17:00:30 +0200 Subject: [PATCH 06/28] don't handle py2 vs. py3 imports --- client/ayon_core/lib/local_settings.py | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/client/ayon_core/lib/local_settings.py b/client/ayon_core/lib/local_settings.py index 54432265d9..00e551d119 100644 --- a/client/ayon_core/lib/local_settings.py +++ b/client/ayon_core/lib/local_settings.py @@ -3,26 +3,10 @@ import os import json import platform +import configparser from datetime import datetime from abc import ABC, abstractmethod - -# disable lru cache in Python 2 -try: - from functools import lru_cache -except ImportError: - def lru_cache(maxsize): - def max_size(func): - def wrapper(*args, **kwargs): - value = func(*args, **kwargs) - return value - return wrapper - return max_size - -# ConfigParser was renamed in python3 to configparser -try: - import configparser -except ImportError: - import ConfigParser as configparser +from functools import lru_cache import appdirs import ayon_api From 11641c996e880d555e12d6320462d8ed11350b68 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 16 Jul 2024 17:05:22 +0200 Subject: [PATCH 07/28] do not inherit from object by default --- client/ayon_core/lib/attribute_definitions.py | 2 +- client/ayon_core/lib/events.py | 7 +++---- client/ayon_core/lib/file_transaction.py | 2 +- client/ayon_core/lib/path_templates.py | 4 ++-- 4 files changed, 7 insertions(+), 8 deletions(-) diff --git a/client/ayon_core/lib/attribute_definitions.py b/client/ayon_core/lib/attribute_definitions.py index 360d47ea17..7e022f6dba 100644 --- a/client/ayon_core/lib/attribute_definitions.py +++ b/client/ayon_core/lib/attribute_definitions.py @@ -577,7 +577,7 @@ class BoolDef(AbstractAttrDef): return self.default -class FileDefItem(object): +class FileDefItem: def __init__( self, directory, filenames, frames=None, template=None ): diff --git a/client/ayon_core/lib/events.py b/client/ayon_core/lib/events.py index 9a3d1edfd4..2601bc1cf4 100644 --- a/client/ayon_core/lib/events.py +++ b/client/ayon_core/lib/events.py @@ -122,7 +122,7 @@ class weakref_partial: ) -class EventCallback(object): +class EventCallback: """Callback registered to a topic. The callback function is registered to a topic. Topic is a string which @@ -379,8 +379,7 @@ class EventCallback(object): self._partial_func = None -# Inherit from 'object' for Python 2 hosts -class Event(object): +class Event: """Base event object. Can be used for any event because is not specific. Only required argument @@ -487,7 +486,7 @@ class Event(object): return obj -class EventSystem(object): +class EventSystem: """Encapsulate event handling into an object. System wraps registered callbacks and triggered events into single object, diff --git a/client/ayon_core/lib/file_transaction.py b/client/ayon_core/lib/file_transaction.py index 47b10dd994..a502403958 100644 --- a/client/ayon_core/lib/file_transaction.py +++ b/client/ayon_core/lib/file_transaction.py @@ -22,7 +22,7 @@ class DuplicateDestinationError(ValueError): """ -class FileTransaction(object): +class FileTransaction: """File transaction with rollback options. The file transaction is a three-step process. diff --git a/client/ayon_core/lib/path_templates.py b/client/ayon_core/lib/path_templates.py index 01a6985a25..ccd36796c1 100644 --- a/client/ayon_core/lib/path_templates.py +++ b/client/ayon_core/lib/path_templates.py @@ -38,7 +38,7 @@ class TemplateUnsolved(Exception): ) -class StringTemplate(object): +class StringTemplate: """String that can be formatted.""" def __init__(self, template): if not isinstance(template, str): @@ -410,7 +410,7 @@ class TemplatePartResult: self._invalid_types[key] = type(value) -class FormatObject(object): +class FormatObject: """Object that can be used for formatting. This is base that is valid for to be used in 'StringTemplate' value. From b281d5be049406574efa9d6ff3632ca775824e3a Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 23 Jul 2024 17:21:45 +0200 Subject: [PATCH 08/28] don't crash the plugin file because of missing functions --- .../extract_usd_layer_contributions.py | 44 +++++++++++++------ 1 file changed, 30 insertions(+), 14 deletions(-) diff --git a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py index 162b7d3d41..dbd26c24c9 100644 --- a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py +++ b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py @@ -4,7 +4,10 @@ import os from typing import Dict import pyblish.api -from pxr import Sdf +try: + from pxr import Sdf +except ImportError: + Sdf = None from ayon_core.lib import ( TextDef, @@ -13,21 +16,24 @@ from ayon_core.lib import ( UILabelDef, EnumDef ) -from ayon_core.pipeline.usdlib import ( - get_or_define_prim_spec, - add_ordered_reference, - variant_nested_prim_path, - setup_asset_layer, - add_ordered_sublayer, - set_layer_defaults -) +try: + from ayon_core.pipeline.usdlib import ( + get_or_define_prim_spec, + add_ordered_reference, + variant_nested_prim_path, + setup_asset_layer, + add_ordered_sublayer, + set_layer_defaults + ) +except ImportError: + pass from ayon_core.pipeline.entity_uri import ( construct_ayon_entity_uri, parse_ayon_entity_uri ) from ayon_core.pipeline.load.utils import get_representation_path_by_names from ayon_core.pipeline.publish.lib import get_instance_expected_output_path -from ayon_core.pipeline import publish +from ayon_core.pipeline import publish, KnownPublishError # This global toggle is here mostly for debugging purposes and should usually @@ -555,6 +561,16 @@ class CollectUSDLayerContributionsHoudiniLook(CollectUSDLayerContributions): return defs +class ValidateUSDDependencies(pyblish.api.InstancePlugin): + families = ["usdLayer"] + + order = pyblish.api.ValidatorOrder + + def process(self, instance): + if Sdf is None: + raise KnownPublishError("USD library 'Sdf' is not available.") + + class ExtractUSDLayerContribution(publish.Extractor): families = ["usdLayer"] @@ -652,14 +668,14 @@ class ExtractUSDLayerContribution(publish.Extractor): ) def remove_previous_reference_contribution(self, - prim_spec: Sdf.PrimSpec, + prim_spec: "Sdf.PrimSpec", instance: pyblish.api.Instance): # Remove existing contributions of the same product - ignoring # the picked version and representation. We assume there's only ever # one version of a product you want to have referenced into a Prim. remove_indices = set() for index, ref in enumerate(prim_spec.referenceList.prependedItems): - ref: Sdf.Reference # type hint + ref: "Sdf.Reference" uri = ref.customData.get("ayon_uri") if uri and self.instance_match_ayon_uri(instance, uri): @@ -674,8 +690,8 @@ class ExtractUSDLayerContribution(publish.Extractor): ] def add_reference_contribution(self, - layer: Sdf.Layer, - prim_path: Sdf.Path, + layer: "Sdf.Layer", + prim_path: "Sdf.Path", filepath: str, contribution: VariantContribution): instance = contribution.instance From 073ab83e78279d4bc51a09e452ff64403193fd1b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 26 Jul 2024 15:39:59 +0200 Subject: [PATCH 09/28] Write the version instead of "latest" into the AYON URI --- .../plugins/publish/extract_usd_layer_contributions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py index 58cffcd59a..7ed129a127 100644 --- a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py +++ b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py @@ -138,13 +138,14 @@ def get_instance_uri_path( folder_path = instance.data["folderPath"] product_name = instance.data["productName"] project_name = context.data["projectName"] + version_name = instance.data["version"] # Get the layer's published path path = construct_ayon_entity_uri( project_name=project_name, folder_path=folder_path, product=product_name, - version="latest", + version=version_name, representation_name="usd" ) From fe8b57f1d3c98508a55e38e1d4bdfd5423889326 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:08:09 +0200 Subject: [PATCH 10/28] moved exceptions to single file --- client/ayon_core/pipeline/create/__init__.py | 16 ++- client/ayon_core/pipeline/create/context.py | 128 ++---------------- .../pipeline/create/creator_plugins.py | 10 -- .../ayon_core/pipeline/create/exceptions.py | 114 ++++++++++++++++ 4 files changed, 140 insertions(+), 128 deletions(-) create mode 100644 client/ayon_core/pipeline/create/exceptions.py diff --git a/client/ayon_core/pipeline/create/__init__.py b/client/ayon_core/pipeline/create/__init__.py index da9cafad5a..68e173d6b9 100644 --- a/client/ayon_core/pipeline/create/__init__.py +++ b/client/ayon_core/pipeline/create/__init__.py @@ -4,6 +4,20 @@ from .constants import ( PRE_CREATE_THUMBNAIL_KEY, DEFAULT_VARIANT_VALUE, ) +from .exceptions import ( + UnavailableSharedData, + ImmutableKeyError, + HostMissRequiredMethod, + ConvertorsOperationFailed, + ConvertorsFindFailed, + ConvertorsConversionFailed, + CreatorError, + CreatorsCreateFailed, + CreatorsCollectionFailed, + CreatorsSaveFailed, + CreatorsRemoveFailed, + CreatorsOperationFailed, +) from .utils import ( get_last_versions_for_instances, @@ -17,8 +31,6 @@ from .product_name import ( ) from .creator_plugins import ( - CreatorError, - BaseCreator, Creator, AutoCreator, diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 1c64d22733..0dd8ed1bd1 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -29,12 +29,23 @@ from ayon_core.pipeline import ( ) from ayon_core.pipeline.plugin_discover import DiscoverResult +from .exceptions import ( + CreatorError, + ImmutableKeyError, + CreatorsCreateFailed, + CreatorsCollectionFailed, + CreatorsSaveFailed, + CreatorsRemoveFailed, + ConvertorsFindFailed, + ConvertorsConversionFailed, + UnavailableSharedData, + HostMissRequiredMethod, +) from .creator_plugins import ( Creator, AutoCreator, discover_creator_plugins, discover_convertor_plugins, - CreatorError, ) # Changes of instances and context are send as tuple of 2 information @@ -42,68 +53,6 @@ UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) _NOT_SET = object() -class UnavailableSharedData(Exception): - """Shared data are not available at the moment when are accessed.""" - pass - - -class ImmutableKeyError(TypeError): - """Accessed key is immutable so does not allow changes or removals.""" - - def __init__(self, key, msg=None): - self.immutable_key = key - if not msg: - msg = "Key \"{}\" is immutable and does not allow changes.".format( - key - ) - super(ImmutableKeyError, self).__init__(msg) - - -class HostMissRequiredMethod(Exception): - """Host does not have implemented required functions for creation.""" - - def __init__(self, host, missing_methods): - self.missing_methods = missing_methods - self.host = host - joined_methods = ", ".join( - ['"{}"'.format(name) for name in missing_methods] - ) - dirpath = os.path.dirname( - os.path.normpath(inspect.getsourcefile(host)) - ) - dirpath_parts = dirpath.split(os.path.sep) - host_name = dirpath_parts.pop(-1) - if host_name == "api": - host_name = dirpath_parts.pop(-1) - - msg = "Host \"{}\" does not have implemented method/s {}".format( - host_name, joined_methods - ) - super(HostMissRequiredMethod, self).__init__(msg) - - -class ConvertorsOperationFailed(Exception): - def __init__(self, msg, failed_info): - super(ConvertorsOperationFailed, self).__init__(msg) - self.failed_info = failed_info - - -class ConvertorsFindFailed(ConvertorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed to find incompatible products" - super(ConvertorsFindFailed, self).__init__( - msg, failed_info - ) - - -class ConvertorsConversionFailed(ConvertorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed to convert incompatible products" - super(ConvertorsConversionFailed, self).__init__( - msg, failed_info - ) - - def prepare_failed_convertor_operation_info(identifier, exc_info): exc_type, exc_value, exc_traceback = exc_info formatted_traceback = "".join(traceback.format_exception( @@ -117,59 +66,6 @@ def prepare_failed_convertor_operation_info(identifier, exc_info): } -class CreatorsOperationFailed(Exception): - """Raised when a creator process crashes in 'CreateContext'. - - The exception contains information about the creator and error. The data - are prepared using 'prepare_failed_creator_operation_info' and can be - serialized using json. - - Usage is for UI purposes which may not have access to exceptions directly - and would not have ability to catch exceptions 'per creator'. - - Args: - msg (str): General error message. - failed_info (list[dict[str, Any]]): List of failed creators with - exception message and optionally formatted traceback. - """ - - def __init__(self, msg, failed_info): - super(CreatorsOperationFailed, self).__init__(msg) - self.failed_info = failed_info - - -class CreatorsCollectionFailed(CreatorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed to collect instances" - super(CreatorsCollectionFailed, self).__init__( - msg, failed_info - ) - - -class CreatorsSaveFailed(CreatorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed update instance changes" - super(CreatorsSaveFailed, self).__init__( - msg, failed_info - ) - - -class CreatorsRemoveFailed(CreatorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed to remove instances" - super(CreatorsRemoveFailed, self).__init__( - msg, failed_info - ) - - -class CreatorsCreateFailed(CreatorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed to create instances" - super(CreatorsCreateFailed, self).__init__( - msg, failed_info - ) - - def prepare_failed_creator_operation_info( identifier, label, exc_info, add_traceback=True ): diff --git a/client/ayon_core/pipeline/create/creator_plugins.py b/client/ayon_core/pipeline/create/creator_plugins.py index 624f1c9588..1e09eb62a1 100644 --- a/client/ayon_core/pipeline/create/creator_plugins.py +++ b/client/ayon_core/pipeline/create/creator_plugins.py @@ -26,16 +26,6 @@ if TYPE_CHECKING: from .context import CreateContext, CreatedInstance, UpdateData # noqa: F401 -class CreatorError(Exception): - """Should be raised when creator failed because of known issue. - - Message of error should be user readable. - """ - - def __init__(self, message): - super(CreatorError, self).__init__(message) - - class ProductConvertorPlugin(ABC): """Helper for conversion of instances created using legacy creators. diff --git a/client/ayon_core/pipeline/create/exceptions.py b/client/ayon_core/pipeline/create/exceptions.py new file mode 100644 index 0000000000..24264840cb --- /dev/null +++ b/client/ayon_core/pipeline/create/exceptions.py @@ -0,0 +1,114 @@ +import os +import inspect + + +class UnavailableSharedData(Exception): + """Shared data are not available at the moment when are accessed.""" + pass + + +class ImmutableKeyError(TypeError): + """Accessed key is immutable so does not allow changes or removals.""" + + def __init__(self, key, msg=None): + self.immutable_key = key + if not msg: + msg = "Key \"{}\" is immutable and does not allow changes.".format( + key + ) + super().__init__(msg) + + +class HostMissRequiredMethod(Exception): + """Host does not have implemented required functions for creation.""" + + def __init__(self, host, missing_methods): + self.missing_methods = missing_methods + self.host = host + joined_methods = ", ".join( + ['"{}"'.format(name) for name in missing_methods] + ) + dirpath = os.path.dirname( + os.path.normpath(inspect.getsourcefile(host)) + ) + dirpath_parts = dirpath.split(os.path.sep) + host_name = dirpath_parts.pop(-1) + if host_name == "api": + host_name = dirpath_parts.pop(-1) + + msg = "Host \"{}\" does not have implemented method/s {}".format( + host_name, joined_methods + ) + super().__init__(msg) + + +class ConvertorsOperationFailed(Exception): + def __init__(self, msg, failed_info): + super().__init__(msg) + self.failed_info = failed_info + + +class ConvertorsFindFailed(ConvertorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to find incompatible products" + super().__init__(msg, failed_info) + + +class ConvertorsConversionFailed(ConvertorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to convert incompatible products" + super().__init__(msg, failed_info) + + +class CreatorError(Exception): + """Should be raised when creator failed because of known issue. + + Message of error should be artist friendly. + """ + pass + + +class CreatorsOperationFailed(Exception): + """Raised when a creator process crashes in 'CreateContext'. + + The exception contains information about the creator and error. The data + are prepared using 'prepare_failed_creator_operation_info' and can be + serialized using json. + + Usage is for UI purposes which may not have access to exceptions directly + and would not have ability to catch exceptions 'per creator'. + + Args: + msg (str): General error message. + failed_info (list[dict[str, Any]]): List of failed creators with + exception message and optionally formatted traceback. + """ + + def __init__(self, msg, failed_info): + super().__init__(msg) + self.failed_info = failed_info + + +class CreatorsCollectionFailed(CreatorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to collect instances" + super().__init__(msg, failed_info) + + +class CreatorsSaveFailed(CreatorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed update instance changes" + super().__init__(msg, failed_info) + + +class CreatorsRemoveFailed(CreatorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to remove instances" + super().__init__(msg, failed_info) + + +class CreatorsCreateFailed(CreatorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to create instances" + super().__init__(msg, failed_info) + From 558cc13cdc10942334d5815829792eb078d67c27 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:10:15 +0200 Subject: [PATCH 11/28] move 'TrackChangesItem' to separate file --- client/ayon_core/pipeline/create/changes.py | 313 +++++++++++++++++++ client/ayon_core/pipeline/create/context.py | 314 +------------------- 2 files changed, 314 insertions(+), 313 deletions(-) create mode 100644 client/ayon_core/pipeline/create/changes.py diff --git a/client/ayon_core/pipeline/create/changes.py b/client/ayon_core/pipeline/create/changes.py new file mode 100644 index 0000000000..217478ee30 --- /dev/null +++ b/client/ayon_core/pipeline/create/changes.py @@ -0,0 +1,313 @@ +import copy + +_EMPTY_VALUE = object() + + +class TrackChangesItem(object): + """Helper object to track changes in data. + + Has access to full old and new data and will create deep copy of them, + so it is not needed to create copy before passed in. + + Can work as a dictionary if old or new value is a dictionary. In + that case received object is another object of 'TrackChangesItem'. + + Goal is to be able to get old or new value as was or only changed values + or get information about removed/changed keys, and all of that on + any "dictionary level". + + ``` + # Example of possible usages + >>> old_value = { + ... "key_1": "value_1", + ... "key_2": { + ... "key_sub_1": 1, + ... "key_sub_2": { + ... "enabled": True + ... } + ... }, + ... "key_3": "value_2" + ... } + >>> new_value = { + ... "key_1": "value_1", + ... "key_2": { + ... "key_sub_2": { + ... "enabled": False + ... }, + ... "key_sub_3": 3 + ... }, + ... "key_3": "value_3" + ... } + + >>> changes = TrackChangesItem(old_value, new_value) + >>> changes.changed + True + + >>> changes["key_2"]["key_sub_1"].new_value is None + True + + >>> list(sorted(changes.changed_keys)) + ['key_2', 'key_3'] + + >>> changes["key_2"]["key_sub_2"]["enabled"].changed + True + + >>> changes["key_2"].removed_keys + {'key_sub_1'} + + >>> list(sorted(changes["key_2"].available_keys)) + ['key_sub_1', 'key_sub_2', 'key_sub_3'] + + >>> changes.new_value == new_value + True + + # Get only changed values + only_changed_new_values = { + key: changes[key].new_value + for key in changes.changed_keys + } + ``` + + Args: + old_value (Any): Old value. + new_value (Any): New value. + """ + + def __init__(self, old_value, new_value): + self._changed = old_value != new_value + # Resolve if value is '_EMPTY_VALUE' after comparison of the values + if old_value is _EMPTY_VALUE: + old_value = None + if new_value is _EMPTY_VALUE: + new_value = None + self._old_value = copy.deepcopy(old_value) + self._new_value = copy.deepcopy(new_value) + + self._old_is_dict = isinstance(old_value, dict) + self._new_is_dict = isinstance(new_value, dict) + + self._old_keys = None + self._new_keys = None + self._available_keys = None + self._removed_keys = None + + self._changed_keys = None + + self._sub_items = None + + def __getitem__(self, key): + """Getter looks into subitems if object is dictionary.""" + + if self._sub_items is None: + self._prepare_sub_items() + return self._sub_items[key] + + def __bool__(self): + """Boolean of object is if old and new value are the same.""" + + return self._changed + + def get(self, key, default=None): + """Try to get sub item.""" + + if self._sub_items is None: + self._prepare_sub_items() + return self._sub_items.get(key, default) + + @property + def old_value(self): + """Get copy of old value. + + Returns: + Any: Whatever old value was. + """ + + return copy.deepcopy(self._old_value) + + @property + def new_value(self): + """Get copy of new value. + + Returns: + Any: Whatever new value was. + """ + + return copy.deepcopy(self._new_value) + + @property + def changed(self): + """Value changed. + + Returns: + bool: If data changed. + """ + + return self._changed + + @property + def is_dict(self): + """Object can be used as dictionary. + + Returns: + bool: When can be used that way. + """ + + return self._old_is_dict or self._new_is_dict + + @property + def changes(self): + """Get changes in raw data. + + This method should be used only if 'is_dict' value is 'True'. + + Returns: + Dict[str, Tuple[Any, Any]]: Changes are by key in tuple + (, ). If 'is_dict' is 'False' then + output is always empty dictionary. + """ + + output = {} + if not self.is_dict: + return output + + old_value = self.old_value + new_value = self.new_value + for key in self.changed_keys: + _old = None + _new = None + if self._old_is_dict: + _old = old_value.get(key) + if self._new_is_dict: + _new = new_value.get(key) + output[key] = (_old, _new) + return output + + # Methods/properties that can be used when 'is_dict' is 'True' + @property + def old_keys(self): + """Keys from old value. + + Empty set is returned if old value is not a dict. + + Returns: + Set[str]: Keys from old value. + """ + + if self._old_keys is None: + self._prepare_keys() + return set(self._old_keys) + + @property + def new_keys(self): + """Keys from new value. + + Empty set is returned if old value is not a dict. + + Returns: + Set[str]: Keys from new value. + """ + + if self._new_keys is None: + self._prepare_keys() + return set(self._new_keys) + + @property + def changed_keys(self): + """Keys that has changed from old to new value. + + Empty set is returned if both old and new value are not a dict. + + Returns: + Set[str]: Keys of changed keys. + """ + + if self._changed_keys is None: + self._prepare_sub_items() + return set(self._changed_keys) + + @property + def available_keys(self): + """All keys that are available in old and new value. + + Empty set is returned if both old and new value are not a dict. + Output is Union of 'old_keys' and 'new_keys'. + + Returns: + Set[str]: All keys from old and new value. + """ + + if self._available_keys is None: + self._prepare_keys() + return set(self._available_keys) + + @property + def removed_keys(self): + """Key that are not available in new value but were in old value. + + Returns: + Set[str]: All removed keys. + """ + + if self._removed_keys is None: + self._prepare_sub_items() + return set(self._removed_keys) + + def _prepare_keys(self): + old_keys = set() + new_keys = set() + if self._old_is_dict and self._new_is_dict: + old_keys = set(self._old_value.keys()) + new_keys = set(self._new_value.keys()) + + elif self._old_is_dict: + old_keys = set(self._old_value.keys()) + + elif self._new_is_dict: + new_keys = set(self._new_value.keys()) + + self._old_keys = old_keys + self._new_keys = new_keys + self._available_keys = old_keys | new_keys + self._removed_keys = old_keys - new_keys + + def _prepare_sub_items(self): + sub_items = {} + changed_keys = set() + + old_keys = self.old_keys + new_keys = self.new_keys + new_value = self.new_value + old_value = self.old_value + if self._old_is_dict and self._new_is_dict: + for key in self.available_keys: + item = TrackChangesItem( + old_value.get(key), new_value.get(key) + ) + sub_items[key] = item + if item.changed or key not in old_keys or key not in new_keys: + changed_keys.add(key) + + elif self._old_is_dict: + old_keys = set(old_value.keys()) + available_keys = set(old_keys) + changed_keys = set(available_keys) + for key in available_keys: + # NOTE Use '_EMPTY_VALUE' because old value could be 'None' + # which would result in "unchanged" item + sub_items[key] = TrackChangesItem( + old_value.get(key), _EMPTY_VALUE + ) + + elif self._new_is_dict: + new_keys = set(new_value.keys()) + available_keys = set(new_keys) + changed_keys = set(available_keys) + for key in available_keys: + # NOTE Use '_EMPTY_VALUE' because new value could be 'None' + # which would result in "unchanged" item + sub_items[key] = TrackChangesItem( + _EMPTY_VALUE, new_value.get(key) + ) + + self._sub_items = sub_items + self._changed_keys = changed_keys diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 0dd8ed1bd1..6f802a5a6e 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -41,6 +41,7 @@ from .exceptions import ( UnavailableSharedData, HostMissRequiredMethod, ) +from .changes import TrackChangesItem from .creator_plugins import ( Creator, AutoCreator, @@ -84,319 +85,6 @@ def prepare_failed_creator_operation_info( } -_EMPTY_VALUE = object() - - -class TrackChangesItem(object): - """Helper object to track changes in data. - - Has access to full old and new data and will create deep copy of them, - so it is not needed to create copy before passed in. - - Can work as a dictionary if old or new value is a dictionary. In - that case received object is another object of 'TrackChangesItem'. - - Goal is to be able to get old or new value as was or only changed values - or get information about removed/changed keys, and all of that on - any "dictionary level". - - ``` - # Example of possible usages - >>> old_value = { - ... "key_1": "value_1", - ... "key_2": { - ... "key_sub_1": 1, - ... "key_sub_2": { - ... "enabled": True - ... } - ... }, - ... "key_3": "value_2" - ... } - >>> new_value = { - ... "key_1": "value_1", - ... "key_2": { - ... "key_sub_2": { - ... "enabled": False - ... }, - ... "key_sub_3": 3 - ... }, - ... "key_3": "value_3" - ... } - - >>> changes = TrackChangesItem(old_value, new_value) - >>> changes.changed - True - - >>> changes["key_2"]["key_sub_1"].new_value is None - True - - >>> list(sorted(changes.changed_keys)) - ['key_2', 'key_3'] - - >>> changes["key_2"]["key_sub_2"]["enabled"].changed - True - - >>> changes["key_2"].removed_keys - {'key_sub_1'} - - >>> list(sorted(changes["key_2"].available_keys)) - ['key_sub_1', 'key_sub_2', 'key_sub_3'] - - >>> changes.new_value == new_value - True - - # Get only changed values - only_changed_new_values = { - key: changes[key].new_value - for key in changes.changed_keys - } - ``` - - Args: - old_value (Any): Old value. - new_value (Any): New value. - """ - - def __init__(self, old_value, new_value): - self._changed = old_value != new_value - # Resolve if value is '_EMPTY_VALUE' after comparison of the values - if old_value is _EMPTY_VALUE: - old_value = None - if new_value is _EMPTY_VALUE: - new_value = None - self._old_value = copy.deepcopy(old_value) - self._new_value = copy.deepcopy(new_value) - - self._old_is_dict = isinstance(old_value, dict) - self._new_is_dict = isinstance(new_value, dict) - - self._old_keys = None - self._new_keys = None - self._available_keys = None - self._removed_keys = None - - self._changed_keys = None - - self._sub_items = None - - def __getitem__(self, key): - """Getter looks into subitems if object is dictionary.""" - - if self._sub_items is None: - self._prepare_sub_items() - return self._sub_items[key] - - def __bool__(self): - """Boolean of object is if old and new value are the same.""" - - return self._changed - - def get(self, key, default=None): - """Try to get sub item.""" - - if self._sub_items is None: - self._prepare_sub_items() - return self._sub_items.get(key, default) - - @property - def old_value(self): - """Get copy of old value. - - Returns: - Any: Whatever old value was. - """ - - return copy.deepcopy(self._old_value) - - @property - def new_value(self): - """Get copy of new value. - - Returns: - Any: Whatever new value was. - """ - - return copy.deepcopy(self._new_value) - - @property - def changed(self): - """Value changed. - - Returns: - bool: If data changed. - """ - - return self._changed - - @property - def is_dict(self): - """Object can be used as dictionary. - - Returns: - bool: When can be used that way. - """ - - return self._old_is_dict or self._new_is_dict - - @property - def changes(self): - """Get changes in raw data. - - This method should be used only if 'is_dict' value is 'True'. - - Returns: - Dict[str, Tuple[Any, Any]]: Changes are by key in tuple - (, ). If 'is_dict' is 'False' then - output is always empty dictionary. - """ - - output = {} - if not self.is_dict: - return output - - old_value = self.old_value - new_value = self.new_value - for key in self.changed_keys: - _old = None - _new = None - if self._old_is_dict: - _old = old_value.get(key) - if self._new_is_dict: - _new = new_value.get(key) - output[key] = (_old, _new) - return output - - # Methods/properties that can be used when 'is_dict' is 'True' - @property - def old_keys(self): - """Keys from old value. - - Empty set is returned if old value is not a dict. - - Returns: - Set[str]: Keys from old value. - """ - - if self._old_keys is None: - self._prepare_keys() - return set(self._old_keys) - - @property - def new_keys(self): - """Keys from new value. - - Empty set is returned if old value is not a dict. - - Returns: - Set[str]: Keys from new value. - """ - - if self._new_keys is None: - self._prepare_keys() - return set(self._new_keys) - - @property - def changed_keys(self): - """Keys that has changed from old to new value. - - Empty set is returned if both old and new value are not a dict. - - Returns: - Set[str]: Keys of changed keys. - """ - - if self._changed_keys is None: - self._prepare_sub_items() - return set(self._changed_keys) - - @property - def available_keys(self): - """All keys that are available in old and new value. - - Empty set is returned if both old and new value are not a dict. - Output is Union of 'old_keys' and 'new_keys'. - - Returns: - Set[str]: All keys from old and new value. - """ - - if self._available_keys is None: - self._prepare_keys() - return set(self._available_keys) - - @property - def removed_keys(self): - """Key that are not available in new value but were in old value. - - Returns: - Set[str]: All removed keys. - """ - - if self._removed_keys is None: - self._prepare_sub_items() - return set(self._removed_keys) - - def _prepare_keys(self): - old_keys = set() - new_keys = set() - if self._old_is_dict and self._new_is_dict: - old_keys = set(self._old_value.keys()) - new_keys = set(self._new_value.keys()) - - elif self._old_is_dict: - old_keys = set(self._old_value.keys()) - - elif self._new_is_dict: - new_keys = set(self._new_value.keys()) - - self._old_keys = old_keys - self._new_keys = new_keys - self._available_keys = old_keys | new_keys - self._removed_keys = old_keys - new_keys - - def _prepare_sub_items(self): - sub_items = {} - changed_keys = set() - - old_keys = self.old_keys - new_keys = self.new_keys - new_value = self.new_value - old_value = self.old_value - if self._old_is_dict and self._new_is_dict: - for key in self.available_keys: - item = TrackChangesItem( - old_value.get(key), new_value.get(key) - ) - sub_items[key] = item - if item.changed or key not in old_keys or key not in new_keys: - changed_keys.add(key) - - elif self._old_is_dict: - old_keys = set(old_value.keys()) - available_keys = set(old_keys) - changed_keys = set(available_keys) - for key in available_keys: - # NOTE Use '_EMPTY_VALUE' because old value could be 'None' - # which would result in "unchanged" item - sub_items[key] = TrackChangesItem( - old_value.get(key), _EMPTY_VALUE - ) - - elif self._new_is_dict: - new_keys = set(new_value.keys()) - available_keys = set(new_keys) - changed_keys = set(available_keys) - for key in available_keys: - # NOTE Use '_EMPTY_VALUE' because new value could be 'None' - # which would result in "unchanged" item - sub_items[key] = TrackChangesItem( - _EMPTY_VALUE, new_value.get(key) - ) - - self._sub_items = sub_items - self._changed_keys = changed_keys - - class InstanceMember: """Representation of instance member. From f41a830f437d000c0f83f44765c6a5e00b25a137 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:17:27 +0200 Subject: [PATCH 12/28] move structure classes to separate file --- client/ayon_core/pipeline/create/__init__.py | 23 +- client/ayon_core/pipeline/create/context.py | 866 +---------------- .../ayon_core/pipeline/create/structures.py | 870 ++++++++++++++++++ 3 files changed, 889 insertions(+), 870 deletions(-) create mode 100644 client/ayon_core/pipeline/create/structures.py diff --git a/client/ayon_core/pipeline/create/__init__.py b/client/ayon_core/pipeline/create/__init__.py index 68e173d6b9..bb05bc6a09 100644 --- a/client/ayon_core/pipeline/create/__init__.py +++ b/client/ayon_core/pipeline/create/__init__.py @@ -18,7 +18,7 @@ from .exceptions import ( CreatorsRemoveFailed, CreatorsOperationFailed, ) - +from .structures import CreatedInstance from .utils import ( get_last_versions_for_instances, get_next_versions_for_instances, @@ -48,10 +48,7 @@ from .creator_plugins import ( cache_and_get_instances, ) -from .context import ( - CreatedInstance, - CreateContext -) +from .context import CreateContext from .legacy_create import ( LegacyCreator, @@ -65,6 +62,21 @@ __all__ = ( "PRE_CREATE_THUMBNAIL_KEY", "DEFAULT_VARIANT_VALUE", + "UnavailableSharedData", + "ImmutableKeyError", + "HostMissRequiredMethod", + "ConvertorsOperationFailed", + "ConvertorsFindFailed", + "ConvertorsConversionFailed", + "CreatorError", + "CreatorsCreateFailed", + "CreatorsCollectionFailed", + "CreatorsSaveFailed", + "CreatorsRemoveFailed", + "CreatorsOperationFailed", + + "CreatedInstance", + "get_last_versions_for_instances", "get_next_versions_for_instances", @@ -90,7 +102,6 @@ __all__ = ( "cache_and_get_instances", - "CreatedInstance", "CreateContext", "LegacyCreator", diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 6f802a5a6e..a11bc311dc 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -5,7 +5,6 @@ import logging import traceback import collections import inspect -from uuid import uuid4 from contextlib import contextmanager from typing import Optional @@ -16,22 +15,14 @@ import ayon_api from ayon_core.settings import get_project_settings from ayon_core.lib import is_func_signature_supported from ayon_core.lib.attribute_definitions import ( - UnknownDef, - serialize_attr_defs, - deserialize_attr_defs, get_default_values, ) from ayon_core.host import IPublishHost, IWorkfileHost -from ayon_core.pipeline import ( - Anatomy, - AYON_INSTANCE_ID, - AVALON_INSTANCE_ID, -) +from ayon_core.pipeline import Anatomy from ayon_core.pipeline.plugin_discover import DiscoverResult from .exceptions import ( CreatorError, - ImmutableKeyError, CreatorsCreateFailed, CreatorsCollectionFailed, CreatorsSaveFailed, @@ -42,6 +33,7 @@ from .exceptions import ( HostMissRequiredMethod, ) from .changes import TrackChangesItem +from .structures import PublishAttributes, ConvertorItem from .creator_plugins import ( Creator, AutoCreator, @@ -85,860 +77,6 @@ def prepare_failed_creator_operation_info( } -class InstanceMember: - """Representation of instance member. - - TODO: - Implement and use! - """ - - def __init__(self, instance, name): - self.instance = instance - - instance.add_members(self) - - self.name = name - self._actions = [] - - def add_action(self, label, callback): - self._actions.append({ - "label": label, - "callback": callback - }) - - -class AttributeValues(object): - """Container which keep values of Attribute definitions. - - Goal is to have one object which hold values of attribute definitions for - single instance. - - Has dictionary like methods. Not all of them are allowed all the time. - - Args: - attr_defs(AbstractAttrDef): Definitions of value type and properties. - values(dict): Values after possible conversion. - origin_data(dict): Values loaded from host before conversion. - """ - - def __init__(self, attr_defs, values, origin_data=None): - if origin_data is None: - origin_data = copy.deepcopy(values) - self._origin_data = origin_data - - attr_defs_by_key = { - attr_def.key: attr_def - for attr_def in attr_defs - if attr_def.is_value_def - } - for key, value in values.items(): - if key not in attr_defs_by_key: - new_def = UnknownDef(key, label=key, default=value) - attr_defs.append(new_def) - attr_defs_by_key[key] = new_def - - self._attr_defs = attr_defs - self._attr_defs_by_key = attr_defs_by_key - - self._data = {} - for attr_def in attr_defs: - value = values.get(attr_def.key) - if value is not None: - self._data[attr_def.key] = value - - def __setitem__(self, key, value): - if key not in self._attr_defs_by_key: - raise KeyError("Key \"{}\" was not found.".format(key)) - - old_value = self._data.get(key) - if old_value == value: - return - self._data[key] = value - - def __getitem__(self, key): - if key not in self._attr_defs_by_key: - return self._data[key] - return self._data.get(key, self._attr_defs_by_key[key].default) - - def __contains__(self, key): - return key in self._attr_defs_by_key - - def get(self, key, default=None): - if key in self._attr_defs_by_key: - return self[key] - return default - - def keys(self): - return self._attr_defs_by_key.keys() - - def values(self): - for key in self._attr_defs_by_key.keys(): - yield self._data.get(key) - - def items(self): - for key in self._attr_defs_by_key.keys(): - yield key, self._data.get(key) - - def update(self, value): - for _key, _value in dict(value): - self[_key] = _value - - def pop(self, key, default=None): - value = self._data.pop(key, default) - # Remove attribute definition if is 'UnknownDef' - # - gives option to get rid of unknown values - attr_def = self._attr_defs_by_key.get(key) - if isinstance(attr_def, UnknownDef): - self._attr_defs_by_key.pop(key) - self._attr_defs.remove(attr_def) - return value - - def reset_values(self): - self._data = {} - - def mark_as_stored(self): - self._origin_data = copy.deepcopy(self._data) - - @property - def attr_defs(self): - """Pointer to attribute definitions. - - Returns: - List[AbstractAttrDef]: Attribute definitions. - """ - - return list(self._attr_defs) - - @property - def origin_data(self): - return copy.deepcopy(self._origin_data) - - def data_to_store(self): - """Create new dictionary with data to store. - - Returns: - Dict[str, Any]: Attribute values that should be stored. - """ - - output = {} - for key in self._data: - output[key] = self[key] - - for key, attr_def in self._attr_defs_by_key.items(): - if key not in output: - output[key] = attr_def.default - return output - - def get_serialized_attr_defs(self): - """Serialize attribute definitions to json serializable types. - - Returns: - List[Dict[str, Any]]: Serialized attribute definitions. - """ - - return serialize_attr_defs(self._attr_defs) - - -class CreatorAttributeValues(AttributeValues): - """Creator specific attribute values of an instance. - - Args: - instance (CreatedInstance): Instance for which are values hold. - """ - - def __init__(self, instance, *args, **kwargs): - self.instance = instance - super(CreatorAttributeValues, self).__init__(*args, **kwargs) - - -class PublishAttributeValues(AttributeValues): - """Publish plugin specific attribute values. - - Values are for single plugin which can be on `CreatedInstance` - or context values stored on `CreateContext`. - - Args: - publish_attributes(PublishAttributes): Wrapper for multiple publish - attributes is used as parent object. - """ - - def __init__(self, publish_attributes, *args, **kwargs): - self.publish_attributes = publish_attributes - super(PublishAttributeValues, self).__init__(*args, **kwargs) - - @property - def parent(self): - return self.publish_attributes.parent - - -class PublishAttributes: - """Wrapper for publish plugin attribute definitions. - - Cares about handling attribute definitions of multiple publish plugins. - Keep information about attribute definitions and their values. - - Args: - parent(CreatedInstance, CreateContext): Parent for which will be - data stored and from which are data loaded. - origin_data(dict): Loaded data by plugin class name. - attr_plugins(Union[List[pyblish.api.Plugin], None]): List of publish - plugins that may have defined attribute definitions. - """ - - def __init__(self, parent, origin_data, attr_plugins=None): - self.parent = parent - self._origin_data = copy.deepcopy(origin_data) - - attr_plugins = attr_plugins or [] - self.attr_plugins = attr_plugins - - self._data = copy.deepcopy(origin_data) - self._plugin_names_order = [] - self._missing_plugins = [] - - self.set_publish_plugins(attr_plugins) - - def __getitem__(self, key): - return self._data[key] - - def __contains__(self, key): - return key in self._data - - def keys(self): - return self._data.keys() - - def values(self): - return self._data.values() - - def items(self): - return self._data.items() - - def pop(self, key, default=None): - """Remove or reset value for plugin. - - Plugin values are reset to defaults if plugin is available but - data of plugin which was not found are removed. - - Args: - key(str): Plugin name. - default: Default value if plugin was not found. - """ - - if key not in self._data: - return default - - if key in self._missing_plugins: - self._missing_plugins.remove(key) - removed_item = self._data.pop(key) - return removed_item.data_to_store() - - value_item = self._data[key] - # Prepare value to return - output = value_item.data_to_store() - # Reset values - value_item.reset_values() - return output - - def plugin_names_order(self): - """Plugin names order by their 'order' attribute.""" - - for name in self._plugin_names_order: - yield name - - def mark_as_stored(self): - self._origin_data = copy.deepcopy(self.data_to_store()) - - def data_to_store(self): - """Convert attribute values to "data to store".""" - - output = {} - for key, attr_value in self._data.items(): - output[key] = attr_value.data_to_store() - return output - - @property - def origin_data(self): - return copy.deepcopy(self._origin_data) - - def set_publish_plugins(self, attr_plugins): - """Set publish plugins attribute definitions.""" - - self._plugin_names_order = [] - self._missing_plugins = [] - self.attr_plugins = attr_plugins or [] - - origin_data = self._origin_data - data = self._data - self._data = {} - added_keys = set() - for plugin in attr_plugins: - output = plugin.convert_attribute_values(data) - if output is not None: - data = output - attr_defs = plugin.get_attribute_defs() - if not attr_defs: - continue - - key = plugin.__name__ - added_keys.add(key) - self._plugin_names_order.append(key) - - value = data.get(key) or {} - orig_value = copy.deepcopy(origin_data.get(key) or {}) - self._data[key] = PublishAttributeValues( - self, attr_defs, value, orig_value - ) - - for key, value in data.items(): - if key not in added_keys: - self._missing_plugins.append(key) - self._data[key] = PublishAttributeValues( - self, [], value, value - ) - - def serialize_attributes(self): - return { - "attr_defs": { - plugin_name: attrs_value.get_serialized_attr_defs() - for plugin_name, attrs_value in self._data.items() - }, - "plugin_names_order": self._plugin_names_order, - "missing_plugins": self._missing_plugins - } - - def deserialize_attributes(self, data): - self._plugin_names_order = data["plugin_names_order"] - self._missing_plugins = data["missing_plugins"] - - attr_defs = deserialize_attr_defs(data["attr_defs"]) - - origin_data = self._origin_data - data = self._data - self._data = {} - - added_keys = set() - for plugin_name, attr_defs_data in attr_defs.items(): - attr_defs = deserialize_attr_defs(attr_defs_data) - value = data.get(plugin_name) or {} - orig_value = copy.deepcopy(origin_data.get(plugin_name) or {}) - self._data[plugin_name] = PublishAttributeValues( - self, attr_defs, value, orig_value - ) - - for key, value in data.items(): - if key not in added_keys: - self._missing_plugins.append(key) - self._data[key] = PublishAttributeValues( - self, [], value, value - ) - - -class CreatedInstance: - """Instance entity with data that will be stored to workfile. - - I think `data` must be required argument containing all minimum information - about instance like "folderPath" and "task" and all data used for filling - product name as creators may have custom data for product name filling. - - Notes: - Object have 2 possible initialization. One using 'creator' object which - is recommended for api usage. Second by passing information about - creator. - - Args: - product_type (str): Product type that will be created. - product_name (str): Name of product that will be created. - data (Dict[str, Any]): Data used for filling product name or override - data from already existing instance. - creator (Union[BaseCreator, None]): Creator responsible for instance. - creator_identifier (str): Identifier of creator plugin. - creator_label (str): Creator plugin label. - group_label (str): Default group label from creator plugin. - creator_attr_defs (List[AbstractAttrDef]): Attribute definitions from - creator. - """ - - # Keys that can't be changed or removed from data after loading using - # creator. - # - 'creator_attributes' and 'publish_attributes' can change values of - # their individual children but not on their own - __immutable_keys = ( - "id", - "instance_id", - "product_type", - "creator_identifier", - "creator_attributes", - "publish_attributes" - ) - - def __init__( - self, - product_type, - product_name, - data, - creator=None, - creator_identifier=None, - creator_label=None, - group_label=None, - creator_attr_defs=None, - ): - if creator is not None: - creator_identifier = creator.identifier - group_label = creator.get_group_label() - creator_label = creator.label - creator_attr_defs = creator.get_instance_attr_defs() - - self._creator_label = creator_label - self._group_label = group_label or creator_identifier - - # Instance members may have actions on them - # TODO implement members logic - self._members = [] - - # Data that can be used for lifetime of object - self._transient_data = {} - - # Create a copy of passed data to avoid changing them on the fly - data = copy.deepcopy(data or {}) - - # Pop dictionary values that will be converted to objects to be able - # catch changes - orig_creator_attributes = data.pop("creator_attributes", None) or {} - orig_publish_attributes = data.pop("publish_attributes", None) or {} - - # Store original value of passed data - self._orig_data = copy.deepcopy(data) - - # Pop 'productType' and 'productName' to prevent unexpected changes - data.pop("productType", None) - data.pop("productName", None) - # Backwards compatibility with OpenPype instances - data.pop("family", None) - data.pop("subset", None) - - asset_name = data.pop("asset", None) - if "folderPath" not in data: - data["folderPath"] = asset_name - - # QUESTION Does it make sense to have data stored as ordered dict? - self._data = collections.OrderedDict() - # QUESTION Do we need this "id" information on instance? - item_id = data.get("id") - # TODO use only 'AYON_INSTANCE_ID' when all hosts support it - if item_id not in {AYON_INSTANCE_ID, AVALON_INSTANCE_ID}: - item_id = AVALON_INSTANCE_ID - self._data["id"] = item_id - self._data["productType"] = product_type - self._data["productName"] = product_name - self._data["active"] = data.get("active", True) - self._data["creator_identifier"] = creator_identifier - - # Pop from source data all keys that are defined in `_data` before - # this moment and through their values away - # - they should be the same and if are not then should not change - # already set values - for key in self._data.keys(): - if key in data: - data.pop(key) - - self._data["variant"] = self._data.get("variant") or "" - # Stored creator specific attribute values - # {key: value} - creator_values = copy.deepcopy(orig_creator_attributes) - - self._data["creator_attributes"] = CreatorAttributeValues( - self, - list(creator_attr_defs), - creator_values, - orig_creator_attributes - ) - - # Stored publish specific attribute values - # {: {key: value}} - # - must be set using 'set_publish_plugins' - self._data["publish_attributes"] = PublishAttributes( - self, orig_publish_attributes, None - ) - if data: - self._data.update(data) - - if not self._data.get("instance_id"): - self._data["instance_id"] = str(uuid4()) - - self._folder_is_valid = self.has_set_folder - self._task_is_valid = self.has_set_task - - def __str__(self): - return ( - " {data}" - ).format( - creator_identifier=self.creator_identifier, - product={"name": self.product_name, "type": self.product_type}, - data=str(self._data) - ) - - # --- Dictionary like methods --- - def __getitem__(self, key): - return self._data[key] - - def __contains__(self, key): - return key in self._data - - def __setitem__(self, key, value): - # Validate immutable keys - if key not in self.__immutable_keys: - self._data[key] = value - - elif value != self._data.get(key): - # Raise exception if key is immutable and value has changed - raise ImmutableKeyError(key) - - def get(self, key, default=None): - return self._data.get(key, default) - - def pop(self, key, *args, **kwargs): - # Raise exception if is trying to pop key which is immutable - if key in self.__immutable_keys: - raise ImmutableKeyError(key) - - self._data.pop(key, *args, **kwargs) - - def keys(self): - return self._data.keys() - - def values(self): - return self._data.values() - - def items(self): - return self._data.items() - # ------ - - @property - def product_type(self): - return self._data["productType"] - - @property - def product_name(self): - return self._data["productName"] - - @property - def label(self): - label = self._data.get("label") - if not label: - label = self.product_name - return label - - @property - def group_label(self): - label = self._data.get("group") - if label: - return label - return self._group_label - - @property - def origin_data(self): - output = copy.deepcopy(self._orig_data) - output["creator_attributes"] = self.creator_attributes.origin_data - output["publish_attributes"] = self.publish_attributes.origin_data - return output - - @property - def creator_identifier(self): - return self._data["creator_identifier"] - - @property - def creator_label(self): - return self._creator_label or self.creator_identifier - - @property - def id(self): - """Instance identifier. - - Returns: - str: UUID of instance. - """ - - return self._data["instance_id"] - - @property - def data(self): - """Legacy access to data. - - Access to data is needed to modify values. - - Returns: - CreatedInstance: Object can be used as dictionary but with - validations of immutable keys. - """ - - return self - - @property - def transient_data(self): - """Data stored for lifetime of instance object. - - These data are not stored to scene and will be lost on object - deletion. - - Can be used to store objects. In some host implementations is not - possible to reference to object in scene with some unique identifier - (e.g. node in Fusion.). In that case it is handy to store the object - here. Should be used that way only if instance data are stored on the - node itself. - - Returns: - Dict[str, Any]: Dictionary object where you can store data related - to instance for lifetime of instance object. - """ - - return self._transient_data - - def changes(self): - """Calculate and return changes.""" - - return TrackChangesItem(self.origin_data, self.data_to_store()) - - def mark_as_stored(self): - """Should be called when instance data are stored. - - Origin data are replaced by current data so changes are cleared. - """ - - orig_keys = set(self._orig_data.keys()) - for key, value in self._data.items(): - orig_keys.discard(key) - if key in ("creator_attributes", "publish_attributes"): - continue - self._orig_data[key] = copy.deepcopy(value) - - for key in orig_keys: - self._orig_data.pop(key) - - self.creator_attributes.mark_as_stored() - self.publish_attributes.mark_as_stored() - - @property - def creator_attributes(self): - return self._data["creator_attributes"] - - @property - def creator_attribute_defs(self): - """Attribute definitions defined by creator plugin. - - Returns: - List[AbstractAttrDef]: Attribute definitions. - """ - - return self.creator_attributes.attr_defs - - @property - def publish_attributes(self): - return self._data["publish_attributes"] - - def data_to_store(self): - """Collect data that contain json parsable types. - - It is possible to recreate the instance using these data. - - Todos: - We probably don't need OrderedDict. When data are loaded they - are not ordered anymore. - - Returns: - OrderedDict: Ordered dictionary with instance data. - """ - - output = collections.OrderedDict() - for key, value in self._data.items(): - if key in ("creator_attributes", "publish_attributes"): - continue - output[key] = value - - output["creator_attributes"] = self.creator_attributes.data_to_store() - output["publish_attributes"] = self.publish_attributes.data_to_store() - - return output - - @classmethod - def from_existing(cls, instance_data, creator): - """Convert instance data from workfile to CreatedInstance. - - Args: - instance_data (Dict[str, Any]): Data in a structure ready for - 'CreatedInstance' object. - creator (BaseCreator): Creator plugin which is creating the - instance of for which the instance belong. - """ - - instance_data = copy.deepcopy(instance_data) - - product_type = instance_data.get("productType") - if product_type is None: - product_type = instance_data.get("family") - if product_type is None: - product_type = creator.product_type - product_name = instance_data.get("productName") - if product_name is None: - product_name = instance_data.get("subset") - - return cls( - product_type, product_name, instance_data, creator - ) - - def set_publish_plugins(self, attr_plugins): - """Set publish plugins with attribute definitions. - - This method should be called only from 'CreateContext'. - - Args: - attr_plugins (List[pyblish.api.Plugin]): Pyblish plugins which - inherit from 'AYONPyblishPluginMixin' and may contain - attribute definitions. - """ - - self.publish_attributes.set_publish_plugins(attr_plugins) - - def add_members(self, members): - """Currently unused method.""" - - for member in members: - if member not in self._members: - self._members.append(member) - - def serialize_for_remote(self): - """Serialize object into data to be possible recreated object. - - Returns: - Dict[str, Any]: Serialized data. - """ - - creator_attr_defs = self.creator_attributes.get_serialized_attr_defs() - publish_attributes = self.publish_attributes.serialize_attributes() - return { - "data": self.data_to_store(), - "orig_data": self.origin_data, - "creator_attr_defs": creator_attr_defs, - "publish_attributes": publish_attributes, - "creator_label": self._creator_label, - "group_label": self._group_label, - } - - @classmethod - def deserialize_on_remote(cls, serialized_data): - """Convert instance data to CreatedInstance. - - This is fake instance in remote process e.g. in UI process. The creator - is not a full creator and should not be used for calling methods when - instance is created from this method (matters on implementation). - - Args: - serialized_data (Dict[str, Any]): Serialized data for remote - recreating. Should contain 'data' and 'orig_data'. - """ - - instance_data = copy.deepcopy(serialized_data["data"]) - creator_identifier = instance_data["creator_identifier"] - - product_type = instance_data["productType"] - product_name = instance_data.get("productName", None) - - creator_label = serialized_data["creator_label"] - group_label = serialized_data["group_label"] - creator_attr_defs = deserialize_attr_defs( - serialized_data["creator_attr_defs"] - ) - publish_attributes = serialized_data["publish_attributes"] - - obj = cls( - product_type, - product_name, - instance_data, - creator_identifier=creator_identifier, - creator_label=creator_label, - group_label=group_label, - creator_attr_defs=creator_attr_defs - ) - obj._orig_data = serialized_data["orig_data"] - obj.publish_attributes.deserialize_attributes(publish_attributes) - - return obj - - # Context validation related methods/properties - @property - def has_set_folder(self): - """Folder path is set in data.""" - - return "folderPath" in self._data - - @property - def has_set_task(self): - """Task name is set in data.""" - - return "task" in self._data - - @property - def has_valid_context(self): - """Context data are valid for publishing.""" - - return self.has_valid_folder and self.has_valid_task - - @property - def has_valid_folder(self): - """Folder set in context exists in project.""" - - if not self.has_set_folder: - return False - return self._folder_is_valid - - @property - def has_valid_task(self): - """Task set in context exists in project.""" - - if not self.has_set_task: - return False - return self._task_is_valid - - def set_folder_invalid(self, invalid): - # TODO replace with `set_folder_path` - self._folder_is_valid = not invalid - - def set_task_invalid(self, invalid): - # TODO replace with `set_task_name` - self._task_is_valid = not invalid - - -class ConvertorItem(object): - """Item representing convertor plugin. - - Args: - identifier (str): Identifier of convertor. - label (str): Label which will be shown in UI. - """ - - def __init__(self, identifier, label): - self._id = str(uuid4()) - self.identifier = identifier - self.label = label - - @property - def id(self): - return self._id - - def to_data(self): - return { - "id": self.id, - "identifier": self.identifier, - "label": self.label - } - - @classmethod - def from_data(cls, data): - obj = cls(data["identifier"], data["label"]) - obj._id = data["id"] - return obj - - class CreateContext: """Context of instance creation. diff --git a/client/ayon_core/pipeline/create/structures.py b/client/ayon_core/pipeline/create/structures.py new file mode 100644 index 0000000000..7fe854c4fc --- /dev/null +++ b/client/ayon_core/pipeline/create/structures.py @@ -0,0 +1,870 @@ +import copy +import collections +from uuid import uuid4 + +from ayon_core.lib.attribute_definitions import ( + UnknownDef, + serialize_attr_defs, + deserialize_attr_defs, +) +from ayon_core.pipeline import ( + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, +) + +from .exceptions import ImmutableKeyError +from .changes import TrackChangesItem + + +class ConvertorItem(object): + """Item representing convertor plugin. + + Args: + identifier (str): Identifier of convertor. + label (str): Label which will be shown in UI. + """ + + def __init__(self, identifier, label): + self._id = str(uuid4()) + self.identifier = identifier + self.label = label + + @property + def id(self): + return self._id + + def to_data(self): + return { + "id": self.id, + "identifier": self.identifier, + "label": self.label + } + + @classmethod + def from_data(cls, data): + obj = cls(data["identifier"], data["label"]) + obj._id = data["id"] + return obj + + +class InstanceMember: + """Representation of instance member. + + TODO: + Implement and use! + """ + + def __init__(self, instance, name): + self.instance = instance + + instance.add_members(self) + + self.name = name + self._actions = [] + + def add_action(self, label, callback): + self._actions.append({ + "label": label, + "callback": callback + }) + + +class AttributeValues(object): + """Container which keep values of Attribute definitions. + + Goal is to have one object which hold values of attribute definitions for + single instance. + + Has dictionary like methods. Not all of them are allowed all the time. + + Args: + attr_defs(AbstractAttrDef): Definitions of value type and properties. + values(dict): Values after possible conversion. + origin_data(dict): Values loaded from host before conversion. + """ + + def __init__(self, attr_defs, values, origin_data=None): + if origin_data is None: + origin_data = copy.deepcopy(values) + self._origin_data = origin_data + + attr_defs_by_key = { + attr_def.key: attr_def + for attr_def in attr_defs + if attr_def.is_value_def + } + for key, value in values.items(): + if key not in attr_defs_by_key: + new_def = UnknownDef(key, label=key, default=value) + attr_defs.append(new_def) + attr_defs_by_key[key] = new_def + + self._attr_defs = attr_defs + self._attr_defs_by_key = attr_defs_by_key + + self._data = {} + for attr_def in attr_defs: + value = values.get(attr_def.key) + if value is not None: + self._data[attr_def.key] = value + + def __setitem__(self, key, value): + if key not in self._attr_defs_by_key: + raise KeyError("Key \"{}\" was not found.".format(key)) + + old_value = self._data.get(key) + if old_value == value: + return + self._data[key] = value + + def __getitem__(self, key): + if key not in self._attr_defs_by_key: + return self._data[key] + return self._data.get(key, self._attr_defs_by_key[key].default) + + def __contains__(self, key): + return key in self._attr_defs_by_key + + def get(self, key, default=None): + if key in self._attr_defs_by_key: + return self[key] + return default + + def keys(self): + return self._attr_defs_by_key.keys() + + def values(self): + for key in self._attr_defs_by_key.keys(): + yield self._data.get(key) + + def items(self): + for key in self._attr_defs_by_key.keys(): + yield key, self._data.get(key) + + def update(self, value): + for _key, _value in dict(value): + self[_key] = _value + + def pop(self, key, default=None): + value = self._data.pop(key, default) + # Remove attribute definition if is 'UnknownDef' + # - gives option to get rid of unknown values + attr_def = self._attr_defs_by_key.get(key) + if isinstance(attr_def, UnknownDef): + self._attr_defs_by_key.pop(key) + self._attr_defs.remove(attr_def) + return value + + def reset_values(self): + self._data = {} + + def mark_as_stored(self): + self._origin_data = copy.deepcopy(self._data) + + @property + def attr_defs(self): + """Pointer to attribute definitions. + + Returns: + List[AbstractAttrDef]: Attribute definitions. + """ + + return list(self._attr_defs) + + @property + def origin_data(self): + return copy.deepcopy(self._origin_data) + + def data_to_store(self): + """Create new dictionary with data to store. + + Returns: + Dict[str, Any]: Attribute values that should be stored. + """ + + output = {} + for key in self._data: + output[key] = self[key] + + for key, attr_def in self._attr_defs_by_key.items(): + if key not in output: + output[key] = attr_def.default + return output + + def get_serialized_attr_defs(self): + """Serialize attribute definitions to json serializable types. + + Returns: + List[Dict[str, Any]]: Serialized attribute definitions. + """ + + return serialize_attr_defs(self._attr_defs) + + +class CreatorAttributeValues(AttributeValues): + """Creator specific attribute values of an instance. + + Args: + instance (CreatedInstance): Instance for which are values hold. + """ + + def __init__(self, instance, *args, **kwargs): + self.instance = instance + super(CreatorAttributeValues, self).__init__(*args, **kwargs) + + +class PublishAttributeValues(AttributeValues): + """Publish plugin specific attribute values. + + Values are for single plugin which can be on `CreatedInstance` + or context values stored on `CreateContext`. + + Args: + publish_attributes(PublishAttributes): Wrapper for multiple publish + attributes is used as parent object. + """ + + def __init__(self, publish_attributes, *args, **kwargs): + self.publish_attributes = publish_attributes + super(PublishAttributeValues, self).__init__(*args, **kwargs) + + @property + def parent(self): + return self.publish_attributes.parent + + +class PublishAttributes: + """Wrapper for publish plugin attribute definitions. + + Cares about handling attribute definitions of multiple publish plugins. + Keep information about attribute definitions and their values. + + Args: + parent(CreatedInstance, CreateContext): Parent for which will be + data stored and from which are data loaded. + origin_data(dict): Loaded data by plugin class name. + attr_plugins(Union[List[pyblish.api.Plugin], None]): List of publish + plugins that may have defined attribute definitions. + """ + + def __init__(self, parent, origin_data, attr_plugins=None): + self.parent = parent + self._origin_data = copy.deepcopy(origin_data) + + attr_plugins = attr_plugins or [] + self.attr_plugins = attr_plugins + + self._data = copy.deepcopy(origin_data) + self._plugin_names_order = [] + self._missing_plugins = [] + + self.set_publish_plugins(attr_plugins) + + def __getitem__(self, key): + return self._data[key] + + def __contains__(self, key): + return key in self._data + + def keys(self): + return self._data.keys() + + def values(self): + return self._data.values() + + def items(self): + return self._data.items() + + def pop(self, key, default=None): + """Remove or reset value for plugin. + + Plugin values are reset to defaults if plugin is available but + data of plugin which was not found are removed. + + Args: + key(str): Plugin name. + default: Default value if plugin was not found. + """ + + if key not in self._data: + return default + + if key in self._missing_plugins: + self._missing_plugins.remove(key) + removed_item = self._data.pop(key) + return removed_item.data_to_store() + + value_item = self._data[key] + # Prepare value to return + output = value_item.data_to_store() + # Reset values + value_item.reset_values() + return output + + def plugin_names_order(self): + """Plugin names order by their 'order' attribute.""" + + for name in self._plugin_names_order: + yield name + + def mark_as_stored(self): + self._origin_data = copy.deepcopy(self.data_to_store()) + + def data_to_store(self): + """Convert attribute values to "data to store".""" + + output = {} + for key, attr_value in self._data.items(): + output[key] = attr_value.data_to_store() + return output + + @property + def origin_data(self): + return copy.deepcopy(self._origin_data) + + def set_publish_plugins(self, attr_plugins): + """Set publish plugins attribute definitions.""" + + self._plugin_names_order = [] + self._missing_plugins = [] + self.attr_plugins = attr_plugins or [] + + origin_data = self._origin_data + data = self._data + self._data = {} + added_keys = set() + for plugin in attr_plugins: + output = plugin.convert_attribute_values(data) + if output is not None: + data = output + attr_defs = plugin.get_attribute_defs() + if not attr_defs: + continue + + key = plugin.__name__ + added_keys.add(key) + self._plugin_names_order.append(key) + + value = data.get(key) or {} + orig_value = copy.deepcopy(origin_data.get(key) or {}) + self._data[key] = PublishAttributeValues( + self, attr_defs, value, orig_value + ) + + for key, value in data.items(): + if key not in added_keys: + self._missing_plugins.append(key) + self._data[key] = PublishAttributeValues( + self, [], value, value + ) + + def serialize_attributes(self): + return { + "attr_defs": { + plugin_name: attrs_value.get_serialized_attr_defs() + for plugin_name, attrs_value in self._data.items() + }, + "plugin_names_order": self._plugin_names_order, + "missing_plugins": self._missing_plugins + } + + def deserialize_attributes(self, data): + self._plugin_names_order = data["plugin_names_order"] + self._missing_plugins = data["missing_plugins"] + + attr_defs = deserialize_attr_defs(data["attr_defs"]) + + origin_data = self._origin_data + data = self._data + self._data = {} + + added_keys = set() + for plugin_name, attr_defs_data in attr_defs.items(): + attr_defs = deserialize_attr_defs(attr_defs_data) + value = data.get(plugin_name) or {} + orig_value = copy.deepcopy(origin_data.get(plugin_name) or {}) + self._data[plugin_name] = PublishAttributeValues( + self, attr_defs, value, orig_value + ) + + for key, value in data.items(): + if key not in added_keys: + self._missing_plugins.append(key) + self._data[key] = PublishAttributeValues( + self, [], value, value + ) + + +class CreatedInstance: + """Instance entity with data that will be stored to workfile. + + I think `data` must be required argument containing all minimum information + about instance like "folderPath" and "task" and all data used for filling + product name as creators may have custom data for product name filling. + + Notes: + Object have 2 possible initialization. One using 'creator' object which + is recommended for api usage. Second by passing information about + creator. + + Args: + product_type (str): Product type that will be created. + product_name (str): Name of product that will be created. + data (Dict[str, Any]): Data used for filling product name or override + data from already existing instance. + creator (Union[BaseCreator, None]): Creator responsible for instance. + creator_identifier (str): Identifier of creator plugin. + creator_label (str): Creator plugin label. + group_label (str): Default group label from creator plugin. + creator_attr_defs (List[AbstractAttrDef]): Attribute definitions from + creator. + """ + + # Keys that can't be changed or removed from data after loading using + # creator. + # - 'creator_attributes' and 'publish_attributes' can change values of + # their individual children but not on their own + __immutable_keys = ( + "id", + "instance_id", + "product_type", + "creator_identifier", + "creator_attributes", + "publish_attributes" + ) + + def __init__( + self, + product_type, + product_name, + data, + creator=None, + creator_identifier=None, + creator_label=None, + group_label=None, + creator_attr_defs=None, + ): + if creator is not None: + creator_identifier = creator.identifier + group_label = creator.get_group_label() + creator_label = creator.label + creator_attr_defs = creator.get_instance_attr_defs() + + self._creator_label = creator_label + self._group_label = group_label or creator_identifier + + # Instance members may have actions on them + # TODO implement members logic + self._members = [] + + # Data that can be used for lifetime of object + self._transient_data = {} + + # Create a copy of passed data to avoid changing them on the fly + data = copy.deepcopy(data or {}) + + # Pop dictionary values that will be converted to objects to be able + # catch changes + orig_creator_attributes = data.pop("creator_attributes", None) or {} + orig_publish_attributes = data.pop("publish_attributes", None) or {} + + # Store original value of passed data + self._orig_data = copy.deepcopy(data) + + # Pop 'productType' and 'productName' to prevent unexpected changes + data.pop("productType", None) + data.pop("productName", None) + # Backwards compatibility with OpenPype instances + data.pop("family", None) + data.pop("subset", None) + + asset_name = data.pop("asset", None) + if "folderPath" not in data: + data["folderPath"] = asset_name + + # QUESTION Does it make sense to have data stored as ordered dict? + self._data = collections.OrderedDict() + # QUESTION Do we need this "id" information on instance? + item_id = data.get("id") + # TODO use only 'AYON_INSTANCE_ID' when all hosts support it + if item_id not in {AYON_INSTANCE_ID, AVALON_INSTANCE_ID}: + item_id = AVALON_INSTANCE_ID + self._data["id"] = item_id + self._data["productType"] = product_type + self._data["productName"] = product_name + self._data["active"] = data.get("active", True) + self._data["creator_identifier"] = creator_identifier + + # Pop from source data all keys that are defined in `_data` before + # this moment and through their values away + # - they should be the same and if are not then should not change + # already set values + for key in self._data.keys(): + if key in data: + data.pop(key) + + self._data["variant"] = self._data.get("variant") or "" + # Stored creator specific attribute values + # {key: value} + creator_values = copy.deepcopy(orig_creator_attributes) + + self._data["creator_attributes"] = CreatorAttributeValues( + self, + list(creator_attr_defs), + creator_values, + orig_creator_attributes + ) + + # Stored publish specific attribute values + # {: {key: value}} + # - must be set using 'set_publish_plugins' + self._data["publish_attributes"] = PublishAttributes( + self, orig_publish_attributes, None + ) + if data: + self._data.update(data) + + if not self._data.get("instance_id"): + self._data["instance_id"] = str(uuid4()) + + self._folder_is_valid = self.has_set_folder + self._task_is_valid = self.has_set_task + + def __str__(self): + return ( + " {data}" + ).format( + creator_identifier=self.creator_identifier, + product={"name": self.product_name, "type": self.product_type}, + data=str(self._data) + ) + + # --- Dictionary like methods --- + def __getitem__(self, key): + return self._data[key] + + def __contains__(self, key): + return key in self._data + + def __setitem__(self, key, value): + # Validate immutable keys + if key not in self.__immutable_keys: + self._data[key] = value + + elif value != self._data.get(key): + # Raise exception if key is immutable and value has changed + raise ImmutableKeyError(key) + + def get(self, key, default=None): + return self._data.get(key, default) + + def pop(self, key, *args, **kwargs): + # Raise exception if is trying to pop key which is immutable + if key in self.__immutable_keys: + raise ImmutableKeyError(key) + + self._data.pop(key, *args, **kwargs) + + def keys(self): + return self._data.keys() + + def values(self): + return self._data.values() + + def items(self): + return self._data.items() + # ------ + + @property + def product_type(self): + return self._data["productType"] + + @property + def product_name(self): + return self._data["productName"] + + @property + def label(self): + label = self._data.get("label") + if not label: + label = self.product_name + return label + + @property + def group_label(self): + label = self._data.get("group") + if label: + return label + return self._group_label + + @property + def origin_data(self): + output = copy.deepcopy(self._orig_data) + output["creator_attributes"] = self.creator_attributes.origin_data + output["publish_attributes"] = self.publish_attributes.origin_data + return output + + @property + def creator_identifier(self): + return self._data["creator_identifier"] + + @property + def creator_label(self): + return self._creator_label or self.creator_identifier + + @property + def id(self): + """Instance identifier. + + Returns: + str: UUID of instance. + """ + + return self._data["instance_id"] + + @property + def data(self): + """Legacy access to data. + + Access to data is needed to modify values. + + Returns: + CreatedInstance: Object can be used as dictionary but with + validations of immutable keys. + """ + + return self + + @property + def transient_data(self): + """Data stored for lifetime of instance object. + + These data are not stored to scene and will be lost on object + deletion. + + Can be used to store objects. In some host implementations is not + possible to reference to object in scene with some unique identifier + (e.g. node in Fusion.). In that case it is handy to store the object + here. Should be used that way only if instance data are stored on the + node itself. + + Returns: + Dict[str, Any]: Dictionary object where you can store data related + to instance for lifetime of instance object. + """ + + return self._transient_data + + def changes(self): + """Calculate and return changes.""" + + return TrackChangesItem(self.origin_data, self.data_to_store()) + + def mark_as_stored(self): + """Should be called when instance data are stored. + + Origin data are replaced by current data so changes are cleared. + """ + + orig_keys = set(self._orig_data.keys()) + for key, value in self._data.items(): + orig_keys.discard(key) + if key in ("creator_attributes", "publish_attributes"): + continue + self._orig_data[key] = copy.deepcopy(value) + + for key in orig_keys: + self._orig_data.pop(key) + + self.creator_attributes.mark_as_stored() + self.publish_attributes.mark_as_stored() + + @property + def creator_attributes(self): + return self._data["creator_attributes"] + + @property + def creator_attribute_defs(self): + """Attribute definitions defined by creator plugin. + + Returns: + List[AbstractAttrDef]: Attribute definitions. + """ + + return self.creator_attributes.attr_defs + + @property + def publish_attributes(self): + return self._data["publish_attributes"] + + def data_to_store(self): + """Collect data that contain json parsable types. + + It is possible to recreate the instance using these data. + + Todos: + We probably don't need OrderedDict. When data are loaded they + are not ordered anymore. + + Returns: + OrderedDict: Ordered dictionary with instance data. + """ + + output = collections.OrderedDict() + for key, value in self._data.items(): + if key in ("creator_attributes", "publish_attributes"): + continue + output[key] = value + + output["creator_attributes"] = self.creator_attributes.data_to_store() + output["publish_attributes"] = self.publish_attributes.data_to_store() + + return output + + @classmethod + def from_existing(cls, instance_data, creator): + """Convert instance data from workfile to CreatedInstance. + + Args: + instance_data (Dict[str, Any]): Data in a structure ready for + 'CreatedInstance' object. + creator (BaseCreator): Creator plugin which is creating the + instance of for which the instance belong. + """ + + instance_data = copy.deepcopy(instance_data) + + product_type = instance_data.get("productType") + if product_type is None: + product_type = instance_data.get("family") + if product_type is None: + product_type = creator.product_type + product_name = instance_data.get("productName") + if product_name is None: + product_name = instance_data.get("subset") + + return cls( + product_type, product_name, instance_data, creator + ) + + def set_publish_plugins(self, attr_plugins): + """Set publish plugins with attribute definitions. + + This method should be called only from 'CreateContext'. + + Args: + attr_plugins (List[pyblish.api.Plugin]): Pyblish plugins which + inherit from 'AYONPyblishPluginMixin' and may contain + attribute definitions. + """ + + self.publish_attributes.set_publish_plugins(attr_plugins) + + def add_members(self, members): + """Currently unused method.""" + + for member in members: + if member not in self._members: + self._members.append(member) + + def serialize_for_remote(self): + """Serialize object into data to be possible recreated object. + + Returns: + Dict[str, Any]: Serialized data. + """ + + creator_attr_defs = self.creator_attributes.get_serialized_attr_defs() + publish_attributes = self.publish_attributes.serialize_attributes() + return { + "data": self.data_to_store(), + "orig_data": self.origin_data, + "creator_attr_defs": creator_attr_defs, + "publish_attributes": publish_attributes, + "creator_label": self._creator_label, + "group_label": self._group_label, + } + + @classmethod + def deserialize_on_remote(cls, serialized_data): + """Convert instance data to CreatedInstance. + + This is fake instance in remote process e.g. in UI process. The creator + is not a full creator and should not be used for calling methods when + instance is created from this method (matters on implementation). + + Args: + serialized_data (Dict[str, Any]): Serialized data for remote + recreating. Should contain 'data' and 'orig_data'. + """ + + instance_data = copy.deepcopy(serialized_data["data"]) + creator_identifier = instance_data["creator_identifier"] + + product_type = instance_data["productType"] + product_name = instance_data.get("productName", None) + + creator_label = serialized_data["creator_label"] + group_label = serialized_data["group_label"] + creator_attr_defs = deserialize_attr_defs( + serialized_data["creator_attr_defs"] + ) + publish_attributes = serialized_data["publish_attributes"] + + obj = cls( + product_type, + product_name, + instance_data, + creator_identifier=creator_identifier, + creator_label=creator_label, + group_label=group_label, + creator_attr_defs=creator_attr_defs + ) + obj._orig_data = serialized_data["orig_data"] + obj.publish_attributes.deserialize_attributes(publish_attributes) + + return obj + + # Context validation related methods/properties + @property + def has_set_folder(self): + """Folder path is set in data.""" + + return "folderPath" in self._data + + @property + def has_set_task(self): + """Task name is set in data.""" + + return "task" in self._data + + @property + def has_valid_context(self): + """Context data are valid for publishing.""" + + return self.has_valid_folder and self.has_valid_task + + @property + def has_valid_folder(self): + """Folder set in context exists in project.""" + + if not self.has_set_folder: + return False + return self._folder_is_valid + + @property + def has_valid_task(self): + """Task set in context exists in project.""" + + if not self.has_set_task: + return False + return self._task_is_valid + + def set_folder_invalid(self, invalid): + # TODO replace with `set_folder_path` + self._folder_is_valid = not invalid + + def set_task_invalid(self, invalid): + # TODO replace with `set_task_name` + self._task_is_valid = not invalid From b0abbf36fb57d6238a1ee8a29669301c8697ede8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:19:57 +0200 Subject: [PATCH 13/28] moved exceptions from product name too --- client/ayon_core/pipeline/create/__init__.py | 6 ++++-- client/ayon_core/pipeline/create/exceptions.py | 13 +++++++++++++ client/ayon_core/pipeline/create/product_name.py | 15 +-------------- 3 files changed, 18 insertions(+), 16 deletions(-) diff --git a/client/ayon_core/pipeline/create/__init__.py b/client/ayon_core/pipeline/create/__init__.py index bb05bc6a09..fa8d639c6f 100644 --- a/client/ayon_core/pipeline/create/__init__.py +++ b/client/ayon_core/pipeline/create/__init__.py @@ -17,6 +17,8 @@ from .exceptions import ( CreatorsSaveFailed, CreatorsRemoveFailed, CreatorsOperationFailed, + TaskNotSetError, + TemplateFillError, ) from .structures import CreatedInstance from .utils import ( @@ -25,7 +27,6 @@ from .utils import ( ) from .product_name import ( - TaskNotSetError, get_product_name, get_product_name_template, ) @@ -74,13 +75,14 @@ __all__ = ( "CreatorsSaveFailed", "CreatorsRemoveFailed", "CreatorsOperationFailed", + "TaskNotSetError", + "TemplateFillError", "CreatedInstance", "get_last_versions_for_instances", "get_next_versions_for_instances", - "TaskNotSetError", "get_product_name", "get_product_name_template", diff --git a/client/ayon_core/pipeline/create/exceptions.py b/client/ayon_core/pipeline/create/exceptions.py index 24264840cb..8910d3fa09 100644 --- a/client/ayon_core/pipeline/create/exceptions.py +++ b/client/ayon_core/pipeline/create/exceptions.py @@ -112,3 +112,16 @@ class CreatorsCreateFailed(CreatorsOperationFailed): msg = "Failed to create instances" super().__init__(msg, failed_info) + +class TaskNotSetError(KeyError): + def __init__(self, msg=None): + if not msg: + msg = "Creator's product name template requires task name." + super().__init__(msg) + + +class TemplateFillError(Exception): + def __init__(self, msg=None): + if not msg: + msg = "Creator's product name template is missing key value." + super().__init__(msg) diff --git a/client/ayon_core/pipeline/create/product_name.py b/client/ayon_core/pipeline/create/product_name.py index 8a08bdc36c..0c6fb70169 100644 --- a/client/ayon_core/pipeline/create/product_name.py +++ b/client/ayon_core/pipeline/create/product_name.py @@ -4,20 +4,7 @@ from ayon_core.settings import get_project_settings from ayon_core.lib import filter_profiles, prepare_template_data from .constants import DEFAULT_PRODUCT_TEMPLATE - - -class TaskNotSetError(KeyError): - def __init__(self, msg=None): - if not msg: - msg = "Creator's product name template requires task name." - super(TaskNotSetError, self).__init__(msg) - - -class TemplateFillError(Exception): - def __init__(self, msg=None): - if not msg: - msg = "Creator's product name template is missing key value." - super(TemplateFillError, self).__init__(msg) +from .exceptions import TaskNotSetError, TemplateFillError def get_product_name_template( From 5d4e086978e7411b0705fcb340c3e8bf9ba2f9ff Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:21:50 +0200 Subject: [PATCH 14/28] remove python 2 compatibility --- client/ayon_core/pipeline/create/changes.py | 2 +- client/ayon_core/pipeline/create/creator_plugins.py | 2 +- client/ayon_core/pipeline/create/legacy_create.py | 2 +- client/ayon_core/pipeline/create/structures.py | 8 ++++---- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/client/ayon_core/pipeline/create/changes.py b/client/ayon_core/pipeline/create/changes.py index 217478ee30..c8b81cac48 100644 --- a/client/ayon_core/pipeline/create/changes.py +++ b/client/ayon_core/pipeline/create/changes.py @@ -3,7 +3,7 @@ import copy _EMPTY_VALUE = object() -class TrackChangesItem(object): +class TrackChangesItem: """Helper object to track changes in data. Has access to full old and new data and will create deep copy of them, diff --git a/client/ayon_core/pipeline/create/creator_plugins.py b/client/ayon_core/pipeline/create/creator_plugins.py index 1e09eb62a1..61c10ee736 100644 --- a/client/ayon_core/pipeline/create/creator_plugins.py +++ b/client/ayon_core/pipeline/create/creator_plugins.py @@ -644,7 +644,7 @@ class Creator(BaseCreator): cls._get_default_variant_wrap, cls._set_default_variant_wrap ) - super(Creator, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) @property def show_order(self): diff --git a/client/ayon_core/pipeline/create/legacy_create.py b/client/ayon_core/pipeline/create/legacy_create.py index fc24bcf934..ec9b23ac62 100644 --- a/client/ayon_core/pipeline/create/legacy_create.py +++ b/client/ayon_core/pipeline/create/legacy_create.py @@ -14,7 +14,7 @@ from ayon_core.pipeline.constants import AVALON_INSTANCE_ID from .product_name import get_product_name -class LegacyCreator(object): +class LegacyCreator: """Determine how assets are created""" label = None product_type = None diff --git a/client/ayon_core/pipeline/create/structures.py b/client/ayon_core/pipeline/create/structures.py index 7fe854c4fc..41c130214d 100644 --- a/client/ayon_core/pipeline/create/structures.py +++ b/client/ayon_core/pipeline/create/structures.py @@ -16,7 +16,7 @@ from .exceptions import ImmutableKeyError from .changes import TrackChangesItem -class ConvertorItem(object): +class ConvertorItem: """Item representing convertor plugin. Args: @@ -69,7 +69,7 @@ class InstanceMember: }) -class AttributeValues(object): +class AttributeValues: """Container which keep values of Attribute definitions. Goal is to have one object which hold values of attribute definitions for @@ -210,7 +210,7 @@ class CreatorAttributeValues(AttributeValues): def __init__(self, instance, *args, **kwargs): self.instance = instance - super(CreatorAttributeValues, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) class PublishAttributeValues(AttributeValues): @@ -226,7 +226,7 @@ class PublishAttributeValues(AttributeValues): def __init__(self, publish_attributes, *args, **kwargs): self.publish_attributes = publish_attributes - super(PublishAttributeValues, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) @property def parent(self): From 7ef60ad4c91af830376d3ffeccd825dd5133e3f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:22:03 +0200 Subject: [PATCH 15/28] fix 'update' method --- client/ayon_core/pipeline/create/structures.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/client/ayon_core/pipeline/create/structures.py b/client/ayon_core/pipeline/create/structures.py index 41c130214d..4f7caa6e11 100644 --- a/client/ayon_core/pipeline/create/structures.py +++ b/client/ayon_core/pipeline/create/structures.py @@ -112,10 +112,7 @@ class AttributeValues: if key not in self._attr_defs_by_key: raise KeyError("Key \"{}\" was not found.".format(key)) - old_value = self._data.get(key) - if old_value == value: - return - self._data[key] = value + self.update({key: value}) def __getitem__(self, key): if key not in self._attr_defs_by_key: @@ -142,8 +139,12 @@ class AttributeValues: yield key, self._data.get(key) def update(self, value): - for _key, _value in dict(value): - self[_key] = _value + changes = {} + for _key, _value in dict(value).items(): + if _key in self._data and self._data.get(_key) == _value: + continue + self._data[_key] = _value + changes[_key] = _value def pop(self, key, default=None): value = self._data.pop(key, default) From d5602cb89a3123dcc5d7b457b40e652db9133c78 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:27:30 +0200 Subject: [PATCH 16/28] simpler import --- client/ayon_core/pipeline/create/context.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index a11bc311dc..76eb620b4d 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -14,9 +14,7 @@ import ayon_api from ayon_core.settings import get_project_settings from ayon_core.lib import is_func_signature_supported -from ayon_core.lib.attribute_definitions import ( - get_default_values, -) +from ayon_core.lib.attribute_definitions import get_default_values from ayon_core.host import IPublishHost, IWorkfileHost from ayon_core.pipeline import Anatomy from ayon_core.pipeline.plugin_discover import DiscoverResult From 4b8b57e39a47cf446b9368f6a8de146d0de04b6c Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:27:41 +0200 Subject: [PATCH 17/28] remove unecessary line --- client/ayon_core/pipeline/create/context.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 76eb620b4d..f5ba7b4774 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -373,7 +373,6 @@ class CreateContext: self._current_task_entity = task_entity return copy.deepcopy(self._current_task_entity) - def get_current_workfile_path(self): """Workfile path which was opened on context reset. From 3296079df689acff6d2d950043428b3ebaaa1b91 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:27:50 +0200 Subject: [PATCH 18/28] make sure exc_info is defined --- client/ayon_core/pipeline/create/context.py | 1 + 1 file changed, 1 insertion(+) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index f5ba7b4774..2326a829e3 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -871,6 +871,7 @@ class CreateContext: add_traceback = False result = None fail_info = None + exc_info = None success = False try: From 04b37b83c64e294da9255698c01c227fe443cb29 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:27:59 +0200 Subject: [PATCH 19/28] fix bulk processing --- client/ayon_core/pipeline/create/context.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 2326a829e3..b3a46bb778 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -965,9 +965,11 @@ class CreateContext: finally: self._bulk_counter -= 1 - # Trigger validation if there is no more context manager for bulk - # instance validation - if self._bulk_counter == 0: + # Trigger validation if there is no more context manager for bulk + # instance validation + if self._bulk_counter != 0: + return + ( self._bulk_instances_to_process, instances_to_validate From 714f58fbd692dd31f5446c8f4b1270dcd0cb0b32 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 26 Aug 2024 11:10:54 +0200 Subject: [PATCH 20/28] excape parenthesis for shell --- client/ayon_core/plugins/publish/extract_review.py | 7 +++++++ client/ayon_core/plugins/publish/extract_review_slate.py | 7 +++++++ client/ayon_core/plugins/publish/extract_thumbnail.py | 8 ++++++++ 3 files changed, 22 insertions(+) diff --git a/client/ayon_core/plugins/publish/extract_review.py b/client/ayon_core/plugins/publish/extract_review.py index c2793f98a2..b2531ebae9 100644 --- a/client/ayon_core/plugins/publish/extract_review.py +++ b/client/ayon_core/plugins/publish/extract_review.py @@ -454,6 +454,13 @@ class ExtractReview(pyblish.api.InstancePlugin): raise NotImplementedError subprcs_cmd = " ".join(ffmpeg_args) + if os.getenv("SHELL") in ("/bin/bash", "/bin/sh"): + # Escape parentheses for bash + subprcs_cmd = ( + subprcs_cmd + .replace("(", "\\(") + .replace(")", "\\)") + ) # run subprocess self.log.debug("Executing: {}".format(subprcs_cmd)) diff --git a/client/ayon_core/plugins/publish/extract_review_slate.py b/client/ayon_core/plugins/publish/extract_review_slate.py index 35f55e275c..01a65e89ae 100644 --- a/client/ayon_core/plugins/publish/extract_review_slate.py +++ b/client/ayon_core/plugins/publish/extract_review_slate.py @@ -269,6 +269,13 @@ class ExtractReviewSlate(publish.Extractor): " ".join(output_args) ] slate_subprocess_cmd = " ".join(slate_args) + if os.getenv("SHELL") in ("/bin/bash", "/bin/sh"): + # Escape parentheses for bash + slate_subprocess_cmd = ( + slate_subprocess_cmd + .replace("(", "\\(") + .replace(")", "\\)") + ) # run slate generation subprocess self.log.debug( diff --git a/client/ayon_core/plugins/publish/extract_thumbnail.py b/client/ayon_core/plugins/publish/extract_thumbnail.py index d1b6e4e0cc..328cb308b9 100644 --- a/client/ayon_core/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/plugins/publish/extract_thumbnail.py @@ -455,6 +455,14 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # output file jpeg_items.append(path_to_subprocess_arg(dst_path)) subprocess_command = " ".join(jpeg_items) + if os.getenv("SHELL") in ("/bin/bash", "/bin/sh"): + # Escape parentheses for bash + subprocess_command = ( + subprocess_command + .replace("(", "\\(") + .replace(")", "\\)") + ) + try: run_subprocess( subprocess_command, shell=True, logger=self.log From b309ec6967cb33070413117d98af95eb188f6508 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 27 Aug 2024 14:55:28 +0200 Subject: [PATCH 21/28] Match the defaults from settings - so that if for whatever reason settings do not exist (e.g. dev mode using older addon on server in bundle) that it still runs backwards compatible and has this disabled by default --- .../plugins/publish/extract_usd_layer_contributions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py index 7ed129a127..8b58f447d6 100644 --- a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py +++ b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py @@ -562,7 +562,7 @@ class ExtractUSDLayerContribution(publish.Extractor): label = "Extract USD Layer Contributions (Asset/Shot)" order = pyblish.api.ExtractorOrder + 0.45 - use_ayon_entity_uri = True + use_ayon_entity_uri = False def process(self, instance): @@ -724,7 +724,7 @@ class ExtractUSDAssetContribution(publish.Extractor): label = "Extract USD Asset/Shot Contributions" order = ExtractUSDLayerContribution.order + 0.01 - use_ayon_entity_uri = True + use_ayon_entity_uri = False def process(self, instance): From 911ef45a458a51a90d5e5caee2058748cfc68b67 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 10:33:48 +0200 Subject: [PATCH 22/28] handle escape in 'run_subprocess' --- client/ayon_core/lib/execute.py | 14 ++++++++++++++ client/ayon_core/plugins/publish/extract_review.py | 7 ------- .../plugins/publish/extract_review_slate.py | 7 ------- .../ayon_core/plugins/publish/extract_thumbnail.py | 7 ------- 4 files changed, 14 insertions(+), 21 deletions(-) diff --git a/client/ayon_core/lib/execute.py b/client/ayon_core/lib/execute.py index bc55c27bd8..4e6cb415e7 100644 --- a/client/ayon_core/lib/execute.py +++ b/client/ayon_core/lib/execute.py @@ -108,6 +108,20 @@ def run_subprocess(*args, **kwargs): | getattr(subprocess, "CREATE_NO_WINDOW", 0) ) + # Escape parentheses for bash + if ( + kwargs.get("shell") is True + and len(args) == 1 + and isinstance(args[0], str) + and os.getenv("SHELL") in ("/bin/bash", "/bin/sh") + ): + new_arg = ( + args[0] + .replace("(", "\\(") + .replace(")", "\\)") + ) + args = (new_arg, ) + # Get environents from kwarg or use current process environments if were # not passed. env = kwargs.get("env") or os.environ diff --git a/client/ayon_core/plugins/publish/extract_review.py b/client/ayon_core/plugins/publish/extract_review.py index b2531ebae9..c2793f98a2 100644 --- a/client/ayon_core/plugins/publish/extract_review.py +++ b/client/ayon_core/plugins/publish/extract_review.py @@ -454,13 +454,6 @@ class ExtractReview(pyblish.api.InstancePlugin): raise NotImplementedError subprcs_cmd = " ".join(ffmpeg_args) - if os.getenv("SHELL") in ("/bin/bash", "/bin/sh"): - # Escape parentheses for bash - subprcs_cmd = ( - subprcs_cmd - .replace("(", "\\(") - .replace(")", "\\)") - ) # run subprocess self.log.debug("Executing: {}".format(subprcs_cmd)) diff --git a/client/ayon_core/plugins/publish/extract_review_slate.py b/client/ayon_core/plugins/publish/extract_review_slate.py index 01a65e89ae..35f55e275c 100644 --- a/client/ayon_core/plugins/publish/extract_review_slate.py +++ b/client/ayon_core/plugins/publish/extract_review_slate.py @@ -269,13 +269,6 @@ class ExtractReviewSlate(publish.Extractor): " ".join(output_args) ] slate_subprocess_cmd = " ".join(slate_args) - if os.getenv("SHELL") in ("/bin/bash", "/bin/sh"): - # Escape parentheses for bash - slate_subprocess_cmd = ( - slate_subprocess_cmd - .replace("(", "\\(") - .replace(")", "\\)") - ) # run slate generation subprocess self.log.debug( diff --git a/client/ayon_core/plugins/publish/extract_thumbnail.py b/client/ayon_core/plugins/publish/extract_thumbnail.py index 328cb308b9..4ffabf6028 100644 --- a/client/ayon_core/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/plugins/publish/extract_thumbnail.py @@ -455,13 +455,6 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # output file jpeg_items.append(path_to_subprocess_arg(dst_path)) subprocess_command = " ".join(jpeg_items) - if os.getenv("SHELL") in ("/bin/bash", "/bin/sh"): - # Escape parentheses for bash - subprocess_command = ( - subprocess_command - .replace("(", "\\(") - .replace(")", "\\)") - ) try: run_subprocess( From 102ec52dd0c88d466e33adc947f3df1ffff33ee0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 17:30:18 +0200 Subject: [PATCH 23/28] add exceptions moved to different file --- client/ayon_core/pipeline/create/context.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index b3a46bb778..69103159c6 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -39,6 +39,13 @@ from .creator_plugins import ( discover_convertor_plugins, ) +# Import of exceptions that were moved to different file +from .exceptions import ( + ImmutableKeyError, + CreatorsOperationFailed, + ConvertorsOperationFailed, +) # noqa: F401 + # Changes of instances and context are send as tuple of 2 information UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) _NOT_SET = object() From f414d73f7dcd5d4359795154028c3ab355cd5dd3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 17:58:34 +0200 Subject: [PATCH 24/28] use shorter import --- client/ayon_core/tools/publisher/abstract.py | 2 +- client/ayon_core/tools/publisher/models/create.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/client/ayon_core/tools/publisher/abstract.py b/client/ayon_core/tools/publisher/abstract.py index 768f4b052f..ce9c6ac1ed 100644 --- a/client/ayon_core/tools/publisher/abstract.py +++ b/client/ayon_core/tools/publisher/abstract.py @@ -14,7 +14,7 @@ from typing import ( from ayon_core.lib import AbstractAttrDef from ayon_core.host import HostBase from ayon_core.pipeline.create import CreateContext, CreatedInstance -from ayon_core.pipeline.create.context import ConvertorItem +from ayon_core.pipeline.create import ConvertorItem from ayon_core.tools.common_models import ( FolderItem, TaskItem, diff --git a/client/ayon_core/tools/publisher/models/create.py b/client/ayon_core/tools/publisher/models/create.py index ab2bf07614..9fe114f4bd 100644 --- a/client/ayon_core/tools/publisher/models/create.py +++ b/client/ayon_core/tools/publisher/models/create.py @@ -18,7 +18,7 @@ from ayon_core.pipeline.create import ( CreateContext, CreatedInstance, ) -from ayon_core.pipeline.create.context import ( +from ayon_core.pipeline.create import ( CreatorsOperationFailed, ConvertorsOperationFailed, ConvertorItem, From a4ed32e3e114480e0fe54624b664b04c90d21e92 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 18:12:56 +0200 Subject: [PATCH 25/28] added strucctures to init file --- client/ayon_core/pipeline/create/__init__.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/client/ayon_core/pipeline/create/__init__.py b/client/ayon_core/pipeline/create/__init__.py index fa8d639c6f..ced43528eb 100644 --- a/client/ayon_core/pipeline/create/__init__.py +++ b/client/ayon_core/pipeline/create/__init__.py @@ -20,7 +20,14 @@ from .exceptions import ( TaskNotSetError, TemplateFillError, ) -from .structures import CreatedInstance +from .structures import ( + CreatedInstance, + ConvertorItem, + AttributeValues, + CreatorAttributeValues, + PublishAttributeValues, + PublishAttributes, +) from .utils import ( get_last_versions_for_instances, get_next_versions_for_instances, @@ -79,6 +86,11 @@ __all__ = ( "TemplateFillError", "CreatedInstance", + "ConvertorItem", + "AttributeValues", + "CreatorAttributeValues", + "PublishAttributeValues", + "PublishAttributes", "get_last_versions_for_instances", "get_next_versions_for_instances", From e33f8670fd9cc3f1c3c99cfad8d989b5c0cd6cf6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 18:13:06 +0200 Subject: [PATCH 26/28] fake import classes from structures too --- client/ayon_core/pipeline/create/context.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 69103159c6..71ba3b7799 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -39,12 +39,18 @@ from .creator_plugins import ( discover_convertor_plugins, ) -# Import of exceptions that were moved to different file +# Import of functions and classes that were moved to different file +# TODO Should be removed in future release - Added 24/08/28, 0.4.3-dev.1 from .exceptions import ( ImmutableKeyError, CreatorsOperationFailed, ConvertorsOperationFailed, ) # noqa: F401 +from .structures import ( + AttributeValues, + CreatorAttributeValues, + PublishAttributeValues, +) # noqa: F401 # Changes of instances and context are send as tuple of 2 information UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) From 0ce2bf6633bdfc1ac7ef6b6ec9fbcf2c8cd64066 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 18:25:40 +0200 Subject: [PATCH 27/28] changed noqa location --- client/ayon_core/pipeline/create/context.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 71ba3b7799..3f067427fa 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -42,15 +42,15 @@ from .creator_plugins import ( # Import of functions and classes that were moved to different file # TODO Should be removed in future release - Added 24/08/28, 0.4.3-dev.1 from .exceptions import ( - ImmutableKeyError, - CreatorsOperationFailed, - ConvertorsOperationFailed, -) # noqa: F401 + ImmutableKeyError, # noqa: F401 + CreatorsOperationFailed, # noqa: F401 + ConvertorsOperationFailed, # noqa: F401 +) from .structures import ( - AttributeValues, - CreatorAttributeValues, - PublishAttributeValues, -) # noqa: F401 + AttributeValues, # noqa: F401 + CreatorAttributeValues, # noqa: F401 + PublishAttributeValues, # noqa: F401 +) # Changes of instances and context are send as tuple of 2 information UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) From 237e17b658d2cf016614e8c8ac30a69a2e0208e0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 18:34:20 +0200 Subject: [PATCH 28/28] merge import --- client/ayon_core/tools/publisher/abstract.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/client/ayon_core/tools/publisher/abstract.py b/client/ayon_core/tools/publisher/abstract.py index ce9c6ac1ed..362fa38882 100644 --- a/client/ayon_core/tools/publisher/abstract.py +++ b/client/ayon_core/tools/publisher/abstract.py @@ -13,8 +13,11 @@ from typing import ( from ayon_core.lib import AbstractAttrDef from ayon_core.host import HostBase -from ayon_core.pipeline.create import CreateContext, CreatedInstance -from ayon_core.pipeline.create import ConvertorItem +from ayon_core.pipeline.create import ( + CreateContext, + CreatedInstance, + ConvertorItem, +) from ayon_core.tools.common_models import ( FolderItem, TaskItem,