mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge branch 'develop' into enhancement/initial-support-for-folder-in-product-name
# Conflicts: # client/ayon_core/pipeline/create/product_name.py
This commit is contained in:
commit
b1db949ecc
29 changed files with 1360 additions and 246 deletions
|
|
@ -185,9 +185,14 @@ class IPluginPaths(AYONInterface):
|
|||
"""
|
||||
return self._get_plugin_paths_by_type("inventory")
|
||||
|
||||
def get_loader_action_plugin_paths(self) -> list[str]:
|
||||
def get_loader_action_plugin_paths(
|
||||
self, host_name: Optional[str]
|
||||
) -> list[str]:
|
||||
"""Receive loader action plugin paths.
|
||||
|
||||
Args:
|
||||
host_name (Optional[str]): Current host name.
|
||||
|
||||
Returns:
|
||||
list[str]: Paths to loader action plugins.
|
||||
|
||||
|
|
|
|||
|
|
@ -1232,6 +1232,14 @@ def oiio_color_convert(
|
|||
# Handle the different conversion cases
|
||||
# Source view and display are known
|
||||
if source_view and source_display:
|
||||
color_convert_args = None
|
||||
ocio_display_args = None
|
||||
oiio_cmd.extend([
|
||||
"--ociodisplay:inverse=1:subimages=0",
|
||||
source_display,
|
||||
source_view,
|
||||
])
|
||||
|
||||
if target_colorspace:
|
||||
# This is a two-step conversion process since there's no direct
|
||||
# display/view to colorspace command
|
||||
|
|
@ -1241,22 +1249,25 @@ def oiio_color_convert(
|
|||
elif source_display != target_display or source_view != target_view:
|
||||
# Complete display/view pair conversion
|
||||
# - go through a reference space
|
||||
color_convert_args = (target_display, target_view)
|
||||
ocio_display_args = (target_display, target_view)
|
||||
else:
|
||||
color_convert_args = None
|
||||
logger.debug(
|
||||
"Source and target display/view pairs are identical."
|
||||
" No color conversion needed."
|
||||
)
|
||||
|
||||
if color_convert_args:
|
||||
# Use colorconvert for colorspace target
|
||||
oiio_cmd.extend([
|
||||
"--ociodisplay:inverse=1:subimages=0",
|
||||
source_display,
|
||||
source_view,
|
||||
"--colorconvert:subimages=0",
|
||||
*color_convert_args
|
||||
])
|
||||
elif ocio_display_args:
|
||||
# Use ociodisplay for display/view target
|
||||
oiio_cmd.extend([
|
||||
"--ociodisplay:subimages=0",
|
||||
*ocio_display_args
|
||||
])
|
||||
|
||||
elif target_colorspace:
|
||||
# Standard color space to color space conversion
|
||||
|
|
@ -1281,24 +1292,6 @@ def oiio_color_convert(
|
|||
run_subprocess(oiio_cmd, logger=logger)
|
||||
|
||||
|
||||
def split_cmd_args(in_args):
|
||||
"""Makes sure all entered arguments are separated in individual items.
|
||||
|
||||
Split each argument string with " -" to identify if string contains
|
||||
one or more arguments.
|
||||
Args:
|
||||
in_args (list): of arguments ['-n', '-d uint10']
|
||||
Returns
|
||||
(list): ['-n', '-d', 'unint10']
|
||||
"""
|
||||
splitted_args = []
|
||||
for arg in in_args:
|
||||
if not arg.strip():
|
||||
continue
|
||||
splitted_args.extend(arg.split(" "))
|
||||
return splitted_args
|
||||
|
||||
|
||||
def get_rescaled_command_arguments(
|
||||
application,
|
||||
input_path,
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ from dataclasses import dataclass
|
|||
import ayon_api
|
||||
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
from ayon_core.lib import StrEnum, Logger
|
||||
from ayon_core.lib import StrEnum, Logger, is_func_signature_supported
|
||||
from ayon_core.host import AbstractHost
|
||||
from ayon_core.addon import AddonsManager, IPluginPaths
|
||||
from ayon_core.settings import get_studio_settings, get_project_settings
|
||||
|
|
@ -752,6 +752,7 @@ class LoaderActionsContext:
|
|||
|
||||
def _get_plugins(self) -> dict[str, LoaderActionPlugin]:
|
||||
if self._plugins is None:
|
||||
host_name = self.get_host_name()
|
||||
addons_manager = self.get_addons_manager()
|
||||
all_paths = [
|
||||
os.path.join(AYON_CORE_ROOT, "plugins", "loader")
|
||||
|
|
@ -759,7 +760,24 @@ class LoaderActionsContext:
|
|||
for addon in addons_manager.addons:
|
||||
if not isinstance(addon, IPluginPaths):
|
||||
continue
|
||||
paths = addon.get_loader_action_plugin_paths()
|
||||
|
||||
try:
|
||||
if is_func_signature_supported(
|
||||
addon.get_loader_action_plugin_paths,
|
||||
host_name
|
||||
):
|
||||
paths = addon.get_loader_action_plugin_paths(
|
||||
host_name
|
||||
)
|
||||
else:
|
||||
paths = addon.get_loader_action_plugin_paths()
|
||||
except Exception:
|
||||
self._log.warning(
|
||||
"Failed to get plugin paths for addon",
|
||||
exc_info=True
|
||||
)
|
||||
continue
|
||||
|
||||
if paths:
|
||||
all_paths.extend(paths)
|
||||
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import platform
|
|||
import tempfile
|
||||
import warnings
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
|
||||
import ayon_api
|
||||
|
||||
|
|
@ -26,6 +27,18 @@ from ayon_core.pipeline.load import get_representation_path_with_anatomy
|
|||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConfigData:
|
||||
"""OCIO Config to use in a certain context.
|
||||
|
||||
When enabled and no path/template are set, it will be considered invalid
|
||||
and will error on OCIO path not found. Enabled must be False to explicitly
|
||||
allow OCIO to be disabled."""
|
||||
path: str = ""
|
||||
template: str = ""
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
class CachedData:
|
||||
remapping = {}
|
||||
has_compatible_ocio_package = None
|
||||
|
|
@ -710,7 +723,7 @@ def _get_config_path_from_profile_data(
|
|||
template_data (dict[str, Any]): Template data.
|
||||
|
||||
Returns:
|
||||
dict[str, str]: Config data with path and template.
|
||||
ConfigData: Config data with path and template.
|
||||
"""
|
||||
template = profile[profile_type]
|
||||
result = StringTemplate.format_strict_template(
|
||||
|
|
@ -719,12 +732,12 @@ def _get_config_path_from_profile_data(
|
|||
normalized_path = str(result.normalized())
|
||||
if not os.path.exists(normalized_path):
|
||||
log.warning(f"Path was not found '{normalized_path}'.")
|
||||
return None
|
||||
return ConfigData() # Return invalid config data
|
||||
|
||||
return {
|
||||
"path": normalized_path,
|
||||
"template": template
|
||||
}
|
||||
return ConfigData(
|
||||
path=normalized_path,
|
||||
template=template
|
||||
)
|
||||
|
||||
|
||||
def _get_global_config_data(
|
||||
|
|
@ -735,7 +748,7 @@ def _get_global_config_data(
|
|||
imageio_global,
|
||||
folder_id,
|
||||
log,
|
||||
):
|
||||
) -> ConfigData:
|
||||
"""Get global config data.
|
||||
|
||||
Global config from core settings is using profiles that are based on
|
||||
|
|
@ -759,8 +772,7 @@ def _get_global_config_data(
|
|||
log (logging.Logger): Logger object.
|
||||
|
||||
Returns:
|
||||
Union[dict[str, str], None]: Config data with path and template
|
||||
or None.
|
||||
ConfigData: Config data with path and template.
|
||||
|
||||
"""
|
||||
task_name = task_type = None
|
||||
|
|
@ -779,12 +791,14 @@ def _get_global_config_data(
|
|||
)
|
||||
if profile is None:
|
||||
log.info(f"No config profile matched filters {str(filter_values)}")
|
||||
return None
|
||||
return ConfigData(enabled=False)
|
||||
|
||||
profile_type = profile["type"]
|
||||
if profile_type in ("builtin_path", "custom_path"):
|
||||
if profile_type in {"builtin_path", "custom_path"}:
|
||||
return _get_config_path_from_profile_data(
|
||||
profile, profile_type, template_data)
|
||||
elif profile_type == "disabled":
|
||||
return ConfigData(enabled=False)
|
||||
|
||||
# TODO decide if this is the right name for representation
|
||||
repre_name = "ocioconfig"
|
||||
|
|
@ -798,7 +812,7 @@ def _get_global_config_data(
|
|||
"Colorspace OCIO config path cannot be set. "
|
||||
"Profile is set to published product but `Product name` is empty."
|
||||
)
|
||||
return None
|
||||
return ConfigData()
|
||||
|
||||
folder_info = template_data.get("folder")
|
||||
if not folder_info:
|
||||
|
|
@ -819,7 +833,7 @@ def _get_global_config_data(
|
|||
)
|
||||
if not folder_entity:
|
||||
log.warning(f"Folder entity '{folder_path}' was not found..")
|
||||
return None
|
||||
return ConfigData()
|
||||
folder_id = folder_entity["id"]
|
||||
|
||||
product_entities_by_name = {
|
||||
|
|
@ -855,7 +869,7 @@ def _get_global_config_data(
|
|||
log.info(
|
||||
f"Product '{product_name}' does not have available any versions."
|
||||
)
|
||||
return None
|
||||
return ConfigData()
|
||||
|
||||
# Find 'ocioconfig' representation entity
|
||||
repre_entity = ayon_api.get_representation_by_name(
|
||||
|
|
@ -868,15 +882,15 @@ def _get_global_config_data(
|
|||
f"Representation '{repre_name}'"
|
||||
f" not found on product '{product_name}'."
|
||||
)
|
||||
return None
|
||||
return ConfigData()
|
||||
|
||||
path = get_representation_path_with_anatomy(repre_entity, anatomy)
|
||||
template = repre_entity["attrib"]["template"]
|
||||
|
||||
return {
|
||||
"path": path,
|
||||
"template": template,
|
||||
}
|
||||
return ConfigData(
|
||||
path=path,
|
||||
template=template
|
||||
)
|
||||
|
||||
|
||||
def get_imageio_config_preset(
|
||||
|
|
@ -1015,13 +1029,19 @@ def get_imageio_config_preset(
|
|||
host_ocio_config["filepath"], template_data
|
||||
)
|
||||
|
||||
if not config_data:
|
||||
if not config_data.enabled:
|
||||
return {} # OCIO management disabled
|
||||
|
||||
if not config_data.path:
|
||||
raise FileExistsError(
|
||||
"No OCIO config found in settings. It is"
|
||||
" either missing or there is typo in path inputs"
|
||||
)
|
||||
|
||||
return config_data
|
||||
return {
|
||||
"path": config_data.path,
|
||||
"template": config_data.template,
|
||||
}
|
||||
|
||||
|
||||
def _get_host_config_data(templates, template_data):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
"""Package to handle compatibility checks for pipeline components."""
|
||||
import ayon_api
|
||||
|
||||
|
||||
def is_product_base_type_supported() -> bool:
|
||||
|
|
@ -13,4 +14,7 @@ def is_product_base_type_supported() -> bool:
|
|||
bool: True if product base types are supported, False otherwise.
|
||||
|
||||
"""
|
||||
return False
|
||||
|
||||
if not hasattr(ayon_api, "is_product_base_type_supported"):
|
||||
return False
|
||||
return ayon_api.is_product_base_type_supported()
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ from typing import (
|
|||
Any,
|
||||
Callable,
|
||||
)
|
||||
from warnings import warn
|
||||
|
||||
import pyblish.logic
|
||||
import pyblish.api
|
||||
|
|
@ -752,13 +753,13 @@ class CreateContext:
|
|||
manual_creators = {}
|
||||
report = discover_creator_plugins(return_report=True)
|
||||
self.creator_discover_result = report
|
||||
for creator_class in report.plugins:
|
||||
if inspect.isabstract(creator_class):
|
||||
self.log.debug(
|
||||
"Skipping abstract Creator {}".format(str(creator_class))
|
||||
)
|
||||
continue
|
||||
for creator_class in report.abstract_plugins:
|
||||
self.log.debug(
|
||||
"Skipping abstract Creator '%s'",
|
||||
str(creator_class)
|
||||
)
|
||||
|
||||
for creator_class in report.plugins:
|
||||
creator_identifier = creator_class.identifier
|
||||
if creator_identifier in creators:
|
||||
self.log.warning(
|
||||
|
|
@ -772,19 +773,17 @@ class CreateContext:
|
|||
creator_class.host_name
|
||||
and creator_class.host_name != self.host_name
|
||||
):
|
||||
self.log.info((
|
||||
"Creator's host name \"{}\""
|
||||
" is not supported for current host \"{}\""
|
||||
).format(creator_class.host_name, self.host_name))
|
||||
self.log.info(
|
||||
(
|
||||
'Creator\'s host name "{}"'
|
||||
' is not supported for current host "{}"'
|
||||
).format(creator_class.host_name, self.host_name)
|
||||
)
|
||||
continue
|
||||
|
||||
# TODO report initialization error
|
||||
try:
|
||||
creator = creator_class(
|
||||
project_settings,
|
||||
self,
|
||||
self.headless
|
||||
)
|
||||
creator = creator_class(project_settings, self, self.headless)
|
||||
except Exception:
|
||||
self.log.error(
|
||||
f"Failed to initialize plugin: {creator_class}",
|
||||
|
|
@ -792,6 +791,19 @@ class CreateContext:
|
|||
)
|
||||
continue
|
||||
|
||||
if not creator.product_base_type:
|
||||
message = (
|
||||
f"Provided creator {creator!r} doesn't have "
|
||||
"product base type attribute defined. This will be "
|
||||
"required in future."
|
||||
)
|
||||
warn(
|
||||
message,
|
||||
DeprecationWarning,
|
||||
stacklevel=2
|
||||
)
|
||||
self.log.warning(message)
|
||||
|
||||
if not creator.enabled:
|
||||
disabled_creators[creator_identifier] = creator
|
||||
continue
|
||||
|
|
@ -1289,8 +1301,12 @@ class CreateContext:
|
|||
"folderPath": folder_entity["path"],
|
||||
"task": task_entity["name"] if task_entity else None,
|
||||
"productType": creator.product_type,
|
||||
# Add product base type if supported. Fallback to product type
|
||||
"productBaseType": (
|
||||
creator.product_base_type or creator.product_type),
|
||||
"variant": variant
|
||||
}
|
||||
|
||||
if active is not None:
|
||||
if not isinstance(active, bool):
|
||||
self.log.warning(
|
||||
|
|
|
|||
|
|
@ -1,20 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import copy
|
||||
import collections
|
||||
from typing import TYPE_CHECKING, Optional, Dict, Any
|
||||
"""Creator plugins for the create process."""
|
||||
from __future__ import annotations
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import os
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional
|
||||
|
||||
from ayon_core.lib import Logger, get_version_from_path
|
||||
from ayon_core.pipeline.plugin_discover import (
|
||||
deregister_plugin,
|
||||
deregister_plugin_path,
|
||||
discover,
|
||||
register_plugin,
|
||||
register_plugin_path,
|
||||
deregister_plugin,
|
||||
deregister_plugin_path
|
||||
)
|
||||
from ayon_core.pipeline.staging_dir import get_staging_dir_info, StagingDir
|
||||
from ayon_core.pipeline.staging_dir import StagingDir, get_staging_dir_info
|
||||
|
||||
from .constants import DEFAULT_VARIANT_VALUE
|
||||
from .product_name import get_product_name
|
||||
|
|
@ -23,6 +24,7 @@ from .structures import CreatedInstance
|
|||
|
||||
if TYPE_CHECKING:
|
||||
from ayon_core.lib import AbstractAttrDef
|
||||
|
||||
# Avoid cyclic imports
|
||||
from .context import CreateContext, UpdateData # noqa: F401
|
||||
|
||||
|
|
@ -66,7 +68,6 @@ class ProductConvertorPlugin(ABC):
|
|||
Returns:
|
||||
logging.Logger: Logger with name of the plugin.
|
||||
"""
|
||||
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
|
@ -82,9 +83,8 @@ class ProductConvertorPlugin(ABC):
|
|||
|
||||
Returns:
|
||||
str: Converted identifier unique for all converters in host.
|
||||
"""
|
||||
|
||||
pass
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def find_instances(self):
|
||||
|
|
@ -94,14 +94,10 @@ class ProductConvertorPlugin(ABC):
|
|||
convert.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def convert(self):
|
||||
"""Conversion code."""
|
||||
|
||||
pass
|
||||
|
||||
@property
|
||||
def create_context(self):
|
||||
"""Quick access to create context.
|
||||
|
|
@ -109,7 +105,6 @@ class ProductConvertorPlugin(ABC):
|
|||
Returns:
|
||||
CreateContext: Context which initialized the plugin.
|
||||
"""
|
||||
|
||||
return self._create_context
|
||||
|
||||
@property
|
||||
|
|
@ -122,7 +117,6 @@ class ProductConvertorPlugin(ABC):
|
|||
Raises:
|
||||
UnavailableSharedData: When called out of collection phase.
|
||||
"""
|
||||
|
||||
return self._create_context.collection_shared_data
|
||||
|
||||
def add_convertor_item(self, label):
|
||||
|
|
@ -131,12 +125,10 @@ class ProductConvertorPlugin(ABC):
|
|||
Args:
|
||||
label (str): Label of item which will show in UI.
|
||||
"""
|
||||
|
||||
self._create_context.add_convertor_item(self.identifier, label)
|
||||
|
||||
def remove_convertor_item(self):
|
||||
"""Remove legacy item from create context when conversion finished."""
|
||||
|
||||
self._create_context.remove_convertor_item(self.identifier)
|
||||
|
||||
|
||||
|
|
@ -155,7 +147,6 @@ class BaseCreator(ABC):
|
|||
create_context (CreateContext): Context which initialized creator.
|
||||
headless (bool): Running in headless mode.
|
||||
"""
|
||||
|
||||
# Label shown in UI
|
||||
label = None
|
||||
group_label = None
|
||||
|
|
@ -219,7 +210,6 @@ class BaseCreator(ABC):
|
|||
Returns:
|
||||
Optional[dict[str, Any]]: Settings values or None.
|
||||
"""
|
||||
|
||||
settings = project_settings.get(category_name)
|
||||
if not settings:
|
||||
return None
|
||||
|
|
@ -265,7 +255,6 @@ class BaseCreator(ABC):
|
|||
Args:
|
||||
project_settings (dict[str, Any]): Project settings.
|
||||
"""
|
||||
|
||||
settings_category = self.settings_category
|
||||
if not settings_category:
|
||||
return
|
||||
|
|
@ -277,18 +266,17 @@ class BaseCreator(ABC):
|
|||
project_settings, settings_category, settings_name
|
||||
)
|
||||
if settings is None:
|
||||
self.log.debug("No settings found for {}".format(cls_name))
|
||||
self.log.debug(f"No settings found for {cls_name}")
|
||||
return
|
||||
|
||||
for key, value in settings.items():
|
||||
# Log out attributes that are not defined on plugin object
|
||||
# - those may be potential dangerous typos in settings
|
||||
if not hasattr(self, key):
|
||||
self.log.debug((
|
||||
"Applying settings to unknown attribute '{}' on '{}'."
|
||||
).format(
|
||||
self.log.debug(
|
||||
"Applying settings to unknown attribute '%s' on '%s'.",
|
||||
key, cls_name
|
||||
))
|
||||
)
|
||||
setattr(self, key, value)
|
||||
|
||||
def register_callbacks(self):
|
||||
|
|
@ -297,23 +285,39 @@ class BaseCreator(ABC):
|
|||
Default implementation does nothing. It can be overridden to register
|
||||
callbacks for creator.
|
||||
"""
|
||||
pass
|
||||
|
||||
@property
|
||||
def identifier(self):
|
||||
"""Identifier of creator (must be unique).
|
||||
|
||||
Default implementation returns plugin's product type.
|
||||
"""
|
||||
Default implementation returns plugin's product base type,
|
||||
or falls back to product type if product base type is not set.
|
||||
|
||||
return self.product_type
|
||||
"""
|
||||
identifier = self.product_base_type
|
||||
if not identifier:
|
||||
identifier = self.product_type
|
||||
return identifier
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def product_type(self):
|
||||
"""Family that plugin represents."""
|
||||
|
||||
pass
|
||||
@property
|
||||
def product_base_type(self) -> Optional[str]:
|
||||
"""Base product type that plugin represents.
|
||||
|
||||
Todo (antirotor): This should be required in future - it
|
||||
should be made abstract then.
|
||||
|
||||
Returns:
|
||||
Optional[str]: Base product type that plugin represents.
|
||||
If not set, it is assumed that the creator plugin is obsolete
|
||||
and does not support product base type.
|
||||
|
||||
"""
|
||||
return None
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
|
|
@ -322,7 +326,6 @@ class BaseCreator(ABC):
|
|||
Returns:
|
||||
str: Name of a project.
|
||||
"""
|
||||
|
||||
return self.create_context.project_name
|
||||
|
||||
@property
|
||||
|
|
@ -332,7 +335,6 @@ class BaseCreator(ABC):
|
|||
Returns:
|
||||
Anatomy: Project anatomy object.
|
||||
"""
|
||||
|
||||
return self.create_context.project_anatomy
|
||||
|
||||
@property
|
||||
|
|
@ -344,13 +346,14 @@ class BaseCreator(ABC):
|
|||
|
||||
Default implementation use attributes in this order:
|
||||
- 'group_label' -> 'label' -> 'identifier'
|
||||
Keep in mind that 'identifier' use 'product_type' by default.
|
||||
|
||||
Keep in mind that 'identifier' uses 'product_base_type' by default.
|
||||
|
||||
Returns:
|
||||
str: Group label that can be used for grouping of instances in UI.
|
||||
Group label can be overridden by instance itself.
|
||||
"""
|
||||
Group label can be overridden by the instance itself.
|
||||
|
||||
"""
|
||||
if self._cached_group_label is None:
|
||||
label = self.identifier
|
||||
if self.group_label:
|
||||
|
|
@ -367,7 +370,6 @@ class BaseCreator(ABC):
|
|||
Returns:
|
||||
logging.Logger: Logger with name of the plugin.
|
||||
"""
|
||||
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
|
@ -376,7 +378,8 @@ class BaseCreator(ABC):
|
|||
self,
|
||||
product_name: str,
|
||||
data: Dict[str, Any],
|
||||
product_type: Optional[str] = None
|
||||
product_type: Optional[str] = None,
|
||||
product_base_type: Optional[str] = None
|
||||
) -> CreatedInstance:
|
||||
"""Create instance and add instance to context.
|
||||
|
||||
|
|
@ -385,6 +388,8 @@ class BaseCreator(ABC):
|
|||
data (Dict[str, Any]): Instance data.
|
||||
product_type (Optional[str]): Product type, object attribute
|
||||
'product_type' is used if not passed.
|
||||
product_base_type (Optional[str]): Product base type, object
|
||||
attribute 'product_base_type' is used if not passed.
|
||||
|
||||
Returns:
|
||||
CreatedInstance: Created instance.
|
||||
|
|
@ -392,11 +397,16 @@ class BaseCreator(ABC):
|
|||
"""
|
||||
if product_type is None:
|
||||
product_type = self.product_type
|
||||
|
||||
if not product_base_type and not self.product_base_type:
|
||||
product_base_type = product_type
|
||||
|
||||
instance = CreatedInstance(
|
||||
product_type,
|
||||
product_name,
|
||||
data,
|
||||
product_type=product_type,
|
||||
product_name=product_name,
|
||||
data=data,
|
||||
creator=self,
|
||||
product_base_type=product_base_type,
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
return instance
|
||||
|
|
@ -412,7 +422,6 @@ class BaseCreator(ABC):
|
|||
Args:
|
||||
instance (CreatedInstance): New created instance.
|
||||
"""
|
||||
|
||||
self.create_context.creator_adds_instance(instance)
|
||||
|
||||
def _remove_instance_from_context(self, instance):
|
||||
|
|
@ -425,7 +434,6 @@ class BaseCreator(ABC):
|
|||
Args:
|
||||
instance (CreatedInstance): Instance which should be removed.
|
||||
"""
|
||||
|
||||
self.create_context.creator_removed_instance(instance)
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -437,8 +445,6 @@ class BaseCreator(ABC):
|
|||
implementation
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def collect_instances(self):
|
||||
"""Collect existing instances related to this creator plugin.
|
||||
|
|
@ -464,8 +470,6 @@ class BaseCreator(ABC):
|
|||
```
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_instances(self, update_list):
|
||||
"""Store changes of existing instances so they can be recollected.
|
||||
|
|
@ -475,8 +479,6 @@ class BaseCreator(ABC):
|
|||
contain changed instance and it's changes.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def remove_instances(self, instances):
|
||||
"""Method called on instance removal.
|
||||
|
|
@ -489,14 +491,11 @@ class BaseCreator(ABC):
|
|||
removed.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def get_icon(self):
|
||||
"""Icon of creator (product type).
|
||||
|
||||
Can return path to image file or awesome icon name.
|
||||
"""
|
||||
|
||||
return self.icon
|
||||
|
||||
def get_dynamic_data(
|
||||
|
|
@ -512,19 +511,18 @@ class BaseCreator(ABC):
|
|||
|
||||
These may be dynamically created based on current context of workfile.
|
||||
"""
|
||||
|
||||
return {}
|
||||
|
||||
def get_product_name(
|
||||
self,
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
variant,
|
||||
host_name=None,
|
||||
instance=None,
|
||||
project_entity=None,
|
||||
):
|
||||
project_name: str,
|
||||
folder_entity: dict[str, Any],
|
||||
task_entity: Optional[dict[str, Any]],
|
||||
variant: str,
|
||||
host_name: Optional[str] = None,
|
||||
instance: Optional[CreatedInstance] = None,
|
||||
project_entity: Optional[dict[str, Any]] = None,
|
||||
) -> str:
|
||||
"""Return product name for passed context.
|
||||
|
||||
Method is also called on product name update. In that case origin
|
||||
|
|
@ -563,8 +561,9 @@ class BaseCreator(ABC):
|
|||
project_name,
|
||||
folder_entity=folder_entity,
|
||||
task_entity=task_entity,
|
||||
host_name=host_name,
|
||||
product_base_type=self.product_base_type,
|
||||
product_type=self.product_type,
|
||||
host_name=host_name,
|
||||
variant=variant,
|
||||
dynamic_data=dynamic_data,
|
||||
project_settings=self.project_settings,
|
||||
|
|
@ -578,15 +577,15 @@ class BaseCreator(ABC):
|
|||
and values are stored to metadata for future usage and for publishing
|
||||
purposes.
|
||||
|
||||
NOTE:
|
||||
Convert method should be implemented which should care about updating
|
||||
keys/values when plugin attributes change.
|
||||
Note:
|
||||
Convert method should be implemented which should care about
|
||||
updating keys/values when plugin attributes change.
|
||||
|
||||
Returns:
|
||||
list[AbstractAttrDef]: Attribute definitions that can be tweaked
|
||||
for created instance.
|
||||
"""
|
||||
|
||||
"""
|
||||
return self.instance_attr_defs
|
||||
|
||||
def get_attr_defs_for_instance(self, instance):
|
||||
|
|
@ -609,12 +608,10 @@ class BaseCreator(ABC):
|
|||
Raises:
|
||||
UnavailableSharedData: When called out of collection phase.
|
||||
"""
|
||||
|
||||
return self.create_context.collection_shared_data
|
||||
|
||||
def set_instance_thumbnail_path(self, instance_id, thumbnail_path=None):
|
||||
"""Set path to thumbnail for instance."""
|
||||
|
||||
self.create_context.thumbnail_paths_by_instance_id[instance_id] = (
|
||||
thumbnail_path
|
||||
)
|
||||
|
|
@ -635,7 +632,6 @@ class BaseCreator(ABC):
|
|||
Returns:
|
||||
dict[str, int]: Next versions by instance id.
|
||||
"""
|
||||
|
||||
return get_next_versions_for_instances(
|
||||
self.create_context.project_name, instances
|
||||
)
|
||||
|
|
@ -702,7 +698,6 @@ class Creator(BaseCreator):
|
|||
int: Order in which is creator shown (less == earlier). By default
|
||||
is using Creator's 'order' or processing.
|
||||
"""
|
||||
|
||||
return self.order
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -717,11 +712,9 @@ class Creator(BaseCreator):
|
|||
pre_create_data(dict): Data based on pre creation attributes.
|
||||
Those may affect how creator works.
|
||||
"""
|
||||
|
||||
# instance = CreatedInstance(
|
||||
# self.product_type, product_name, instance_data
|
||||
# )
|
||||
pass
|
||||
|
||||
def get_description(self):
|
||||
"""Short description of product type and plugin.
|
||||
|
|
@ -729,7 +722,6 @@ class Creator(BaseCreator):
|
|||
Returns:
|
||||
str: Short description of product type.
|
||||
"""
|
||||
|
||||
return self.description
|
||||
|
||||
def get_detail_description(self):
|
||||
|
|
@ -740,7 +732,6 @@ class Creator(BaseCreator):
|
|||
Returns:
|
||||
str: Detailed description of product type for artist.
|
||||
"""
|
||||
|
||||
return self.detailed_description
|
||||
|
||||
def get_default_variants(self):
|
||||
|
|
@ -754,7 +745,6 @@ class Creator(BaseCreator):
|
|||
Returns:
|
||||
list[str]: Whisper variants for user input.
|
||||
"""
|
||||
|
||||
return copy.deepcopy(self.default_variants)
|
||||
|
||||
def get_default_variant(self, only_explicit=False):
|
||||
|
|
@ -774,7 +764,6 @@ class Creator(BaseCreator):
|
|||
Returns:
|
||||
str: Variant value.
|
||||
"""
|
||||
|
||||
if only_explicit or self._default_variant:
|
||||
return self._default_variant
|
||||
|
||||
|
|
@ -795,7 +784,6 @@ class Creator(BaseCreator):
|
|||
Returns:
|
||||
str: Variant value.
|
||||
"""
|
||||
|
||||
return self.get_default_variant()
|
||||
|
||||
def _set_default_variant_wrap(self, variant):
|
||||
|
|
@ -807,7 +795,6 @@ class Creator(BaseCreator):
|
|||
Args:
|
||||
variant (str): New default variant value.
|
||||
"""
|
||||
|
||||
self._default_variant = variant
|
||||
|
||||
default_variant = property(
|
||||
|
|
@ -957,7 +944,6 @@ class AutoCreator(BaseCreator):
|
|||
|
||||
def remove_instances(self, instances):
|
||||
"""Skip removal."""
|
||||
pass
|
||||
|
||||
|
||||
def discover_creator_plugins(*args, **kwargs):
|
||||
|
|
@ -1015,7 +1001,6 @@ def cache_and_get_instances(creator, shared_key, list_instances_func):
|
|||
dict[str, dict[str, Any]]: Cached instances by creator identifier from
|
||||
result of passed function.
|
||||
"""
|
||||
|
||||
if shared_key not in creator.collection_shared_data:
|
||||
value = collections.defaultdict(list)
|
||||
for instance in list_instances_func():
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
"""Functions for handling product names."""
|
||||
from __future__ import annotations
|
||||
|
||||
import warnings
|
||||
from functools import wraps
|
||||
from typing import Optional, Any, overload
|
||||
from typing import Any, Optional, Union, overload
|
||||
from warnings import warn
|
||||
|
||||
import ayon_api
|
||||
from ayon_core.lib import (
|
||||
|
|
@ -22,14 +24,15 @@ log = Logger.get_logger(__name__)
|
|||
|
||||
|
||||
def get_product_name_template(
|
||||
project_name,
|
||||
product_type,
|
||||
task_name,
|
||||
task_type,
|
||||
host_name,
|
||||
default_template=None,
|
||||
project_settings=None
|
||||
):
|
||||
project_name: str,
|
||||
product_type: str,
|
||||
task_name: Optional[str],
|
||||
task_type: Optional[str],
|
||||
host_name: str,
|
||||
default_template: Optional[str] = None,
|
||||
project_settings: Optional[dict[str, Any]] = None,
|
||||
product_base_type: Optional[str] = None
|
||||
) -> str:
|
||||
"""Get product name template based on passed context.
|
||||
|
||||
Args:
|
||||
|
|
@ -37,26 +40,32 @@ def get_product_name_template(
|
|||
product_type (str): Product type for which the product name is
|
||||
calculated.
|
||||
host_name (str): Name of host in which the product name is calculated.
|
||||
task_name (str): Name of task in which context the product is created.
|
||||
task_type (str): Type of task in which context the product is created.
|
||||
task_name (Optional[str]): Name of task in which context the
|
||||
product is created.
|
||||
task_type (Optional[str]): Type of task in which context the
|
||||
product is created.
|
||||
default_template (Optional[str]): Default template which is used if
|
||||
settings won't find any matching possibility. Constant
|
||||
'DEFAULT_PRODUCT_TEMPLATE' is used if not defined.
|
||||
project_settings (Optional[dict[str, Any]]): Prepared settings for
|
||||
project. Settings are queried if not passed.
|
||||
"""
|
||||
product_base_type (Optional[str]): Base type of product.
|
||||
|
||||
Returns:
|
||||
str: Product name template.
|
||||
|
||||
"""
|
||||
if project_settings is None:
|
||||
project_settings = get_project_settings(project_name)
|
||||
tools_settings = project_settings["core"]["tools"]
|
||||
profiles = tools_settings["creator"]["product_name_profiles"]
|
||||
filtering_criteria = {
|
||||
"product_types": product_type,
|
||||
"hosts": host_name,
|
||||
"tasks": task_name,
|
||||
"task_types": task_type
|
||||
"host_names": host_name,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
"product_base_types": product_base_type,
|
||||
}
|
||||
|
||||
matching_profile = filter_profiles(profiles, filtering_criteria)
|
||||
template = None
|
||||
if matching_profile:
|
||||
|
|
@ -92,6 +101,7 @@ def _get_product_name_old(
|
|||
project_settings: Optional[dict[str, Any]] = None,
|
||||
product_type_filter: Optional[str] = None,
|
||||
project_entity: Optional[dict[str, Any]] = None,
|
||||
product_base_type: Optional[str] = None,
|
||||
) -> TemplateResult:
|
||||
warnings.warn(
|
||||
"Used deprecated 'task_name' and 'task_type' arguments."
|
||||
|
|
@ -103,13 +113,14 @@ def _get_product_name_old(
|
|||
return StringTemplate("").format({})
|
||||
|
||||
template = get_product_name_template(
|
||||
project_name,
|
||||
product_type_filter or product_type,
|
||||
task_name,
|
||||
task_type,
|
||||
host_name,
|
||||
project_name=project_name,
|
||||
product_type=product_type_filter or product_type,
|
||||
task_name=task_name,
|
||||
task_type=task_type,
|
||||
host_name=host_name,
|
||||
default_template=default_template,
|
||||
project_settings=project_settings
|
||||
project_settings=project_settings,
|
||||
product_base_type=product_base_type,
|
||||
)
|
||||
|
||||
template_low = template.lower()
|
||||
|
|
@ -140,12 +151,22 @@ def _get_product_name_old(
|
|||
task_short = task_types_by_name.get(task_type, {}).get("shortName")
|
||||
task_value["short"] = task_short
|
||||
|
||||
fill_pairs = {
|
||||
if not product_base_type and "{product[basetype]}" in template.lower():
|
||||
warn(
|
||||
"You have Product base type in product name template, "
|
||||
"but it is not provided by the creator, please update your "
|
||||
"creation code to include it. It will be required in "
|
||||
"the future.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2)
|
||||
|
||||
fill_pairs: dict[str, Union[str, dict[str, str]]] = {
|
||||
"variant": variant,
|
||||
"family": product_type,
|
||||
"task": task_value,
|
||||
"product": {
|
||||
"type": product_type
|
||||
"type": product_type,
|
||||
"basetype": product_base_type or product_type,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -160,10 +181,11 @@ def _get_product_name_old(
|
|||
data=prepare_template_data(fill_pairs)
|
||||
)
|
||||
except KeyError as exp:
|
||||
raise TemplateFillError(
|
||||
"Value for {} key is missing in template '{}'."
|
||||
" Available values are {}".format(str(exp), template, fill_pairs)
|
||||
msg = (
|
||||
f"Value for {exp} key is missing in template '{template}'."
|
||||
f" Available values are {fill_pairs}"
|
||||
)
|
||||
raise TemplateFillError(msg) from exp
|
||||
|
||||
|
||||
def _backwards_compatibility_product_name(func):
|
||||
|
|
@ -198,9 +220,9 @@ def _backwards_compatibility_product_name(func):
|
|||
if "folder_entity" in kwargs or "task_entity" in kwargs:
|
||||
return func(*args, **kwargs)
|
||||
|
||||
# Using more than 6 positional arguments is not allowed
|
||||
# Using more than 7 positional arguments is not allowed
|
||||
# in the new function
|
||||
if len(args) > 6:
|
||||
if len(args) > 7:
|
||||
return _get_product_name_old(*args, **kwargs)
|
||||
|
||||
if len(args) > 1:
|
||||
|
|
@ -332,15 +354,16 @@ def get_product_name(
|
|||
project_name: str,
|
||||
folder_entity: dict[str, Any],
|
||||
task_entity: Optional[dict[str, Any]],
|
||||
host_name: str,
|
||||
product_base_type: str,
|
||||
product_type: str,
|
||||
host_name: str,
|
||||
variant: str,
|
||||
*,
|
||||
default_template: Optional[str] = None,
|
||||
dynamic_data: Optional[dict[str, Any]] = None,
|
||||
project_settings: Optional[dict[str, Any]] = None,
|
||||
product_type_filter: Optional[str] = None,
|
||||
project_entity: Optional[dict[str, Any]] = None,
|
||||
default_template: Optional[str] = None,
|
||||
product_base_type_filter: Optional[str] = None,
|
||||
) -> TemplateResult:
|
||||
"""Calculate product name based on passed context and AYON settings.
|
||||
|
||||
|
|
@ -357,20 +380,21 @@ def get_product_name(
|
|||
folder_entity (Optional[dict[str, Any]]): Folder entity.
|
||||
task_entity (Optional[dict[str, Any]]): Task entity.
|
||||
host_name (str): Host name.
|
||||
product_base_type (str): Product base type.
|
||||
product_type (str): Product type.
|
||||
variant (str): In most of the cases it is user input during creation.
|
||||
default_template (Optional[str]): Default template if any profile does
|
||||
not match passed context. Constant 'DEFAULT_PRODUCT_TEMPLATE'
|
||||
is used if is not passed.
|
||||
dynamic_data (Optional[dict[str, Any]]): Dynamic data specific for
|
||||
a creator which creates instance.
|
||||
project_settings (Optional[dict[str, Any]]): Prepared settings
|
||||
for project. Settings are queried if not passed.
|
||||
product_type_filter (Optional[str]): Use different product type for
|
||||
product template filtering. Value of `product_type` is used when
|
||||
not passed.
|
||||
project_entity (Optional[dict[str, Any]]): Project entity used when
|
||||
task short name is required by template.
|
||||
default_template (Optional[str]): Default template if any profile does
|
||||
not match passed context. Constant 'DEFAULT_PRODUCT_TEMPLATE'
|
||||
is used if is not passed.
|
||||
product_base_type_filter (Optional[str]): Use different product base
|
||||
type for product template filtering. Value of
|
||||
`product_base_type_filter` is used when not passed.
|
||||
|
||||
Returns:
|
||||
TemplateResult: Product name.
|
||||
|
|
@ -390,13 +414,14 @@ def get_product_name(
|
|||
task_type = task_entity["taskType"]
|
||||
|
||||
template = get_product_name_template(
|
||||
project_name,
|
||||
product_type_filter or product_type,
|
||||
task_name,
|
||||
task_type,
|
||||
host_name,
|
||||
project_name=project_name,
|
||||
product_base_type=product_base_type_filter or product_base_type,
|
||||
product_type=product_type,
|
||||
task_name=task_name,
|
||||
task_type=task_type,
|
||||
host_name=host_name,
|
||||
default_template=default_template,
|
||||
project_settings=project_settings
|
||||
project_settings=project_settings,
|
||||
)
|
||||
|
||||
template_low = template.lower()
|
||||
|
|
@ -421,8 +446,8 @@ def get_product_name(
|
|||
if project_entity is None:
|
||||
project_entity = ayon_api.get_project(project_name)
|
||||
task_types_by_name = {
|
||||
task["name"]: task for task in
|
||||
project_entity["taskTypes"]
|
||||
task["name"]: task
|
||||
for task in project_entity["taskTypes"]
|
||||
}
|
||||
task_short = task_types_by_name.get(task_type, {}).get("shortName")
|
||||
task_value["short"] = task_short
|
||||
|
|
@ -433,7 +458,8 @@ def get_product_name(
|
|||
"family": product_type,
|
||||
"task": task_value,
|
||||
"product": {
|
||||
"type": product_type
|
||||
"type": product_type,
|
||||
"basetype": product_base_type,
|
||||
}
|
||||
}
|
||||
if folder_entity:
|
||||
|
|
@ -453,7 +479,8 @@ def get_product_name(
|
|||
data=prepare_template_data(fill_pairs)
|
||||
)
|
||||
except KeyError as exp:
|
||||
raise TemplateFillError(
|
||||
msg = (
|
||||
f"Value for {exp} key is missing in template '{template}'."
|
||||
f" Available values are {fill_pairs}"
|
||||
)
|
||||
raise TemplateFillError(msg)
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ from ayon_core.lib.attribute_definitions import (
|
|||
serialize_attr_defs,
|
||||
deserialize_attr_defs,
|
||||
)
|
||||
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
AYON_INSTANCE_ID,
|
||||
AVALON_INSTANCE_ID,
|
||||
|
|
@ -480,6 +482,10 @@ class CreatedInstance:
|
|||
data (Dict[str, Any]): Data used for filling product name or override
|
||||
data from already existing instance.
|
||||
creator (BaseCreator): Creator responsible for instance.
|
||||
product_base_type (Optional[str]): Product base type that will be
|
||||
created. If not provided then product base type is taken from
|
||||
creator plugin. If creator does not have product base type then
|
||||
deprecation warning is raised.
|
||||
"""
|
||||
|
||||
# Keys that can't be changed or removed from data after loading using
|
||||
|
|
@ -490,6 +496,7 @@ class CreatedInstance:
|
|||
"id",
|
||||
"instance_id",
|
||||
"productType",
|
||||
"productBaseType",
|
||||
"creator_identifier",
|
||||
"creator_attributes",
|
||||
"publish_attributes"
|
||||
|
|
@ -509,7 +516,13 @@ class CreatedInstance:
|
|||
data: Dict[str, Any],
|
||||
creator: "BaseCreator",
|
||||
transient_data: Optional[Dict[str, Any]] = None,
|
||||
product_base_type: Optional[str] = None
|
||||
):
|
||||
"""Initialize CreatedInstance."""
|
||||
# fallback to product type for backward compatibility
|
||||
if not product_base_type:
|
||||
product_base_type = creator.product_base_type or product_type
|
||||
|
||||
self._creator = creator
|
||||
creator_identifier = creator.identifier
|
||||
group_label = creator.get_group_label()
|
||||
|
|
@ -562,6 +575,9 @@ class CreatedInstance:
|
|||
self._data["id"] = item_id
|
||||
self._data["productType"] = product_type
|
||||
self._data["productName"] = product_name
|
||||
|
||||
self._data["productBaseType"] = product_base_type
|
||||
|
||||
self._data["active"] = data.get("active", True)
|
||||
self._data["creator_identifier"] = creator_identifier
|
||||
|
||||
|
|
|
|||
|
|
@ -253,6 +253,19 @@ def create_skeleton_instance(
|
|||
"reuseLastVersion": data.get("reuseLastVersion", False),
|
||||
}
|
||||
|
||||
# Pass on the OCIO metadata of what the source display and view are
|
||||
# so that the farm can correctly set up color management.
|
||||
if "sceneDisplay" in data and "sceneView" in data:
|
||||
instance_skeleton_data["sceneDisplay"] = data["sceneDisplay"]
|
||||
instance_skeleton_data["sceneView"] = data["sceneView"]
|
||||
elif "colorspaceDisplay" in data and "colorspaceView" in data:
|
||||
# Backwards compatibility for sceneDisplay and sceneView
|
||||
instance_skeleton_data["colorspaceDisplay"] = data["colorspaceDisplay"]
|
||||
instance_skeleton_data["colorspaceView"] = data["colorspaceView"]
|
||||
if "sourceDisplay" in data and "sourceView" in data:
|
||||
instance_skeleton_data["sourceDisplay"] = data["sourceDisplay"]
|
||||
instance_skeleton_data["sourceView"] = data["sourceView"]
|
||||
|
||||
if data.get("renderlayer"):
|
||||
instance_skeleton_data["renderlayer"] = data["renderlayer"]
|
||||
|
||||
|
|
@ -589,7 +602,6 @@ def create_instances_for_aov(
|
|||
"""
|
||||
# we cannot attach AOVs to other products as we consider every
|
||||
# AOV product of its own.
|
||||
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
|
||||
# if there are product to attach to and more than one AOV,
|
||||
|
|
@ -612,8 +624,8 @@ def create_instances_for_aov(
|
|||
additional_data.update({
|
||||
"colorspaceConfig": colorspace_config,
|
||||
# Display/View are optional
|
||||
"display": instance.data.get("colorspaceDisplay"),
|
||||
"view": instance.data.get("colorspaceView")
|
||||
"display": instance.data.get("sourceDisplay"),
|
||||
"view": instance.data.get("sourceView")
|
||||
})
|
||||
|
||||
# Get templated path from absolute config path.
|
||||
|
|
|
|||
|
|
@ -122,7 +122,8 @@ def get_publish_template_name(
|
|||
task_type,
|
||||
project_settings=None,
|
||||
hero=False,
|
||||
logger=None
|
||||
product_base_type: Optional[str] = None,
|
||||
logger=None,
|
||||
):
|
||||
"""Get template name which should be used for passed context.
|
||||
|
||||
|
|
@ -140,17 +141,29 @@ def get_publish_template_name(
|
|||
task_type (str): Task type on which is instance working.
|
||||
project_settings (Dict[str, Any]): Prepared project settings.
|
||||
hero (bool): Template is for hero version publishing.
|
||||
product_base_type (Optional[str]): Product type for which should
|
||||
be found template.
|
||||
logger (logging.Logger): Custom logger used for 'filter_profiles'
|
||||
function.
|
||||
|
||||
Returns:
|
||||
str: Template name which should be used for integration.
|
||||
"""
|
||||
if not product_base_type:
|
||||
msg = (
|
||||
"Argument 'product_base_type' is not provided to"
|
||||
" 'get_publish_template_name' function. This argument"
|
||||
" will be required in future versions."
|
||||
)
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
if logger:
|
||||
logger.warning(msg)
|
||||
|
||||
template = None
|
||||
filter_criteria = {
|
||||
"hosts": host_name,
|
||||
"product_types": product_type,
|
||||
"product_base_types": product_base_type,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
}
|
||||
|
|
@ -812,7 +825,22 @@ def replace_with_published_scene_path(instance, replace_in_path=True):
|
|||
template_data["comment"] = None
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
template = anatomy.get_template_item("publish", "default", "path")
|
||||
project_name = anatomy.project_name
|
||||
task_name = task_type = None
|
||||
task_entity = instance.data.get("taskEntity")
|
||||
if task_entity:
|
||||
task_name = task_entity["name"]
|
||||
task_type = task_entity["taskType"]
|
||||
project_settings = instance.context.data["project_settings"]
|
||||
template_name = get_publish_template_name(
|
||||
project_name=project_name,
|
||||
host_name=instance.context.data["hostName"],
|
||||
product_type=workfile_instance.data["productType"],
|
||||
task_name=task_name,
|
||||
task_type=task_type,
|
||||
project_settings=project_settings,
|
||||
)
|
||||
template = anatomy.get_template_item("publish", template_name, "path")
|
||||
template_filled = template.format_strict(template_data)
|
||||
file_path = os.path.normpath(template_filled)
|
||||
|
||||
|
|
|
|||
|
|
@ -684,3 +684,20 @@ def get_sdf_format_args(path):
|
|||
"""Return SDF_FORMAT_ARGS parsed to `dict`"""
|
||||
_raw_path, data = Sdf.Layer.SplitIdentifier(path)
|
||||
return data
|
||||
|
||||
|
||||
def get_standard_default_prim_name(folder_path: str) -> str:
|
||||
"""Return the AYON-specified default prim name for a folder path.
|
||||
|
||||
This is used e.g. for the default prim in AYON USD Contribution workflows.
|
||||
"""
|
||||
folder_name: str = folder_path.rsplit("/", 1)[-1]
|
||||
|
||||
# Prim names are not allowed to start with a digit in USD. Authoring them
|
||||
# would mean generating essentially garbage data and may result in
|
||||
# unexpected behavior in certain USD or DCC versions, like failure to
|
||||
# refresh in usdview or crashes in Houdini 21.
|
||||
if folder_name and folder_name[0].isdigit():
|
||||
folder_name = f"_{folder_name}"
|
||||
|
||||
return folder_name
|
||||
|
|
|
|||
|
|
@ -87,15 +87,19 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
profile_output_defs = profile["outputs"]
|
||||
new_representations = []
|
||||
repres = instance.data["representations"]
|
||||
for idx, repre in enumerate(list(repres)):
|
||||
# target space, display and view might be defined upstream
|
||||
# TODO: address https://github.com/ynput/ayon-core/pull/1268#discussion_r2156555474
|
||||
# Implement upstream logic to handle target_colorspace,
|
||||
# target_display, target_view in other DCCs
|
||||
target_colorspace = False
|
||||
target_display = instance.data.get("colorspaceDisplay")
|
||||
target_view = instance.data.get("colorspaceView")
|
||||
|
||||
scene_display = instance.data.get(
|
||||
"sceneDisplay",
|
||||
# Backward compatibility
|
||||
instance.data.get("colorspaceDisplay")
|
||||
)
|
||||
scene_view = instance.data.get(
|
||||
"sceneView",
|
||||
# Backward compatibility
|
||||
instance.data.get("colorspaceView")
|
||||
)
|
||||
|
||||
for idx, repre in enumerate(list(repres)):
|
||||
self.log.debug("repre ({}): `{}`".format(idx + 1, repre["name"]))
|
||||
if not self._repre_is_valid(repre):
|
||||
continue
|
||||
|
|
@ -142,24 +146,18 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
|
||||
transcoding_type = output_def["transcoding_type"]
|
||||
|
||||
# NOTE: we use colorspace_data as the fallback values for
|
||||
# the target colorspace.
|
||||
# Set target colorspace/display/view based on transcoding type
|
||||
target_colorspace = None
|
||||
target_view = None
|
||||
target_display = None
|
||||
if transcoding_type == "colorspace":
|
||||
# TODO: Should we fallback to the colorspace
|
||||
# (which used as source above) ?
|
||||
# or should we compute the target colorspace from
|
||||
# current view and display ?
|
||||
target_colorspace = (output_def["colorspace"] or
|
||||
colorspace_data.get("colorspace"))
|
||||
target_colorspace = output_def["colorspace"]
|
||||
elif transcoding_type == "display_view":
|
||||
display_view = output_def["display_view"]
|
||||
target_view = (
|
||||
display_view["view"]
|
||||
or colorspace_data.get("view"))
|
||||
target_display = (
|
||||
display_view["display"]
|
||||
or colorspace_data.get("display")
|
||||
)
|
||||
# If empty values are provided in output definition,
|
||||
# fallback to scene display/view that is collected from DCC
|
||||
target_view = display_view["view"] or scene_view
|
||||
target_display = display_view["display"] or scene_display
|
||||
|
||||
# both could be already collected by DCC,
|
||||
# but could be overwritten when transcoding
|
||||
|
|
|
|||
353
client/ayon_core/plugins/publish/extract_oiio_postprocess.py
Normal file
353
client/ayon_core/plugins/publish/extract_oiio_postprocess.py
Normal file
|
|
@ -0,0 +1,353 @@
|
|||
from __future__ import annotations
|
||||
from typing import Any, Optional
|
||||
import os
|
||||
import copy
|
||||
import clique
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
publish,
|
||||
get_temp_dir
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
is_oiio_supported,
|
||||
get_oiio_tool_args,
|
||||
run_subprocess
|
||||
)
|
||||
from ayon_core.lib.transcoding import IMAGE_EXTENSIONS
|
||||
from ayon_core.lib.profiles_filtering import filter_profiles
|
||||
|
||||
|
||||
class ExtractOIIOPostProcess(publish.Extractor):
|
||||
"""Process representations through `oiiotool` with profile defined
|
||||
settings so that e.g. color space conversions can be applied or images
|
||||
could be converted to scanline, resized, etc. regardless of colorspace
|
||||
data.
|
||||
"""
|
||||
|
||||
label = "OIIO Post Process"
|
||||
order = pyblish.api.ExtractorOrder + 0.020
|
||||
|
||||
settings_category = "core"
|
||||
|
||||
optional = True
|
||||
|
||||
# Supported extensions
|
||||
supported_exts = {ext.lstrip(".") for ext in IMAGE_EXTENSIONS}
|
||||
|
||||
# Configurable by Settings
|
||||
profiles = None
|
||||
options = None
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
if not self.profiles:
|
||||
self.log.debug("No profiles present for OIIO Post Process")
|
||||
return
|
||||
|
||||
if not instance.data.get("representations"):
|
||||
self.log.debug("No representations, skipping.")
|
||||
return
|
||||
|
||||
if not is_oiio_supported():
|
||||
self.log.warning("OIIO not supported, no transcoding possible.")
|
||||
return
|
||||
|
||||
new_representations = []
|
||||
for idx, repre in enumerate(list(instance.data["representations"])):
|
||||
self.log.debug("repre ({}): `{}`".format(idx + 1, repre["name"]))
|
||||
if not self._repre_is_valid(repre):
|
||||
continue
|
||||
|
||||
# We check profile per representation name and extension because
|
||||
# it's included in the profile check. As such, an instance may have
|
||||
# a different profile applied per representation.
|
||||
profile = self._get_profile(
|
||||
instance,
|
||||
repre
|
||||
)
|
||||
if not profile:
|
||||
continue
|
||||
|
||||
# Get representation files to convert
|
||||
if isinstance(repre["files"], list):
|
||||
repre_files_to_convert = copy.deepcopy(repre["files"])
|
||||
else:
|
||||
repre_files_to_convert = [repre["files"]]
|
||||
|
||||
added_representations = False
|
||||
added_review = False
|
||||
|
||||
# Process each output definition
|
||||
for output_def in profile["outputs"]:
|
||||
|
||||
# Local copy to avoid accidental mutable changes
|
||||
files_to_convert = list(repre_files_to_convert)
|
||||
|
||||
output_name = output_def["name"]
|
||||
new_repre = copy.deepcopy(repre)
|
||||
|
||||
original_staging_dir = new_repre["stagingDir"]
|
||||
new_staging_dir = get_temp_dir(
|
||||
project_name=instance.context.data["projectName"],
|
||||
use_local_temp=True,
|
||||
)
|
||||
new_repre["stagingDir"] = new_staging_dir
|
||||
|
||||
output_extension = output_def["extension"]
|
||||
output_extension = output_extension.replace('.', '')
|
||||
self._rename_in_representation(new_repre,
|
||||
files_to_convert,
|
||||
output_name,
|
||||
output_extension)
|
||||
|
||||
sequence_files = self._translate_to_sequence(files_to_convert)
|
||||
self.log.debug("Files to convert: {}".format(sequence_files))
|
||||
for file_name in sequence_files:
|
||||
if isinstance(file_name, clique.Collection):
|
||||
# Convert to filepath that can be directly converted
|
||||
# by oiio like `frame.1001-1025%04d.exr`
|
||||
file_name: str = file_name.format(
|
||||
"{head}{range}{padding}{tail}"
|
||||
)
|
||||
|
||||
self.log.debug("Transcoding file: `{}`".format(file_name))
|
||||
input_path = os.path.join(original_staging_dir,
|
||||
file_name)
|
||||
output_path = self._get_output_file_path(input_path,
|
||||
new_staging_dir,
|
||||
output_extension)
|
||||
|
||||
# TODO: Support formatting with dynamic keys from the
|
||||
# representation, like e.g. colorspace config, display,
|
||||
# view, etc.
|
||||
input_arguments: list[str] = output_def.get(
|
||||
"input_arguments", []
|
||||
)
|
||||
output_arguments: list[str] = output_def.get(
|
||||
"output_arguments", []
|
||||
)
|
||||
|
||||
# Prepare subprocess arguments
|
||||
oiio_cmd = get_oiio_tool_args(
|
||||
"oiiotool",
|
||||
*input_arguments,
|
||||
input_path,
|
||||
*output_arguments,
|
||||
"-o",
|
||||
output_path
|
||||
)
|
||||
|
||||
self.log.debug(
|
||||
"Conversion command: {}".format(" ".join(oiio_cmd)))
|
||||
run_subprocess(oiio_cmd, logger=self.log)
|
||||
|
||||
# cleanup temporary transcoded files
|
||||
for file_name in new_repre["files"]:
|
||||
transcoded_file_path = os.path.join(new_staging_dir,
|
||||
file_name)
|
||||
instance.context.data["cleanupFullPaths"].append(
|
||||
transcoded_file_path)
|
||||
|
||||
custom_tags = output_def.get("custom_tags")
|
||||
if custom_tags:
|
||||
if new_repre.get("custom_tags") is None:
|
||||
new_repre["custom_tags"] = []
|
||||
new_repre["custom_tags"].extend(custom_tags)
|
||||
|
||||
# Add additional tags from output definition to representation
|
||||
if new_repre.get("tags") is None:
|
||||
new_repre["tags"] = []
|
||||
for tag in output_def["tags"]:
|
||||
if tag not in new_repre["tags"]:
|
||||
new_repre["tags"].append(tag)
|
||||
|
||||
if tag == "review":
|
||||
added_review = True
|
||||
|
||||
# If there is only 1 file outputted then convert list to
|
||||
# string, because that'll indicate that it is not a sequence.
|
||||
if len(new_repre["files"]) == 1:
|
||||
new_repre["files"] = new_repre["files"][0]
|
||||
|
||||
# If the source representation has "review" tag, but it's not
|
||||
# part of the output definition tags, then both the
|
||||
# representations will be transcoded in ExtractReview and
|
||||
# their outputs will clash in integration.
|
||||
if "review" in repre.get("tags", []):
|
||||
added_review = True
|
||||
|
||||
new_representations.append(new_repre)
|
||||
added_representations = True
|
||||
|
||||
if added_representations:
|
||||
self._mark_original_repre_for_deletion(
|
||||
repre, profile, added_review
|
||||
)
|
||||
|
||||
tags = repre.get("tags") or []
|
||||
if "delete" in tags and "thumbnail" not in tags:
|
||||
instance.data["representations"].remove(repre)
|
||||
|
||||
instance.data["representations"].extend(new_representations)
|
||||
|
||||
def _rename_in_representation(self, new_repre, files_to_convert,
|
||||
output_name, output_extension):
|
||||
"""Replace old extension with new one everywhere in representation.
|
||||
|
||||
Args:
|
||||
new_repre (dict)
|
||||
files_to_convert (list): of filenames from repre["files"],
|
||||
standardized to always list
|
||||
output_name (str): key of output definition from Settings,
|
||||
if "<passthrough>" token used, keep original repre name
|
||||
output_extension (str): extension from output definition
|
||||
"""
|
||||
if output_name != "passthrough":
|
||||
new_repre["name"] = output_name
|
||||
if not output_extension:
|
||||
return
|
||||
|
||||
new_repre["ext"] = output_extension
|
||||
new_repre["outputName"] = output_name
|
||||
|
||||
renamed_files = []
|
||||
for file_name in files_to_convert:
|
||||
file_name, _ = os.path.splitext(file_name)
|
||||
file_name = '{}.{}'.format(file_name,
|
||||
output_extension)
|
||||
renamed_files.append(file_name)
|
||||
new_repre["files"] = renamed_files
|
||||
|
||||
def _translate_to_sequence(self, files_to_convert):
|
||||
"""Returns original list or a clique.Collection of a sequence.
|
||||
|
||||
Uses clique to find frame sequence Collection.
|
||||
If sequence not found, it returns original list.
|
||||
|
||||
Args:
|
||||
files_to_convert (list): list of file names
|
||||
Returns:
|
||||
list[str | clique.Collection]: List of filepaths or a list
|
||||
of Collections (usually one, unless there are holes)
|
||||
"""
|
||||
pattern = [clique.PATTERNS["frames"]]
|
||||
collections, _ = clique.assemble(
|
||||
files_to_convert, patterns=pattern,
|
||||
assume_padded_when_ambiguous=True)
|
||||
if collections:
|
||||
if len(collections) > 1:
|
||||
raise ValueError(
|
||||
"Too many collections {}".format(collections))
|
||||
|
||||
collection = collections[0]
|
||||
# TODO: Technically oiiotool supports holes in the sequence as well
|
||||
# using the dedicated --frames argument to specify the frames.
|
||||
# We may want to use that too so conversions of sequences with
|
||||
# holes will perform faster as well.
|
||||
# Separate the collection so that we have no holes/gaps per
|
||||
# collection.
|
||||
return collection.separate()
|
||||
|
||||
return files_to_convert
|
||||
|
||||
def _get_output_file_path(self, input_path, output_dir,
|
||||
output_extension):
|
||||
"""Create output file name path."""
|
||||
file_name = os.path.basename(input_path)
|
||||
file_name, input_extension = os.path.splitext(file_name)
|
||||
if not output_extension:
|
||||
output_extension = input_extension.replace(".", "")
|
||||
new_file_name = '{}.{}'.format(file_name,
|
||||
output_extension)
|
||||
return os.path.join(output_dir, new_file_name)
|
||||
|
||||
def _get_profile(
|
||||
self,
|
||||
instance: pyblish.api.Instance,
|
||||
repre: dict
|
||||
) -> Optional[dict[str, Any]]:
|
||||
"""Returns profile if it should process this instance."""
|
||||
host_name = instance.context.data["hostName"]
|
||||
product_type = instance.data["productType"]
|
||||
product_name = instance.data["productName"]
|
||||
task_data = instance.data["anatomyData"].get("task", {})
|
||||
task_name = task_data.get("name")
|
||||
task_type = task_data.get("type")
|
||||
repre_name: str = repre["name"]
|
||||
repre_ext: str = repre["ext"]
|
||||
filtering_criteria = {
|
||||
"host_names": host_name,
|
||||
"product_types": product_type,
|
||||
"product_names": product_name,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
"representation_names": repre_name,
|
||||
"representation_exts": repre_ext,
|
||||
}
|
||||
profile = filter_profiles(self.profiles, filtering_criteria,
|
||||
logger=self.log)
|
||||
|
||||
if not profile:
|
||||
self.log.debug(
|
||||
"Skipped instance. None of profiles in presets are for"
|
||||
f" Host: \"{host_name}\" |"
|
||||
f" Product types: \"{product_type}\" |"
|
||||
f" Product names: \"{product_name}\" |"
|
||||
f" Task name \"{task_name}\" |"
|
||||
f" Task type \"{task_type}\" |"
|
||||
f" Representation: \"{repre_name}\" (.{repre_ext})"
|
||||
)
|
||||
|
||||
return profile
|
||||
|
||||
def _repre_is_valid(self, repre: dict) -> bool:
|
||||
"""Validation if representation should be processed.
|
||||
|
||||
Args:
|
||||
repre (dict): Representation which should be checked.
|
||||
|
||||
Returns:
|
||||
bool: False if can't be processed else True.
|
||||
"""
|
||||
if repre.get("ext") not in self.supported_exts:
|
||||
self.log.debug((
|
||||
"Representation '{}' has unsupported extension: '{}'. Skipped."
|
||||
).format(repre["name"], repre.get("ext")))
|
||||
return False
|
||||
|
||||
if not repre.get("files"):
|
||||
self.log.debug((
|
||||
"Representation '{}' has empty files. Skipped."
|
||||
).format(repre["name"]))
|
||||
return False
|
||||
|
||||
if "delete" in repre.get("tags", []):
|
||||
self.log.debug((
|
||||
"Representation '{}' has 'delete' tag. Skipped."
|
||||
).format(repre["name"]))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _mark_original_repre_for_deletion(
|
||||
self,
|
||||
repre: dict,
|
||||
profile: dict,
|
||||
added_review: bool
|
||||
):
|
||||
"""If new transcoded representation created, delete old."""
|
||||
if not repre.get("tags"):
|
||||
repre["tags"] = []
|
||||
|
||||
delete_original = profile["delete_original"]
|
||||
|
||||
if delete_original:
|
||||
if "delete" not in repre["tags"]:
|
||||
repre["tags"].append("delete")
|
||||
|
||||
if added_review and "review" in repre["tags"]:
|
||||
repre["tags"].remove("review")
|
||||
|
|
@ -163,7 +163,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
"flame",
|
||||
"unreal",
|
||||
"batchdelivery",
|
||||
"photoshop"
|
||||
"photoshop",
|
||||
"substancepainter",
|
||||
]
|
||||
|
||||
settings_category = "core"
|
||||
|
|
@ -400,6 +401,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
new_staging_dir,
|
||||
self.log
|
||||
)
|
||||
# The OIIO conversion will remap the RGBA channels just to
|
||||
# `R,G,B,A` so we will pass the intermediate file to FFMPEG
|
||||
# without layer name.
|
||||
layer_name = ""
|
||||
|
||||
try:
|
||||
self._render_output_definitions(
|
||||
|
|
|
|||
|
|
@ -48,6 +48,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
"unreal",
|
||||
"houdini",
|
||||
"batchdelivery",
|
||||
"webpublisher",
|
||||
]
|
||||
settings_category = "core"
|
||||
enabled = False
|
||||
|
|
|
|||
|
|
@ -25,7 +25,8 @@ try:
|
|||
variant_nested_prim_path,
|
||||
setup_asset_layer,
|
||||
add_ordered_sublayer,
|
||||
set_layer_defaults
|
||||
set_layer_defaults,
|
||||
get_standard_default_prim_name
|
||||
)
|
||||
except ImportError:
|
||||
pass
|
||||
|
|
@ -176,7 +177,12 @@ def get_instance_uri_path(
|
|||
|
||||
# If for whatever reason we were unable to retrieve from the context
|
||||
# then get the path from an existing database entry
|
||||
path = get_representation_path_by_names(**query)
|
||||
path = get_representation_path_by_names(
|
||||
anatomy=context.data["anatomy"],
|
||||
**names
|
||||
)
|
||||
if not path:
|
||||
raise RuntimeError(f"Unable to resolve publish path for: {names}")
|
||||
|
||||
# Ensure `None` for now is also a string
|
||||
path = str(path)
|
||||
|
|
@ -494,7 +500,7 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
|||
"asset"
|
||||
if profile.get("contribution_target_product") == "usdAsset"
|
||||
else "shot")
|
||||
init_as_visible = False
|
||||
init_as_visible = True
|
||||
|
||||
# Attributes logic
|
||||
publish_attributes = instance["publish_attributes"].get(
|
||||
|
|
@ -640,6 +646,7 @@ class ExtractUSDLayerContribution(publish.Extractor):
|
|||
settings_category = "core"
|
||||
|
||||
use_ayon_entity_uri = False
|
||||
enforce_default_prim = False
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -650,9 +657,18 @@ class ExtractUSDLayerContribution(publish.Extractor):
|
|||
path = get_last_publish(instance)
|
||||
if path and BUILD_INTO_LAST_VERSIONS:
|
||||
sdf_layer = Sdf.Layer.OpenAsAnonymous(path)
|
||||
|
||||
# If enabled in settings, ignore any default prim specified on
|
||||
# older publish versions and always publish with the AYON
|
||||
# standard default prim
|
||||
if self.enforce_default_prim:
|
||||
sdf_layer.defaultPrim = get_standard_default_prim_name(
|
||||
folder_path
|
||||
)
|
||||
|
||||
default_prim = sdf_layer.defaultPrim
|
||||
else:
|
||||
default_prim = folder_path.rsplit("/", 1)[-1] # use folder name
|
||||
default_prim = get_standard_default_prim_name(folder_path)
|
||||
sdf_layer = Sdf.Layer.CreateAnonymous()
|
||||
set_layer_defaults(sdf_layer, default_prim=default_prim)
|
||||
|
||||
|
|
@ -810,7 +826,7 @@ class ExtractUSDAssetContribution(publish.Extractor):
|
|||
folder_path = instance.data["folderPath"]
|
||||
product_name = instance.data["productName"]
|
||||
self.log.debug(f"Building asset: {folder_path} > {product_name}")
|
||||
folder_name = folder_path.rsplit("/", 1)[-1]
|
||||
asset_name = get_standard_default_prim_name(folder_path)
|
||||
|
||||
# Contribute layers to asset
|
||||
# Use existing asset and add to it, or initialize a new asset layer
|
||||
|
|
@ -828,8 +844,9 @@ class ExtractUSDAssetContribution(publish.Extractor):
|
|||
# If no existing publish of this product exists then we initialize
|
||||
# the layer as either a default asset or shot structure.
|
||||
init_type = instance.data["contribution_target_product_init"]
|
||||
self.log.debug("Initializing layer as type: %s", init_type)
|
||||
asset_layer, payload_layer = self.init_layer(
|
||||
asset_name=folder_name, init_type=init_type
|
||||
asset_name=asset_name, init_type=init_type
|
||||
)
|
||||
|
||||
# Author timeCodesPerSecond and framesPerSecond if the asset layer
|
||||
|
|
@ -909,7 +926,7 @@ class ExtractUSDAssetContribution(publish.Extractor):
|
|||
payload_layer.Export(payload_path, args={"format": "usda"})
|
||||
self.add_relative_file(instance, payload_path)
|
||||
|
||||
def init_layer(self, asset_name, init_type):
|
||||
def init_layer(self, asset_name: str, init_type: str):
|
||||
"""Initialize layer if no previous version exists"""
|
||||
|
||||
if init_type == "asset":
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ from ayon_core.pipeline.publish import (
|
|||
KnownPublishError,
|
||||
get_publish_template_name,
|
||||
)
|
||||
from ayon_core.pipeline import is_product_base_type_supported
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -367,6 +368,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
folder_entity = instance.data["folderEntity"]
|
||||
product_name = instance.data["productName"]
|
||||
product_type = instance.data["productType"]
|
||||
product_base_type = instance.data.get("productBaseType")
|
||||
|
||||
self.log.debug("Product: {}".format(product_name))
|
||||
|
||||
# Get existing product if it exists
|
||||
|
|
@ -394,14 +397,33 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
product_id = None
|
||||
if existing_product_entity:
|
||||
product_id = existing_product_entity["id"]
|
||||
product_entity = new_product_entity(
|
||||
product_name,
|
||||
product_type,
|
||||
folder_entity["id"],
|
||||
data=data,
|
||||
attribs=attributes,
|
||||
entity_id=product_id
|
||||
)
|
||||
|
||||
new_product_entity_kwargs = {
|
||||
"product_name": product_name,
|
||||
"product_type": product_type,
|
||||
"folder_id": folder_entity["id"],
|
||||
"data": data,
|
||||
"attribs": attributes,
|
||||
"entity_id": product_id,
|
||||
"product_base_type": product_base_type,
|
||||
}
|
||||
|
||||
if not is_product_base_type_supported():
|
||||
new_product_entity_kwargs.pop("product_base_type")
|
||||
if (
|
||||
product_base_type is not None
|
||||
and product_base_type != product_type):
|
||||
self.log.warning((
|
||||
"Product base type %s is not supported by the server, "
|
||||
"but it's defined - and it differs from product type %s. "
|
||||
"Using product base type as product type."
|
||||
), product_base_type, product_type)
|
||||
|
||||
new_product_entity_kwargs["product_type"] = (
|
||||
product_base_type
|
||||
)
|
||||
|
||||
product_entity = new_product_entity(**new_product_entity_kwargs)
|
||||
|
||||
if existing_product_entity is None:
|
||||
# Create a new product
|
||||
|
|
@ -927,6 +949,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
host_name = context.data["hostName"]
|
||||
anatomy_data = instance.data["anatomyData"]
|
||||
product_type = instance.data["productType"]
|
||||
product_base_type = instance.data.get("productBaseType")
|
||||
task_info = anatomy_data.get("task") or {}
|
||||
|
||||
return get_publish_template_name(
|
||||
|
|
@ -936,7 +959,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
task_name=task_info.get("name"),
|
||||
task_type=task_info.get("type"),
|
||||
project_settings=context.data["project_settings"],
|
||||
logger=self.log
|
||||
logger=self.log,
|
||||
product_base_type=product_base_type
|
||||
)
|
||||
|
||||
def get_rootless_path(self, anatomy, path):
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ from ayon_core.pipeline.create import (
|
|||
ConvertorsOperationFailed,
|
||||
ConvertorItem,
|
||||
)
|
||||
|
||||
from ayon_core.tools.publisher.abstract import (
|
||||
AbstractPublisherBackend,
|
||||
CardMessageTypes,
|
||||
|
|
|
|||
|
|
@ -1114,6 +1114,8 @@ class SceneInventoryView(QtWidgets.QTreeView):
|
|||
try:
|
||||
for item_id, item_version in zip(item_ids, versions):
|
||||
container = containers_by_id[item_id]
|
||||
if container.get("version_locked"):
|
||||
continue
|
||||
try:
|
||||
update_container(container, item_version)
|
||||
except Exception as exc:
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'core' version."""
|
||||
__version__ = "1.6.11+dev"
|
||||
__version__ = "1.6.13+dev"
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ name="core"
|
|||
description="AYON core addon."
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9.1,<3.10"
|
||||
markdown = "^3.4.1"
|
||||
clique = "1.6.*"
|
||||
jsonschema = "^2.6.0"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue