Merge branch 'develop' of https://github.com/ynput/ayon-core into 989-ay-7315_extract-review-and-oiio-transcode-failing-to-transcode-media-blocking-publishes-2

# Conflicts:
#	client/ayon_core/lib/transcoding.py
#	client/ayon_core/plugins/publish/extract_color_transcode.py
This commit is contained in:
Roy Nieterau 2025-10-30 20:57:29 +01:00
commit ad5368eaa2
156 changed files with 10721 additions and 5817 deletions

View file

@ -35,6 +35,18 @@ body:
label: Version
description: What version are you running? Look to AYON Tray
options:
- 1.6.7
- 1.6.6
- 1.6.5
- 1.6.4
- 1.6.3
- 1.6.2
- 1.6.1
- 1.6.0
- 1.5.3
- 1.5.2
- 1.5.1
- 1.5.0
- 1.4.1
- 1.4.0
- 1.3.2

18
.github/workflows/deploy_mkdocs.yml vendored Normal file
View file

@ -0,0 +1,18 @@
name: Deploy MkDocs
on:
push:
tags:
- "*"
workflow_dispatch:
jobs:
build-mk-docs:
# FIXME: Update @develop to @main after `ops-repo-automation` release.
uses: ynput/ops-repo-automation/.github/workflows/deploy_mkdocs.yml@develop
with:
repo: ${{ github.repository }}
secrets:
YNPUT_BOT_TOKEN: ${{ secrets.YNPUT_BOT_TOKEN }}
CI_USER: ${{ secrets.CI_USER }}
CI_EMAIL: ${{ secrets.CI_EMAIL }}

View file

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
"""Base class for AYON addons."""
import copy
from __future__ import annotations
import os
import sys
import time
@ -8,12 +9,15 @@ import inspect
import logging
import threading
import collections
import warnings
from uuid import uuid4
from abc import ABC, abstractmethod
from typing import Optional
from urllib.parse import urlencode
from types import ModuleType
import typing
from typing import Optional, Any, Union
import ayon_api
from semver import VersionInfo
from ayon_core import AYON_CORE_ROOT
from ayon_core.lib import (
@ -29,6 +33,11 @@ from .interfaces import (
IHostAddon,
)
if typing.TYPE_CHECKING:
import click
from ayon_core.host import HostBase
# Files that will be always ignored on addons import
IGNORED_FILENAMES = {
"__pycache__",
@ -38,33 +47,6 @@ IGNORED_DEFAULT_FILENAMES = {
"__init__.py",
}
# When addon was moved from ayon-core codebase
# - this is used to log the missing addon
MOVED_ADDON_MILESTONE_VERSIONS = {
"aftereffects": VersionInfo(0, 2, 0),
"applications": VersionInfo(0, 2, 0),
"blender": VersionInfo(0, 2, 0),
"celaction": VersionInfo(0, 2, 0),
"clockify": VersionInfo(0, 2, 0),
"deadline": VersionInfo(0, 2, 0),
"flame": VersionInfo(0, 2, 0),
"fusion": VersionInfo(0, 2, 0),
"harmony": VersionInfo(0, 2, 0),
"hiero": VersionInfo(0, 2, 0),
"max": VersionInfo(0, 2, 0),
"photoshop": VersionInfo(0, 2, 0),
"timers_manager": VersionInfo(0, 2, 0),
"traypublisher": VersionInfo(0, 2, 0),
"tvpaint": VersionInfo(0, 2, 0),
"maya": VersionInfo(0, 2, 0),
"nuke": VersionInfo(0, 2, 0),
"resolve": VersionInfo(0, 2, 0),
"royalrender": VersionInfo(0, 2, 0),
"substancepainter": VersionInfo(0, 2, 0),
"houdini": VersionInfo(0, 3, 0),
"unreal": VersionInfo(0, 2, 0),
}
class ProcessPreparationError(Exception):
"""Exception that can be used when process preparation failed.
@ -127,7 +109,7 @@ class _LoadCache:
addon_modules = []
def load_addons(force=False):
def load_addons(force: bool = False) -> None:
"""Load AYON addons as python modules.
Modules does not load only classes (like in Interfaces) because there must
@ -154,91 +136,79 @@ def load_addons(force=False):
time.sleep(0.1)
def _get_ayon_bundle_data():
def _get_ayon_bundle_data() -> tuple[
dict[str, Any], Optional[dict[str, Any]]
]:
studio_bundle_name = os.environ.get("AYON_STUDIO_BUNDLE_NAME")
project_bundle_name = os.getenv("AYON_BUNDLE_NAME")
# If AYON launcher <1.4.0 was used
if not studio_bundle_name:
studio_bundle_name = project_bundle_name
bundles = ayon_api.get_bundles()["bundles"]
bundle_name = os.getenv("AYON_BUNDLE_NAME")
return next(
studio_bundle = next(
(
bundle
for bundle in bundles
if bundle["name"] == bundle_name
if bundle["name"] == studio_bundle_name
),
None
)
if studio_bundle is None:
raise RuntimeError(f"Failed to find bundle '{studio_bundle_name}'.")
def _get_ayon_addons_information(bundle_info):
project_bundle = None
if project_bundle_name and project_bundle_name != studio_bundle_name:
project_bundle = next(
(
bundle
for bundle in bundles
if bundle["name"] == project_bundle_name
),
None
)
if project_bundle is None:
raise RuntimeError(
f"Failed to find project bundle '{project_bundle_name}'."
)
return studio_bundle, project_bundle
def _get_ayon_addons_information(
studio_bundle: dict[str, Any],
project_bundle: Optional[dict[str, Any]],
) -> dict[str, str]:
"""Receive information about addons to use from server.
Todos:
Actually ask server for the information.
Allow project name as optional argument to be able to query information
about used addons for specific project.
Wrap versions into an object.
Returns:
List[Dict[str, Any]]: List of addon information to use.
list[dict[str, Any]]: List of addon information to use.
"""
key_values = {
"summary": "true",
"bundle_name": studio_bundle["name"],
}
if project_bundle:
key_values["project_bundle_name"] = project_bundle["name"]
output = []
bundle_addons = bundle_info["addons"]
addons = ayon_api.get_addons_info()["addons"]
for addon in addons:
name = addon["name"]
versions = addon.get("versions")
addon_version = bundle_addons.get(name)
if addon_version is None or not versions:
continue
version = versions.get(addon_version)
if version:
version = copy.deepcopy(version)
version["name"] = name
version["version"] = addon_version
output.append(version)
return output
query = urlencode(key_values)
response = ayon_api.get(f"settings?{query}")
return {
addon["name"]: addon["version"]
for addon in response.data["addons"]
}
def _handle_moved_addons(addon_name, milestone_version, log):
"""Log message that addon version is not compatible with current core.
The function can return path to addon client code, but that can happen
only if ayon-core is used from code (for development), but still
logs a warning.
Args:
addon_name (str): Addon name.
milestone_version (str): Milestone addon version.
log (logging.Logger): Logger object.
Returns:
Union[str, None]: Addon dir or None.
"""
# Handle addons which were moved out of ayon-core
# - Try to fix it by loading it directly from server addons dir in
# ayon-core repository. But that will work only if ayon-core is
# used from code.
addon_dir = os.path.join(
os.path.dirname(os.path.dirname(AYON_CORE_ROOT)),
"server_addon",
addon_name,
"client",
)
if not os.path.exists(addon_dir):
log.error(
f"Addon '{addon_name}' is not available. Please update "
f"{addon_name} addon to '{milestone_version}' or higher."
)
return None
log.warning((
"Please update '{}' addon to '{}' or higher."
" Using client code from ayon-core repository."
).format(addon_name, milestone_version))
return addon_dir
def _load_ayon_addons(log):
def _load_ayon_addons(log: logging.Logger) -> list[ModuleType]:
"""Load AYON addons based on information from server.
This function should not trigger downloading of any addons but only use
@ -248,10 +218,13 @@ def _load_ayon_addons(log):
Args:
log (logging.Logger): Logger object.
Returns:
list[ModuleType]: Loaded addon modules.
"""
all_addon_modules = []
bundle_info = _get_ayon_bundle_data()
addons_info = _get_ayon_addons_information(bundle_info)
studio_bundle, project_bundle = _get_ayon_bundle_data()
addons_info = _get_ayon_addons_information(studio_bundle, project_bundle)
if not addons_info:
return all_addon_modules
@ -263,18 +236,16 @@ def _load_ayon_addons(log):
dev_addons_info = {}
if dev_mode_enabled:
# Get dev addons info only when dev mode is enabled
dev_addons_info = bundle_info.get("addonDevelopment", dev_addons_info)
dev_addons_info = studio_bundle.get(
"addonDevelopment", dev_addons_info
)
addons_dir_exists = os.path.exists(addons_dir)
if not addons_dir_exists:
log.warning("Addons directory does not exists. Path \"{}\"".format(
addons_dir
))
for addon_info in addons_info:
addon_name = addon_info["name"]
addon_version = addon_info["version"]
log.warning(
f"Addons directory does not exists. Path \"{addons_dir}\"")
for addon_name, addon_version in addons_info.items():
# core addon does not have any addon object
if addon_name == "core":
continue
@ -283,32 +254,28 @@ def _load_ayon_addons(log):
use_dev_path = dev_addon_info.get("enabled", False)
addon_dir = None
milestone_version = MOVED_ADDON_MILESTONE_VERSIONS.get(addon_name)
if use_dev_path:
addon_dir = dev_addon_info["path"]
if not addon_dir or not os.path.exists(addon_dir):
log.warning((
"Dev addon {} {} path does not exists. Path \"{}\""
).format(addon_name, addon_version, addon_dir))
continue
if addon_dir:
addon_dir = os.path.expandvars(
addon_dir.format_map(os.environ)
)
elif (
milestone_version is not None
and VersionInfo.parse(addon_version) < milestone_version
):
addon_dir = _handle_moved_addons(
addon_name, milestone_version, log
)
if not addon_dir:
if not addon_dir or not os.path.exists(addon_dir):
log.warning(
f"Dev addon {addon_name} {addon_version} path"
f" does not exists. Path \"{addon_dir}\""
)
continue
elif addons_dir_exists:
folder_name = "{}_{}".format(addon_name, addon_version)
folder_name = f"{addon_name}_{addon_version}"
addon_dir = os.path.join(addons_dir, folder_name)
if not os.path.exists(addon_dir):
log.debug((
"No localized client code found for addon {} {}."
).format(addon_name, addon_version))
log.debug(
"No localized client code found"
f" for addon {addon_name} {addon_version}."
)
continue
if not addon_dir:
@ -347,24 +314,22 @@ def _load_ayon_addons(log):
except BaseException:
log.warning(
"Failed to import \"{}\"".format(basename),
f"Failed to import \"{basename}\"",
exc_info=True
)
if not addon_modules:
log.warning("Addon {} {} has no content to import".format(
addon_name, addon_version
))
log.warning(
f"Addon {addon_name} {addon_version} has no content to import"
)
continue
if len(addon_modules) > 1:
log.warning((
"Multiple modules ({}) were found in addon '{}' in dir {}."
).format(
", ".join([m.__name__ for m in addon_modules]),
addon_name,
addon_dir,
))
joined_modules = ", ".join([m.__name__ for m in addon_modules])
log.warning(
f"Multiple modules ({joined_modules}) were found in"
f" addon '{addon_name}' in dir {addon_dir}."
)
all_addon_modules.extend(addon_modules)
return all_addon_modules
@ -382,20 +347,21 @@ class AYONAddon(ABC):
Attributes:
enabled (bool): Is addon enabled.
name (str): Addon name.
Args:
manager (AddonsManager): Manager object who discovered addon.
settings (dict[str, Any]): AYON settings.
"""
enabled = True
enabled: bool = True
_id = None
# Temporary variable for 'version' property
_missing_version_warned = False
def __init__(self, manager, settings):
def __init__(
self, manager: AddonsManager, settings: dict[str, Any]
) -> None:
self.manager = manager
self.log = Logger.get_logger(self.name)
@ -403,7 +369,7 @@ class AYONAddon(ABC):
self.initialize(settings)
@property
def id(self):
def id(self) -> str:
"""Random id of addon object.
Returns:
@ -416,7 +382,7 @@ class AYONAddon(ABC):
@property
@abstractmethod
def name(self):
def name(self) -> str:
"""Addon name.
Returns:
@ -426,7 +392,7 @@ class AYONAddon(ABC):
pass
@property
def version(self):
def version(self) -> str:
"""Addon version.
Todo:
@ -445,7 +411,7 @@ class AYONAddon(ABC):
)
return "0.0.0"
def initialize(self, settings):
def initialize(self, settings: dict[str, Any]) -> None:
"""Initialization of addon attributes.
It is not recommended to override __init__ that's why specific method
@ -457,7 +423,7 @@ class AYONAddon(ABC):
"""
pass
def connect_with_addons(self, enabled_addons):
def connect_with_addons(self, enabled_addons: list[AYONAddon]) -> None:
"""Connect with other enabled addons.
Args:
@ -468,7 +434,7 @@ class AYONAddon(ABC):
def ensure_is_process_ready(
self, process_context: ProcessContext
):
) -> None:
"""Make sure addon is prepared for a process.
This method is called when some action makes sure that addon has set
@ -489,7 +455,7 @@ class AYONAddon(ABC):
"""
pass
def get_global_environments(self):
def get_global_environments(self) -> dict[str, str]:
"""Get global environments values of addon.
Environment variables that can be get only from system settings.
@ -500,20 +466,12 @@ class AYONAddon(ABC):
"""
return {}
def modify_application_launch_arguments(self, application, env):
"""Give option to modify launch environments before application launch.
Implementation is optional. To change environments modify passed
dictionary of environments.
Args:
application (Application): Application that is launched.
env (dict[str, str]): Current environment variables.
"""
pass
def on_host_install(self, host, host_name, project_name):
def on_host_install(
self,
host: HostBase,
host_name: str,
project_name: str,
) -> None:
"""Host was installed which gives option to handle in-host logic.
It is a good option to register in-host event callbacks which are
@ -524,7 +482,7 @@ class AYONAddon(ABC):
to receive from 'host' object.
Args:
host (Union[ModuleType, HostBase]): Access to installed/registered
host (HostBase): Access to installed/registered
host object.
host_name (str): Name of host.
project_name (str): Project name which is main part of host
@ -533,7 +491,7 @@ class AYONAddon(ABC):
"""
pass
def cli(self, addon_click_group):
def cli(self, addon_click_group: click.Group) -> None:
"""Add commands to click group.
The best practise is to create click group for whole addon which is
@ -564,15 +522,21 @@ class AYONAddon(ABC):
class _AddonReportInfo:
def __init__(
self, class_name, name, version, report_value_by_label
):
self,
class_name: str,
name: str,
version: str,
report_value_by_label: dict[str, Optional[str]],
) -> None:
self.class_name = class_name
self.name = name
self.version = version
self.report_value_by_label = report_value_by_label
@classmethod
def from_addon(cls, addon, report):
def from_addon(
cls, addon: AYONAddon, report: dict[str, dict[str, int]]
) -> "_AddonReportInfo":
class_name = addon.__class__.__name__
report_value_by_label = {
label: reported.get(class_name)
@ -599,29 +563,35 @@ class AddonsManager:
_report_total_key = "Total"
_log = None
def __init__(self, settings=None, initialize=True):
def __init__(
self,
settings: Optional[dict[str, Any]] = None,
initialize: bool = True,
) -> None:
self._settings = settings
self._addons = []
self._addons_by_id = {}
self._addons_by_name = {}
self._addons: list[AYONAddon] = []
self._addons_by_id: dict[str, AYONAddon] = {}
self._addons_by_name: dict[str, AYONAddon] = {}
# For report of time consumption
self._report = {}
self._report: dict[str, dict[str, int]] = {}
if initialize:
self.initialize_addons()
self.connect_addons()
def __getitem__(self, addon_name):
def __getitem__(self, addon_name: str) -> AYONAddon:
return self._addons_by_name[addon_name]
@property
def log(self):
def log(self) -> logging.Logger:
if self._log is None:
self._log = logging.getLogger(self.__class__.__name__)
self._log = Logger.get_logger(self.__class__.__name__)
return self._log
def get(self, addon_name, default=None):
def get(
self, addon_name: str, default: Optional[Any] = None
) -> Union[AYONAddon, Any]:
"""Access addon by name.
Args:
@ -635,18 +605,20 @@ class AddonsManager:
return self._addons_by_name.get(addon_name, default)
@property
def addons(self):
def addons(self) -> list[AYONAddon]:
return list(self._addons)
@property
def addons_by_id(self):
def addons_by_id(self) -> dict[str, AYONAddon]:
return dict(self._addons_by_id)
@property
def addons_by_name(self):
def addons_by_name(self) -> dict[str, AYONAddon]:
return dict(self._addons_by_name)
def get_enabled_addon(self, addon_name, default=None):
def get_enabled_addon(
self, addon_name: str, default: Optional[Any] = None
) -> Union[AYONAddon, Any]:
"""Fast access to enabled addon.
If addon is available but is not enabled default value is returned.
@ -657,7 +629,7 @@ class AddonsManager:
not enabled.
Returns:
Union[AYONAddon, None]: Enabled addon found by name or None.
Union[AYONAddon, Any]: Enabled addon found by name or None.
"""
addon = self.get(addon_name)
@ -665,7 +637,7 @@ class AddonsManager:
return addon
return default
def get_enabled_addons(self):
def get_enabled_addons(self) -> list[AYONAddon]:
"""Enabled addons initialized by the manager.
Returns:
@ -678,7 +650,7 @@ class AddonsManager:
if addon.enabled
]
def initialize_addons(self):
def initialize_addons(self) -> None:
"""Import and initialize addons."""
# Make sure modules are loaded
load_addons()
@ -759,7 +731,7 @@ class AddonsManager:
report[self._report_total_key] = time.time() - time_start
self._report["Initialization"] = report
def connect_addons(self):
def connect_addons(self) -> None:
"""Trigger connection with other enabled addons.
Addons should handle their interfaces in `connect_with_addons`.
@ -768,7 +740,7 @@ class AddonsManager:
time_start = time.time()
prev_start_time = time_start
enabled_addons = self.get_enabled_addons()
self.log.debug("Has {} enabled addons.".format(len(enabled_addons)))
self.log.debug(f"Has {len(enabled_addons)} enabled addons.")
for addon in enabled_addons:
try:
addon.connect_with_addons(enabled_addons)
@ -787,7 +759,7 @@ class AddonsManager:
report[self._report_total_key] = time.time() - time_start
self._report["Connect modules"] = report
def collect_global_environments(self):
def collect_global_environments(self) -> dict[str, str]:
"""Helper to collect global environment variabled from modules.
Returns:
@ -810,15 +782,31 @@ class AddonsManager:
module_envs[key] = value
return module_envs
def collect_plugin_paths(self):
def collect_plugin_paths(self) -> dict[str, list[str]]:
"""Helper to collect all plugins from modules inherited IPluginPaths.
Unknown keys are logged out.
Deprecated:
Use targeted methods 'collect_launcher_action_paths',
'collect_create_plugin_paths', 'collect_load_plugin_paths',
'collect_publish_plugin_paths' and
'collect_inventory_action_paths' to collect plugin paths.
Returns:
dict: Output is dictionary with keys "publish", "create", "load",
"actions" and "inventory" each containing list of paths.
"""
warnings.warn(
"Used deprecated method 'collect_plugin_paths'. Please use"
" targeted methods 'collect_launcher_action_paths',"
" 'collect_create_plugin_paths', 'collect_load_plugin_paths'"
" 'collect_publish_plugin_paths' and"
" 'collect_inventory_action_paths'",
DeprecationWarning,
stacklevel=2
)
# Output structure
output = {
"publish": [],
@ -853,7 +841,7 @@ class AddonsManager:
# Report unknown keys (Developing purposes)
if unknown_keys_by_addon:
expected_keys = ", ".join([
"\"{}\"".format(key) for key in output.keys()
f'"{key}"' for key in output.keys()
])
msg_template = "Addon: \"{}\" - got key {}"
msg_items = []
@ -862,39 +850,45 @@ class AddonsManager:
"\"{}\"".format(key) for key in keys
])
msg_items.append(msg_template.format(addon_name, joined_keys))
self.log.warning((
"Expected keys from `get_plugin_paths` are {}. {}"
).format(expected_keys, " | ".join(msg_items)))
joined_items = " | ".join(msg_items)
self.log.warning(
f"Expected keys from `get_plugin_paths` are {expected_keys}."
f" {joined_items}"
)
return output
def _collect_plugin_paths(self, method_name, *args, **kwargs):
def _collect_plugin_paths(self, method_name: str, *args, **kwargs):
output = []
for addon in self.get_enabled_addons():
# Skip addon that do not inherit from `IPluginPaths`
if not isinstance(addon, IPluginPaths):
continue
paths = None
method = getattr(addon, method_name)
try:
paths = method(*args, **kwargs)
except Exception:
self.log.warning(
(
"Failed to get plugin paths from addon"
" '{}' using '{}'."
).format(addon.__class__.__name__, method_name),
"Failed to get plugin paths from addon"
f" '{addon.name}' using '{method_name}'.",
exc_info=True
)
if not paths:
continue
if paths:
# Convert to list if value is not list
if not isinstance(paths, (list, tuple, set)):
paths = [paths]
output.extend(paths)
if isinstance(paths, str):
paths = [paths]
self.log.warning(
f"Addon '{addon.name}' returned invalid output type"
f" from '{method_name}'."
f" Got 'str' expected 'list[str]'."
)
output.extend(paths)
return output
def collect_launcher_action_paths(self):
def collect_launcher_action_paths(self) -> list[str]:
"""Helper to collect launcher action paths from addons.
Returns:
@ -909,16 +903,16 @@ class AddonsManager:
output.insert(0, actions_dir)
return output
def collect_create_plugin_paths(self, host_name):
def collect_create_plugin_paths(self, host_name: str) -> list[str]:
"""Helper to collect creator plugin paths from addons.
Args:
host_name (str): For which host are creators meant.
Returns:
list: List of creator plugin paths.
"""
list[str]: List of creator plugin paths.
"""
return self._collect_plugin_paths(
"get_create_plugin_paths",
host_name
@ -926,37 +920,37 @@ class AddonsManager:
collect_creator_plugin_paths = collect_create_plugin_paths
def collect_load_plugin_paths(self, host_name):
def collect_load_plugin_paths(self, host_name: str) -> list[str]:
"""Helper to collect load plugin paths from addons.
Args:
host_name (str): For which host are load plugins meant.
Returns:
list: List of load plugin paths.
"""
list[str]: List of load plugin paths.
"""
return self._collect_plugin_paths(
"get_load_plugin_paths",
host_name
)
def collect_publish_plugin_paths(self, host_name):
def collect_publish_plugin_paths(self, host_name: str) -> list[str]:
"""Helper to collect load plugin paths from addons.
Args:
host_name (str): For which host are load plugins meant.
Returns:
list: List of pyblish plugin paths.
"""
list[str]: List of pyblish plugin paths.
"""
return self._collect_plugin_paths(
"get_publish_plugin_paths",
host_name
)
def collect_inventory_action_paths(self, host_name):
def collect_inventory_action_paths(self, host_name: str) -> list[str]:
"""Helper to collect load plugin paths from addons.
Args:
@ -964,21 +958,21 @@ class AddonsManager:
Returns:
list: List of pyblish plugin paths.
"""
"""
return self._collect_plugin_paths(
"get_inventory_action_paths",
host_name
)
def get_host_addon(self, host_name):
def get_host_addon(self, host_name: str) -> Optional[AYONAddon]:
"""Find host addon by host name.
Args:
host_name (str): Host name for which is found host addon.
Returns:
Union[AYONAddon, None]: Found host addon by name or `None`.
Optional[AYONAddon]: Found host addon by name or `None`.
"""
for addon in self.get_enabled_addons():
@ -989,21 +983,21 @@ class AddonsManager:
return addon
return None
def get_host_names(self):
def get_host_names(self) -> set[str]:
"""List of available host names based on host addons.
Returns:
Iterable[str]: All available host names based on enabled addons
set[str]: All available host names based on enabled addons
inheriting 'IHostAddon'.
"""
"""
return {
addon.host_name
for addon in self.get_enabled_addons()
if isinstance(addon, IHostAddon)
}
def print_report(self):
def print_report(self) -> None:
"""Print out report of time spent on addons initialization parts.
Reporting is not automated must be implemented for each initialization

View file

@ -1,6 +1,7 @@
"""Addon interfaces for AYON."""
from __future__ import annotations
import warnings
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING, Callable, Optional, Type
@ -39,26 +40,29 @@ class AYONInterface(metaclass=_AYONInterfaceMeta):
class IPluginPaths(AYONInterface):
"""Addon has plugin paths to return.
"""Addon wants to register plugin paths."""
Expected result is dictionary with keys "publish", "create", "load",
"actions" or "inventory" and values as list or string.
{
"publish": ["path/to/publish_plugins"]
}
"""
@abstractmethod
def get_plugin_paths(self) -> dict[str, list[str]]:
"""Return plugin paths for addon.
This method was abstract (required) in the past, so raise the required
'core' addon version when 'get_plugin_paths' is removed from
addon.
Deprecated:
Please implement specific methods 'get_create_plugin_paths',
'get_load_plugin_paths', 'get_inventory_action_paths' and
'get_publish_plugin_paths' to return plugin paths.
Returns:
dict[str, list[str]]: Plugin paths for addon.
"""
return {}
def _get_plugin_paths_by_type(
self, plugin_type: str) -> list[str]:
self, plugin_type: str
) -> list[str]:
"""Get plugin paths by type.
Args:
@ -78,6 +82,24 @@ class IPluginPaths(AYONInterface):
if not isinstance(paths, (list, tuple, set)):
paths = [paths]
new_function_name = "get_launcher_action_paths"
if plugin_type == "create":
new_function_name = "get_create_plugin_paths"
elif plugin_type == "load":
new_function_name = "get_load_plugin_paths"
elif plugin_type == "publish":
new_function_name = "get_publish_plugin_paths"
elif plugin_type == "inventory":
new_function_name = "get_inventory_action_paths"
warnings.warn(
f"Addon '{self.name}' returns '{plugin_type}' paths using"
" 'get_plugin_paths' method. Please implement"
f" '{new_function_name}' instead.",
DeprecationWarning,
stacklevel=2
)
return paths
def get_launcher_action_paths(self) -> list[str]:

View file

@ -27,25 +27,40 @@ from ayon_core.lib.env_tools import (
@click.group(invoke_without_command=True)
@click.pass_context
@click.option("--use-staging", is_flag=True,
expose_value=False, help="use staging variants")
@click.option("--debug", is_flag=True, expose_value=False,
help="Enable debug")
@click.option("--verbose", expose_value=False,
help=("Change AYON log level (debug - critical or 0-50)"))
@click.option("--force", is_flag=True, hidden=True)
def main_cli(ctx, force):
@click.option(
"--use-staging",
is_flag=True,
expose_value=False,
help="use staging variants")
@click.option(
"--debug",
is_flag=True,
expose_value=False,
help="Enable debug")
@click.option(
"--project",
help="Project name")
@click.option(
"--verbose",
expose_value=False,
help="Change AYON log level (debug - critical or 0-50)")
@click.option(
"--use-dev",
is_flag=True,
expose_value=False,
help="use dev bundle")
def main_cli(ctx, *_args, **_kwargs):
"""AYON is main command serving as entry point to pipeline system.
It wraps different commands together.
"""
if ctx.invoked_subcommand is None:
# Print help if headless mode is used
if os.getenv("AYON_HEADLESS_MODE") == "1":
print(ctx.get_help())
sys.exit(0)
else:
ctx.params.pop("project")
ctx.forward(tray)
@ -60,7 +75,6 @@ def tray(force):
Default action of AYON command is to launch tray widget to control basic
aspects of AYON. See documentation for more information.
"""
from ayon_core.tools.tray import main
main(force)
@ -306,6 +320,43 @@ def _add_addons(addons_manager):
)
def _cleanup_project_args():
rem_args = list(sys.argv[1:])
if "--project" not in rem_args:
return
cmd = None
current_ctx = None
parent_name = "ayon"
parent_cmd = main_cli
while hasattr(parent_cmd, "resolve_command"):
if current_ctx is None:
current_ctx = main_cli.make_context(parent_name, rem_args)
else:
current_ctx = parent_cmd.make_context(
parent_name,
rem_args,
parent=current_ctx
)
if not rem_args:
break
cmd_name, cmd, rem_args = parent_cmd.resolve_command(
current_ctx, rem_args
)
parent_name = cmd_name
parent_cmd = cmd
if cmd is None:
return
param_names = {param.name for param in cmd.params}
if "project" in param_names:
return
idx = sys.argv.index("--project")
sys.argv.pop(idx)
sys.argv.pop(idx)
def main(*args, **kwargs):
logging.basicConfig()
@ -332,10 +383,14 @@ def main(*args, **kwargs):
addons_manager = AddonsManager()
_set_addons_environments(addons_manager)
_add_addons(addons_manager)
_cleanup_project_args()
try:
main_cli(
prog_name="ayon",
obj={"addons_manager": addons_manager},
args=(sys.argv[1:]),
)
except Exception: # noqa
exc_info = sys.exc_info()

View file

@ -33,22 +33,25 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
"cinema4d",
"silhouette",
"gaffer",
"loki",
}
launch_types = {LaunchTypes.local}
def execute(self):
if not self.data.get("start_last_workfile"):
self.log.info("It is set to not start last workfile on start.")
return
workfile_path = self.data.get("workfile_path")
if not workfile_path:
if not self.data.get("start_last_workfile"):
self.log.info("It is set to not start last workfile on start.")
return
last_workfile = self.data.get("last_workfile_path")
if not last_workfile:
self.log.warning("Last workfile was not collected.")
return
workfile_path = self.data.get("last_workfile_path")
if not workfile_path:
self.log.warning("Last workfile was not collected.")
return
if not os.path.exists(last_workfile):
if not os.path.exists(workfile_path):
self.log.info("Current context does not have any workfile yet.")
return
# Add path to workfile to arguments
self.launch_context.launch_args.append(last_workfile)
self.launch_context.launch_args.append(workfile_path)

View file

@ -14,7 +14,7 @@ class OCIOEnvHook(PreLaunchHook):
"fusion",
"blender",
"aftereffects",
"3dsmax",
"max",
"houdini",
"maya",
"nuke",
@ -24,6 +24,7 @@ class OCIOEnvHook(PreLaunchHook):
"cinema4d",
"silhouette",
"gaffer",
"loki",
}
launch_types = set()

View file

@ -1,9 +1,14 @@
from .constants import ContextChangeReason
from .abstract import AbstractHost, ApplicationInformation
from .host import (
HostBase,
ContextChangeData,
)
from .interfaces import (
IWorkfileHost,
WorkfileInfo,
PublishedWorkfileInfo,
ILoadHost,
IPublishHost,
INewPublisher,
@ -13,9 +18,17 @@ from .dirmap import HostDirmap
__all__ = (
"ContextChangeReason",
"AbstractHost",
"ApplicationInformation",
"HostBase",
"ContextChangeData",
"IWorkfileHost",
"WorkfileInfo",
"PublishedWorkfileInfo",
"ILoadHost",
"IPublishHost",
"INewPublisher",

View file

@ -0,0 +1,120 @@
from __future__ import annotations
import logging
from abc import ABC, abstractmethod
from dataclasses import dataclass
import typing
from typing import Optional, Any
from .constants import ContextChangeReason
if typing.TYPE_CHECKING:
from ayon_core.pipeline import Anatomy
from .typing import HostContextData
@dataclass
class ApplicationInformation:
"""Application information.
Attributes:
app_name (Optional[str]): Application name. e.g. Maya, NukeX, Nuke
app_version (Optional[str]): Application version. e.g. 15.2.1
"""
app_name: Optional[str] = None
app_version: Optional[str] = None
class AbstractHost(ABC):
"""Abstract definition of host implementation."""
@property
@abstractmethod
def log(self) -> logging.Logger:
pass
@property
@abstractmethod
def name(self) -> str:
"""Host name."""
pass
@abstractmethod
def get_app_information(self) -> ApplicationInformation:
"""Information about the application where host is running.
Returns:
ApplicationInformation: Application information.
"""
pass
@abstractmethod
def get_current_context(self) -> HostContextData:
"""Get the current context of the host.
Current context is defined by project name, folder path and task name.
Returns:
HostContextData: The current context of the host.
"""
pass
@abstractmethod
def set_current_context(
self,
folder_entity: dict[str, Any],
task_entity: dict[str, Any],
*,
reason: ContextChangeReason = ContextChangeReason.undefined,
project_entity: Optional[dict[str, Any]] = None,
anatomy: Optional[Anatomy] = None,
) -> HostContextData:
"""Change context of the host.
Args:
folder_entity (dict[str, Any]): Folder entity.
task_entity (dict[str, Any]): Task entity.
reason (ContextChangeReason): Reason for change.
project_entity (dict[str, Any]): Project entity.
anatomy (Anatomy): Anatomy entity.
"""
pass
@abstractmethod
def get_current_project_name(self) -> str:
"""Get the current project name.
Returns:
Optional[str]: The current project name.
"""
pass
@abstractmethod
def get_current_folder_path(self) -> Optional[str]:
"""Get the current folder path.
Returns:
Optional[str]: The current folder path.
"""
pass
@abstractmethod
def get_current_task_name(self) -> Optional[str]:
"""Get the current task name.
Returns:
Optional[str]: The current task name.
"""
pass
@abstractmethod
def get_context_title(self) -> str:
"""Get the context title used in UIs."""
pass

View file

@ -0,0 +1,15 @@
from enum import Enum
class StrEnum(str, Enum):
"""A string-based Enum class that allows for string comparison."""
def __str__(self) -> str:
return self.value
class ContextChangeReason(StrEnum):
"""Reasons for context change in the host."""
undefined = "undefined"
workfile_open = "workfile.opened"
workfile_save = "workfile.saved"

View file

@ -1,13 +1,35 @@
from __future__ import annotations
import os
import logging
import contextlib
from abc import ABC, abstractproperty
import typing
from typing import Optional, Any
from dataclasses import dataclass
# NOTE can't import 'typing' because of issues in Maya 2020
# - shiboken crashes on 'typing' module import
import ayon_api
from ayon_core.lib import emit_event
from .constants import ContextChangeReason
from .abstract import AbstractHost, ApplicationInformation
if typing.TYPE_CHECKING:
from ayon_core.pipeline import Anatomy
from .typing import HostContextData
class HostBase(ABC):
@dataclass
class ContextChangeData:
project_entity: dict[str, Any]
folder_entity: dict[str, Any]
task_entity: dict[str, Any]
reason: ContextChangeReason
anatomy: Anatomy
class HostBase(AbstractHost):
"""Base of host implementation class.
Host is pipeline implementation of DCC application. This class should help
@ -74,6 +96,18 @@ class HostBase(ABC):
pass
def get_app_information(self) -> ApplicationInformation:
"""Running application information.
Host integration should override this method and return correct
information.
Returns:
ApplicationInformation: Application information.
"""
return ApplicationInformation()
def install(self):
"""Install host specific functionality.
@ -82,47 +116,41 @@ class HostBase(ABC):
It is called automatically when 'ayon_core.pipeline.install_host' is
triggered.
"""
"""
pass
@property
def log(self):
def log(self) -> logging.Logger:
if self._log is None:
self._log = logging.getLogger(self.__class__.__name__)
return self._log
@abstractproperty
def name(self):
"""Host name."""
pass
def get_current_project_name(self):
def get_current_project_name(self) -> str:
"""
Returns:
Union[str, None]: Current project name.
str: Current project name.
"""
return os.environ["AYON_PROJECT_NAME"]
return os.environ.get("AYON_PROJECT_NAME")
def get_current_folder_path(self):
def get_current_folder_path(self) -> Optional[str]:
"""
Returns:
Union[str, None]: Current asset name.
"""
Optional[str]: Current asset name.
"""
return os.environ.get("AYON_FOLDER_PATH")
def get_current_task_name(self):
def get_current_task_name(self) -> Optional[str]:
"""
Returns:
Union[str, None]: Current task name.
"""
Optional[str]: Current task name.
"""
return os.environ.get("AYON_TASK_NAME")
def get_current_context(self):
def get_current_context(self) -> HostContextData:
"""Get current context information.
This method should be used to get current context of host. Usage of
@ -131,16 +159,85 @@ class HostBase(ABC):
can't be caught properly.
Returns:
Dict[str, Union[str, None]]: Context with 3 keys 'project_name',
'folder_path' and 'task_name'. All of them can be 'None'.
"""
HostContextData: Current context with 'project_name',
'folder_path' and 'task_name'.
"""
return {
"project_name": self.get_current_project_name(),
"folder_path": self.get_current_folder_path(),
"task_name": self.get_current_task_name()
}
def set_current_context(
self,
folder_entity: dict[str, Any],
task_entity: dict[str, Any],
*,
reason: ContextChangeReason = ContextChangeReason.undefined,
project_entity: Optional[dict[str, Any]] = None,
anatomy: Optional[Anatomy] = None,
) -> HostContextData:
"""Set current context information.
This method should be used to set current context of host. Usage of
this method can be crucial for host implementations in DCCs where
can be opened multiple workfiles at one moment and change of context
can't be caught properly.
Notes:
This method should not care about change of workdir and expect any
of the arguments.
Args:
folder_entity (Optional[dict[str, Any]]): Folder entity.
task_entity (Optional[dict[str, Any]]): Task entity.
reason (ContextChangeReason): Reason for context change.
project_entity (Optional[dict[str, Any]]): Project entity data.
anatomy (Optional[Anatomy]): Anatomy instance for the project.
Returns:
dict[str, Optional[str]]: Context information with project name,
folder path and task name.
"""
from ayon_core.pipeline import Anatomy
folder_path = folder_entity["path"]
task_name = task_entity["name"]
context = self.get_current_context()
# Don't do anything if context did not change
if (
context["folder_path"] == folder_path
and context["task_name"] == task_name
):
return context
project_name = self.get_current_project_name()
if project_entity is None:
project_entity = ayon_api.get_project(project_name)
if anatomy is None:
anatomy = Anatomy(project_name, project_entity=project_entity)
context_change_data = ContextChangeData(
project_entity,
folder_entity,
task_entity,
reason,
anatomy,
)
self._before_context_change(context_change_data)
self._set_current_context(context_change_data)
self._after_context_change(context_change_data)
return self._emit_context_change_event(
project_name,
folder_path,
task_name,
)
def get_context_title(self):
"""Context title shown for UI purposes.
@ -187,3 +284,91 @@ class HostBase(ABC):
yield
finally:
pass
def _emit_context_change_event(
self,
project_name: str,
folder_path: Optional[str],
task_name: Optional[str],
) -> HostContextData:
"""Emit context change event.
Args:
project_name (str): Name of the project.
folder_path (Optional[str]): Path of the folder.
task_name (Optional[str]): Name of the task.
Returns:
HostContextData: Data send to context change event.
"""
data: HostContextData = {
"project_name": project_name,
"folder_path": folder_path,
"task_name": task_name,
}
emit_event("taskChanged", data)
return data
def _set_current_context(
self, context_change_data: ContextChangeData
) -> None:
"""Method that changes the context in host.
Can be overriden for hosts that do need different handling of context
than using environment variables.
Args:
context_change_data (ContextChangeData): Context change related
data.
"""
project_name = self.get_current_project_name()
folder_path = None
task_name = None
if context_change_data.folder_entity:
folder_path = context_change_data.folder_entity["path"]
if context_change_data.task_entity:
task_name = context_change_data.task_entity["name"]
envs = {
"AYON_PROJECT_NAME": project_name,
"AYON_FOLDER_PATH": folder_path,
"AYON_TASK_NAME": task_name,
}
# Update the Session and environments. Pop from environments all
# keys with value set to None.
for key, value in envs.items():
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = value
def _before_context_change(self, context_change_data: ContextChangeData):
"""Before context is changed.
This method is called before the context is changed in the host.
Can be overridden to implement host specific logic.
Args:
context_change_data (ContextChangeData): Object with information
about context change.
"""
pass
def _after_context_change(self, context_change_data: ContextChangeData):
"""After context is changed.
This method is called after the context is changed in the host.
Can be overridden to implement host specific logic.
Args:
context_change_data (ContextChangeData): Object with information
about context change.
"""
pass

View file

@ -1,384 +0,0 @@
from abc import ABC, abstractmethod
class MissingMethodsError(ValueError):
"""Exception when host miss some required methods for specific workflow.
Args:
host (HostBase): Host implementation where are missing methods.
missing_methods (list[str]): List of missing methods.
"""
def __init__(self, host, missing_methods):
joined_missing = ", ".join(
['"{}"'.format(item) for item in missing_methods]
)
host_name = getattr(host, "name", None)
if not host_name:
try:
host_name = host.__file__.replace("\\", "/").split("/")[-3]
except Exception:
host_name = str(host)
message = (
"Host \"{}\" miss methods {}".format(host_name, joined_missing)
)
super(MissingMethodsError, self).__init__(message)
class ILoadHost:
"""Implementation requirements to be able use reference of representations.
The load plugins can do referencing even without implementation of methods
here, but switch and removement of containers would not be possible.
Questions:
- Is list container dependency of host or load plugins?
- Should this be directly in HostBase?
- how to find out if referencing is available?
- do we need to know that?
"""
@staticmethod
def get_missing_load_methods(host):
"""Look for missing methods on "old type" host implementation.
Method is used for validation of implemented functions related to
loading. Checks only existence of methods.
Args:
Union[ModuleType, HostBase]: Object of host where to look for
required methods.
Returns:
list[str]: Missing method implementations for loading workflow.
"""
if isinstance(host, ILoadHost):
return []
required = ["ls"]
missing = []
for name in required:
if not hasattr(host, name):
missing.append(name)
return missing
@staticmethod
def validate_load_methods(host):
"""Validate implemented methods of "old type" host for load workflow.
Args:
Union[ModuleType, HostBase]: Object of host to validate.
Raises:
MissingMethodsError: If there are missing methods on host
implementation.
"""
missing = ILoadHost.get_missing_load_methods(host)
if missing:
raise MissingMethodsError(host, missing)
@abstractmethod
def get_containers(self):
"""Retrieve referenced containers from scene.
This can be implemented in hosts where referencing can be used.
Todo:
Rename function to something more self explanatory.
Suggestion: 'get_containers'
Returns:
list[dict]: Information about loaded containers.
"""
pass
# --- Deprecated method names ---
def ls(self):
"""Deprecated variant of 'get_containers'.
Todo:
Remove when all usages are replaced.
"""
return self.get_containers()
class IWorkfileHost(ABC):
"""Implementation requirements to be able use workfile utils and tool."""
@staticmethod
def get_missing_workfile_methods(host):
"""Look for missing methods on "old type" host implementation.
Method is used for validation of implemented functions related to
workfiles. Checks only existence of methods.
Args:
Union[ModuleType, HostBase]: Object of host where to look for
required methods.
Returns:
list[str]: Missing method implementations for workfiles workflow.
"""
if isinstance(host, IWorkfileHost):
return []
required = [
"open_file",
"save_file",
"current_file",
"has_unsaved_changes",
"file_extensions",
"work_root",
]
missing = []
for name in required:
if not hasattr(host, name):
missing.append(name)
return missing
@staticmethod
def validate_workfile_methods(host):
"""Validate methods of "old type" host for workfiles workflow.
Args:
Union[ModuleType, HostBase]: Object of host to validate.
Raises:
MissingMethodsError: If there are missing methods on host
implementation.
"""
missing = IWorkfileHost.get_missing_workfile_methods(host)
if missing:
raise MissingMethodsError(host, missing)
@abstractmethod
def get_workfile_extensions(self):
"""Extensions that can be used as save.
Questions:
This could potentially use 'HostDefinition'.
"""
return []
@abstractmethod
def save_workfile(self, dst_path=None):
"""Save currently opened scene.
Args:
dst_path (str): Where the current scene should be saved. Or use
current path if 'None' is passed.
"""
pass
@abstractmethod
def open_workfile(self, filepath):
"""Open passed filepath in the host.
Args:
filepath (str): Path to workfile.
"""
pass
@abstractmethod
def get_current_workfile(self):
"""Retrieve path to current opened file.
Returns:
str: Path to file which is currently opened.
None: If nothing is opened.
"""
return None
def workfile_has_unsaved_changes(self):
"""Currently opened scene is saved.
Not all hosts can know if current scene is saved because the API of
DCC does not support it.
Returns:
bool: True if scene is saved and False if has unsaved
modifications.
None: Can't tell if workfiles has modifications.
"""
return None
def work_root(self, session):
"""Modify workdir per host.
Default implementation keeps workdir untouched.
Warnings:
We must handle this modification with more sophisticated way
because this can't be called out of DCC so opening of last workfile
(calculated before DCC is launched) is complicated. Also breaking
defined work template is not a good idea.
Only place where it's really used and can make sense is Maya. There
workspace.mel can modify subfolders where to look for maya files.
Args:
session (dict): Session context data.
Returns:
str: Path to new workdir.
"""
return session["AYON_WORKDIR"]
# --- Deprecated method names ---
def file_extensions(self):
"""Deprecated variant of 'get_workfile_extensions'.
Todo:
Remove when all usages are replaced.
"""
return self.get_workfile_extensions()
def save_file(self, dst_path=None):
"""Deprecated variant of 'save_workfile'.
Todo:
Remove when all usages are replaced.
"""
self.save_workfile(dst_path)
def open_file(self, filepath):
"""Deprecated variant of 'open_workfile'.
Todo:
Remove when all usages are replaced.
"""
return self.open_workfile(filepath)
def current_file(self):
"""Deprecated variant of 'get_current_workfile'.
Todo:
Remove when all usages are replaced.
"""
return self.get_current_workfile()
def has_unsaved_changes(self):
"""Deprecated variant of 'workfile_has_unsaved_changes'.
Todo:
Remove when all usages are replaced.
"""
return self.workfile_has_unsaved_changes()
class IPublishHost:
"""Functions related to new creation system in new publisher.
New publisher is not storing information only about each created instance
but also some global data. At this moment are data related only to context
publish plugins but that can extend in future.
"""
@staticmethod
def get_missing_publish_methods(host):
"""Look for missing methods on "old type" host implementation.
Method is used for validation of implemented functions related to
new publish creation. Checks only existence of methods.
Args:
Union[ModuleType, HostBase]: Host module where to look for
required methods.
Returns:
list[str]: Missing method implementations for new publisher
workflow.
"""
if isinstance(host, IPublishHost):
return []
required = [
"get_context_data",
"update_context_data",
"get_context_title",
"get_current_context",
]
missing = []
for name in required:
if not hasattr(host, name):
missing.append(name)
return missing
@staticmethod
def validate_publish_methods(host):
"""Validate implemented methods of "old type" host.
Args:
Union[ModuleType, HostBase]: Host module to validate.
Raises:
MissingMethodsError: If there are missing methods on host
implementation.
"""
missing = IPublishHost.get_missing_publish_methods(host)
if missing:
raise MissingMethodsError(host, missing)
@abstractmethod
def get_context_data(self):
"""Get global data related to creation-publishing from workfile.
These data are not related to any created instance but to whole
publishing context. Not saving/returning them will cause that each
reset of publishing resets all values to default ones.
Context data can contain information about enabled/disabled publish
plugins or other values that can be filled by artist.
Returns:
dict: Context data stored using 'update_context_data'.
"""
pass
@abstractmethod
def update_context_data(self, data, changes):
"""Store global context data to workfile.
Called when some values in context data has changed.
Without storing the values in a way that 'get_context_data' would
return them will each reset of publishing cause loose of filled values
by artist. Best practice is to store values into workfile, if possible.
Args:
data (dict): New data as are.
changes (dict): Only data that has been changed. Each value has
tuple with '(<old>, <new>)' value.
"""
pass
class INewPublisher(IPublishHost):
"""Legacy interface replaced by 'IPublishHost'.
Deprecated:
'INewPublisher' is replaced by 'IPublishHost' please change your
imports.
There is no "reasonable" way hot mark these classes as deprecated
to show warning of wrong import. Deprecated since 3.14.* will be
removed in 3.15.*
"""
pass

View file

@ -0,0 +1,66 @@
from .exceptions import MissingMethodsError
from .workfiles import (
IWorkfileHost,
WorkfileInfo,
PublishedWorkfileInfo,
OpenWorkfileOptionalData,
ListWorkfilesOptionalData,
ListPublishedWorkfilesOptionalData,
SaveWorkfileOptionalData,
CopyWorkfileOptionalData,
CopyPublishedWorkfileOptionalData,
get_open_workfile_context,
get_list_workfiles_context,
get_list_published_workfiles_context,
get_save_workfile_context,
get_copy_workfile_context,
get_copy_repre_workfile_context,
OpenWorkfileContext,
ListWorkfilesContext,
ListPublishedWorkfilesContext,
SaveWorkfileContext,
CopyWorkfileContext,
CopyPublishedWorkfileContext,
)
from .interfaces import (
IPublishHost,
INewPublisher,
ILoadHost,
)
__all__ = (
"MissingMethodsError",
"IWorkfileHost",
"WorkfileInfo",
"PublishedWorkfileInfo",
"OpenWorkfileOptionalData",
"ListWorkfilesOptionalData",
"ListPublishedWorkfilesOptionalData",
"SaveWorkfileOptionalData",
"CopyWorkfileOptionalData",
"CopyPublishedWorkfileOptionalData",
"get_open_workfile_context",
"get_list_workfiles_context",
"get_list_published_workfiles_context",
"get_save_workfile_context",
"get_copy_workfile_context",
"get_copy_repre_workfile_context",
"OpenWorkfileContext",
"ListWorkfilesContext",
"ListPublishedWorkfilesContext",
"SaveWorkfileContext",
"CopyWorkfileContext",
"CopyPublishedWorkfileContext",
"IPublishHost",
"INewPublisher",
"ILoadHost",
)

View file

@ -0,0 +1,15 @@
class MissingMethodsError(ValueError):
"""Exception when host miss some required methods for a specific workflow.
Args:
host (HostBase): Host implementation where are missing methods.
missing_methods (list[str]): List of missing methods.
"""
def __init__(self, host, missing_methods):
joined_missing = ", ".join(
['"{}"'.format(item) for item in missing_methods]
)
super().__init__(
f"Host \"{host.name}\" miss methods {joined_missing}"
)

View file

@ -0,0 +1,189 @@
from abc import abstractmethod
from ayon_core.host.abstract import AbstractHost
from .exceptions import MissingMethodsError
class ILoadHost(AbstractHost):
"""Implementation requirements to be able use reference of representations.
The load plugins can do referencing even without implementation of methods
here, but switch and removement of containers would not be possible.
Questions:
- Is list container dependency of host or load plugins?
- Should this be directly in HostBase?
- how to find out if referencing is available?
- do we need to know that?
"""
@staticmethod
def get_missing_load_methods(host):
"""Look for missing methods on "old type" host implementation.
Method is used for validation of implemented functions related to
loading. Checks only existence of methods.
Args:
Union[ModuleType, AbstractHost]: Object of host where to look for
required methods.
Returns:
list[str]: Missing method implementations for loading workflow.
"""
if isinstance(host, ILoadHost):
return []
required = ["ls"]
missing = []
for name in required:
if not hasattr(host, name):
missing.append(name)
return missing
@staticmethod
def validate_load_methods(host):
"""Validate implemented methods of "old type" host for load workflow.
Args:
Union[ModuleType, AbstractHost]: Object of host to validate.
Raises:
MissingMethodsError: If there are missing methods on host
implementation.
"""
missing = ILoadHost.get_missing_load_methods(host)
if missing:
raise MissingMethodsError(host, missing)
@abstractmethod
def get_containers(self):
"""Retrieve referenced containers from scene.
This can be implemented in hosts where referencing can be used.
Todo:
Rename function to something more self explanatory.
Suggestion: 'get_containers'
Returns:
list[dict]: Information about loaded containers.
"""
pass
# --- Deprecated method names ---
def ls(self):
"""Deprecated variant of 'get_containers'.
Todo:
Remove when all usages are replaced.
"""
return self.get_containers()
class IPublishHost(AbstractHost):
"""Functions related to new creation system in new publisher.
New publisher is not storing information only about each created instance
but also some global data. At this moment are data related only to context
publish plugins but that can extend in future.
"""
@staticmethod
def get_missing_publish_methods(host):
"""Look for missing methods on "old type" host implementation.
Method is used for validation of implemented functions related to
new publish creation. Checks only existence of methods.
Args:
Union[ModuleType, AbstractHost]: Host module where to look for
required methods.
Returns:
list[str]: Missing method implementations for new publisher
workflow.
"""
if isinstance(host, IPublishHost):
return []
required = [
"get_context_data",
"update_context_data",
"get_context_title",
"get_current_context",
]
missing = []
for name in required:
if not hasattr(host, name):
missing.append(name)
return missing
@staticmethod
def validate_publish_methods(host):
"""Validate implemented methods of "old type" host.
Args:
Union[ModuleType, AbstractHost]: Host module to validate.
Raises:
MissingMethodsError: If there are missing methods on host
implementation.
"""
missing = IPublishHost.get_missing_publish_methods(host)
if missing:
raise MissingMethodsError(host, missing)
@abstractmethod
def get_context_data(self):
"""Get global data related to creation-publishing from workfile.
These data are not related to any created instance but to whole
publishing context. Not saving/returning them will cause that each
reset of publishing resets all values to default ones.
Context data can contain information about enabled/disabled publish
plugins or other values that can be filled by artist.
Returns:
dict: Context data stored using 'update_context_data'.
"""
pass
@abstractmethod
def update_context_data(self, data, changes):
"""Store global context data to workfile.
Called when some values in context data has changed.
Without storing the values in a way that 'get_context_data' would
return them will each reset of publishing cause loose of filled values
by artist. Best practice is to store values into workfile, if possible.
Args:
data (dict): New data as are.
changes (dict): Only data that has been changed. Each value has
tuple with '(<old>, <new>)' value.
"""
pass
class INewPublisher(IPublishHost):
"""Legacy interface replaced by 'IPublishHost'.
Deprecated:
'INewPublisher' is replaced by 'IPublishHost' please change your
imports.
There is no "reasonable" way hot mark these classes as deprecated
to show warning of wrong import. Deprecated since 3.14.* will be
removed in 3.15.*
"""
pass

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,7 @@
from typing import Optional, TypedDict
class HostContextData(TypedDict):
project_name: str
folder_path: Optional[str]
task_name: Optional[str]

View file

@ -11,6 +11,7 @@ from .local_settings import (
get_launcher_storage_dir,
get_addons_resources_dir,
get_local_site_id,
get_ayon_user_entity,
get_ayon_username,
)
from .ayon_connection import initialize_ayon_connection
@ -73,6 +74,7 @@ from .log import (
)
from .path_templates import (
DefaultKeysDict,
TemplateUnsolved,
StringTemplate,
FormatObject,
@ -148,6 +150,7 @@ __all__ = [
"get_launcher_storage_dir",
"get_addons_resources_dir",
"get_local_site_id",
"get_ayon_user_entity",
"get_ayon_username",
"initialize_ayon_connection",
@ -228,6 +231,7 @@ __all__ = [
"get_version_from_path",
"get_last_version_from_path",
"DefaultKeysDict",
"TemplateUnsolved",
"StringTemplate",
"FormatObject",

View file

@ -5,32 +5,46 @@ import json
import platform
import configparser
import warnings
import copy
from datetime import datetime
from abc import ABC, abstractmethod
from functools import lru_cache
from typing import Optional, Any
import platformdirs
import ayon_api
from .cache import NestedCacheItem, CacheItem
_PLACEHOLDER = object()
def _get_ayon_appdirs(*args):
# TODO should use 'KeyError' or 'Exception' as base
class RegistryItemNotFound(ValueError):
"""Raised when the item is not found in the keyring."""
class _Cache:
username = None
user_entities_by_name = NestedCacheItem()
def _get_ayon_appdirs(*args: str) -> str:
return os.path.join(
platformdirs.user_data_dir("AYON", "Ynput"),
*args
)
def get_ayon_appdirs(*args):
def get_ayon_appdirs(*args: str) -> str:
"""Local app data directory of AYON client.
Deprecated:
Use 'get_launcher_local_dir' or 'get_launcher_storage_dir' based on
use-case. Deprecation added 24/08/09 (0.4.4-dev.1).
a use-case. Deprecation added 24/08/09 (0.4.4-dev.1).
Args:
*args (Iterable[str]): Subdirectories/files in local app data dir.
*args (Iterable[str]): Subdirectories/files in the local app data dir.
Returns:
str: Path to directory/file in local app data dir.
@ -48,7 +62,7 @@ def get_ayon_appdirs(*args):
def get_launcher_storage_dir(*subdirs: str) -> str:
"""Get storage directory for launcher.
"""Get a storage directory for launcher.
Storage directory is used for storing shims, addons, dependencies, etc.
@ -73,14 +87,14 @@ def get_launcher_storage_dir(*subdirs: str) -> str:
def get_launcher_local_dir(*subdirs: str) -> str:
"""Get local directory for launcher.
"""Get a local directory for launcher.
Local directory is used for storing machine or user specific data.
Local directory is used for storing machine or user-specific data.
The location is user specific.
The location is user-specific.
Note:
This function should be called at least once on bootstrap.
This function should be called at least once on the bootstrap.
Args:
*subdirs (str): Subdirectories relative to local dir.
@ -97,7 +111,7 @@ def get_launcher_local_dir(*subdirs: str) -> str:
def get_addons_resources_dir(addon_name: str, *args) -> str:
"""Get directory for storing resources for addons.
"""Get a directory for storing resources for addons.
Some addons might need to store ad-hoc resources that are not part of
addon client package (e.g. because of size). Studio might define
@ -107,7 +121,7 @@ def get_addons_resources_dir(addon_name: str, *args) -> str:
Args:
addon_name (str): Addon name.
*args (str): Subfolders in resources directory.
*args (str): Subfolders in the resources directory.
Returns:
str: Path to resources directory.
@ -120,6 +134,10 @@ def get_addons_resources_dir(addon_name: str, *args) -> str:
return os.path.join(addons_resources_dir, addon_name, *args)
class _FakeException(Exception):
"""Placeholder exception used if real exception is not available."""
class AYONSecureRegistry:
"""Store information using keyring.
@ -130,9 +148,10 @@ class AYONSecureRegistry:
identify which data were created by AYON.
Args:
name(str): Name of registry used as identifier for data.
name(str): Name of registry used as the identifier for data.
"""
def __init__(self, name):
def __init__(self, name: str) -> None:
try:
import keyring
@ -148,13 +167,12 @@ class AYONSecureRegistry:
keyring.set_keyring(Windows.WinVaultKeyring())
# Force "AYON" prefix
self._name = "/".join(("AYON", name))
self._name = f"AYON/{name}"
def set_item(self, name, value):
# type: (str, str) -> None
"""Set sensitive item into system's keyring.
def set_item(self, name: str, value: str) -> None:
"""Set sensitive item into the system's keyring.
This uses `Keyring module`_ to save sensitive stuff into system's
This uses `Keyring module`_ to save sensitive stuff into the system's
keyring.
Args:
@ -168,22 +186,26 @@ class AYONSecureRegistry:
import keyring
keyring.set_password(self._name, name, value)
self.get_item.cache_clear()
@lru_cache(maxsize=32)
def get_item(self, name, default=_PLACEHOLDER):
"""Get value of sensitive item from system's keyring.
def get_item(
self, name: str, default: Any = _PLACEHOLDER
) -> Optional[str]:
"""Get value of sensitive item from the system's keyring.
See also `Keyring module`_
Args:
name (str): Name of the item.
default (Any): Default value if item is not available.
default (Any): Default value if the item is not available.
Returns:
value (str): Value of the item.
Raises:
ValueError: If item doesn't exist and default is not defined.
RegistryItemNotFound: If the item doesn't exist and default
is not defined.
.. _Keyring module:
https://github.com/jaraco/keyring
@ -191,21 +213,29 @@ class AYONSecureRegistry:
"""
import keyring
value = keyring.get_password(self._name, name)
# Capture 'ItemNotFoundException' exception (on linux)
try:
from secretstorage.exceptions import ItemNotFoundException
except ImportError:
ItemNotFoundException = _FakeException
try:
value = keyring.get_password(self._name, name)
except ItemNotFoundException:
value = None
if value is not None:
return value
if default is not _PLACEHOLDER:
return default
# NOTE Should raise `KeyError`
raise ValueError(
"Item {}:{} does not exist in keyring.".format(self._name, name)
raise RegistryItemNotFound(
f"Item {self._name}:{name} not found in keyring."
)
def delete_item(self, name):
# type: (str) -> None
"""Delete value stored in system's keyring.
def delete_item(self, name: str) -> None:
"""Delete value stored in the system's keyring.
See also `Keyring module`_
@ -223,47 +253,38 @@ class AYONSecureRegistry:
class ASettingRegistry(ABC):
"""Abstract class defining structure of **SettingRegistry** class.
It is implementing methods to store secure items into keyring, otherwise
mechanism for storing common items must be implemented in abstract
methods.
Attributes:
_name (str): Registry names.
"""Abstract class to defining structure of registry class.
"""
def __init__(self, name):
# type: (str) -> ASettingRegistry
super(ASettingRegistry, self).__init__()
def __init__(self, name: str) -> None:
self._name = name
self._items = {}
def set_item(self, name, value):
# type: (str, str) -> None
"""Set item to settings registry.
Args:
name (str): Name of the item.
value (str): Value of the item.
"""
self._set_item(name, value)
@abstractmethod
def _set_item(self, name, value):
# type: (str, str) -> None
# Implement it
pass
def _get_item(self, name: str) -> Any:
"""Get item value from registry."""
def __setitem__(self, name, value):
self._items[name] = value
@abstractmethod
def _set_item(self, name: str, value: str) -> None:
"""Set item value to registry."""
@abstractmethod
def _delete_item(self, name: str) -> None:
"""Delete item from registry."""
def __getitem__(self, name: str) -> Any:
return self._get_item(name)
def __setitem__(self, name: str, value: str) -> None:
self._set_item(name, value)
def get_item(self, name):
# type: (str) -> str
def __delitem__(self, name: str) -> None:
self._delete_item(name)
@property
def name(self) -> str:
return self._name
def get_item(self, name: str) -> str:
"""Get item from settings registry.
Args:
@ -273,22 +294,22 @@ class ASettingRegistry(ABC):
value (str): Value of the item.
Raises:
ValueError: If item doesn't exist.
RegistryItemNotFound: If the item doesn't exist.
"""
return self._get_item(name)
@abstractmethod
def _get_item(self, name):
# type: (str) -> str
# Implement it
pass
def set_item(self, name: str, value: str) -> None:
"""Set item to settings registry.
def __getitem__(self, name):
return self._get_item(name)
Args:
name (str): Name of the item.
value (str): Value of the item.
def delete_item(self, name):
# type: (str) -> None
"""
self._set_item(name, value)
def delete_item(self, name: str) -> None:
"""Delete item from settings registry.
Args:
@ -297,16 +318,6 @@ class ASettingRegistry(ABC):
"""
self._delete_item(name)
@abstractmethod
def _delete_item(self, name):
# type: (str) -> None
"""Delete item from settings."""
pass
def __delitem__(self, name):
del self._items[name]
self._delete_item(name)
class IniSettingRegistry(ASettingRegistry):
"""Class using :mod:`configparser`.
@ -314,20 +325,17 @@ class IniSettingRegistry(ASettingRegistry):
This class is using :mod:`configparser` (ini) files to store items.
"""
def __init__(self, name, path):
# type: (str, str) -> IniSettingRegistry
super(IniSettingRegistry, self).__init__(name)
def __init__(self, name: str, path: str) -> None:
super().__init__(name)
# get registry file
self._registry_file = os.path.join(path, "{}.ini".format(name))
self._registry_file = os.path.join(path, f"{name}.ini")
if not os.path.exists(self._registry_file):
with open(self._registry_file, mode="w") as cfg:
print("# Settings registry", cfg)
now = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
print("# {}".format(now), cfg)
print(f"# {now}", cfg)
def set_item_section(self, section, name, value):
# type: (str, str, str) -> None
def set_item_section(self, section: str, name: str, value: str) -> None:
"""Set item to specific section of ini registry.
If section doesn't exists, it is created.
@ -350,12 +358,10 @@ class IniSettingRegistry(ASettingRegistry):
with open(self._registry_file, mode="w") as cfg:
config.write(cfg)
def _set_item(self, name, value):
# type: (str, str) -> None
def _set_item(self, name: str, value: str) -> None:
self.set_item_section("MAIN", name, value)
def set_item(self, name, value):
# type: (str, str) -> None
def set_item(self, name: str, value: str) -> None:
"""Set item to settings ini file.
This saves item to ``DEFAULT`` section of ini as each item there
@ -368,10 +374,9 @@ class IniSettingRegistry(ASettingRegistry):
"""
# this does the some, overridden just for different docstring.
# we cast value to str as ini options values must be strings.
super(IniSettingRegistry, self).set_item(name, str(value))
super().set_item(name, str(value))
def get_item(self, name):
# type: (str) -> str
def get_item(self, name: str) -> str:
"""Gets item from settings ini file.
This gets settings from ``DEFAULT`` section of ini file as each item
@ -384,19 +389,18 @@ class IniSettingRegistry(ASettingRegistry):
str: Value of item.
Raises:
ValueError: If value doesn't exist.
RegistryItemNotFound: If value doesn't exist.
"""
return super(IniSettingRegistry, self).get_item(name)
return super().get_item(name)
@lru_cache(maxsize=32)
def get_item_from_section(self, section, name):
# type: (str, str) -> str
def get_item_from_section(self, section: str, name: str) -> str:
"""Get item from section of ini file.
This will read ini file and try to get item value from specified
section. If that section or item doesn't exist, :exc:`ValueError`
is risen.
section. If that section or item doesn't exist,
:exc:`RegistryItemNotFound` is risen.
Args:
section (str): Name of ini section.
@ -406,7 +410,7 @@ class IniSettingRegistry(ASettingRegistry):
str: Item value.
Raises:
ValueError: If value doesn't exist.
RegistryItemNotFound: If value doesn't exist.
"""
config = configparser.ConfigParser()
@ -414,16 +418,15 @@ class IniSettingRegistry(ASettingRegistry):
try:
value = config[section][name]
except KeyError:
raise ValueError(
"Registry doesn't contain value {}:{}".format(section, name))
raise RegistryItemNotFound(
f"Registry doesn't contain value {section}:{name}"
)
return value
def _get_item(self, name):
# type: (str) -> str
def _get_item(self, name: str) -> str:
return self.get_item_from_section("MAIN", name)
def delete_item_from_section(self, section, name):
# type: (str, str) -> None
def delete_item_from_section(self, section: str, name: str) -> None:
"""Delete item from section in ini file.
Args:
@ -431,7 +434,7 @@ class IniSettingRegistry(ASettingRegistry):
name (str): Name of the item.
Raises:
ValueError: If item doesn't exist.
RegistryItemNotFound: If the item doesn't exist.
"""
self.get_item_from_section.cache_clear()
@ -440,8 +443,9 @@ class IniSettingRegistry(ASettingRegistry):
try:
_ = config[section][name]
except KeyError:
raise ValueError(
"Registry doesn't contain value {}:{}".format(section, name))
raise RegistryItemNotFound(
f"Registry doesn't contain value {section}:{name}"
)
config.remove_option(section, name)
# if section is empty, delete it
@ -457,29 +461,28 @@ class IniSettingRegistry(ASettingRegistry):
class JSONSettingRegistry(ASettingRegistry):
"""Class using json file as storage."""
"""Class using a json file as storage."""
def __init__(self, name, path):
# type: (str, str) -> JSONSettingRegistry
super(JSONSettingRegistry, self).__init__(name)
#: str: name of registry file
self._registry_file = os.path.join(path, "{}.json".format(name))
def __init__(self, name: str, path: str) -> None:
super().__init__(name)
self._registry_file = os.path.join(path, f"{name}.json")
now = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
header = {
"__metadata__": {"generated": now},
"registry": {}
}
if not os.path.exists(os.path.dirname(self._registry_file)):
os.makedirs(os.path.dirname(self._registry_file), exist_ok=True)
# Use 'os.path.dirname' in case someone uses slashes in 'name'
dirpath = os.path.dirname(self._registry_file)
if not os.path.exists(dirpath):
os.makedirs(dirpath, exist_ok=True)
if not os.path.exists(self._registry_file):
with open(self._registry_file, mode="w") as cfg:
json.dump(header, cfg, indent=4)
@lru_cache(maxsize=32)
def _get_item(self, name):
# type: (str) -> object
"""Get item value from registry json.
def _get_item(self, name: str) -> str:
"""Get item value from the registry.
Note:
See :meth:`ayon_core.lib.JSONSettingRegistry.get_item`
@ -490,29 +493,13 @@ class JSONSettingRegistry(ASettingRegistry):
try:
value = data["registry"][name]
except KeyError:
raise ValueError(
"Registry doesn't contain value {}".format(name))
raise RegistryItemNotFound(
f"Registry doesn't contain value {name}"
)
return value
def get_item(self, name):
# type: (str) -> object
"""Get item value from registry json.
Args:
name (str): Name of the item.
Returns:
value of the item
Raises:
ValueError: If item is not found in registry file.
"""
return self._get_item(name)
def _set_item(self, name, value):
# type: (str, object) -> None
"""Set item value to registry json.
def _set_item(self, name: str, value: str) -> None:
"""Set item value to the registry.
Note:
See :meth:`ayon_core.lib.JSONSettingRegistry.set_item`
@ -524,41 +511,39 @@ class JSONSettingRegistry(ASettingRegistry):
cfg.truncate(0)
cfg.seek(0)
json.dump(data, cfg, indent=4)
def set_item(self, name, value):
# type: (str, object) -> None
"""Set item and its value into json registry file.
Args:
name (str): name of the item.
value (Any): value of the item.
"""
self._set_item(name, value)
def _delete_item(self, name):
# type: (str) -> None
self._get_item.cache_clear()
def _delete_item(self, name: str) -> None:
with open(self._registry_file, "r+") as cfg:
data = json.load(cfg)
del data["registry"][name]
cfg.truncate(0)
cfg.seek(0)
json.dump(data, cfg, indent=4)
self._get_item.cache_clear()
class AYONSettingsRegistry(JSONSettingRegistry):
"""Class handling AYON general settings registry.
Args:
name (Optional[str]): Name of the registry.
"""
name (Optional[str]): Name of the registry. Using 'None' or not
passing name is deprecated.
def __init__(self, name=None):
"""
def __init__(self, name: Optional[str] = None) -> None:
if not name:
name = "AYON_settings"
warnings.warn(
(
"Used 'AYONSettingsRegistry' without 'name' argument."
" The argument will be required in future versions."
),
DeprecationWarning,
stacklevel=2,
)
path = get_launcher_storage_dir()
super(AYONSettingsRegistry, self).__init__(name, path)
super().__init__(name, path)
def get_local_site_id():
@ -588,13 +573,76 @@ def get_local_site_id():
return site_id
def _get_ayon_service_username() -> Optional[str]:
# TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather
# use public method to get username from connection stack.
con = ayon_api.get_server_api_connection()
user_stack = getattr(con, "_as_user_stack", None)
if user_stack is None:
return None
return user_stack.username
def get_ayon_user_entity(username: Optional[str] = None) -> dict[str, Any]:
"""AYON user entity used for templates and publishing.
Note:
Usually only service and admin users can receive the full user entity.
Args:
username (Optional[str]): Username of the user. If not passed, then
the current user in 'ayon_api' is used.
Returns:
dict[str, Any]: User entity.
"""
service_username = _get_ayon_service_username()
# Handle service user handling first
if service_username:
if username is None:
username = service_username
cache: CacheItem = _Cache.user_entities_by_name[username]
if not cache.is_valid:
if username == service_username:
user = ayon_api.get_user()
else:
user = ayon_api.get_user(username)
cache.update_data(user)
return copy.deepcopy(cache.get_data())
# Cache current user
current_user = None
if _Cache.username is None:
current_user = ayon_api.get_user()
_Cache.username = current_user["name"]
if username is None:
username = _Cache.username
cache: CacheItem = _Cache.user_entities_by_name[username]
if not cache.is_valid:
user = None
if username == _Cache.username:
if current_user is None:
current_user = ayon_api.get_user()
user = current_user
if user is None:
user = ayon_api.get_user(username)
cache.update_data(user)
return copy.deepcopy(cache.get_data())
def get_ayon_username():
"""AYON username used for templates and publishing.
Uses curet ayon api username.
Uses current ayon api username.
Returns:
str: Username.
"""
return ayon_api.get_user()["name"]
user = get_ayon_user_entity()
return user["name"]

View file

@ -1,17 +1,17 @@
from __future__ import annotations
import os
import re
import copy
import numbers
import warnings
import platform
from string import Formatter
import typing
from typing import List, Dict, Any, Set
if typing.TYPE_CHECKING:
from typing import Union
from typing import Any, Union, Iterable
SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
_IS_WINDOWS = platform.system().lower() == "windows"
class TemplateUnsolved(Exception):
@ -42,6 +42,54 @@ class TemplateUnsolved(Exception):
)
class DefaultKeysDict(dict):
"""Dictionary that supports the default key to use for str conversion.
Is helpful for changes of a key in a template from string to dictionary
for example '{folder}' -> '{folder[name]}'.
>>> data = DefaultKeysDict(
>>> "name",
>>> {"folder": {"name": "FolderName"}}
>>> )
>>> print("{folder[name]}".format_map(data))
FolderName
>>> print("{folder}".format_map(data))
FolderName
Args:
default_key (Union[str, Iterable[str]]): Default key to use for str
conversion. Can also expect multiple keys for more nested
dictionary.
"""
def __init__(
self, default_keys: Union[str, Iterable[str]], *args, **kwargs
) -> None:
if isinstance(default_keys, str):
default_keys = [default_keys]
else:
default_keys = list(default_keys)
if not default_keys:
raise ValueError(
"Default key must be set. Got empty default keys."
)
self._default_keys = default_keys
super().__init__(*args, **kwargs)
def __str__(self) -> str:
return str(self.get_default_value())
def get_default_keys(self) -> list[str]:
return list(self._default_keys)
def get_default_value(self) -> Any:
value = self
for key in self._default_keys:
value = value[key]
return value
class StringTemplate:
"""String that can be formatted."""
def __init__(self, template: str):
@ -82,7 +130,7 @@ class StringTemplate:
if substr:
new_parts.append(substr)
self._parts: List["Union[str, OptionalPart, FormattingPart]"] = (
self._parts: list[Union[str, OptionalPart, FormattingPart]] = (
self.find_optional_parts(new_parts)
)
@ -103,7 +151,7 @@ class StringTemplate:
def template(self) -> str:
return self._template
def format(self, data: Dict[str, Any]) -> "TemplateResult":
def format(self, data: dict[str, Any]) -> "TemplateResult":
""" Figure out with whole formatting.
Separate advanced keys (*Like '{project[name]}') from string which must
@ -143,29 +191,29 @@ class StringTemplate:
invalid_types
)
def format_strict(self, data: Dict[str, Any]) -> "TemplateResult":
def format_strict(self, data: dict[str, Any]) -> "TemplateResult":
result = self.format(data)
result.validate()
return result
@classmethod
def format_template(
cls, template: str, data: Dict[str, Any]
cls, template: str, data: dict[str, Any]
) -> "TemplateResult":
objected_template = cls(template)
return objected_template.format(data)
@classmethod
def format_strict_template(
cls, template: str, data: Dict[str, Any]
cls, template: str, data: dict[str, Any]
) -> "TemplateResult":
objected_template = cls(template)
return objected_template.format_strict(data)
@staticmethod
def find_optional_parts(
parts: List["Union[str, FormattingPart]"]
) -> List["Union[str, OptionalPart, FormattingPart]"]:
parts: list[Union[str, FormattingPart]]
) -> list[Union[str, OptionalPart, FormattingPart]]:
new_parts = []
tmp_parts = {}
counted_symb = -1
@ -190,7 +238,7 @@ class StringTemplate:
len(parts) == 1
and isinstance(parts[0], str)
):
value = "<{}>".format(parts[0])
value = f"<{parts[0]}>"
else:
value = OptionalPart(parts)
@ -221,7 +269,7 @@ class TemplateResult(str):
only used keys.
solved (bool): For check if all required keys were filled.
template (str): Original template.
missing_keys (Iterable[str]): Missing keys that were not in the data.
missing_keys (list[str]): Missing keys that were not in the data.
Include missing optional keys.
invalid_types (dict): When key was found in data, but value had not
allowed DataType. Allowed data types are `numbers`,
@ -230,11 +278,11 @@ class TemplateResult(str):
of number.
"""
used_values: Dict[str, Any] = None
used_values: dict[str, Any] = None
solved: bool = None
template: str = None
missing_keys: List[str] = None
invalid_types: Dict[str, Any] = None
missing_keys: list[str] = None
invalid_types: dict[str, Any] = None
def __new__(
cls, filled_template, template, solved,
@ -277,8 +325,11 @@ class TemplateResult(str):
"""Convert to normalized path."""
cls = self.__class__
path = str(self)
if _IS_WINDOWS:
path = path.replace("\\", "/")
return cls(
os.path.normpath(self.replace("\\", "/")),
os.path.normpath(path),
self.template,
self.solved,
self.used_values,
@ -291,21 +342,21 @@ class TemplatePartResult:
"""Result to store result of template parts."""
def __init__(self, optional: bool = False):
# Missing keys or invalid value types of required keys
self._missing_keys: Set[str] = set()
self._invalid_types: Dict[str, Any] = {}
self._missing_keys: set[str] = set()
self._invalid_types: dict[str, Any] = {}
# Missing keys or invalid value types of optional keys
self._missing_optional_keys: Set[str] = set()
self._invalid_optional_types: Dict[str, Any] = {}
self._missing_optional_keys: set[str] = set()
self._invalid_optional_types: dict[str, Any] = {}
# Used values stored by key with origin type
# - key without any padding or key modifiers
# - value from filling data
# Example: {"version": 1}
self._used_values: Dict[str, Any] = {}
self._used_values: dict[str, Any] = {}
# Used values stored by key with all modifirs
# - value is already formatted string
# Example: {"version:0>3": "001"}
self._really_used_values: Dict[str, Any] = {}
self._really_used_values: dict[str, Any] = {}
# Concatenated string output after formatting
self._output: str = ""
# Is this result from optional part
@ -331,8 +382,9 @@ class TemplatePartResult:
self._really_used_values.update(other.really_used_values)
else:
raise TypeError("Cannot add data from \"{}\" to \"{}\"".format(
str(type(other)), self.__class__.__name__)
raise TypeError(
f"Cannot add data from \"{type(other)}\""
f" to \"{self.__class__.__name__}\""
)
@property
@ -357,40 +409,41 @@ class TemplatePartResult:
return self._output
@property
def missing_keys(self) -> Set[str]:
def missing_keys(self) -> set[str]:
return self._missing_keys
@property
def missing_optional_keys(self) -> Set[str]:
def missing_optional_keys(self) -> set[str]:
return self._missing_optional_keys
@property
def invalid_types(self) -> Dict[str, Any]:
def invalid_types(self) -> dict[str, Any]:
return self._invalid_types
@property
def invalid_optional_types(self) -> Dict[str, Any]:
def invalid_optional_types(self) -> dict[str, Any]:
return self._invalid_optional_types
@property
def really_used_values(self) -> Dict[str, Any]:
def really_used_values(self) -> dict[str, Any]:
return self._really_used_values
@property
def realy_used_values(self) -> Dict[str, Any]:
def realy_used_values(self) -> dict[str, Any]:
warnings.warn(
"Property 'realy_used_values' is deprecated."
" Use 'really_used_values' instead.",
DeprecationWarning
DeprecationWarning,
stacklevel=2,
)
return self._really_used_values
@property
def used_values(self) -> Dict[str, Any]:
def used_values(self) -> dict[str, Any]:
return self._used_values
@staticmethod
def split_keys_to_subdicts(values: Dict[str, Any]) -> Dict[str, Any]:
def split_keys_to_subdicts(values: dict[str, Any]) -> dict[str, Any]:
output = {}
formatter = Formatter()
for key, value in values.items():
@ -405,7 +458,7 @@ class TemplatePartResult:
data[last_key] = value
return output
def get_clean_used_values(self) -> Dict[str, Any]:
def get_clean_used_values(self) -> dict[str, Any]:
new_used_values = {}
for key, value in self.used_values.items():
if isinstance(value, FormatObject):
@ -421,7 +474,8 @@ class TemplatePartResult:
warnings.warn(
"Method 'add_realy_used_value' is deprecated."
" Use 'add_really_used_value' instead.",
DeprecationWarning
DeprecationWarning,
stacklevel=2,
)
self.add_really_used_value(key, value)
@ -474,7 +528,7 @@ class FormattingPart:
self,
field_name: str,
format_spec: str,
conversion: "Union[str, None]",
conversion: Union[str, None],
):
format_spec_v = ""
if format_spec:
@ -541,7 +595,7 @@ class FormattingPart:
return not queue
@staticmethod
def keys_to_template_base(keys: List[str]):
def keys_to_template_base(keys: list[str]):
if not keys:
return None
# Create copy of keys
@ -551,7 +605,7 @@ class FormattingPart:
return f"{template_base}{joined_keys}"
def format(
self, data: Dict[str, Any], result: TemplatePartResult
self, data: dict[str, Any], result: TemplatePartResult
) -> TemplatePartResult:
"""Format the formattings string.
@ -630,6 +684,12 @@ class FormattingPart:
result.add_output(self.template)
return result
if isinstance(value, DefaultKeysDict):
try:
value = value.get_default_value()
except KeyError:
pass
if not self.validate_value_type(value):
result.add_invalid_type(key, value)
result.add_output(self.template)
@ -682,23 +742,25 @@ class OptionalPart:
def __init__(
self,
parts: List["Union[str, OptionalPart, FormattingPart]"]
parts: list[Union[str, OptionalPart, FormattingPart]]
):
self._parts: List["Union[str, OptionalPart, FormattingPart]"] = parts
self._parts: list[Union[str, OptionalPart, FormattingPart]] = parts
@property
def parts(self) -> List["Union[str, OptionalPart, FormattingPart]"]:
def parts(self) -> list[Union[str, OptionalPart, FormattingPart]]:
return self._parts
def __str__(self) -> str:
return "<{}>".format("".join([str(p) for p in self._parts]))
joined_parts = "".join([str(p) for p in self._parts])
return f"<{joined_parts}>"
def __repr__(self) -> str:
return "<Optional:{}>".format("".join([str(p) for p in self._parts]))
joined_parts = "".join([str(p) for p in self._parts])
return f"<Optional:{joined_parts}>"
def format(
self,
data: Dict[str, Any],
data: dict[str, Any],
result: TemplatePartResult,
) -> TemplatePartResult:
new_result = TemplatePartResult(True)

View file

@ -1,11 +1,9 @@
# -*- coding: utf-8 -*-
"""AYON plugin tools."""
import os
import logging
import re
import collections
log = logging.getLogger(__name__)
CAPITALIZE_REGEX = re.compile(r"[a-zA-Z0-9]")

View file

@ -6,6 +6,8 @@ import collections
import tempfile
import subprocess
import platform
import warnings
import functools
from typing import Optional
import xml.etree.ElementTree
@ -67,6 +69,47 @@ VIDEO_EXTENSIONS = {
}
def deprecated(new_destination):
"""Mark functions as deprecated.
It will result in a warning being emitted when the function is used.
"""
func = None
if callable(new_destination):
func = new_destination
new_destination = None
def _decorator(decorated_func):
if new_destination is None:
warning_message = (
" Please check content of deprecated function to figure out"
" possible replacement."
)
else:
warning_message = " Please replace your usage with '{}'.".format(
new_destination
)
@functools.wraps(decorated_func)
def wrapper(*args, **kwargs):
warnings.simplefilter("always", DeprecationWarning)
warnings.warn(
(
"Call to deprecated function '{}'"
"\nFunction was moved or removed.{}"
).format(decorated_func.__name__, warning_message),
category=DeprecationWarning,
stacklevel=4
)
return decorated_func(*args, **kwargs)
return wrapper
if func is None:
return _decorator
return _decorator(func)
class MissingRGBAChannelsError(ValueError):
"""Raised when we can't find channels to use as RGBA for conversion in
input media.
@ -393,12 +436,14 @@ def get_review_info_by_layer_name(channel_names):
channels_by_layer_name[layer_name][channel] = channel_name
# Put empty layer to the beginning of the list
# Put empty layer or 'rgba' to the beginning of the list
# - if input has R, G, B, A channels they should be used for review
def _sort(_layer_name: str) -> int:
# Prioritize "" layer name
# Prioritize layers with RGB channels
order = 0
if _layer_name == "rgba":
order -= 11
if _layer_name == "":
order -= 10
@ -1024,6 +1069,8 @@ def convert_ffprobe_fps_to_float(value):
return dividend / divisor
# --- Deprecated functions ---
@deprecated("oiio_color_convert")
def convert_colorspace(
input_path,
output_path,
@ -1035,7 +1082,62 @@ def convert_colorspace(
additional_command_args=None,
logger=None,
):
"""Convert source file from one color space to another.
"""DEPRECATED function use `oiio_color_convert` instead
Args:
input_path (str): Path to input file that should be converted.
output_path (str): Path to output file where result will be stored.
config_path (str): Path to OCIO config file.
source_colorspace (str): OCIO valid color space of source files.
target_colorspace (str, optional): OCIO valid target color space.
If filled, 'view' and 'display' must be empty.
view (str, optional): Name for target viewer space (OCIO valid).
Both 'view' and 'display' must be filled
(if not 'target_colorspace').
display (str, optional): Name for target display-referred
reference space. Both 'view' and 'display' must be filled
(if not 'target_colorspace').
additional_command_args (list, optional): Additional arguments
for oiiotool (like binary depth for .dpx).
logger (logging.Logger, optional): Logger used for logging.
Returns:
None: Function returns None.
Raises:
ValueError: If parameters are misconfigured.
"""
return oiio_color_convert(
input_path,
output_path,
config_path,
source_colorspace,
target_colorspace=target_colorspace,
target_display=display,
target_view=view,
additional_command_args=additional_command_args,
logger=logger,
)
def oiio_color_convert(
input_path,
output_path,
config_path,
source_colorspace,
source_display=None,
source_view=None,
target_colorspace=None,
target_display=None,
target_view=None,
additional_command_args=None,
logger=None,
):
"""Transcode source file to other with colormanagement.
Oiiotool also support additional arguments for transcoding.
For more information, see the official documentation:
https://openimageio.readthedocs.io/en/latest/oiiotool.html
Args:
input_path (str): Path that should be converted. It is expected that
@ -1047,17 +1149,26 @@ def convert_colorspace(
sequence in 'file.FRAMESTART-FRAMEEND#.ext', `output.1-3#.tif`)
config_path (str): path to OCIO config file
source_colorspace (str): ocio valid color space of source files
source_display (str, optional): name for source display-referred
reference space (ocio valid). If provided, source_view must also be
provided, and source_colorspace will be ignored
source_view (str, optional): name for source viewer space (ocio valid)
If provided, source_display must also be provided, and
source_colorspace will be ignored
target_colorspace (str): ocio valid target color space
if filled, 'view' and 'display' must be empty
view (str): name for viewer space (ocio valid)
both 'view' and 'display' must be filled (if 'target_colorspace')
display (str): name for display-referred reference space (ocio valid)
target_display (str): name for target display-referred reference space
(ocio valid) both 'view' and 'display' must be filled (if
'target_colorspace')
target_view (str): name for target viewer space (ocio valid)
both 'view' and 'display' must be filled (if 'target_colorspace')
additional_command_args (list): arguments for oiiotool (like binary
depth for .dpx)
logger (logging.Logger): Logger used for logging.
Raises:
ValueError: if misconfigured
"""
if logger is None:
logger = logging.getLogger(__name__)
@ -1082,23 +1193,82 @@ def convert_colorspace(
"--ch", channels_arg
])
if all([target_colorspace, view, display]):
raise ValueError("Colorspace and both screen and display"
" cannot be set together."
"Choose colorspace or screen and display")
if not target_colorspace and not all([view, display]):
raise ValueError("Both screen and display must be set.")
# Validate input parameters
if target_colorspace and target_view and target_display:
raise ValueError(
"Colorspace and both view and display cannot be set together."
"Choose colorspace or screen and display"
)
if not target_colorspace and not target_view and not target_display:
raise ValueError(
"Both view and display must be set if target_colorspace is not "
"provided."
)
if (
(source_view and not source_display)
or (source_display and not source_view)
):
raise ValueError(
"Both source_view and source_display must be provided if using "
"display/view inputs."
)
if source_view and source_display and source_colorspace:
logger.warning(
"Both source display/view and source_colorspace provided. "
"Using source display/view pair and ignoring source_colorspace."
)
if additional_command_args:
oiio_cmd.extend(additional_command_args)
if target_colorspace:
oiio_cmd.extend(["--colorconvert:subimages=0",
source_colorspace,
target_colorspace])
if view and display:
oiio_cmd.extend(["--iscolorspace", source_colorspace])
oiio_cmd.extend(["--ociodisplay:subimages=0", display, view])
# Handle the different conversion cases
# Source view and display are known
if source_view and source_display:
if target_colorspace:
# This is a two-step conversion process since there's no direct
# display/view to colorspace command
# This could be a config parameter or determined from OCIO config
# Use temporarty role space 'scene_linear'
color_convert_args = ("scene_linear", target_colorspace)
elif source_display != target_display or source_view != target_view:
# Complete display/view pair conversion
# - go through a reference space
color_convert_args = (target_display, target_view)
else:
color_convert_args = None
logger.debug(
"Source and target display/view pairs are identical."
" No color conversion needed."
)
if color_convert_args:
oiio_cmd.extend([
"--ociodisplay:inverse=1:subimages=0",
source_display,
source_view,
"--colorconvert:subimages=0",
*color_convert_args
])
elif target_colorspace:
# Standard color space to color space conversion
oiio_cmd.extend([
"--colorconvert:subimages=0",
source_colorspace,
target_colorspace,
])
else:
# Standard conversion from colorspace to display/view
oiio_cmd.extend([
"--iscolorspace",
source_colorspace,
"--ociodisplay:subimages=0",
target_display,
target_view,
])
oiio_cmd.extend(["-o", output_path])
@ -1410,12 +1580,27 @@ def get_media_mime_type(filepath: str) -> Optional[str]:
Optional[str]: Mime type or None if is unknown mime type.
"""
# The implementation is identical or better with ayon_api >=1.1.0,
# which is used in AYON launcher >=1.3.0.
# NOTE Remove safe import when AYON launcher >=1.2.0.
try:
from ayon_api.utils import (
get_media_mime_type_for_content as _ayon_api_func
)
except ImportError:
_ayon_api_func = None
if not filepath or not os.path.exists(filepath):
return None
with open(filepath, "rb") as stream:
content = stream.read()
if _ayon_api_func is not None:
mime_type = _ayon_api_func(content)
if mime_type is not None:
return mime_type
content_len = len(content)
# Pre-validation (largest definition check)
# - hopefully there cannot be media defined in less than 12 bytes
@ -1442,11 +1627,13 @@ def get_media_mime_type(filepath: str) -> Optional[str]:
if b'xmlns="http://www.w3.org/2000/svg"' in content:
return "image/svg+xml"
# JPEG, JFIF or Exif
if (
content[0:4] == b"\xff\xd8\xff\xdb"
or content[6:10] in (b"JFIF", b"Exif")
):
# JPEG
# - [0:2] is constant b"\xff\xd8"
# (ref. https://www.file-recovery.com/jpg-signature-format.htm)
# - [2:4] Marker identifier b"\xff{?}"
# (ref. https://www.disktuna.com/list-of-jpeg-markers/)
# NOTE: File ends with b"\xff\xd9"
if content[0:3] == b"\xff\xd8\xff":
return "image/jpeg"
# Webp

View file

@ -19,11 +19,7 @@ from .create import (
CreatedInstance,
CreatorError,
LegacyCreator,
legacy_create,
discover_creator_plugins,
discover_legacy_creator_plugins,
register_creator_plugin,
deregister_creator_plugin,
register_creator_plugin_path,
@ -141,12 +137,7 @@ __all__ = (
"CreatorError",
# - legacy creation
"LegacyCreator",
"legacy_create",
"discover_creator_plugins",
"discover_legacy_creator_plugins",
"register_creator_plugin",
"deregister_creator_plugin",
"register_creator_plugin_path",

View file

@ -37,16 +37,19 @@ class LauncherActionSelection:
project_name,
folder_id,
task_id,
workfile_id,
folder_path=None,
task_name=None,
project_entity=None,
folder_entity=None,
task_entity=None,
workfile_entity=None,
project_settings=None,
):
self._project_name = project_name
self._folder_id = folder_id
self._task_id = task_id
self._workfile_id = workfile_id
self._folder_path = folder_path
self._task_name = task_name
@ -54,6 +57,7 @@ class LauncherActionSelection:
self._project_entity = project_entity
self._folder_entity = folder_entity
self._task_entity = task_entity
self._workfile_entity = workfile_entity
self._project_settings = project_settings
@ -213,6 +217,15 @@ class LauncherActionSelection:
self._task_name = self.task_entity["name"]
return self._task_name
def get_workfile_id(self):
"""Selected workfile id.
Returns:
Union[str, None]: Selected workfile id.
"""
return self._workfile_id
def get_project_entity(self):
"""Project entity for the selection.
@ -259,6 +272,24 @@ class LauncherActionSelection:
)
return self._task_entity
def get_workfile_entity(self):
"""Workfile entity for the selection.
Returns:
Union[dict[str, Any], None]: Workfile entity.
"""
if (
self._project_name is None
or self._workfile_id is None
):
return None
if self._workfile_entity is None:
self._workfile_entity = ayon_api.get_workfile_info_by_id(
self._project_name, self._workfile_id
)
return self._workfile_entity
def get_project_settings(self):
"""Project settings for the selection.
@ -305,15 +336,27 @@ class LauncherActionSelection:
"""
return self._task_id is not None
@property
def is_workfile_selected(self):
"""Return whether a task is selected.
Returns:
bool: Whether a task is selected.
"""
return self._workfile_id is not None
project_name = property(get_project_name)
folder_id = property(get_folder_id)
task_id = property(get_task_id)
workfile_id = property(get_workfile_id)
folder_path = property(get_folder_path)
task_name = property(get_task_name)
project_entity = property(get_project_entity)
folder_entity = property(get_folder_entity)
task_entity = property(get_task_entity)
workfile_entity = property(get_workfile_entity)
class LauncherAction(object):

View file

@ -6,6 +6,7 @@ from .exceptions import (
AnatomyTemplateUnsolved,
)
from .anatomy import Anatomy
from .templates import AnatomyTemplateResult, AnatomyStringTemplate
__all__ = (
@ -16,4 +17,7 @@ __all__ = (
"AnatomyTemplateUnsolved",
"Anatomy",
"AnatomyTemplateResult",
"AnatomyStringTemplate",
)

View file

@ -1,6 +1,7 @@
import os
import re
import copy
import platform
import collections
import numbers
@ -15,6 +16,7 @@ from .exceptions import (
AnatomyTemplateUnsolved,
)
_IS_WINDOWS = platform.system().lower() == "windows"
_PLACEHOLDER = object()
@ -526,6 +528,14 @@ class AnatomyTemplates:
root_key = "{" + root_key + "}"
output = output.replace(str(used_value), root_key)
# Make sure rootless path is with forward slashes
if _IS_WINDOWS:
output.replace("\\", "/")
# Make sure there are no double slashes
while "//" in output:
output = output.replace("//", "/")
return output
def format(self, data, strict=True):

View file

@ -1403,7 +1403,12 @@ def _get_display_view_colorspace_name(config_path, display, view):
"""
config = _get_ocio_config(config_path)
return config.getDisplayViewColorSpaceName(display, view)
colorspace = config.getDisplayViewColorSpaceName(display, view)
# Special token. See https://opencolorio.readthedocs.io/en/latest/guides/authoring/authoring.html#shared-views # noqa
if colorspace == "<USE_DISPLAY_NAME>":
colorspace = display
return colorspace
def _get_ocio_config_colorspaces(config_path):

View file

@ -1,21 +1,22 @@
"""Core pipeline functionality"""
from __future__ import annotations
import os
import logging
import platform
import uuid
import warnings
from typing import Optional, Any
import ayon_api
import pyblish.api
from pyblish.lib import MessageHandler
from ayon_core import AYON_CORE_ROOT
from ayon_core.host import HostBase
from ayon_core.host import AbstractHost
from ayon_core.lib import (
is_in_tests,
initialize_ayon_connection,
emit_event,
version_up
)
from ayon_core.addon import load_addons, AddonsManager
from ayon_core.settings import get_project_settings
@ -23,13 +24,7 @@ from ayon_core.settings import get_project_settings
from .publish.lib import filter_pyblish_plugins
from .anatomy import Anatomy
from .template_data import get_template_data_with_names
from .workfile import (
get_workdir,
get_custom_workfile_template_by_string_context,
get_workfile_template_key_from_context,
get_last_workfile,
MissingWorkdirError,
)
from .workfile import get_custom_workfile_template_by_string_context
from . import (
register_loader_plugin_path,
register_inventory_action_path,
@ -75,7 +70,7 @@ def _get_addons_manager():
def register_root(path):
"""Register currently active root"""
"""DEPRECATED Register currently active root."""
log.info("Registering root: %s" % path)
_registered_root["_"] = path
@ -94,18 +89,29 @@ def registered_root():
Returns:
dict[str, str]: Root paths.
"""
"""
warnings.warn(
"Used deprecated function 'registered_root'. Please use 'Anatomy'"
" to get roots.",
DeprecationWarning,
stacklevel=2,
)
return _registered_root["_"]
def install_host(host):
def install_host(host: AbstractHost) -> None:
"""Install `host` into the running Python session.
Args:
host (HostBase): A host interface object.
host (AbstractHost): A host interface object.
"""
if not isinstance(host, AbstractHost):
log.error(
f"Host must be a subclass of 'AbstractHost', got '{type(host)}'."
)
global _is_installed
_is_installed = True
@ -183,7 +189,7 @@ def install_ayon_plugins(project_name=None, host_name=None):
register_inventory_action_path(INVENTORY_PATH)
if host_name is None:
host_name = os.environ.get("AYON_HOST_NAME")
host_name = get_current_host_name()
addons_manager = _get_addons_manager()
publish_plugin_dirs = addons_manager.collect_publish_plugin_paths(
@ -304,7 +310,7 @@ def get_current_host_name():
"""
host = registered_host()
if isinstance(host, HostBase):
if isinstance(host, AbstractHost):
return host.name
return os.environ.get("AYON_HOST_NAME")
@ -340,32 +346,50 @@ def get_global_context():
def get_current_context():
host = registered_host()
if isinstance(host, HostBase):
if isinstance(host, AbstractHost):
return host.get_current_context()
return get_global_context()
def get_current_project_name():
host = registered_host()
if isinstance(host, HostBase):
if isinstance(host, AbstractHost):
return host.get_current_project_name()
return get_global_context()["project_name"]
def get_current_folder_path():
host = registered_host()
if isinstance(host, HostBase):
if isinstance(host, AbstractHost):
return host.get_current_folder_path()
return get_global_context()["folder_path"]
def get_current_task_name():
host = registered_host()
if isinstance(host, HostBase):
if isinstance(host, AbstractHost):
return host.get_current_task_name()
return get_global_context()["task_name"]
def get_current_project_settings() -> dict[str, Any]:
"""Project settings for the current context project.
Returns:
dict[str, Any]: Project settings for the current context project.
Raises:
ValueError: If current project is not set.
"""
project_name = get_current_project_name()
if not project_name:
raise ValueError(
"Current project is not set. Can't get project settings."
)
return get_project_settings(project_name)
def get_current_project_entity(fields=None):
"""Helper function to get project document based on global Session.
@ -505,66 +529,64 @@ def get_current_context_custom_workfile_template(project_settings=None):
)
def change_current_context(folder_entity, task_entity, template_key=None):
_PLACEHOLDER = object()
def change_current_context(
folder_entity: dict[str, Any],
task_entity: dict[str, Any],
*,
template_key: Optional[str] = _PLACEHOLDER,
reason: Optional[str] = None,
project_entity: Optional[dict[str, Any]] = None,
anatomy: Optional[Anatomy] = None,
) -> dict[str, str]:
"""Update active Session to a new task work area.
This updates the live Session to a different task under folder.
This updates the live Session to a different task under a folder.
Notes:
* This function does a lot of things related to workfiles which
extends arguments options a lot.
* We might want to implement 'set_current_context' on host integration
instead. But `AYON_WORKDIR`, which is related to 'IWorkfileHost',
would not be available in that case which might break some
logic.
Args:
folder_entity (Dict[str, Any]): Folder entity to set.
task_entity (Dict[str, Any]): Task entity to set.
template_key (Union[str, None]): Prepared template key to be used for
workfile template in Anatomy.
template_key (Optional[str]): DEPRECATED: Prepared template key to
be used for workfile template in Anatomy.
reason (Optional[str]): Reason for changing context.
anatomy (Optional[Anatomy]): Anatomy object used for workdir
calculation.
project_entity (Optional[dict[str, Any]]): Project entity used for
workdir calculation.
Returns:
Dict[str, str]: The changed key, values in the current Session.
"""
dict[str, str]: New context data.
project_name = get_current_project_name()
workdir = None
folder_path = None
task_name = None
if folder_entity:
folder_path = folder_entity["path"]
if task_entity:
task_name = task_entity["name"]
project_entity = ayon_api.get_project(project_name)
host_name = get_current_host_name()
workdir = get_workdir(
project_entity,
folder_entity,
task_entity,
host_name,
template_key=template_key
"""
if template_key is not _PLACEHOLDER:
warnings.warn(
(
"Used deprecated argument 'template_key' in"
" 'change_current_context'."
" It is not necessary to pass it in anymore."
),
DeprecationWarning,
stacklevel=2,
)
envs = {
"AYON_PROJECT_NAME": project_name,
"AYON_FOLDER_PATH": folder_path,
"AYON_TASK_NAME": task_name,
"AYON_WORKDIR": workdir,
}
# Update the Session and environments. Pop from environments all keys with
# value set to None.
for key, value in envs.items():
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = value
data = envs.copy()
# Convert env keys to human readable keys
data["project_name"] = project_name
data["folder_path"] = folder_path
data["task_name"] = task_name
data["workdir_path"] = workdir
# Emit session change
emit_event("taskChanged", data)
return data
host = registered_host()
return host.set_current_context(
folder_entity,
task_entity,
reason=reason,
project_entity=project_entity,
anatomy=anatomy,
)
def get_process_id():
@ -583,53 +605,16 @@ def get_process_id():
def version_up_current_workfile():
"""Function to increment and save workfile
"""DEPRECATED Function to increment and save workfile.
Please use 'save_next_version' from 'ayon_core.pipeline.workfile' instead.
"""
host = registered_host()
project_name = get_current_project_name()
folder_path = get_current_folder_path()
task_name = get_current_task_name()
host_name = get_current_host_name()
template_key = get_workfile_template_key_from_context(
project_name,
folder_path,
task_name,
host_name,
warnings.warn(
"Used deprecated 'version_up_current_workfile' please use"
" 'save_next_version' from 'ayon_core.pipeline.workfile' instead.",
DeprecationWarning,
stacklevel=2,
)
anatomy = Anatomy(project_name)
data = get_template_data_with_names(
project_name, folder_path, task_name, host_name
)
data["root"] = anatomy.roots
work_template = anatomy.get_template_item("work", template_key)
# Define saving file extension
extensions = host.get_workfile_extensions()
current_file = host.get_current_workfile()
if current_file:
extensions = [os.path.splitext(current_file)[-1]]
work_root = work_template["directory"].format_strict(data)
file_template = work_template["file"].template
last_workfile_path = get_last_workfile(
work_root, file_template, data, extensions, True
)
# `get_last_workfile` will return the first expected file version
# if no files exist yet. In that case, if they do not exist we will
# want to save v001
new_workfile_path = last_workfile_path
if os.path.exists(new_workfile_path):
new_workfile_path = version_up(new_workfile_path)
# Raise an error if the parent folder doesn't exist as `host.save_workfile`
# is not supposed/able to create missing folders.
parent_folder = os.path.dirname(new_workfile_path)
if not os.path.exists(parent_folder):
raise MissingWorkdirError(
f"Work area directory '{parent_folder}' does not exist.")
host.save_workfile(new_workfile_path)
from ayon_core.pipeline.workfile import save_next_version
save_next_version()

View file

@ -21,12 +21,14 @@ from .exceptions import (
TemplateFillError,
)
from .structures import (
ParentFlags,
CreatedInstance,
ConvertorItem,
AttributeValues,
CreatorAttributeValues,
PublishAttributeValues,
PublishAttributes,
InstanceContextInfo,
)
from .utils import (
get_last_versions_for_instances,
@ -44,9 +46,6 @@ from .creator_plugins import (
AutoCreator,
HiddenCreator,
discover_legacy_creator_plugins,
get_legacy_creator_by_name,
discover_creator_plugins,
register_creator_plugin,
deregister_creator_plugin,
@ -58,11 +57,6 @@ from .creator_plugins import (
from .context import CreateContext
from .legacy_create import (
LegacyCreator,
legacy_create,
)
__all__ = (
"PRODUCT_NAME_ALLOWED_SYMBOLS",
@ -85,12 +79,14 @@ __all__ = (
"TaskNotSetError",
"TemplateFillError",
"ParentFlags",
"CreatedInstance",
"ConvertorItem",
"AttributeValues",
"CreatorAttributeValues",
"PublishAttributeValues",
"PublishAttributes",
"InstanceContextInfo",
"get_last_versions_for_instances",
"get_next_versions_for_instances",
@ -105,9 +101,6 @@ __all__ = (
"AutoCreator",
"HiddenCreator",
"discover_legacy_creator_plugins",
"get_legacy_creator_by_name",
"discover_creator_plugins",
"register_creator_plugin",
"deregister_creator_plugin",
@ -117,7 +110,4 @@ __all__ = (
"cache_and_get_instances",
"CreateContext",
"LegacyCreator",
"legacy_create",
)

View file

@ -41,7 +41,12 @@ from .exceptions import (
HostMissRequiredMethod,
)
from .changes import TrackChangesItem
from .structures import PublishAttributes, ConvertorItem, InstanceContextInfo
from .structures import (
PublishAttributes,
ConvertorItem,
InstanceContextInfo,
ParentFlags,
)
from .creator_plugins import (
Creator,
AutoCreator,
@ -49,15 +54,12 @@ from .creator_plugins import (
discover_convertor_plugins,
)
if typing.TYPE_CHECKING:
from ayon_core.host import HostBase
from ayon_core.lib import AbstractAttrDef
from ayon_core.lib.events import EventCallback, Event
from .structures import CreatedInstance
from .creator_plugins import BaseCreator
class PublishHost(HostBase, IPublishHost):
pass
# Import of functions and classes that were moved to different file
# TODO Should be removed in future release - Added 24/08/28, 0.4.3-dev.1
@ -80,6 +82,7 @@ INSTANCE_ADDED_TOPIC = "instances.added"
INSTANCE_REMOVED_TOPIC = "instances.removed"
VALUE_CHANGED_TOPIC = "values.changed"
INSTANCE_REQUIREMENT_CHANGED_TOPIC = "instance.requirement.changed"
INSTANCE_PARENT_CHANGED_TOPIC = "instance.parent.changed"
PRE_CREATE_ATTR_DEFS_CHANGED_TOPIC = "pre.create.attr.defs.changed"
CREATE_ATTR_DEFS_CHANGED_TOPIC = "create.attr.defs.changed"
PUBLISH_ATTR_DEFS_CHANGED_TOPIC = "publish.attr.defs.changed"
@ -163,7 +166,7 @@ class CreateContext:
context which should be handled by host.
Args:
host (PublishHost): Host implementation which handles implementation
host (IPublishHost): Host implementation which handles implementation
and global metadata.
headless (bool): Context is created out of UI (Current not used).
reset (bool): Reset context on initialization.
@ -173,7 +176,7 @@ class CreateContext:
def __init__(
self,
host: "PublishHost",
host: IPublishHost,
headless: bool = False,
reset: bool = True,
discover_publish_plugins: bool = True,
@ -262,6 +265,8 @@ class CreateContext:
# - right now used only for 'mandatory' but can be extended
# in future
"requirement_change": BulkInfo(),
# Instance parent changed
"parent_change": BulkInfo(),
}
self._bulk_order = []
@ -1083,6 +1088,35 @@ class CreateContext:
INSTANCE_REQUIREMENT_CHANGED_TOPIC, callback
)
def add_instance_parent_change_callback(
self, callback: Callable
) -> "EventCallback":
"""Register callback to listen to instance parent changes.
Instance changed parent or parent flags.
Data structure of event:
```python
{
"instances": [CreatedInstance, ...],
"create_context": CreateContext
}
```
Args:
callback (Callable): Callback function that will be called when
instance requirement changed.
Returns:
EventCallback: Created callback object which can be used to
stop listening.
"""
return self._event_hub.add_callback(
INSTANCE_PARENT_CHANGED_TOPIC, callback
)
def context_data_to_store(self) -> dict[str, Any]:
"""Data that should be stored by host function.
@ -1364,6 +1398,13 @@ class CreateContext:
) as bulk_info:
yield bulk_info
@contextmanager
def bulk_instance_parent_change(self, sender: Optional[str] = None):
with self._bulk_context(
"parent_change", sender
) as bulk_info:
yield bulk_info
@contextmanager
def bulk_publish_attr_defs_change(self, sender: Optional[str] = None):
with self._bulk_context("publish_attrs_change", sender) as bulk_info:
@ -1444,6 +1485,19 @@ class CreateContext:
with self.bulk_instance_requirement_change() as bulk_item:
bulk_item.append(instance_id)
def instance_parent_changed(self, instance_id: str) -> None:
"""Instance parent changed.
Triggered by `CreatedInstance`.
Args:
instance_id (Optional[str]): Instance id.
"""
if self._is_instance_events_ready(instance_id):
with self.bulk_instance_parent_change() as bulk_item:
bulk_item.append(instance_id)
# --- context change callbacks ---
def publish_attribute_value_changed(
self, plugin_name: str, value: dict[str, Any]
@ -2046,63 +2100,97 @@ class CreateContext:
sender (Optional[str]): Sender of the event.
"""
instance_ids_by_parent_id = collections.defaultdict(set)
for instance in self.instances:
instance_ids_by_parent_id[instance.parent_instance_id].add(
instance.id
)
instances_to_remove = list(instances)
ids_to_remove = {
instance.id
for instance in instances_to_remove
}
_queue = collections.deque()
_queue.extend(instances_to_remove)
# Add children with parent lifetime flag
while _queue:
instance = _queue.popleft()
ids_to_remove.add(instance.id)
children_ids = instance_ids_by_parent_id[instance.id]
for children_id in children_ids:
if children_id in ids_to_remove:
continue
instance = self._instances_by_id[children_id]
if instance.parent_flags & ParentFlags.parent_lifetime:
instances_to_remove.append(instance)
ids_to_remove.add(instance.id)
_queue.append(instance)
instances_by_identifier = collections.defaultdict(list)
for instance in instances:
for instance in instances_to_remove:
identifier = instance.creator_identifier
instances_by_identifier[identifier].append(instance)
# Just remove instances from context if creator is not available
missing_creators = set(instances_by_identifier) - set(self.creators)
instances = []
miss_creator_instances = []
for identifier in missing_creators:
instances.extend(
instance
for instance in instances_by_identifier[identifier]
)
miss_creator_instances.extend(instances_by_identifier[identifier])
self._remove_instances(instances, sender)
with self.bulk_remove_instances(sender):
self._remove_instances(miss_creator_instances, sender)
error_message = "Instances removement of creator \"{}\" failed. {}"
failed_info = []
# Remove instances by creator plugin order
for creator in self.get_sorted_creators(
instances_by_identifier.keys()
):
identifier = creator.identifier
creator_instances = instances_by_identifier[identifier]
error_message = "Instances removement of creator \"{}\" failed. {}"
failed_info = []
# Remove instances by creator plugin order
for creator in self.get_sorted_creators(
instances_by_identifier.keys()
):
identifier = creator.identifier
# Filter instances by current state of 'CreateContext'
# - in case instances were already removed as subroutine of
# previous create plugin.
creator_instances = [
instance
for instance in instances_by_identifier[identifier]
if instance.id in self._instances_by_id
]
if not creator_instances:
continue
label = creator.label
failed = False
add_traceback = False
exc_info = None
try:
creator.remove_instances(creator_instances)
label = creator.label
failed = False
add_traceback = False
exc_info = None
try:
creator.remove_instances(creator_instances)
except CreatorError:
failed = True
exc_info = sys.exc_info()
self.log.warning(
error_message.format(identifier, exc_info[1])
)
except (KeyboardInterrupt, SystemExit):
raise
except: # noqa: E722
failed = True
add_traceback = True
exc_info = sys.exc_info()
self.log.warning(
error_message.format(identifier, ""),
exc_info=True
)
if failed:
failed_info.append(
prepare_failed_creator_operation_info(
identifier, label, exc_info, add_traceback
except CreatorError:
failed = True
exc_info = sys.exc_info()
self.log.warning(
error_message.format(identifier, exc_info[1])
)
except (KeyboardInterrupt, SystemExit):
raise
except: # noqa: E722
failed = True
add_traceback = True
exc_info = sys.exc_info()
self.log.warning(
error_message.format(identifier, ""),
exc_info=True
)
if failed:
failed_info.append(
prepare_failed_creator_operation_info(
identifier, label, exc_info, add_traceback
)
)
)
if failed_info:
raise CreatorsRemoveFailed(failed_info)
@ -2305,6 +2393,8 @@ class CreateContext:
self._bulk_publish_attrs_change_finished(data, sender)
elif key == "requirement_change":
self._bulk_instance_requirement_change_finished(data, sender)
elif key == "parent_change":
self._bulk_instance_parent_change_finished(data, sender)
def _bulk_add_instances_finished(
self,
@ -2518,3 +2608,22 @@ class CreateContext:
{"instances": instances},
sender,
)
def _bulk_instance_parent_change_finished(
self,
instance_ids: list[str],
sender: Optional[str],
):
if not instance_ids:
return
instances = [
self.get_instance_by_id(instance_id)
for instance_id in set(instance_ids)
]
self._emit_event(
INSTANCE_PARENT_CHANGED_TOPIC,
{"instances": instances},
sender,
)

View file

@ -6,7 +6,6 @@ from typing import TYPE_CHECKING, Optional, Dict, Any
from abc import ABC, abstractmethod
from ayon_core.settings import get_project_settings
from ayon_core.lib import Logger, get_version_from_path
from ayon_core.pipeline.plugin_discover import (
discover,
@ -20,7 +19,6 @@ from ayon_core.pipeline.staging_dir import get_staging_dir_info, StagingDir
from .constants import DEFAULT_VARIANT_VALUE
from .product_name import get_product_name
from .utils import get_next_versions_for_instances
from .legacy_create import LegacyCreator
from .structures import CreatedInstance
if TYPE_CHECKING:
@ -975,62 +973,10 @@ def discover_convertor_plugins(*args, **kwargs):
return discover(ProductConvertorPlugin, *args, **kwargs)
def discover_legacy_creator_plugins():
from ayon_core.pipeline import get_current_project_name
log = Logger.get_logger("CreatorDiscover")
plugins = discover(LegacyCreator)
project_name = get_current_project_name()
project_settings = get_project_settings(project_name)
for plugin in plugins:
try:
plugin.apply_settings(project_settings)
except Exception:
log.warning(
"Failed to apply settings to creator {}".format(
plugin.__name__
),
exc_info=True
)
return plugins
def get_legacy_creator_by_name(creator_name, case_sensitive=False):
"""Find creator plugin by name.
Args:
creator_name (str): Name of creator class that should be returned.
case_sensitive (bool): Match of creator plugin name is case sensitive.
Set to `False` by default.
Returns:
Creator: Return first matching plugin or `None`.
"""
# Lower input creator name if is not case sensitive
if not case_sensitive:
creator_name = creator_name.lower()
for creator_plugin in discover_legacy_creator_plugins():
_creator_name = creator_plugin.__name__
# Lower creator plugin name if is not case sensitive
if not case_sensitive:
_creator_name = _creator_name.lower()
if _creator_name == creator_name:
return creator_plugin
return None
def register_creator_plugin(plugin):
if issubclass(plugin, BaseCreator):
register_plugin(BaseCreator, plugin)
elif issubclass(plugin, LegacyCreator):
register_plugin(LegacyCreator, plugin)
elif issubclass(plugin, ProductConvertorPlugin):
register_plugin(ProductConvertorPlugin, plugin)
@ -1039,22 +985,17 @@ def deregister_creator_plugin(plugin):
if issubclass(plugin, BaseCreator):
deregister_plugin(BaseCreator, plugin)
elif issubclass(plugin, LegacyCreator):
deregister_plugin(LegacyCreator, plugin)
elif issubclass(plugin, ProductConvertorPlugin):
deregister_plugin(ProductConvertorPlugin, plugin)
def register_creator_plugin_path(path):
register_plugin_path(BaseCreator, path)
register_plugin_path(LegacyCreator, path)
register_plugin_path(ProductConvertorPlugin, path)
def deregister_creator_plugin_path(path):
deregister_plugin_path(BaseCreator, path)
deregister_plugin_path(LegacyCreator, path)
deregister_plugin_path(ProductConvertorPlugin, path)

View file

@ -1,216 +0,0 @@
"""Create workflow moved from avalon-core repository.
Renamed classes and functions
- 'Creator' -> 'LegacyCreator'
- 'create' -> 'legacy_create'
"""
import os
import logging
import collections
from ayon_core.pipeline.constants import AYON_INSTANCE_ID
from .product_name import get_product_name
class LegacyCreator:
"""Determine how assets are created"""
label = None
product_type = None
defaults = None
maintain_selection = True
enabled = True
dynamic_product_name_keys = []
log = logging.getLogger("LegacyCreator")
log.propagate = True
def __init__(self, name, folder_path, options=None, data=None):
self.name = name # For backwards compatibility
self.options = options
# Default data
self.data = collections.OrderedDict()
# TODO use 'AYON_INSTANCE_ID' when all hosts support it
self.data["id"] = AYON_INSTANCE_ID
self.data["productType"] = self.product_type
self.data["folderPath"] = folder_path
self.data["productName"] = name
self.data["active"] = True
self.data.update(data or {})
@classmethod
def apply_settings(cls, project_settings):
"""Apply AYON settings to a plugin class."""
host_name = os.environ.get("AYON_HOST_NAME")
plugin_type = "create"
plugin_type_settings = (
project_settings
.get(host_name, {})
.get(plugin_type, {})
)
global_type_settings = (
project_settings
.get("core", {})
.get(plugin_type, {})
)
if not global_type_settings and not plugin_type_settings:
return
plugin_name = cls.__name__
plugin_settings = None
# Look for plugin settings in host specific settings
if plugin_name in plugin_type_settings:
plugin_settings = plugin_type_settings[plugin_name]
# Look for plugin settings in global settings
elif plugin_name in global_type_settings:
plugin_settings = global_type_settings[plugin_name]
if not plugin_settings:
return
cls.log.debug(">>> We have preset for {}".format(plugin_name))
for option, value in plugin_settings.items():
if option == "enabled" and value is False:
cls.log.debug(" - is disabled by preset")
else:
cls.log.debug(" - setting `{}`: `{}`".format(option, value))
setattr(cls, option, value)
def process(self):
pass
@classmethod
def get_dynamic_data(
cls, project_name, folder_entity, task_entity, variant, host_name
):
"""Return dynamic data for current Creator plugin.
By default return keys from `dynamic_product_name_keys` attribute
as mapping to keep formatted template unchanged.
```
dynamic_product_name_keys = ["my_key"]
---
output = {
"my_key": "{my_key}"
}
```
Dynamic keys may override default Creator keys (productType, task,
folderPath, ...) but do it wisely if you need.
All of keys will be converted into 3 variants unchanged, capitalized
and all upper letters. Because of that are all keys lowered.
This method can be modified to prefill some values just keep in mind it
is class method.
Args:
project_name (str): Context's project name.
folder_entity (dict[str, Any]): Folder entity.
task_entity (dict[str, Any]): Task entity.
variant (str): What is entered by user in creator tool.
host_name (str): Name of host.
Returns:
dict: Fill data for product name template.
"""
dynamic_data = {}
for key in cls.dynamic_product_name_keys:
key = key.lower()
dynamic_data[key] = "{" + key + "}"
return dynamic_data
@classmethod
def get_product_name(
cls, project_name, folder_entity, task_entity, variant, host_name=None
):
"""Return product name created with entered arguments.
Logic extracted from Creator tool. This method should give ability
to get product name without the tool.
TODO: Maybe change `variant` variable.
By default is output concatenated product type with variant.
Args:
project_name (str): Context's project name.
folder_entity (dict[str, Any]): Folder entity.
task_entity (dict[str, Any]): Task entity.
variant (str): What is entered by user in creator tool.
host_name (str): Name of host.
Returns:
str: Formatted product name with entered arguments. Should match
config's logic.
"""
dynamic_data = cls.get_dynamic_data(
project_name, folder_entity, task_entity, variant, host_name
)
task_name = task_type = None
if task_entity:
task_name = task_entity["name"]
task_type = task_entity["taskType"]
return get_product_name(
project_name,
task_name,
task_type,
host_name,
cls.product_type,
variant,
dynamic_data=dynamic_data
)
def legacy_create(
Creator, product_name, folder_path, options=None, data=None
):
"""Create a new instance
Associate nodes with a product name and type. These nodes are later
validated, according to their `product type`, and integrated into the
shared environment, relative their `productName`.
Data relative each product type, along with default data, are imprinted
into the resulting objectSet. This data is later used by extractors
and finally asset browsers to help identify the origin of the asset.
Arguments:
Creator (Creator): Class of creator.
product_name (str): Name of product.
folder_path (str): Folder path.
options (dict, optional): Additional options from GUI.
data (dict, optional): Additional data from GUI.
Raises:
NameError on `productName` already exists
KeyError on invalid dynamic property
RuntimeError on host error
Returns:
Name of instance
"""
from ayon_core.pipeline import registered_host
host = registered_host()
plugin = Creator(product_name, folder_path, options, data)
if plugin.maintain_selection is True:
with host.maintained_selection():
print("Running %s with maintained selection" % plugin)
instance = plugin.process()
return instance
print("Running %s" % plugin)
instance = plugin.process()
return instance

View file

@ -1,6 +1,7 @@
import copy
import collections
from uuid import uuid4
from enum import Enum
import typing
from typing import Optional, Dict, List, Any
@ -22,6 +23,23 @@ if typing.TYPE_CHECKING:
from .creator_plugins import BaseCreator
class IntEnum(int, Enum):
"""An int-based Enum class that allows for int comparison."""
def __int__(self) -> int:
return self.value
class ParentFlags(IntEnum):
# Delete instance if parent is deleted
parent_lifetime = 1
# Active state is propagated from parent to children
# - the active state is propagated in collection phase
# NOTE It might be helpful to have a function that would return "real"
# active state for instances
share_active = 1 << 1
class ConvertorItem:
"""Item representing convertor plugin.
@ -507,7 +525,9 @@ class CreatedInstance:
if transient_data is None:
transient_data = {}
self._transient_data = transient_data
self._is_mandatory = False
self._is_mandatory: bool = False
self._parent_instance_id: Optional[str] = None
self._parent_flags: int = 0
# Create a copy of passed data to avoid changing them on the fly
data = copy.deepcopy(data or {})
@ -752,6 +772,39 @@ class CreatedInstance:
self["active"] = True
self._create_context.instance_requirement_changed(self.id)
@property
def parent_instance_id(self) -> Optional[str]:
return self._parent_instance_id
@property
def parent_flags(self) -> int:
return self._parent_flags
def set_parent(
self, instance_id: Optional[str], flags: int
) -> None:
"""Set parent instance id and parenting flags.
Args:
instance_id (Optional[str]): Parent instance id.
flags (int): Parenting flags.
"""
changed = False
if instance_id != self._parent_instance_id:
changed = True
self._parent_instance_id = instance_id
if flags is None:
flags = 0
if self._parent_flags != flags:
self._parent_flags = flags
changed = True
if changed:
self._create_context.instance_parent_changed(self.id)
def changes(self):
"""Calculate and return changes."""

View file

@ -7,6 +7,10 @@ import opentimelineio as otio
from opentimelineio import opentime as _ot
# https://github.com/AcademySoftwareFoundation/OpenTimelineIO/issues/1822
OTIO_EPSILON = 1e-9
def otio_range_to_frame_range(otio_range):
start = _ot.to_frames(
otio_range.start_time, otio_range.start_time.rate)
@ -198,7 +202,8 @@ def is_clip_from_media_sequence(otio_clip):
def remap_range_on_file_sequence(otio_clip, otio_range):
"""
""" Remap the provided range on a file sequence clip.
Args:
otio_clip (otio.schema.Clip): The OTIO clip to check.
otio_range (otio.schema.TimeRange): The trim range to apply.
@ -245,7 +250,11 @@ def remap_range_on_file_sequence(otio_clip, otio_range):
if (
is_clip_from_media_sequence(otio_clip)
and available_range_start_frame == media_ref.start_frame
and conformed_src_in.to_frames() < media_ref.start_frame
# source range should be included in available range from media
# using round instead of conformed_src_in.to_frames() to avoid
# any precision issue with frame rate.
and round(conformed_src_in.value) < media_ref.start_frame
):
media_in = otio.opentime.RationalTime(
0, rate=available_range_rate

View file

@ -249,7 +249,8 @@ def create_skeleton_instance(
# map inputVersions `ObjectId` -> `str` so json supports it
"inputVersions": list(map(str, data.get("inputVersions", []))),
"colorspace": data.get("colorspace"),
"hasExplicitFrames": data.get("hasExplicitFrames")
"hasExplicitFrames": data.get("hasExplicitFrames", False),
"reuseLastVersion": data.get("reuseLastVersion", False),
}
if data.get("renderlayer"):

View file

@ -2,10 +2,10 @@
from __future__ import annotations
from abc import abstractmethod
import logging
import os
from typing import Any, Optional, Type
from ayon_core.lib import Logger
from ayon_core.pipeline.plugin_discover import (
deregister_plugin,
deregister_plugin_path,
@ -31,8 +31,7 @@ class LoaderPlugin(list):
options = []
log = logging.getLogger("ProductLoader")
log.propagate = True
log = Logger.get_logger("ProductLoader")
@classmethod
def apply_settings(cls, project_settings):
@ -373,7 +372,7 @@ def discover_loader_plugins(project_name=None):
if not project_name:
project_name = get_current_project_name()
project_settings = get_project_settings(project_name)
plugins = discover(LoaderPlugin)
plugins = discover(LoaderPlugin, allow_duplicates=False)
hooks = discover(LoaderHookPlugin)
sorted_hooks = sorted(hooks, key=lambda hook: hook.order)
for plugin in plugins:

View file

@ -9,7 +9,7 @@ from typing import Optional, Union, Any
import ayon_api
from ayon_core.host import ILoadHost
from ayon_core.host import ILoadHost, AbstractHost
from ayon_core.lib import (
StringTemplate,
TemplateUnsolved,
@ -720,11 +720,13 @@ def get_representation_path(representation, root=None):
str: fullpath of the representation
"""
if root is None:
from ayon_core.pipeline import registered_root
from ayon_core.pipeline import get_current_project_name, Anatomy
root = registered_root()
anatomy = Anatomy(get_current_project_name())
return get_representation_path_with_anatomy(
representation, anatomy
)
def path_from_representation():
try:
@ -772,7 +774,7 @@ def get_representation_path(representation, root=None):
dir_path, file_name = os.path.split(path)
if not os.path.exists(dir_path):
return
return None
base_name, ext = os.path.splitext(file_name)
file_name_items = None
@ -782,7 +784,7 @@ def get_representation_path(representation, root=None):
file_name_items = base_name.split("%")
if not file_name_items:
return
return None
filename_start = file_name_items[0]
@ -940,15 +942,21 @@ def any_outdated_containers(host=None, project_name=None):
return False
def get_outdated_containers(host=None, project_name=None):
def get_outdated_containers(
host: Optional[AbstractHost] = None,
project_name: Optional[str] = None,
ignore_locked_versions: bool = False,
):
"""Collect outdated containers from host scene.
Currently registered host and project in global session are used if
arguments are not passed.
Args:
host (ModuleType): Host implementation with 'ls' function available.
project_name (str): Name of project in which context we are.
host (Optional[AbstractHost]): Host implementation.
project_name (Optional[str]): Name of project in which context we are.
ignore_locked_versions (bool): Locked versions are ignored.
"""
from ayon_core.pipeline import registered_host, get_current_project_name
@ -962,7 +970,16 @@ def get_outdated_containers(host=None, project_name=None):
containers = host.get_containers()
else:
containers = host.ls()
return filter_containers(containers, project_name).outdated
outdated_containers = []
for container in filter_containers(containers, project_name).outdated:
if (
not ignore_locked_versions
and container.get("version_locked") is True
):
continue
outdated_containers.append(container)
return outdated_containers
def _is_valid_representation_id(repre_id: Any) -> bool:
@ -983,6 +1000,9 @@ def filter_containers(containers, project_name):
'invalid' are invalid containers (invalid content) and 'not_found' has
some missing entity in database.
Todos:
Respect 'project_name' on containers if is available.
Args:
containers (Iterable[dict]): List of containers referenced into scene.
project_name (str): Name of project in which context shoud look for
@ -991,8 +1011,8 @@ def filter_containers(containers, project_name):
Returns:
ContainersFilterResult: Named tuple with 'latest', 'outdated',
'invalid' and 'not_found' containers.
"""
"""
# Make sure containers is list that won't change
containers = list(containers)
@ -1040,13 +1060,13 @@ def filter_containers(containers, project_name):
hero=True,
fields={"id", "productId", "version"}
)
verisons_by_id = {}
versions_by_id = {}
versions_by_product_id = collections.defaultdict(list)
hero_version_ids = set()
for version_entity in version_entities:
version_id = version_entity["id"]
# Store versions by their ids
verisons_by_id[version_id] = version_entity
versions_by_id[version_id] = version_entity
# There's no need to query products for hero versions
# - they are considered as latest?
if version_entity["version"] < 0:
@ -1081,24 +1101,23 @@ def filter_containers(containers, project_name):
repre_entity = repre_entities_by_id.get(repre_id)
if not repre_entity:
log.debug((
"Container '{}' has an invalid representation."
log.debug(
f"Container '{container_name}' has an invalid representation."
" It is missing in the database."
).format(container_name))
)
not_found_containers.append(container)
continue
version_id = repre_entity["versionId"]
if version_id in outdated_version_ids:
outdated_containers.append(container)
elif version_id not in verisons_by_id:
log.debug((
"Representation on container '{}' has an invalid version."
" It is missing in the database."
).format(container_name))
if version_id not in versions_by_id:
log.debug(
f"Representation on container '{container_name}' has an"
" invalid version. It is missing in the database."
)
not_found_containers.append(container)
elif version_id in outdated_version_ids:
outdated_containers.append(container)
else:
uptodate_containers.append(container)

View file

@ -51,7 +51,7 @@ class DiscoverResult:
"*** Discovered {} plugins".format(len(self.plugins))
)
for cls in self.plugins:
lines.append("- {}".format(cls.__class__.__name__))
lines.append("- {}".format(cls.__name__))
# Plugin that were defined to be ignored
if self.ignored_plugins or full_report:

View file

@ -5,14 +5,22 @@ import sys
import inspect
import copy
import warnings
import hashlib
import xml.etree.ElementTree
from typing import TYPE_CHECKING, Optional, Union, List
from typing import TYPE_CHECKING, Optional, Union, List, Any
import clique
import speedcopy
import logging
import ayon_api
import pyblish.util
import pyblish.plugin
import pyblish.api
from ayon_api import (
get_server_api_connection,
get_representations,
get_last_version_by_product_name
)
from ayon_core.lib import (
import_filepath,
Logger,
@ -33,6 +41,8 @@ if TYPE_CHECKING:
TRAIT_INSTANCE_KEY: str = "representations_with_traits"
log = logging.getLogger(__name__)
def get_template_name_profiles(
project_name, project_settings=None, logger=None
@ -243,32 +253,38 @@ def publish_plugins_discover(
for path in paths:
path = os.path.normpath(path)
if not os.path.isdir(path):
continue
filenames = []
if os.path.isdir(path):
filenames.extend(
name
for name in os.listdir(path)
if (
os.path.isfile(os.path.join(path, name))
and not name.startswith("_")
)
)
else:
filenames.append(os.path.basename(path))
path = os.path.dirname(path)
for fname in os.listdir(path):
if fname.startswith("_"):
continue
abspath = os.path.join(path, fname)
if not os.path.isfile(abspath):
continue
mod_name, mod_ext = os.path.splitext(fname)
if mod_ext != ".py":
dirpath_hash = hashlib.md5(path.encode("utf-8")).hexdigest()
for filename in filenames:
basename, ext = os.path.splitext(filename)
if ext.lower() != ".py":
continue
filepath = os.path.join(path, filename)
module_name = f"{dirpath_hash}.{basename}"
try:
module = import_filepath(
abspath, mod_name, sys_module_name=mod_name)
filepath, module_name, sys_module_name=module_name
)
except Exception as err: # noqa: BLE001
# we need broad exception to catch all possible errors.
result.crashed_file_paths[abspath] = sys.exc_info()
result.crashed_file_paths[filepath] = sys.exc_info()
log.debug('Skipped: "%s" (%s)', mod_name, err)
log.debug('Skipped: "%s" (%s)', filepath, err)
continue
for plugin in pyblish.plugin.plugins_from_module(module):
@ -354,12 +370,18 @@ def get_plugin_settings(plugin, project_settings, log, category=None):
# Use project settings based on a category name
if category:
try:
return (
output = (
project_settings
[category]
["publish"]
[plugin.__name__]
)
warnings.warn(
"Please fill 'settings_category'"
f" for plugin '{plugin.__name__}'.",
DeprecationWarning
)
return output
except KeyError:
pass
@ -384,12 +406,18 @@ def get_plugin_settings(plugin, project_settings, log, category=None):
category_from_file = "core"
try:
return (
output = (
project_settings
[category_from_file]
[plugin_kind]
[plugin.__name__]
)
warnings.warn(
"Please fill 'settings_category'"
f" for plugin '{plugin.__name__}'.",
DeprecationWarning
)
return output
except KeyError:
pass
return {}
@ -955,7 +983,26 @@ def get_instance_expected_output_path(
"version": version
})
path_template_obj = anatomy.get_template_item("publish", "default")["path"]
# Get instance publish template name
task_name = task_type = None
task_entity = instance.data.get("taskEntity")
if task_entity:
task_name = task_entity["name"]
task_type = task_entity["taskType"]
template_name = get_publish_template_name(
project_name=instance.context.data["projectName"],
host_name=instance.context.data["hostName"],
product_type=instance.data["productType"],
task_name=task_name,
task_type=task_type,
project_settings=instance.context.data["project_settings"],
)
path_template_obj = anatomy.get_template_item(
"publish",
template_name
)["path"]
template_filled = path_template_obj.format_strict(template_data)
return os.path.normpath(template_filled)
@ -1011,7 +1058,7 @@ def main_cli_publish(
# NOTE: ayon-python-api does not have public api function to find
# out if is used service user. So we need to have try > except
# block.
con = ayon_api.get_server_api_connection()
con = get_server_api_connection()
try:
con.set_default_service_username(username)
except ValueError:
@ -1048,7 +1095,7 @@ def main_cli_publish(
discover_result = publish_plugins_discover()
publish_plugins = discover_result.plugins
print("\n".join(discover_result.get_report(only_errors=False)))
print(discover_result.get_report(only_errors=False))
# Error exit as soon as any error occurs.
error_format = ("Failed {plugin.__name__}: "
@ -1124,3 +1171,90 @@ def get_trait_representations(
"""
return instance.data.get(TRAIT_INSTANCE_KEY, [])
def fill_sequence_gaps_with_previous_version(
collection: str,
staging_dir: str,
instance: pyblish.plugin.Instance,
current_repre_name: str,
start_frame: int,
end_frame: int
) -> tuple[Optional[dict[str, Any]], Optional[dict[int, str]]]:
"""Tries to replace missing frames from ones from last version"""
used_version_entity, repre_file_paths = _get_last_version_files(
instance, current_repre_name
)
if repre_file_paths is None:
# issues in getting last version files
return (None, None)
prev_collection = clique.assemble(
repre_file_paths,
patterns=[clique.PATTERNS["frames"]],
minimum_items=1
)[0][0]
prev_col_format = prev_collection.format("{head}{padding}{tail}")
added_files = {}
anatomy = instance.context.data["anatomy"]
col_format = collection.format("{head}{padding}{tail}")
for frame in range(start_frame, end_frame + 1):
if frame in collection.indexes:
continue
hole_fpath = os.path.join(staging_dir, col_format % frame)
previous_version_path = prev_col_format % frame
previous_version_path = anatomy.fill_root(previous_version_path)
if not os.path.exists(previous_version_path):
log.warning(
"Missing frame should be replaced from "
f"'{previous_version_path}' but that doesn't exist. "
)
return (None, None)
log.warning(
f"Replacing missing '{hole_fpath}' with "
f"'{previous_version_path}'"
)
speedcopy.copyfile(previous_version_path, hole_fpath)
added_files[frame] = hole_fpath
return (used_version_entity, added_files)
def _get_last_version_files(
instance: pyblish.plugin.Instance,
current_repre_name: str,
) -> tuple[Optional[dict[str, Any]], Optional[list[str]]]:
product_name = instance.data["productName"]
project_name = instance.data["projectEntity"]["name"]
folder_entity = instance.data["folderEntity"]
version_entity = get_last_version_by_product_name(
project_name,
product_name,
folder_entity["id"],
fields={"id", "attrib"}
)
if not version_entity:
return None, None
matching_repres = get_representations(
project_name,
version_ids=[version_entity["id"]],
representation_names=[current_repre_name],
fields={"files"}
)
matching_repre = next(matching_repres, None)
if not matching_repre:
return None, None
repre_file_paths = [
file_info["path"]
for file_info in matching_repre["files"]
]
return (version_entity, repre_file_paths)

View file

@ -1,27 +1,50 @@
from __future__ import annotations
from typing import Optional, Any
import ayon_api
from ayon_core.settings import get_studio_settings
from ayon_core.lib.local_settings import get_ayon_username
from ayon_core.lib import DefaultKeysDict
from ayon_core.lib.local_settings import get_ayon_user_entity
def get_general_template_data(settings=None, username=None):
def get_general_template_data(
settings: Optional[dict[str, Any]] = None,
username: Optional[str] = None,
user_entity: Optional[dict[str, Any]] = None,
):
"""General template data based on system settings or machine.
Output contains formatting keys:
- 'studio[name]' - Studio name filled from system settings
- 'studio[code]' - Studio code filled from system settings
- 'user' - User's name using 'get_ayon_username'
- 'studio[name]' - Studio name filled from system settings
- 'studio[code]' - Studio code filled from system settings
- 'user[name]' - User's name
- 'user[attrib][...]' - User's attributes
- 'user[data][...]' - User's data
Args:
settings (Dict[str, Any]): Studio or project settings.
username (Optional[str]): AYON Username.
"""
user_entity (Optional[dict[str, Any]]): User entity.
"""
if not settings:
settings = get_studio_settings()
if username is None:
username = get_ayon_username()
if user_entity is None:
user_entity = get_ayon_user_entity(username)
# Use dictionary with default value for backwards compatibility
# - we did support '{user}' now it should be '{user[name]}'
user_data = DefaultKeysDict(
"name",
{
"name": user_entity["name"],
"attrib": user_entity["attrib"],
"data": user_entity["data"],
}
)
core_settings = settings["core"]
return {
@ -29,7 +52,7 @@ def get_general_template_data(settings=None, username=None):
"name": core_settings["studio_name"],
"code": core_settings["studio_code"]
},
"user": username
"user": user_data,
}
@ -150,7 +173,8 @@ def get_template_data(
task_entity=None,
host_name=None,
settings=None,
username=None
username=None,
user_entity=None,
):
"""Prepare data for templates filling from entered documents and info.
@ -173,13 +197,18 @@ def get_template_data(
host_name (Optional[str]): Used to fill '{app}' key.
settings (Union[Dict, None]): Prepared studio or project settings.
They're queried if not passed (may be slower).
username (Optional[str]): AYON Username.
username (Optional[str]): DEPRECATED AYON Username.
user_entity (Optional[dict[str, Any]): AYON user entity.
Returns:
Dict[str, Any]: Data prepared for filling workdir template.
"""
template_data = get_general_template_data(settings, username=username)
template_data = get_general_template_data(
settings,
username=username,
user_entity=user_entity,
)
template_data.update(get_project_template_data(project_entity))
if folder_entity:
template_data.update(get_folder_template_data(

View file

@ -4,6 +4,8 @@ from .path_resolving import (
get_workdir_with_workdir_data,
get_workdir,
get_last_workfile_with_version_from_paths,
get_last_workfile_from_paths,
get_last_workfile_with_version,
get_last_workfile,
@ -11,12 +13,21 @@ from .path_resolving import (
get_custom_workfile_template_by_string_context,
create_workdir_extra_folders,
get_comments_from_workfile_paths,
)
from .utils import (
should_use_last_workfile_on_launch,
should_open_workfiles_tool_on_launch,
MissingWorkdirError,
save_workfile_info,
save_current_workfile_to,
save_workfile_with_current_context,
save_next_version,
copy_workfile_to_context,
find_workfile_rootless_path,
)
from .build_workfile import BuildWorkfile
@ -37,18 +48,29 @@ __all__ = (
"get_workdir_with_workdir_data",
"get_workdir",
"get_last_workfile_with_version_from_paths",
"get_last_workfile_from_paths",
"get_last_workfile_with_version",
"get_last_workfile",
"find_workfile_rootless_path",
"get_custom_workfile_template",
"get_custom_workfile_template_by_string_context",
"create_workdir_extra_folders",
"get_comments_from_workfile_paths",
"should_use_last_workfile_on_launch",
"should_open_workfiles_tool_on_launch",
"MissingWorkdirError",
"save_workfile_info",
"save_current_workfile_to",
"save_workfile_with_current_context",
"save_next_version",
"copy_workfile_to_context",
"BuildWorkfile",
"discover_workfile_build_plugins",

View file

@ -1,8 +1,12 @@
from __future__ import annotations
import os
import re
import copy
import platform
import warnings
import typing
from typing import Optional, Dict, Any
from dataclasses import dataclass
import ayon_api
@ -15,6 +19,9 @@ from ayon_core.lib import (
from ayon_core.pipeline import version_start, Anatomy
from ayon_core.pipeline.template_data import get_template_data
if typing.TYPE_CHECKING:
from ayon_core.pipeline.anatomy import AnatomyTemplateResult
def get_workfile_template_key_from_context(
project_name: str,
@ -111,7 +118,7 @@ def get_workdir_with_workdir_data(
anatomy=None,
template_key=None,
project_settings=None
):
) -> "AnatomyTemplateResult":
"""Fill workdir path from entered data and project's anatomy.
It is possible to pass only project's name instead of project's anatomy but
@ -130,9 +137,9 @@ def get_workdir_with_workdir_data(
if 'template_key' is not passed.
Returns:
TemplateResult: Workdir path.
"""
AnatomyTemplateResult: Workdir path.
"""
if not anatomy:
anatomy = Anatomy(project_name)
@ -147,7 +154,7 @@ def get_workdir_with_workdir_data(
template_obj = anatomy.get_template_item(
"work", template_key, "directory"
)
# Output is TemplateResult object which contain useful data
# Output is AnatomyTemplateResult object which contain useful data
output = template_obj.format_strict(workdir_data)
if output:
return output.normalized()
@ -155,14 +162,14 @@ def get_workdir_with_workdir_data(
def get_workdir(
project_entity,
folder_entity,
task_entity,
host_name,
project_entity: dict[str, Any],
folder_entity: dict[str, Any],
task_entity: dict[str, Any],
host_name: str,
anatomy=None,
template_key=None,
project_settings=None
):
) -> "AnatomyTemplateResult":
"""Fill workdir path from entered data and project's anatomy.
Args:
@ -174,8 +181,8 @@ def get_workdir(
is stored under `AYON_HOST_NAME` key.
anatomy (Anatomy): Optional argument. Anatomy object is created using
project name from `project_entity`. It is preferred to pass this
argument as initialization of a new Anatomy object may be time
consuming.
argument as initialization of a new Anatomy object may be
time-consuming.
template_key (str): Key of work templates in anatomy templates. Default
value is defined in `get_workdir_with_workdir_data`.
project_settings(Dict[str, Any]): Prepared project settings for
@ -183,9 +190,9 @@ def get_workdir(
if 'template_key' is not passed.
Returns:
TemplateResult: Workdir path.
"""
AnatomyTemplateResult: Workdir path.
"""
if not anatomy:
anatomy = Anatomy(
project_entity["name"], project_entity=project_entity
@ -197,7 +204,7 @@ def get_workdir(
task_entity,
host_name,
)
# Output is TemplateResult object which contain useful data
# Output is AnatomyTemplateResult object which contain useful data
return get_workdir_with_workdir_data(
workdir_data,
anatomy.project_name,
@ -207,12 +214,141 @@ def get_workdir(
)
def get_last_workfile_with_version(
workdir, file_template, fill_data, extensions
):
@dataclass
class WorkfileParsedData:
version: Optional[int] = None
comment: Optional[str] = None
ext: Optional[str] = None
class WorkfileDataParser:
"""Parse dynamic data from existing filenames based on template.
Args:
file_template (str): Workfile file template.
data (dict[str, Any]): Data to fill the template with.
"""
def __init__(
self,
file_template: str,
data: dict[str, Any],
):
data = copy.deepcopy(data)
file_template = str(file_template)
# Use placeholders that will never be in the filename
ext_replacement = "CIextID"
version_replacement = "CIversionID"
comment_replacement = "CIcommentID"
data["version"] = version_replacement
data["comment"] = comment_replacement
for pattern, replacement in (
# Replace `.{ext}` with `{ext}` so we are sure dot is not
# at the end
(r"\.?{ext}", ext_replacement),
):
file_template = re.sub(pattern, replacement, file_template)
file_template = StringTemplate(file_template)
# Prepare template that does contain 'comment'
comment_template = re.escape(str(file_template.format_strict(data)))
# Prepare template that does not contain 'comment'
# - comment is usually marked as optional and in that case the regex
# to find the comment is different based on the filename
# - if filename contains comment then 'comment_template' will match
# - if filename does not contain comment then 'file_template' will
# match
data.pop("comment")
file_template = re.escape(str(file_template.format_strict(data)))
for src, replacement in (
(ext_replacement, r"(?P<ext>\..*)"),
(version_replacement, r"(?P<version>[0-9]+)"),
(comment_replacement, r"(?P<comment>.+?)"),
):
comment_template = comment_template.replace(src, replacement)
file_template = file_template.replace(src, replacement)
kwargs = {}
if platform.system().lower() == "windows":
kwargs["flags"] = re.IGNORECASE
# Match from beginning to end of string to be safe
self._comment_template = re.compile(f"^{comment_template}$", **kwargs)
self._file_template = re.compile(f"^{file_template}$", **kwargs)
def parse_data(self, filename: str) -> WorkfileParsedData:
"""Parse the dynamic data from a filename."""
match = self._comment_template.match(filename)
if not match:
match = self._file_template.match(filename)
if not match:
return WorkfileParsedData()
kwargs = match.groupdict()
version = kwargs.get("version")
if version is not None:
kwargs["version"] = int(version)
return WorkfileParsedData(**kwargs)
def parse_dynamic_data_from_workfile(
filename: str,
file_template: str,
template_data: dict[str, Any],
) -> WorkfileParsedData:
"""Parse dynamic data from a workfile filename.
Dynamic data are 'version', 'comment' and 'ext'.
Args:
filename (str): Workfile filename.
file_template (str): Workfile file template.
template_data (dict[str, Any]): Data to fill the template with.
Returns:
WorkfileParsedData: Dynamic data parsed from the filename.
"""
parser = WorkfileDataParser(file_template, template_data)
return parser.parse_data(filename)
def parse_dynamic_data_from_workfiles(
filenames: list[str],
file_template: str,
template_data: dict[str, Any],
) -> dict[str, WorkfileParsedData]:
"""Parse dynamic data from a workfiles filenames.
Dynamic data are 'version', 'comment' and 'ext'.
Args:
filenames (list[str]): Workfiles filenames.
file_template (str): Workfile file template.
template_data (dict[str, Any]): Data to fill the template with.
Returns:
dict[str, WorkfileParsedData]: Dynamic data parsed from the filenames
by filename.
"""
parser = WorkfileDataParser(file_template, template_data)
return {
filename: parser.parse_data(filename)
for filename in filenames
}
def get_last_workfile_with_version_from_paths(
filepaths: list[str],
file_template: str,
template_data: dict[str, Any],
extensions: set[str],
) -> tuple[Optional[str], Optional[int]]:
"""Return last workfile version.
Usign workfile template and it's filling data find most possible last
Using the workfile template and its template data find most possible last
version of workfile which was created for the context.
Functionality is fully based on knowing which keys are optional or what
@ -222,50 +358,43 @@ def get_last_workfile_with_version(
last workfile.
Args:
workdir (str): Path to dir where workfiles are stored.
filepaths (list[str]): Workfile paths.
file_template (str): Template of file name.
fill_data (Dict[str, Any]): Data for filling template.
extensions (Iterable[str]): All allowed file extensions of workfile.
template_data (Dict[str, Any]): Data for filling template.
extensions (set[str]): All allowed file extensions of workfile.
Returns:
Tuple[Union[str, None], Union[int, None]]: Last workfile with version
tuple[Optional[str], Optional[int]]: Last workfile with version
if there is any workfile otherwise None for both.
"""
if not os.path.exists(workdir):
"""
if not filepaths:
return None, None
dotted_extensions = set()
for ext in extensions:
if not ext.startswith("."):
ext = ".{}".format(ext)
dotted_extensions.add(ext)
# Fast match on extension
filenames = [
filename
for filename in os.listdir(workdir)
if os.path.splitext(filename)[-1] in dotted_extensions
]
ext = f".{ext}"
dotted_extensions.add(re.escape(ext))
# Build template without optionals, version to digits only regex
# and comment to any definable value.
# Escape extensions dot for regex
regex_exts = [
"\\" + ext
for ext in dotted_extensions
]
ext_expression = "(?:" + "|".join(regex_exts) + ")"
ext_expression = "(?:" + "|".join(dotted_extensions) + ")"
for pattern, replacement in (
# Replace `.{ext}` with `{ext}` so we are sure dot is not at the end
(r"\.?{ext}", ext_expression),
# Replace optional keys with optional content regex
(r"<.*?>", r".*?"),
# Replace `{version}` with group regex
(r"{version.*?}", r"([0-9]+)"),
(r"{comment.*?}", r".+?"),
):
file_template = re.sub(pattern, replacement, file_template)
# Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end
file_template = re.sub(r"\.?{ext}", ext_expression, file_template)
# Replace optional keys with optional content regex
file_template = re.sub(r"<.*?>", r".*?", file_template)
# Replace `{version}` with group regex
file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template)
file_template = re.sub(r"{comment.*?}", r".+?", file_template)
file_template = StringTemplate.format_strict_template(
file_template, fill_data
file_template, template_data
)
# Match with ignore case on Windows due to the Windows
@ -278,64 +407,189 @@ def get_last_workfile_with_version(
# Get highest version among existing matching files
version = None
output_filenames = []
for filename in sorted(filenames):
output_filepaths = []
for filepath in sorted(filepaths):
filename = os.path.basename(filepath)
match = re.match(file_template, filename, **kwargs)
if not match:
continue
if not match.groups():
output_filenames.append(filename)
output_filepaths.append(filename)
continue
file_version = int(match.group(1))
if version is None or file_version > version:
output_filenames[:] = []
output_filepaths.clear()
version = file_version
if file_version == version:
output_filenames.append(filename)
output_filepaths.append(filepath)
output_filename = None
if output_filenames:
if len(output_filenames) == 1:
output_filename = output_filenames[0]
else:
last_time = None
for _output_filename in output_filenames:
full_path = os.path.join(workdir, _output_filename)
mod_time = os.path.getmtime(full_path)
if last_time is None or last_time < mod_time:
output_filename = _output_filename
last_time = mod_time
# Use file modification time to use most recent file if there are
# multiple workfiles with the same version
output_filepath = None
last_time = None
for _output_filepath in output_filepaths:
mod_time = None
if os.path.exists(_output_filepath):
mod_time = os.path.getmtime(_output_filepath)
if (
last_time is None
or (mod_time is not None and last_time < mod_time)
):
output_filepath = _output_filepath
last_time = mod_time
return output_filename, version
return output_filepath, version
def get_last_workfile(
workdir, file_template, fill_data, extensions, full_path=False
):
"""Return last workfile filename.
def get_last_workfile_from_paths(
filepaths: list[str],
file_template: str,
template_data: dict[str, Any],
extensions: set[str],
) -> Optional[str]:
"""Return the last workfile filename.
Returns file with version 1 if there is not workfile yet.
Returns the file with version 1 if there is not workfile yet.
Args:
filepaths (list[str]): Paths to workfiles.
file_template (str): Template of file name.
template_data (dict[str, Any]): Data for filling template.
extensions (set[str]): All allowed file extensions of workfile.
Returns:
Optional[str]: Last workfile path.
"""
filepath, _version = get_last_workfile_with_version_from_paths(
filepaths, file_template, template_data, extensions
)
return filepath
def _filter_dir_files_by_ext(
dirpath: str,
extensions: set[str],
) -> tuple[list[str], set[str]]:
"""Filter files by extensions.
Args:
dirpath (str): List of file paths.
extensions (set[str]): Set of file extensions.
Returns:
tuple[list[str], set[str]]: Filtered list of file paths.
"""
dotted_extensions = set()
for ext in extensions:
if not ext.startswith("."):
ext = f".{ext}"
dotted_extensions.add(ext)
if not os.path.exists(dirpath):
return [], dotted_extensions
filtered_paths = [
os.path.join(dirpath, filename)
for filename in os.listdir(dirpath)
if os.path.splitext(filename)[-1] in dotted_extensions
]
return filtered_paths, dotted_extensions
def get_last_workfile_with_version(
workdir: str,
file_template: str,
template_data: dict[str, Any],
extensions: set[str],
) -> tuple[Optional[str], Optional[int]]:
"""Return last workfile version.
Using the workfile template and its filling data to find the most possible
last version of workfile which was created for the context.
Functionality is fully based on knowing which keys are optional or what
values are expected as value.
The last modified file is used if more files can be considered as
last workfile.
Args:
workdir (str): Path to dir where workfiles are stored.
file_template (str): Template of file name.
fill_data (Dict[str, Any]): Data for filling template.
extensions (Iterable[str]): All allowed file extensions of workfile.
full_path (Optional[bool]): Full path to file is returned if
set to True.
template_data (dict[str, Any]): Data for filling template.
extensions (set[str]): All allowed file extensions of workfile.
Returns:
str: Last or first workfile as filename of full path to filename.
tuple[Optional[str], Optional[int]]: Last workfile with version
if there is any workfile otherwise None for both.
"""
filename, _version = get_last_workfile_with_version(
workdir, file_template, fill_data, extensions
if not os.path.exists(workdir):
return None, None
filepaths, dotted_extensions = _filter_dir_files_by_ext(
workdir, extensions
)
if filename is None:
data = copy.deepcopy(fill_data)
return get_last_workfile_with_version_from_paths(
filepaths,
file_template,
template_data,
dotted_extensions,
)
def get_last_workfile(
workdir: str,
file_template: str,
template_data: dict[str, Any],
extensions: set[str],
full_path: bool = False,
) -> str:
"""Return last the workfile filename.
Returns first file name/path if there are not workfiles yet.
Args:
workdir (str): Path to dir where workfiles are stored.
file_template (str): Template of file name.
template_data (Dict[str, Any]): Data for filling template.
extensions (Iterable[str]): All allowed file extensions of workfile.
full_path (bool): Return full path to the file or only filename.
Returns:
str: Last or first workfile file name or path based on
'full_path' value.
"""
# TODO (iLLiCiTiT): Remove the argument 'full_path' and return only full
# path. As far as I can tell it is always called with 'full_path' set
# to 'True'.
# - it has to be 2 step operation, first warn about having it 'False', and
# then warn about having it filled.
if full_path is False:
warnings.warn(
"Argument 'full_path' will be removed and will return"
" only full path in future.",
DeprecationWarning,
)
filepaths, dotted_extensions = _filter_dir_files_by_ext(
workdir, extensions
)
filepath = get_last_workfile_from_paths(
filepaths,
file_template,
template_data,
dotted_extensions
)
if filepath is None:
data = copy.deepcopy(template_data)
data["version"] = version_start.get_versioning_start(
data["project"]["name"],
data["app"],
@ -344,15 +598,15 @@ def get_last_workfile(
product_type="workfile"
)
data.pop("comment", None)
if not data.get("ext"):
data["ext"] = extensions[0]
if data.get("ext") is None:
data["ext"] = next(iter(extensions), "")
data["ext"] = data["ext"].lstrip(".")
filename = StringTemplate.format_strict_template(file_template, data)
filepath = os.path.join(workdir, filename)
if full_path:
return os.path.normpath(os.path.join(workdir, filename))
return filename
return os.path.normpath(filepath)
return os.path.basename(filepath)
def get_custom_workfile_template(
@ -389,11 +643,10 @@ def get_custom_workfile_template(
project_settings(Dict[str, Any]): Preloaded project settings.
Returns:
str: Path to template or None if none of profiles match current
context. Existence of formatted path is not validated.
None: If no profile is matching context.
"""
Optional[str]: Path to template or None if none of profiles match
current context. Existence of formatted path is not validated.
"""
log = Logger.get_logger("CustomWorkfileResolve")
project_name = project_entity["name"]
@ -562,3 +815,112 @@ def create_workdir_extra_folders(
fullpath = os.path.join(workdir, subfolder)
if not os.path.exists(fullpath):
os.makedirs(fullpath)
class CommentMatcher:
"""Use anatomy and work file data to parse comments from filenames.
Args:
extensions (set[str]): Set of extensions.
file_template (StringTemplate): Workfile file template.
data (dict[str, Any]): Data to fill the template with.
"""
def __init__(
self,
extensions: set[str],
file_template: StringTemplate,
data: dict[str, Any]
):
warnings.warn(
"Class 'CommentMatcher' is deprecated. Please"
" use 'parse_dynamic_data_from_workfiles' instead.",
DeprecationWarning,
stacklevel=2,
)
self._fname_regex = None
if "{comment}" not in file_template:
# Don't look for comment if template doesn't allow it
return
# Create a regex group for extensions
any_extension = "(?:{})".format(
"|".join(re.escape(ext.lstrip(".")) for ext in extensions)
)
# Use placeholders that will never be in the filename
temp_data = copy.deepcopy(data)
temp_data["comment"] = "<<comment>>"
temp_data["version"] = "<<version>>"
temp_data["ext"] = "<<ext>>"
fname_pattern = re.escape(
file_template.format_strict(temp_data)
)
# Replace comment and version with something we can match with regex
replacements = (
("<<comment>>", r"(?P<comment>.+)"),
("<<version>>", r"[0-9]+"),
("<<ext>>", any_extension),
)
for src, dest in replacements:
fname_pattern = fname_pattern.replace(re.escape(src), dest)
# Match from beginning to end of string to be safe
self._fname_regex = re.compile(f"^{fname_pattern}$")
def parse_comment(self, filename: str) -> Optional[str]:
"""Parse the {comment} part from a filename."""
if self._fname_regex:
match = self._fname_regex.match(filename)
if match:
return match.group("comment")
return None
def get_comments_from_workfile_paths(
filepaths: list[str],
extensions: set[str],
file_template: StringTemplate,
template_data: dict[str, Any],
current_filename: Optional[str] = None,
) -> tuple[list[str], str]:
"""DEPRECATED Collect comments from workfile filenames.
Based on 'current_filename' is also returned "current comment".
Args:
filepaths (list[str]): List of filepaths to parse.
extensions (set[str]): Set of file extensions.
file_template (StringTemplate): Workfile file template.
template_data (dict[str, Any]): Data to fill the template with.
current_filename (str): Filename to check for the current comment.
Returns:
tuple[list[str], str]: List of comments and the current comment.
"""
warnings.warn(
"Function 'get_comments_from_workfile_paths' is deprecated. Please"
" use 'parse_dynamic_data_from_workfiles' instead.",
DeprecationWarning,
stacklevel=2,
)
current_comment = ""
if not filepaths:
return [], current_comment
matcher = CommentMatcher(extensions, file_template, template_data)
comment_hints = set()
for filepath in filepaths:
filename = os.path.basename(filepath)
comment = matcher.parse_comment(filename)
if comment:
comment_hints.add(comment)
if filename == current_filename:
current_comment = comment
return list(comment_hints), current_comment

View file

@ -1,5 +1,30 @@
from ayon_core.lib import filter_profiles
from __future__ import annotations
import os
import platform
import uuid
import typing
from typing import Optional, Any
import ayon_api
from ayon_api.operations import OperationsSession
from ayon_core.lib import filter_profiles, get_ayon_username
from ayon_core.settings import get_project_settings
from ayon_core.host.interfaces import (
SaveWorkfileOptionalData,
ListWorkfilesOptionalData,
CopyWorkfileOptionalData,
)
from ayon_core.pipeline.version_start import get_versioning_start
from ayon_core.pipeline.template_data import get_template_data
from .path_resolving import (
get_workdir,
get_workfile_template_key,
)
if typing.TYPE_CHECKING:
from ayon_core.pipeline import Anatomy
class MissingWorkdirError(Exception):
@ -7,14 +32,61 @@ class MissingWorkdirError(Exception):
pass
def get_workfiles_info(
workfile_path: str,
project_name: str,
task_id: str,
*,
anatomy: Optional["Anatomy"] = None,
workfile_entities: Optional[list[dict[str, Any]]] = None,
) -> Optional[dict[str, Any]]:
"""Find workfile info entity for a workfile path.
Args:
workfile_path (str): Workfile path.
project_name (str): The name of the project.
task_id (str): Task id under which is workfile created.
anatomy (Optional[Anatomy]): Project anatomy used to get roots.
workfile_entities (Optional[list[dict[str, Any]]]): Pre-fetched
workfile entities related to the task.
Returns:
Optional[dict[str, Any]]: Workfile info entity if found, otherwise
`None`.
"""
if anatomy is None:
anatomy = Anatomy(project_name)
if workfile_entities is None:
workfile_entities = list(ayon_api.get_workfiles_info(
project_name,
task_ids=[task_id],
))
if platform.system().lower() == "windows":
workfile_path = workfile_path.replace("\\", "/")
workfile_path = workfile_path.lower()
for workfile_entity in workfile_entities:
path = workfile_entity["path"]
filled_path = anatomy.fill_root(path)
if platform.system().lower() == "windows":
filled_path = filled_path.replace("\\", "/")
filled_path = filled_path.lower()
if filled_path == workfile_path:
return workfile_entity
return None
def should_use_last_workfile_on_launch(
project_name,
host_name,
task_name,
task_type,
default_output=False,
project_settings=None,
):
project_name: str,
host_name: str,
task_name: str,
task_type: str,
default_output: bool = False,
project_settings: Optional[dict[str, Any]] = None,
) -> bool:
"""Define if host should start last version workfile if possible.
Default output is `False`. Can be overridden with environment variable
@ -124,3 +196,618 @@ def should_open_workfiles_tool_on_launch(
if output is None:
return default_output
return output
def save_workfile_info(
project_name: str,
task_id: str,
rootless_path: str,
host_name: str,
version: Optional[int] = None,
comment: Optional[str] = None,
description: Optional[str] = None,
username: Optional[str] = None,
data: Optional[dict[str, Any]] = None,
workfile_entities: Optional[list[dict[str, Any]]] = None,
) -> dict[str, Any]:
"""Save workfile info entity for a workfile path.
Args:
project_name (str): The name of the project.
task_id (str): Task id under which is workfile created.
rootless_path (str): Rootless path of the workfile.
host_name (str): Name of host which is saving the workfile.
version (Optional[int]): Workfile version.
comment (Optional[str]): Workfile comment.
description (Optional[str]): Workfile description.
username (Optional[str]): Username of user who saves the workfile.
If not provided, current user is used.
data (Optional[dict[str, Any]]): Additional workfile entity data.
workfile_entities (Optional[list[dict[str, Any]]]): Pre-fetched
workfile entities related to task.
Returns:
dict[str, Any]: Workfile info entity.
"""
if workfile_entities is None:
workfile_entities = list(ayon_api.get_workfiles_info(
project_name,
task_ids=[task_id],
))
workfile_entity = next(
(
_ent
for _ent in workfile_entities
if _ent["path"] == rootless_path
),
None
)
if username is None:
username = get_ayon_username()
attrib = {}
extension = os.path.splitext(rootless_path)[1]
for key, value in (
("extension", extension),
("description", description),
):
if value is not None:
attrib[key] = value
if data is None:
data = {}
if not workfile_entity:
return _create_workfile_info_entity(
project_name,
task_id,
host_name,
rootless_path,
username,
version,
comment,
attrib,
data,
)
for key, value in (
("host_name", host_name),
("version", version),
("comment", comment),
):
if value is not None:
data[key] = value
changed_data = {}
old_data = workfile_entity["data"]
for key, value in data.items():
if key not in old_data or old_data[key] != value:
changed_data[key] = value
workfile_entity["data"][key] = value
changed_attrib = {}
old_attrib = workfile_entity["attrib"]
for key, value in attrib.items():
if key not in old_attrib or old_attrib[key] != value:
changed_attrib[key] = value
workfile_entity["attrib"][key] = value
update_data = {}
if changed_data:
update_data["data"] = changed_data
if changed_attrib:
update_data["attrib"] = changed_attrib
# Automatically fix 'createdBy' and 'updatedBy' fields
# NOTE both fields were not automatically filled by server
# until 1.1.3 release.
if workfile_entity.get("createdBy") is None:
update_data["createdBy"] = username
workfile_entity["createdBy"] = username
if workfile_entity.get("updatedBy") != username:
update_data["updatedBy"] = username
workfile_entity["updatedBy"] = username
if not update_data:
return workfile_entity
session = OperationsSession()
session.update_entity(
project_name,
"workfile",
workfile_entity["id"],
update_data,
)
session.commit()
return workfile_entity
def save_current_workfile_to(
workfile_path: str,
folder_path: str,
task_name: str,
*,
version: Optional[int] = None,
comment: Optional[str] = None,
description: Optional[str] = None,
prepared_data: Optional[SaveWorkfileOptionalData] = None,
) -> None:
"""Save current workfile to new location or context.
Args:
workfile_path (str): Destination workfile path.
folder_path (str): Target folder path.
task_name (str): Target task name.
version (Optional[int]): Workfile version.
comment (optional[str]): Workfile comment.
description (Optional[str]): Workfile description.
prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data
for speed enhancements.
"""
from ayon_core.pipeline.context_tools import registered_host
host = registered_host()
context = host.get_current_context()
project_name = context["project_name"]
folder_entity = ayon_api.get_folder_by_path(
project_name, folder_path
)
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], task_name
)
host.save_workfile_with_context(
workfile_path,
folder_entity,
task_entity,
version=version,
comment=comment,
description=description,
prepared_data=prepared_data,
)
def save_workfile_with_current_context(
workfile_path: str,
*,
version: Optional[int] = None,
comment: Optional[str] = None,
description: Optional[str] = None,
prepared_data: Optional[SaveWorkfileOptionalData] = None,
) -> None:
"""Save current workfile to new location using current context.
Helper function to save workfile using current context. Calls
'save_current_workfile_to' at the end.
Args:
workfile_path (str): Destination workfile path.
version (Optional[int]): Workfile version.
comment (optional[str]): Workfile comment.
description (Optional[str]): Workfile description.
prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data
for speed enhancements.
"""
from ayon_core.pipeline.context_tools import registered_host
host = registered_host()
context = host.get_current_context()
project_name = context["project_name"]
folder_path = context["folder_path"]
task_name = context["task_name"]
folder_entity = task_entity = None
if folder_path:
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
if folder_entity and task_name:
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], task_name
)
host.save_workfile_with_context(
workfile_path,
folder_entity,
task_entity,
version=version,
comment=comment,
description=description,
prepared_data=prepared_data,
)
def save_next_version(
version: Optional[int] = None,
comment: Optional[str] = None,
description: Optional[str] = None,
*,
prepared_data: Optional[SaveWorkfileOptionalData] = None,
) -> None:
"""Save workfile using current context, version and comment.
Helper function to save a workfile using the current context. Last
workfile version + 1 is used if is not passed in.
Args:
version (Optional[int]): Workfile version that will be used. Last
version + 1 is used if is not passed in.
comment (optional[str]): Workfile comment. Pass '""' to clear comment.
The current workfile comment is used if it is not passed.
description (Optional[str]): Workfile description.
prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data
for speed enhancements.
"""
from ayon_core.pipeline import Anatomy
from ayon_core.pipeline.context_tools import registered_host
host = registered_host()
current_path = host.get_current_workfile()
if not current_path:
current_path = None
else:
current_path = os.path.normpath(current_path)
context = host.get_current_context()
project_name = context["project_name"]
folder_path = context["folder_path"]
task_name = context["task_name"]
if prepared_data is None:
prepared_data = SaveWorkfileOptionalData()
project_entity = prepared_data.project_entity
anatomy = prepared_data.anatomy
project_settings = prepared_data.project_settings
if project_entity is None:
project_entity = ayon_api.get_project(project_name)
prepared_data.project_entity = project_entity
if project_settings is None:
project_settings = get_project_settings(project_name)
prepared_data.project_settings = project_settings
if anatomy is None:
anatomy = Anatomy(project_name, project_entity=project_entity)
prepared_data.anatomy = anatomy
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
task_entity = ayon_api.get_task_by_name(
project_name, folder_entity["id"], task_name
)
template_key = get_workfile_template_key(
project_name,
task_entity["taskType"],
host.name,
project_settings=project_settings
)
file_template = anatomy.get_template_item("work", template_key, "file")
template_data = get_template_data(
project_entity,
folder_entity,
task_entity,
host.name,
project_settings,
)
workdir = get_workdir(
project_entity,
folder_entity,
task_entity,
host.name,
anatomy=anatomy,
template_key=template_key,
project_settings=project_settings,
)
rootless_dir = workdir.rootless
last_workfile = None
current_workfile = None
if version is None or comment is None:
workfiles = host.list_workfiles(
project_name, folder_entity, task_entity,
prepared_data=ListWorkfilesOptionalData(
project_entity=project_entity,
anatomy=anatomy,
project_settings=project_settings,
template_key=template_key,
)
)
for workfile in workfiles:
if current_workfile is None and workfile.filepath == current_path:
current_workfile = workfile
if workfile.version is None:
continue
if (
last_workfile is None
or last_workfile.version < workfile.version
):
last_workfile = workfile
if version is None and last_workfile is not None:
version = last_workfile.version + 1
if version is None:
version = get_versioning_start(
project_name,
host.name,
task_name=task_entity["name"],
task_type=task_entity["taskType"],
product_type="workfile"
)
# Re-use comment from the current workfile if is not passed in
if comment is None and current_workfile is not None:
comment = current_workfile.comment
template_data["version"] = version
if comment:
template_data["comment"] = comment
# Resolve extension
# - Don't fill any if the host does not have defined any -> e.g. if host
# uses directory instead of a file.
# 1. Use the current file extension.
# 2. Use the last known workfile extension.
# 3. Use the first extensions from 'get_workfile_extensions'.
ext = None
workfile_extensions = host.get_workfile_extensions()
if workfile_extensions:
if current_path:
ext = os.path.splitext(current_path)[1]
elif last_workfile is not None:
ext = os.path.splitext(last_workfile.filepath)[1]
else:
ext = next(iter(workfile_extensions))
ext = ext.lstrip(".")
if ext:
template_data["ext"] = ext
filename = file_template.format_strict(template_data)
workfile_path = os.path.join(workdir, filename)
rootless_path = f"{rootless_dir}/{filename}"
if platform.system().lower() == "windows":
rootless_path = rootless_path.replace("\\", "/")
prepared_data.rootless_path = rootless_path
host.save_workfile_with_context(
workfile_path,
folder_entity,
task_entity,
version=version,
comment=comment,
description=description,
prepared_data=prepared_data,
)
def copy_workfile_to_context(
src_workfile_path: str,
folder_entity: dict[str, Any],
task_entity: dict[str, Any],
*,
version: Optional[int] = None,
comment: Optional[str] = None,
description: Optional[str] = None,
open_workfile: bool = True,
prepared_data: Optional[CopyWorkfileOptionalData] = None,
) -> None:
"""Copy workfile to a context.
Copy workfile to a specified folder and task. Destination path is
calculated based on passed information.
Args:
src_workfile_path (str): Source workfile path.
folder_entity (dict[str, Any]): Target folder entity.
task_entity (dict[str, Any]): Target task entity.
version (Optional[int]): Workfile version. Use next version if not
passed.
comment (optional[str]): Workfile comment.
description (Optional[str]): Workfile description.
prepared_data (Optional[CopyWorkfileOptionalData]): Prepared data
for speed enhancements. Rootless path is calculated in this
function.
"""
from ayon_core.pipeline import Anatomy
from ayon_core.pipeline.context_tools import registered_host
host = registered_host()
project_name = host.get_current_project_name()
anatomy = prepared_data.anatomy
if anatomy is None:
if prepared_data.project_entity is None:
prepared_data.project_entity = ayon_api.get_project(
project_name
)
anatomy = Anatomy(
project_name, project_entity=prepared_data.project_entity
)
prepared_data.anatomy = anatomy
project_settings = prepared_data.project_settings
if project_settings is None:
project_settings = get_project_settings(project_name)
prepared_data.project_settings = project_settings
if version is None:
list_prepared_data = None
if prepared_data is not None:
list_prepared_data = ListWorkfilesOptionalData(
project_entity=prepared_data.project_entity,
anatomy=prepared_data.anatomy,
project_settings=prepared_data.project_settings,
workfile_entities=prepared_data.workfile_entities,
)
workfiles = host.list_workfiles(
project_name,
folder_entity,
task_entity,
prepared_data=list_prepared_data
)
if workfiles:
version = max(
workfile.version
for workfile in workfiles
) + 1
else:
version = get_versioning_start(
project_name,
host.name,
task_name=task_entity["name"],
task_type=task_entity["taskType"],
product_type="workfile"
)
task_type = task_entity["taskType"]
template_key = get_workfile_template_key(
project_name,
task_type,
host.name,
project_settings=prepared_data.project_settings
)
template_data = get_template_data(
prepared_data.project_entity,
folder_entity,
task_entity,
host.name,
prepared_data.project_settings,
)
template_data["version"] = version
if comment:
template_data["comment"] = comment
workfile_extensions = host.get_workfile_extensions()
if workfile_extensions:
ext = os.path.splitext(src_workfile_path)[1].lstrip(".")
template_data["ext"] = ext
workfile_template = anatomy.get_template_item(
"work", template_key, "path"
)
workfile_path = workfile_template.format_strict(template_data)
prepared_data.rootless_path = workfile_path.rootless
host.copy_workfile(
src_workfile_path,
workfile_path,
folder_entity,
task_entity,
version=version,
comment=comment,
description=description,
open_workfile=open_workfile,
prepared_data=prepared_data,
)
def find_workfile_rootless_path(
workfile_path: str,
project_name: str,
folder_entity: dict[str, Any],
task_entity: dict[str, Any],
host_name: str,
*,
project_entity: Optional[dict[str, Any]] = None,
project_settings: Optional[dict[str, Any]] = None,
anatomy: Optional["Anatomy"] = None,
) -> str:
"""Find rootless workfile path."""
if anatomy is None:
from ayon_core.pipeline import Anatomy
anatomy = Anatomy(project_name, project_entity=project_entity)
task_type = task_entity["taskType"]
template_key = get_workfile_template_key(
project_name,
task_type,
host_name,
project_settings=project_settings
)
dir_template = anatomy.get_template_item(
"work", template_key, "directory"
)
result = dir_template.format({"root": anatomy.roots})
used_root = result.used_values.get("root")
rootless_path = str(workfile_path)
if platform.system().lower() == "windows":
rootless_path = rootless_path.replace("\\", "/")
root_key = root_value = None
if used_root is not None:
root_key, root_value = next(iter(used_root.items()))
if platform.system().lower() == "windows":
root_value = root_value.replace("\\", "/")
if root_value and rootless_path.startswith(root_value):
rootless_path = rootless_path[len(root_value):].lstrip("/")
rootless_path = f"{{root[{root_key}]}}/{rootless_path}"
else:
success, result = anatomy.find_root_template_from_path(rootless_path)
if success:
rootless_path = result
return rootless_path
def _create_workfile_info_entity(
project_name: str,
task_id: str,
host_name: str,
rootless_path: str,
username: str,
version: Optional[int],
comment: Optional[str],
attrib: dict[str, Any],
data: dict[str, Any],
) -> dict[str, Any]:
"""Create workfile entity data.
Args:
project_name (str): Project name.
task_id (str): Task id.
host_name (str): Host name.
rootless_path (str): Rootless workfile path.
username (str): Username.
version (Optional[int]): Workfile version.
comment (Optional[str]): Workfile comment.
attrib (dict[str, Any]): Workfile entity attributes.
data (dict[str, Any]): Workfile entity data.
Returns:
dict[str, Any]: Created workfile entity data.
"""
data.update({
"host_name": host_name,
"version": version,
"comment": comment,
})
workfile_info = {
"id": uuid.uuid4().hex,
"path": rootless_path,
"taskId": task_id,
"attrib": attrib,
"data": data,
# TODO remove 'createdBy' and 'updatedBy' fields when server is
# or above 1.1.3 .
"createdBy": username,
"updatedBy": username,
}
session = OperationsSession()
session.create_entity(
project_name, "workfile", workfile_info
)
session.commit()
return workfile_info

View file

@ -16,6 +16,7 @@ import re
import collections
import copy
from abc import ABC, abstractmethod
from typing import Optional
import ayon_api
from ayon_api import (
@ -29,7 +30,7 @@ from ayon_api import (
)
from ayon_core.settings import get_project_settings
from ayon_core.host import IWorkfileHost, HostBase
from ayon_core.host import IWorkfileHost, AbstractHost
from ayon_core.lib import (
Logger,
StringTemplate,
@ -53,7 +54,6 @@ from ayon_core.pipeline.plugin_discover import (
)
from ayon_core.pipeline.create import (
discover_legacy_creator_plugins,
CreateContext,
HiddenCreator,
)
@ -126,15 +126,14 @@ class AbstractTemplateBuilder(ABC):
placeholder population.
Args:
host (Union[HostBase, ModuleType]): Implementation of host.
host (Union[AbstractHost, ModuleType]): Implementation of host.
"""
_log = None
use_legacy_creators = False
def __init__(self, host):
# Get host name
if isinstance(host, HostBase):
if isinstance(host, AbstractHost):
host_name = host.name
else:
host_name = os.environ.get("AYON_HOST_NAME")
@ -162,24 +161,24 @@ class AbstractTemplateBuilder(ABC):
@property
def project_name(self):
if isinstance(self._host, HostBase):
if isinstance(self._host, AbstractHost):
return self._host.get_current_project_name()
return os.getenv("AYON_PROJECT_NAME")
@property
def current_folder_path(self):
if isinstance(self._host, HostBase):
if isinstance(self._host, AbstractHost):
return self._host.get_current_folder_path()
return os.getenv("AYON_FOLDER_PATH")
@property
def current_task_name(self):
if isinstance(self._host, HostBase):
if isinstance(self._host, AbstractHost):
return self._host.get_current_task_name()
return os.getenv("AYON_TASK_NAME")
def get_current_context(self):
if isinstance(self._host, HostBase):
if isinstance(self._host, AbstractHost):
return self._host.get_current_context()
return {
"project_name": self.project_name,
@ -201,12 +200,6 @@ class AbstractTemplateBuilder(ABC):
)
return self._current_folder_entity
@property
def linked_folder_entities(self):
if self._linked_folder_entities is _NOT_SET:
self._linked_folder_entities = self._get_linked_folder_entities()
return self._linked_folder_entities
@property
def current_task_entity(self):
if self._current_task_entity is _NOT_SET:
@ -261,7 +254,7 @@ class AbstractTemplateBuilder(ABC):
"""Access to host implementation.
Returns:
Union[HostBase, ModuleType]: Implementation of host.
Union[AbstractHost, ModuleType]: Implementation of host.
"""
return self._host
@ -307,13 +300,16 @@ class AbstractTemplateBuilder(ABC):
self._loaders_by_name = get_loaders_by_name()
return self._loaders_by_name
def _get_linked_folder_entities(self):
def get_linked_folder_entities(self, link_type: Optional[str]):
if not link_type:
return []
project_name = self.project_name
folder_entity = self.current_folder_entity
if not folder_entity:
return []
links = get_folder_links(
project_name, folder_entity["id"], link_direction="in"
project_name,
folder_entity["id"], link_types=[link_type], link_direction="in"
)
linked_folder_ids = {
link["entityId"]
@ -323,19 +319,6 @@ class AbstractTemplateBuilder(ABC):
return list(get_folders(project_name, folder_ids=linked_folder_ids))
def _collect_legacy_creators(self):
creators_by_name = {}
for creator in discover_legacy_creator_plugins():
if not creator.enabled:
continue
creator_name = creator.__name__
if creator_name in creators_by_name:
raise KeyError(
"Duplicated creator name {} !".format(creator_name)
)
creators_by_name[creator_name] = creator
self._creators_by_name = creators_by_name
def _collect_creators(self):
self._creators_by_name = {
identifier: creator
@ -347,10 +330,7 @@ class AbstractTemplateBuilder(ABC):
def get_creators_by_name(self):
if self._creators_by_name is None:
if self.use_legacy_creators:
self._collect_legacy_creators()
else:
self._collect_creators()
self._collect_creators()
return self._creators_by_name
@ -631,7 +611,7 @@ class AbstractTemplateBuilder(ABC):
"""Open template file with registered host."""
template_preset = self.get_template_preset()
template_path = template_preset["path"]
self.host.open_file(template_path)
self.host.open_workfile(template_path)
@abstractmethod
def import_template(self, template_path):
@ -1429,10 +1409,27 @@ class PlaceholderLoadMixin(object):
builder_type_enum_items = [
{"label": "Current folder", "value": "context_folder"},
# TODO implement linked folders
# {"label": "Linked folders", "value": "linked_folders"},
{"label": "Linked folders", "value": "linked_folders"},
{"label": "All folders", "value": "all_folders"},
]
link_types = ayon_api.get_link_types(self.builder.project_name)
# Filter link types for folder to folder links
link_types_enum_items = [
{"label": link_type["name"], "value": link_type["linkType"]}
for link_type in link_types
if (
link_type["inputType"] == "folder"
and link_type["outputType"] == "folder"
)
]
if not link_types_enum_items:
link_types_enum_items.append(
{"label": "<No link types>", "value": None}
)
build_type_label = "Folder Builder Type"
build_type_help = (
"Folder Builder Type\n"
@ -1461,6 +1458,16 @@ class PlaceholderLoadMixin(object):
items=builder_type_enum_items,
tooltip=build_type_help
),
attribute_definitions.EnumDef(
"link_type",
label="Link Type",
items=link_types_enum_items,
tooltip=(
"Link Type\n"
"\nDefines what type of link will be used to"
" link the asset to the current folder."
)
),
attribute_definitions.EnumDef(
"product_type",
label="Product type",
@ -1607,10 +1614,7 @@ class PlaceholderLoadMixin(object):
builder_type = placeholder.data["builder_type"]
folder_ids = []
if builder_type == "context_folder":
folder_ids = [current_folder_entity["id"]]
elif builder_type == "all_folders":
if builder_type == "all_folders":
folder_ids = {
folder_entity["id"]
for folder_entity in get_folders(
@ -1620,6 +1624,23 @@ class PlaceholderLoadMixin(object):
)
}
elif builder_type == "context_folder":
folder_ids = [current_folder_entity["id"]]
elif builder_type == "linked_folders":
# link type from placeholder data or default to "template"
link_type = placeholder.data.get("link_type", "template")
# Get all linked folders for the current folder
if hasattr(self, "builder") and isinstance(
self.builder, AbstractTemplateBuilder):
# self.builder: AbstractTemplateBuilder
folder_ids = [
linked_folder_entity["id"]
for linked_folder_entity in (
self.builder.get_linked_folder_entities(
link_type=link_type))
]
if not folder_ids:
return []
@ -1899,8 +1920,6 @@ class PlaceholderCreateMixin(object):
pre_create_data (dict): dictionary of configuration from Creator
configuration in UI
"""
legacy_create = self.builder.use_legacy_creators
creator_name = placeholder.data["creator"]
create_variant = placeholder.data["create_variant"]
active = placeholder.data.get("active")
@ -1940,20 +1959,14 @@ class PlaceholderCreateMixin(object):
# compile product name from variant
try:
if legacy_create:
creator_instance = creator_plugin(
product_name,
folder_path
).process()
else:
creator_instance = self.builder.create_context.create(
creator_plugin.identifier,
create_variant,
folder_entity,
task_entity,
pre_create_data=pre_create_data,
active=active
)
creator_instance = self.builder.create_context.create(
creator_plugin.identifier,
create_variant,
folder_entity,
task_entity,
pre_create_data=pre_create_data,
active=active
)
except: # noqa: E722
failed = True

View file

@ -0,0 +1,630 @@
"""Plugin to create hero version from selected context."""
from __future__ import annotations
import os
import copy
import shutil
import errno
import itertools
from concurrent.futures import ThreadPoolExecutor
from typing import Any, Optional
from speedcopy import copyfile
import clique
import ayon_api
from ayon_api.operations import OperationsSession, new_version_entity
from ayon_api.utils import create_entity_id
from qtpy import QtWidgets, QtCore
from ayon_core import style
from ayon_core.pipeline import load, Anatomy
from ayon_core.lib import create_hard_link, source_hash, StringTemplate
from ayon_core.lib.file_transaction import wait_for_future_errors
from ayon_core.pipeline.publish import get_publish_template_name
from ayon_core.pipeline.template_data import get_template_data
def prepare_changes(old_entity: dict, new_entity: dict) -> dict:
"""Prepare changes dict for update entity operation.
Args:
old_entity (dict): Existing entity data from database.
new_entity (dict): New entity data to compare against old.
Returns:
dict: Changes to apply to old entity to make it like new entity.
"""
changes = {}
for key in set(new_entity.keys()):
if key == "attrib":
continue
if key in new_entity and new_entity[key] != old_entity.get(key):
changes[key] = new_entity[key]
attrib_changes = {}
if "attrib" in new_entity:
for key, value in new_entity["attrib"].items():
if value != old_entity["attrib"].get(key):
attrib_changes[key] = value
if attrib_changes:
changes["attrib"] = attrib_changes
return changes
class CreateHeroVersion(load.ProductLoaderPlugin):
"""Create hero version from selected context."""
is_multiple_contexts_compatible = False
representations = {"*"}
product_types = {"*"}
label = "Create Hero Version"
order = 36
icon = "star"
color = "#ffd700"
ignored_representation_names: list[str] = []
db_representation_context_keys = [
"project", "folder", "asset", "hierarchy", "task", "product",
"subset", "family", "representation", "username", "user", "output"
]
use_hardlinks = False
@staticmethod
def message(text: str) -> None:
"""Show message box with text."""
msgBox = QtWidgets.QMessageBox()
msgBox.setText(text)
msgBox.setStyleSheet(style.load_stylesheet())
msgBox.setWindowFlags(
msgBox.windowFlags() | QtCore.Qt.WindowType.FramelessWindowHint
)
msgBox.exec_()
def load(self, context, name=None, namespace=None, options=None) -> None:
"""Load hero version from context (dict as in context.py)."""
success = True
errors = []
# Extract project, product, version, folder from context
project = context.get("project")
product = context.get("product")
version = context.get("version")
folder = context.get("folder")
task_entity = ayon_api.get_task_by_id(
task_id=version.get("taskId"), project_name=project["name"]
)
anatomy = Anatomy(project["name"])
version_id = version["id"]
project_name = project["name"]
repres = list(
ayon_api.get_representations(
project_name, version_ids={version_id}
)
)
anatomy_data = get_template_data(
project_entity=project,
folder_entity=folder,
task_entity=task_entity,
)
anatomy_data["product"] = {
"name": product["name"],
"type": product["productType"],
}
anatomy_data["version"] = version["version"]
published_representations = {}
for repre in repres:
repre_anatomy = copy.deepcopy(anatomy_data)
if "ext" not in repre_anatomy:
repre_anatomy["ext"] = repre.get("context", {}).get("ext", "")
published_representations[repre["id"]] = {
"representation": repre,
"published_files": [f["path"] for f in repre.get("files", [])],
"anatomy_data": repre_anatomy
}
# get the publish directory
publish_template_key = get_publish_template_name(
project_name,
context.get("hostName"),
product["productType"],
task_name=anatomy_data.get("task", {}).get("name"),
task_type=anatomy_data.get("task", {}).get("type"),
project_settings=context.get("project_settings", {}),
logger=self.log
)
published_template_obj = anatomy.get_template_item(
"publish", publish_template_key, "directory"
)
published_dir = os.path.normpath(
published_template_obj.format_strict(anatomy_data)
)
instance_data = {
"productName": product["name"],
"productType": product["productType"],
"anatomyData": anatomy_data,
"publishDir": published_dir,
"published_representations": published_representations,
"versionEntity": version,
}
try:
self.create_hero_version(instance_data, anatomy, context)
except Exception as exc:
success = False
errors.append(str(exc))
if success:
self.message("Hero version created successfully.")
else:
self.message(
f"Failed to create hero version:\n{chr(10).join(errors)}")
def create_hero_version(
self,
instance_data: dict[str, Any],
anatomy: Anatomy,
context: dict[str, Any]) -> None:
"""Create hero version from instance data.
Args:
instance_data (dict): Instance data with keys:
- productName (str): Name of the product.
- productType (str): Type of the product.
- anatomyData (dict): Anatomy data for templates.
- publishDir (str): Directory where the product is published.
- published_representations (dict): Published representations.
- versionEntity (dict, optional): Source version entity.
anatomy (Anatomy): Anatomy object for the project.
context (dict): Context data with keys:
- hostName (str): Name of the host application.
- project_settings (dict): Project settings.
Raises:
RuntimeError: If any required data is missing or an error occurs
during the hero version creation process.
"""
published_repres = instance_data.get("published_representations")
if not published_repres:
raise RuntimeError("No published representations found.")
project_name = anatomy.project_name
template_key = get_publish_template_name(
project_name,
context.get("hostName"),
instance_data.get("productType"),
instance_data.get("anatomyData", {}).get("task", {}).get("name"),
instance_data.get("anatomyData", {}).get("task", {}).get("type"),
project_settings=context.get("project_settings", {}),
hero=True,
)
hero_template = anatomy.get_template_item(
"hero", template_key, "path", default=None
)
if hero_template is None:
raise RuntimeError("Project anatomy does not have hero "
f"template key: {template_key}")
self.log.info(f"Hero template: {hero_template.template}")
hero_publish_dir = self.get_publish_dir(
instance_data, anatomy, template_key
)
self.log.info(f"Hero publish dir: {hero_publish_dir}")
src_version_entity = instance_data.get("versionEntity")
filtered_repre_ids = []
for repre_id, repre_info in published_repres.items():
repre = repre_info["representation"]
if repre["name"].lower() in self.ignored_representation_names:
filtered_repre_ids.append(repre_id)
for repre_id in filtered_repre_ids:
published_repres.pop(repre_id, None)
if not published_repres:
raise RuntimeError(
"All published representations were filtered by name."
)
if src_version_entity is None:
src_version_entity = self.version_from_representations(
project_name, published_repres)
if not src_version_entity:
raise RuntimeError("Can't find origin version in database.")
if src_version_entity["version"] == 0:
raise RuntimeError("Version 0 cannot have hero version.")
all_copied_files = []
transfers = instance_data.get("transfers", [])
for _src, dst in transfers:
dst = os.path.normpath(dst)
if dst not in all_copied_files:
all_copied_files.append(dst)
hardlinks = instance_data.get("hardlinks", [])
for _src, dst in hardlinks:
dst = os.path.normpath(dst)
if dst not in all_copied_files:
all_copied_files.append(dst)
all_repre_file_paths = []
for repre_info in published_repres.values():
published_files = repre_info.get("published_files") or []
for file_path in published_files:
file_path = os.path.normpath(file_path)
if file_path not in all_repre_file_paths:
all_repre_file_paths.append(file_path)
publish_dir = instance_data.get("publishDir", "")
if not publish_dir:
raise RuntimeError(
"publishDir is empty in instance_data, cannot continue."
)
instance_publish_dir = os.path.normpath(publish_dir)
other_file_paths_mapping = []
for file_path in all_copied_files:
if not file_path.startswith(instance_publish_dir):
continue
if file_path in all_repre_file_paths:
continue
dst_filepath = file_path.replace(
instance_publish_dir, hero_publish_dir
)
other_file_paths_mapping.append((file_path, dst_filepath))
old_version, old_repres = self.current_hero_ents(
project_name, src_version_entity
)
inactive_old_repres_by_name = {}
old_repres_by_name = {}
for repre in old_repres:
low_name = repre["name"].lower()
if repre["active"]:
old_repres_by_name[low_name] = repre
else:
inactive_old_repres_by_name[low_name] = repre
op_session = OperationsSession()
entity_id = old_version["id"] if old_version else None
new_hero_version = new_version_entity(
-src_version_entity["version"],
src_version_entity["productId"],
task_id=src_version_entity.get("taskId"),
data=copy.deepcopy(src_version_entity["data"]),
attribs=copy.deepcopy(src_version_entity["attrib"]),
entity_id=entity_id,
)
if old_version:
update_data = prepare_changes(old_version, new_hero_version)
op_session.update_entity(
project_name, "version", old_version["id"], update_data
)
else:
op_session.create_entity(project_name, "version", new_hero_version)
# Store hero entity to instance_data
instance_data["heroVersionEntity"] = new_hero_version
old_repres_to_replace = {}
for repre_info in published_repres.values():
repre = repre_info["representation"]
repre_name_low = repre["name"].lower()
if repre_name_low in old_repres_by_name:
old_repres_to_replace[repre_name_low] = (
old_repres_by_name.pop(repre_name_low)
)
old_repres_to_delete = old_repres_by_name or {}
backup_hero_publish_dir = None
if os.path.exists(hero_publish_dir):
base_backup_dir = f"{hero_publish_dir}.BACKUP"
max_idx = 10
# Find the first available backup directory name
for idx in range(max_idx + 1):
if idx == 0:
candidate_backup_dir = base_backup_dir
else:
candidate_backup_dir = f"{base_backup_dir}{idx}"
if not os.path.exists(candidate_backup_dir):
backup_hero_publish_dir = candidate_backup_dir
break
else:
raise AssertionError(
f"Backup folders are fully occupied to max index {max_idx}"
)
try:
os.rename(hero_publish_dir, backup_hero_publish_dir)
except PermissionError as e:
raise AssertionError(
"Could not create hero version because it is "
"not possible to replace current hero files."
) from e
try:
src_to_dst_file_paths = []
repre_integrate_data = []
path_template_obj = anatomy.get_template_item(
"hero", template_key, "path")
anatomy_root = {"root": anatomy.roots}
for repre_info in published_repres.values():
published_files = repre_info["published_files"]
if len(published_files) == 0:
continue
anatomy_data = copy.deepcopy(repre_info["anatomy_data"])
anatomy_data.pop("version", None)
template_filled = path_template_obj.format_strict(anatomy_data)
repre_context = template_filled.used_values
for key in self.db_representation_context_keys:
value = anatomy_data.get(key)
if value is not None:
repre_context[key] = value
repre_entity = copy.deepcopy(repre_info["representation"])
repre_entity.pop("id", None)
repre_entity["versionId"] = new_hero_version["id"]
repre_entity["context"] = repre_context
repre_entity["attrib"] = {
"path": str(template_filled),
"template": hero_template.template
}
dst_paths = []
if len(published_files) == 1:
dst_paths.append(str(template_filled))
mapped_published_file = StringTemplate(
published_files[0]).format_strict(
anatomy_root
)
src_to_dst_file_paths.append(
(mapped_published_file, template_filled)
)
self.log.info(
f"Single published file: {mapped_published_file} -> "
f"{template_filled}"
)
else:
collections, remainders = clique.assemble(published_files)
if remainders or not collections or len(collections) > 1:
raise RuntimeError(
(
"Integrity error. Files of published "
"representation is combination of frame "
"collections and single files."
)
)
src_col = collections[0]
frame_splitter = "_-_FRAME_SPLIT_-_"
anatomy_data["frame"] = frame_splitter
_template_filled = path_template_obj.format_strict(
anatomy_data
)
head, tail = _template_filled.split(frame_splitter)
padding = anatomy.templates_obj.frame_padding
dst_col = clique.Collection(
head=head, padding=padding, tail=tail
)
dst_col.indexes.clear()
dst_col.indexes.update(src_col.indexes)
for src_file, dst_file in zip(src_col, dst_col):
src_file = StringTemplate(src_file).format_strict(
anatomy_root
)
src_to_dst_file_paths.append((src_file, dst_file))
dst_paths.append(dst_file)
self.log.info(
f"Collection published file: {src_file} "
f"-> {dst_file}"
)
repre_integrate_data.append((repre_entity, dst_paths))
# Copy files
with ThreadPoolExecutor(max_workers=8) as executor:
futures = [
executor.submit(self.copy_file, src_path, dst_path)
for src_path, dst_path in itertools.chain(
src_to_dst_file_paths, other_file_paths_mapping)
]
wait_for_future_errors(executor, futures)
# Update/create representations
for repre_entity, dst_paths in repre_integrate_data:
repre_files = self.get_files_info(dst_paths, anatomy)
repre_entity["files"] = repre_files
repre_name_low = repre_entity["name"].lower()
if repre_name_low in old_repres_to_replace:
old_repre = old_repres_to_replace.pop(repre_name_low)
repre_entity["id"] = old_repre["id"]
update_data = prepare_changes(old_repre, repre_entity)
op_session.update_entity(
project_name,
"representation",
old_repre["id"],
update_data
)
elif repre_name_low in inactive_old_repres_by_name:
inactive_repre = inactive_old_repres_by_name.pop(
repre_name_low
)
repre_entity["id"] = inactive_repre["id"]
update_data = prepare_changes(inactive_repre, repre_entity)
op_session.update_entity(
project_name,
"representation",
inactive_repre["id"],
update_data
)
else:
op_session.create_entity(
project_name,
"representation",
repre_entity
)
for repre in old_repres_to_delete.values():
op_session.update_entity(
project_name,
"representation",
repre["id"],
{"active": False}
)
op_session.commit()
if backup_hero_publish_dir is not None and os.path.exists(
backup_hero_publish_dir
):
shutil.rmtree(backup_hero_publish_dir)
except Exception:
if backup_hero_publish_dir is not None and os.path.exists(
backup_hero_publish_dir):
if os.path.exists(hero_publish_dir):
shutil.rmtree(hero_publish_dir)
os.rename(backup_hero_publish_dir, hero_publish_dir)
raise
def get_files_info(
self, filepaths: list[str], anatomy: Anatomy) -> list[dict]:
"""Get list of file info dictionaries for given file paths.
Args:
filepaths (list[str]): List of absolute file paths.
anatomy (Anatomy): Anatomy object for the project.
Returns:
list[dict]: List of file info dictionaries.
"""
file_infos = []
for filepath in filepaths:
file_info = self.prepare_file_info(filepath, anatomy)
file_infos.append(file_info)
return file_infos
def prepare_file_info(self, path: str, anatomy: Anatomy) -> dict:
"""Prepare file info dictionary for given path.
Args:
path (str): Absolute file path.
anatomy (Anatomy): Anatomy object for the project.
Returns:
dict: File info dictionary with keys:
- id (str): Unique identifier for the file.
- name (str): Base name of the file.
- path (str): Rootless file path.
- size (int): Size of the file in bytes.
- hash (str): Hash of the file content.
- hash_type (str): Type of the hash used.
"""
return {
"id": create_entity_id(),
"name": os.path.basename(path),
"path": self.get_rootless_path(anatomy, path),
"size": os.path.getsize(path),
"hash": source_hash(path),
"hash_type": "op3",
}
@staticmethod
def get_publish_dir(
instance_data: dict,
anatomy: Anatomy,
template_key: str) -> str:
"""Get publish directory from instance data and anatomy.
Args:
instance_data (dict): Instance data with "anatomyData" key.
anatomy (Anatomy): Anatomy object for the project.
template_key (str): Template key for the hero template.
Returns:
str: Normalized publish directory path.
"""
template_data = copy.deepcopy(instance_data.get("anatomyData", {}))
if "originalBasename" in instance_data:
template_data["originalBasename"] = (
instance_data["originalBasename"]
)
template_obj = anatomy.get_template_item(
"hero", template_key, "directory"
)
return os.path.normpath(template_obj.format_strict(template_data))
@staticmethod
def get_rootless_path(anatomy: Anatomy, path: str) -> str:
"""Get rootless path from absolute path.
Args:
anatomy (Anatomy): Anatomy object for the project.
path (str): Absolute file path.
Returns:
str: Rootless file path if root found, else original path.
"""
success, rootless_path = anatomy.find_root_template_from_path(path)
if success:
path = rootless_path
return path
def copy_file(self, src_path: str, dst_path: str) -> None:
"""Copy file from src to dst with creating directories.
Args:
src_path (str): Source file path.
dst_path (str): Destination file path.
Raises:
OSError: If copying or linking fails.
"""
dirname = os.path.dirname(dst_path)
try:
os.makedirs(dirname)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
if self.use_hardlinks:
try:
create_hard_link(src_path, dst_path)
return
except OSError as exc:
if exc.errno not in [errno.EXDEV, errno.EINVAL]:
raise
copyfile(src_path, dst_path)
@staticmethod
def version_from_representations(
project_name: str, repres: dict) -> Optional[dict[str, Any]]:
"""Find version from representations.
Args:
project_name (str): Name of the project.
repres (dict): Dictionary of representations info.
Returns:
Optional[dict]: Version entity if found, else None.
"""
for repre_info in repres.values():
version = ayon_api.get_version_by_id(
project_name, repre_info["representation"]["versionId"]
)
if version:
return version
return None
@staticmethod
def current_hero_ents(
project_name: str,
version: dict[str, Any]) -> tuple[Any, list[dict[str, Any]]]:
hero_version = ayon_api.get_hero_version_by_product_id(
project_name, version["productId"]
)
if not hero_version:
return None, []
hero_repres = list(
ayon_api.get_representations(
project_name, version_ids={hero_version["id"]}
)
)
return hero_version, hero_repres

View file

@ -6,15 +6,15 @@ from ayon_core.pipeline import load
from ayon_core.pipeline.load import LoadError
class PushToLibraryProject(load.ProductLoaderPlugin):
"""Export selected versions to folder structure from Template"""
class PushToProject(load.ProductLoaderPlugin):
"""Export selected versions to different project"""
is_multiple_contexts_compatible = True
representations = {"*"}
product_types = {"*"}
label = "Push to Library project"
label = "Push to project"
order = 35
icon = "send"
color = "#d8d8d8"
@ -28,10 +28,12 @@ class PushToLibraryProject(load.ProductLoaderPlugin):
if not filtered_contexts:
raise LoadError("Nothing to push for your selection")
if len(filtered_contexts) > 1:
raise LoadError("Please select only one item")
context = tuple(filtered_contexts)[0]
folder_ids = set(
context["folder"]["id"]
for context in filtered_contexts
)
if len(folder_ids) > 1:
raise LoadError("Please select products from single folder")
push_tool_script_path = os.path.join(
AYON_CORE_ROOT,
@ -39,14 +41,16 @@ class PushToLibraryProject(load.ProductLoaderPlugin):
"push_to_project",
"main.py"
)
project_name = filtered_contexts[0]["project"]["name"]
project_name = context["project"]["name"]
version_id = context["version"]["id"]
version_ids = {
context["version"]["id"]
for context in filtered_contexts
}
args = get_ayon_launcher_args(
"run",
push_tool_script_path,
"--project", project_name,
"--version", version_id
"--versions", ",".join(version_ids)
)
run_detached_process(args)

View file

@ -38,6 +38,8 @@ class CleanUp(pyblish.api.InstancePlugin):
"webpublisher",
"shell"
]
settings_category = "core"
exclude_families = ["clip"]
optional = True
active = True

View file

@ -13,6 +13,8 @@ class CleanUpFarm(pyblish.api.ContextPlugin):
order = pyblish.api.IntegratorOrder + 11
label = "Clean Up Farm"
settings_category = "core"
enabled = True
# Keep "filesequence" for backwards compatibility of older jobs

View file

@ -16,6 +16,7 @@ Provides:
import json
import pyblish.api
from ayon_core.lib import get_ayon_user_entity
from ayon_core.pipeline.template_data import get_template_data
@ -55,17 +56,18 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin):
if folder_entity:
task_entity = context.data["taskEntity"]
username = context.data["user"]
user_entity = get_ayon_user_entity(username)
anatomy_data = get_template_data(
project_entity,
folder_entity,
task_entity,
host_name,
project_settings
host_name=host_name,
settings=project_settings,
user_entity=user_entity,
)
anatomy_data.update(context.data.get("datetimeData") or {})
username = context.data["user"]
anatomy_data["user"] = username
# Backwards compatibility for 'username' key
anatomy_data["username"] = username

View file

@ -46,6 +46,8 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder + 0.49
label = "Collect Anatomy Instance data"
settings_category = "core"
follow_workfile_version = False
def process(self, context):

View file

@ -39,8 +39,9 @@ class CollectAudio(pyblish.api.ContextPlugin):
"blender",
"houdini",
"max",
"circuit",
"batchdelivery",
]
settings_category = "core"
audio_product_name = "audioMain"

View file

@ -32,6 +32,7 @@ class CollectCoreJobEnvVars(pyblish.api.ContextPlugin):
for key in [
"AYON_BUNDLE_NAME",
"AYON_STUDIO_BUNDLE_NAME",
"AYON_USE_STAGING",
"AYON_IN_TESTS",
# NOTE Not sure why workdir is needed?

View file

@ -23,6 +23,7 @@ class CollectFramesFixDef(
targets = ["local"]
hosts = ["nuke"]
families = ["render", "prerender"]
settings_category = "core"
rewrite_version_enable = False

View file

@ -2,11 +2,13 @@
"""
import os
import collections
import pyblish.api
from ayon_core.host import IPublishHost
from ayon_core.pipeline import registered_host
from ayon_core.pipeline.create import CreateContext
from ayon_core.pipeline.create import CreateContext, ParentFlags
class CollectFromCreateContext(pyblish.api.ContextPlugin):
@ -36,18 +38,51 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
if project_name:
context.data["projectName"] = project_name
# Separate root instances and parented instances
instances_by_parent_id = collections.defaultdict(list)
root_instances = []
for created_instance in create_context.instances:
parent_id = created_instance.parent_instance_id
if parent_id is None:
root_instances.append(created_instance)
else:
instances_by_parent_id[parent_id].append(created_instance)
# Traverse instances from top to bottom
# - All instances without an existing parent are automatically
# eliminated
filtered_instances = []
_queue = collections.deque()
_queue.append((root_instances, True))
while _queue:
created_instances, parent_is_active = _queue.popleft()
for created_instance in created_instances:
is_active = created_instance["active"]
# Use a parent's active state if parent flags defines that
if (
created_instance.parent_flags & ParentFlags.share_active
and is_active
):
is_active = parent_is_active
if is_active:
filtered_instances.append(created_instance)
children = instances_by_parent_id[created_instance.id]
if children:
_queue.append((children, is_active))
for created_instance in filtered_instances:
instance_data = created_instance.data_to_store()
if instance_data["active"]:
thumbnail_path = thumbnail_paths_by_instance_id.get(
created_instance.id
)
self.create_instance(
context,
instance_data,
created_instance.transient_data,
thumbnail_path
)
thumbnail_path = thumbnail_paths_by_instance_id.get(
created_instance.id
)
self.create_instance(
context,
instance_data,
created_instance.transient_data,
thumbnail_path
)
# Update global data to context
context.data.update(create_context.context_data_to_store())

View file

@ -8,13 +8,7 @@ This module contains a unified plugin that handles:
from pprint import pformat
import opentimelineio as otio
import pyblish.api
from ayon_core.pipeline.editorial import (
get_media_range_with_retimes,
otio_range_to_frame_range,
otio_range_with_handles,
)
def validate_otio_clip(instance, logger):
@ -74,7 +68,15 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
if not validate_otio_clip(instance, self.log):
return
import opentimelineio as otio
otio_clip = instance.data["otioClip"]
if isinstance(
otio_clip.media_reference,
otio.schema.MissingReference
):
self.log.info("Clip has no media reference")
return
# Collect timeline ranges if workfile start frame is available
if "workfileFrameStart" in instance.data:
@ -100,6 +102,11 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
def _collect_timeline_ranges(self, instance, otio_clip):
"""Collect basic timeline frame ranges."""
from ayon_core.pipeline.editorial import (
otio_range_to_frame_range,
otio_range_with_handles,
)
workfile_start = instance.data["workfileFrameStart"]
# Get timeline ranges
@ -129,6 +136,8 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
def _collect_source_ranges(self, instance, otio_clip):
"""Collect source media frame ranges."""
import opentimelineio as otio
# Get source ranges
otio_src_range = otio_clip.source_range
otio_available_range = otio_clip.available_range()
@ -178,6 +187,8 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
def _collect_retimed_ranges(self, instance, otio_clip):
"""Handle retimed clip frame ranges."""
from ayon_core.pipeline.editorial import get_media_range_with_retimes
retimed_attributes = get_media_range_with_retimes(otio_clip, 0, 0)
self.log.debug(f"Retimed attributes: {retimed_attributes}")

View file

@ -60,6 +60,13 @@ class CollectOtioSubsetResources(
# get basic variables
otio_clip = instance.data["otioClip"]
if isinstance(
otio_clip.media_reference,
otio.schema.MissingReference
):
self.log.info("Clip has no media reference")
return
otio_available_range = otio_clip.available_range()
media_fps = otio_available_range.start_time.rate
available_duration = otio_available_range.duration.value

View file

@ -13,6 +13,8 @@ import copy
import pyblish.api
from ayon_core.pipeline.publish import get_publish_template_name
class CollectResourcesPath(pyblish.api.InstancePlugin):
"""Generate directory path where the files and resources will be stored.
@ -77,16 +79,29 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
# This is for cases of Deprecated anatomy without `folder`
# TODO remove when all clients have solved this issue
template_data.update({
"frame": "FRAME_TEMP",
"representation": "TEMP"
})
template_data.update({"frame": "FRAME_TEMP", "representation": "TEMP"})
publish_templates = anatomy.get_template_item(
"publish", "default", "directory"
task_name = task_type = None
task_entity = instance.data.get("taskEntity")
if task_entity:
task_name = task_entity["name"]
task_type = task_entity["taskType"]
template_name = get_publish_template_name(
project_name=instance.context.data["projectName"],
host_name=instance.context.data["hostName"],
product_type=instance.data["productType"],
task_name=task_name,
task_type=task_type,
project_settings=instance.context.data["project_settings"],
logger=self.log,
)
publish_template = anatomy.get_template_item(
"publish", template_name, "directory")
publish_folder = os.path.normpath(
publish_templates.format_strict(template_data)
publish_template.format_strict(template_data)
)
resources_folder = os.path.join(publish_folder, "resources")

View file

@ -1,7 +1,9 @@
import ayon_api
import ayon_api.utils
from ayon_core.host import ILoadHost
from ayon_core.pipeline import registered_host
import pyblish.api
@ -27,16 +29,23 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
def process(self, context):
host = registered_host()
if host is None:
self.log.warn("No registered host.")
self.log.warning("No registered host.")
return
if not hasattr(host, "ls"):
host_name = host.__name__
self.log.warn("Host %r doesn't have ls() implemented." % host_name)
if not isinstance(host, ILoadHost):
host_name = host.name
self.log.warning(
f"Host {host_name} does not implement ILoadHost. "
"Skipping querying of loaded versions in scene."
)
return
containers = list(host.get_containers())
if not containers:
# Opt out early if there are no containers
self.log.debug("No loaded containers found in scene.")
return
loaded_versions = []
containers = list(host.ls())
repre_ids = {
container["representation"]
for container in containers
@ -61,6 +70,7 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
# QUESTION should we add same representation id when loaded multiple
# times?
loaded_versions = []
for con in containers:
repre_id = con["representation"]
repre_entity = repre_entities_by_id.get(repre_id)
@ -80,4 +90,5 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
}
loaded_versions.append(version)
self.log.debug(f"Collected {len(loaded_versions)} loaded versions.")
context.data["loadedVersions"] = loaded_versions

View file

@ -12,9 +12,10 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
"""
order = pyblish.api.CollectorOrder
label = 'Collect Scene Version'
label = "Collect Scene Version"
# configurable in Settings
hosts = ["*"]
settings_category = "core"
# in some cases of headless publishing (for example webpublisher using PS)
# you want to ignore version from name and let integrate use next version

View file

@ -55,8 +55,9 @@ class ExtractBurnin(publish.Extractor):
"max",
"blender",
"unreal",
"circuit",
"batchdelivery",
]
settings_category = "core"
optional = True

View file

@ -12,7 +12,7 @@ from ayon_core.lib import (
)
from ayon_core.lib.transcoding import (
MissingRGBAChannelsError,
convert_colorspace,
oiio_color_convert,
)
from ayon_core.lib.profiles_filtering import filter_profiles
@ -56,6 +56,8 @@ class ExtractOIIOTranscode(publish.Extractor):
label = "Transcode color spaces"
order = pyblish.api.ExtractorOrder + 0.019
settings_category = "core"
optional = True
# Supported extensions
@ -86,6 +88,14 @@ class ExtractOIIOTranscode(publish.Extractor):
new_representations = []
repres = instance.data["representations"]
for idx, repre in enumerate(list(repres)):
# target space, display and view might be defined upstream
# TODO: address https://github.com/ynput/ayon-core/pull/1268#discussion_r2156555474
# Implement upstream logic to handle target_colorspace,
# target_display, target_view in other DCCs
target_colorspace = False
target_display = instance.data.get("colorspaceDisplay")
target_view = instance.data.get("colorspaceView")
self.log.debug("repre ({}): `{}`".format(idx + 1, repre["name"]))
if not self._repre_is_valid(repre):
continue
@ -95,6 +105,8 @@ class ExtractOIIOTranscode(publish.Extractor):
colorspace_data = repre["colorspaceData"]
source_colorspace = colorspace_data["colorspace"]
source_display = colorspace_data.get("display")
source_view = colorspace_data.get("view")
config_path = colorspace_data.get("config", {}).get("path")
if not config_path or not os.path.exists(config_path):
self.log.warning("Config file doesn't exist, skipping")
@ -132,7 +144,6 @@ class ExtractOIIOTranscode(publish.Extractor):
transcoding_type = output_def["transcoding_type"]
target_colorspace = view = display = None
# NOTE: we use colorspace_data as the fallback values for
# the target colorspace.
if transcoding_type == "colorspace":
@ -144,18 +155,20 @@ class ExtractOIIOTranscode(publish.Extractor):
colorspace_data.get("colorspace"))
elif transcoding_type == "display_view":
display_view = output_def["display_view"]
view = display_view["view"] or colorspace_data.get("view")
display = (
target_view = (
display_view["view"]
or colorspace_data.get("view"))
target_display = (
display_view["display"]
or colorspace_data.get("display")
)
# both could be already collected by DCC,
# but could be overwritten when transcoding
if view:
new_repre["colorspaceData"]["view"] = view
if display:
new_repre["colorspaceData"]["display"] = display
if target_view:
new_repre["colorspaceData"]["view"] = target_view
if target_display:
new_repre["colorspaceData"]["display"] = target_display
if target_colorspace:
new_repre["colorspaceData"]["colorspace"] = \
target_colorspace
@ -175,16 +188,18 @@ class ExtractOIIOTranscode(publish.Extractor):
new_staging_dir,
output_extension)
try:
convert_colorspace(
input_path,
output_path,
config_path,
source_colorspace,
target_colorspace,
view,
display,
additional_command_args,
self.log
oiio_color_convert(
input_path=input_path,
output_path=output_path,
config_path=config_path,
source_colorspace=source_colorspace,
target_colorspace=target_colorspace,
target_display=target_display,
target_view=target_view,
source_display=source_display,
source_view=source_view,
additional_command_args=additional_command_args,
logger=self.log
)
except MissingRGBAChannelsError as exc:
missing_rgba_review_channels = True

View file

@ -158,6 +158,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
"""
# Not all hosts can import this module.
import opentimelineio as otio
from ayon_core.pipeline.editorial import OTIO_EPSILON
output = []
# go trough all audio tracks
@ -172,6 +173,14 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
clip_start = otio_clip.source_range.start_time
fps = clip_start.rate
conformed_av_start = media_av_start.rescaled_to(fps)
# Avoid rounding issue on media available range.
if clip_start.almost_equal(
conformed_av_start,
OTIO_EPSILON
):
conformed_av_start = clip_start
# ffmpeg ignores embedded tc
start = clip_start - conformed_av_start
duration = otio_clip.source_range.duration

View file

@ -23,7 +23,10 @@ from ayon_core.lib import (
get_ffmpeg_tool_args,
run_subprocess,
)
from ayon_core.pipeline import publish
from ayon_core.pipeline import (
KnownPublishError,
publish,
)
class ExtractOTIOReview(
@ -97,8 +100,11 @@ class ExtractOTIOReview(
# skip instance if no reviewable data available
if (
not isinstance(otio_review_clips[0], otio.schema.Clip)
and len(otio_review_clips) == 1
len(otio_review_clips) == 1
and (
not isinstance(otio_review_clips[0], otio.schema.Clip)
or otio_review_clips[0].media_reference.is_missing_reference
)
):
self.log.warning(
"Instance `{}` has nothing to process".format(instance))
@ -124,7 +130,7 @@ class ExtractOTIOReview(
# NOTE it looks like it is set only in hiero integration
res_data = {"width": self.to_width, "height": self.to_height}
for key in res_data:
for meta_prefix in ("ayon.source.", "openpype.source."):
for meta_prefix in ("ayon.source", "openpype.source"):
meta_key = f"{meta_prefix}.{key}"
value = media_metadata.get(meta_key)
if value is not None:
@ -248,7 +254,7 @@ class ExtractOTIOReview(
# Single video way.
# Extraction via FFmpeg.
else:
elif hasattr(media_ref, "target_url"):
path = media_ref.target_url
# Set extract range from 0 (FFmpeg ignores
# embedded timecode).
@ -352,6 +358,7 @@ class ExtractOTIOReview(
import opentimelineio as otio
from ayon_core.pipeline.editorial import (
trim_media_range,
OTIO_EPSILON,
)
def _round_to_frame(rational_time):
@ -370,6 +377,13 @@ class ExtractOTIOReview(
avl_start = avl_range.start_time
# Avoid rounding issue on media available range.
if start.almost_equal(
avl_start,
OTIO_EPSILON
):
avl_start = start
# An additional gap is required before the available
# range to conform source start point and head handles.
if start < avl_start:
@ -388,6 +402,14 @@ class ExtractOTIOReview(
# (media duration is shorter then clip requirement).
end_point = start + duration
avl_end_point = avl_range.end_time_exclusive()
# Avoid rounding issue on media available range.
if end_point.almost_equal(
avl_end_point,
OTIO_EPSILON
):
avl_end_point = end_point
if end_point > avl_end_point:
gap_duration = end_point - avl_end_point
duration -= gap_duration
@ -444,7 +466,7 @@ class ExtractOTIOReview(
command = get_ffmpeg_tool_args("ffmpeg")
input_extension = None
if sequence:
if sequence is not None:
input_dir, collection, sequence_fps = sequence
in_frame_start = min(collection.indexes)
@ -478,7 +500,7 @@ class ExtractOTIOReview(
"-i", input_path
])
elif video:
elif video is not None:
video_path, otio_range = video
frame_start = otio_range.start_time.value
input_fps = otio_range.start_time.rate
@ -496,7 +518,7 @@ class ExtractOTIOReview(
"-i", video_path
])
elif gap:
elif gap is not None:
sec_duration = frames_to_seconds(gap, self.actual_fps)
# form command for rendering gap files
@ -510,6 +532,9 @@ class ExtractOTIOReview(
"-tune", "stillimage"
])
else:
raise KnownPublishError("Sequence, video or gap is required.")
if video or sequence:
command.extend([
"-vf", f"scale={self.to_width}:{self.to_height}:flags=lanczos",

View file

@ -13,14 +13,15 @@ import clique
import speedcopy
import pyblish.api
from ayon_api import get_last_version_by_product_name, get_representations
from ayon_core.lib import (
get_ffmpeg_tool_args,
filter_profiles,
path_to_subprocess_arg,
run_subprocess,
)
from ayon_core.pipeline.publish.lib import (
fill_sequence_gaps_with_previous_version
)
from ayon_core.lib.transcoding import (
IMAGE_EXTENSIONS,
get_ffprobe_streams,
@ -130,7 +131,7 @@ def frame_to_timecode(frame: int, fps: float) -> str:
class ExtractReview(pyblish.api.InstancePlugin):
"""Extracting Review mov file for Ftrack
"""Extracting Reviewable medias
Compulsory attribute of representation is tags list with "review",
otherwise the representation is ignored.
@ -161,9 +162,11 @@ class ExtractReview(pyblish.api.InstancePlugin):
"aftereffects",
"flame",
"unreal",
"circuit",
"batchdelivery",
"photoshop"
]
settings_category = "core"
# Supported extensions
image_exts = {"exr", "jpg", "jpeg", "png", "dpx", "tga", "tiff", "tif"}
video_exts = {"mov", "mp4"}
@ -202,15 +205,21 @@ class ExtractReview(pyblish.api.InstancePlugin):
def _get_outputs_for_instance(self, instance):
host_name = instance.context.data["hostName"]
product_type = instance.data["productType"]
task_type = None
task_entity = instance.data.get("taskEntity")
if task_entity:
task_type = task_entity["taskType"]
self.log.debug("Host: \"{}\"".format(host_name))
self.log.debug("Product type: \"{}\"".format(product_type))
self.log.debug("Task type: \"{}\"".format(task_type))
profile = filter_profiles(
self.profiles,
{
"hosts": host_name,
"product_types": product_type,
"task_types": task_type
},
logger=self.log)
if not profile:
@ -500,10 +509,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
resolution_width=temp_data.resolution_width,
resolution_height=temp_data.resolution_height,
extension=temp_data.input_ext,
temp_data=temp_data
temp_data=temp_data,
)
elif fill_missing_frames == "previous_version":
new_frame_files = self.fill_sequence_gaps_with_previous(
fill_output = fill_sequence_gaps_with_previous_version(
collection=collection,
staging_dir=new_repre["stagingDir"],
instance=instance,
@ -511,8 +520,13 @@ class ExtractReview(pyblish.api.InstancePlugin):
start_frame=temp_data.frame_start,
end_frame=temp_data.frame_end,
)
_, new_frame_files = fill_output
# fallback to original workflow
if new_frame_files is None:
self.log.warning(
"Falling back to filling from currently "
"last rendered."
)
new_frame_files = (
self.fill_sequence_gaps_from_existing(
collection=collection,
@ -604,8 +618,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
"name": "{}_{}".format(output_name, output_ext),
"outputName": output_name,
"outputDef": output_def,
"frameStartFtrack": temp_data.output_frame_start,
"frameEndFtrack": temp_data.output_frame_end,
"ffmpeg_cmd": subprcs_cmd
})
@ -1042,92 +1054,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
return all_args
def fill_sequence_gaps_with_previous(
self,
collection: str,
staging_dir: str,
instance: pyblish.plugin.Instance,
current_repre_name: str,
start_frame: int,
end_frame: int
) -> Optional[dict[int, str]]:
"""Tries to replace missing frames from ones from last version"""
repre_file_paths = self._get_last_version_files(
instance, current_repre_name)
if repre_file_paths is None:
# issues in getting last version files, falling back
return None
prev_collection = clique.assemble(
repre_file_paths,
patterns=[clique.PATTERNS["frames"]],
minimum_items=1
)[0][0]
prev_col_format = prev_collection.format("{head}{padding}{tail}")
added_files = {}
anatomy = instance.context.data["anatomy"]
col_format = collection.format("{head}{padding}{tail}")
for frame in range(start_frame, end_frame + 1):
if frame in collection.indexes:
continue
hole_fpath = os.path.join(staging_dir, col_format % frame)
previous_version_path = prev_col_format % frame
previous_version_path = anatomy.fill_root(previous_version_path)
if not os.path.exists(previous_version_path):
self.log.warning(
"Missing frame should be replaced from "
f"'{previous_version_path}' but that doesn't exist. "
"Falling back to filling from currently last rendered."
)
return None
self.log.warning(
f"Replacing missing '{hole_fpath}' with "
f"'{previous_version_path}'"
)
speedcopy.copyfile(previous_version_path, hole_fpath)
added_files[frame] = hole_fpath
return added_files
def _get_last_version_files(
self,
instance: pyblish.plugin.Instance,
current_repre_name: str,
):
product_name = instance.data["productName"]
project_name = instance.data["projectEntity"]["name"]
folder_entity = instance.data["folderEntity"]
version_entity = get_last_version_by_product_name(
project_name,
product_name,
folder_entity["id"],
fields={"id"}
)
if not version_entity:
return None
matching_repres = get_representations(
project_name,
version_ids=[version_entity["id"]],
representation_names=[current_repre_name],
fields={"files"}
)
if not matching_repres:
return None
matching_repre = list(matching_repres)[0]
repre_file_paths = [
file_info["path"]
for file_info in matching_repre["files"]
]
return repre_file_paths
def fill_sequence_gaps_with_blanks(
self,
collection: str,
@ -1376,15 +1302,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
return audio_in_args, audio_filters, audio_out_args
for audio in audio_inputs:
# NOTE modified, always was expected "frameStartFtrack" which is
# STRANGE?!!! There should be different key, right?
# TODO use different frame start!
offset_seconds = 0
frame_start_ftrack = instance.data.get("frameStartFtrack")
if frame_start_ftrack is not None:
offset_frames = frame_start_ftrack - audio["offset"]
offset_seconds = offset_frames / temp_data.fps
if offset_seconds > 0:
audio_in_args.append(
"-ss {}".format(offset_seconds)

View file

@ -6,6 +6,7 @@ import re
import pyblish.api
from ayon_core.lib import (
get_oiio_tool_args,
get_ffmpeg_tool_args,
get_ffprobe_data,
@ -15,7 +16,11 @@ from ayon_core.lib import (
path_to_subprocess_arg,
run_subprocess,
)
from ayon_core.lib.transcoding import convert_colorspace
from ayon_core.lib.transcoding import (
oiio_color_convert,
get_oiio_input_and_channel_args,
get_oiio_info_for_input,
)
from ayon_core.lib.transcoding import VIDEO_EXTENSIONS, IMAGE_EXTENSIONS
@ -38,10 +43,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
"substancedesigner",
"nuke",
"aftereffects",
"photoshop",
"unreal",
"houdini",
"circuit",
"batchdelivery",
]
settings_category = "core"
enabled = False
integrate_thumbnail = False
@ -208,6 +215,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
full_output_path = os.path.join(dst_staging, jpeg_file)
colorspace_data = repre.get("colorspaceData")
# NOTE We should find out what is happening here. Why don't we
# use oiiotool all the time if it is available? Only possible
# reason might be that video files should be converted using
# ffmpeg, but other then that, we should use oiio all the time.
# - We should also probably get rid of the ffmpeg settings...
# only use OIIO if it is supported and representation has
# colorspace data
if oiio_supported and colorspace_data:
@ -217,7 +230,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
)
# If the input can read by OIIO then use OIIO method for
# conversion otherwise use ffmpeg
repre_thumb_created = self._create_thumbnail_oiio(
repre_thumb_created = self._create_colorspace_thumbnail(
full_input_path,
full_output_path,
colorspace_data
@ -227,17 +240,16 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
# oiiotool isn't available or representation is not having
# colorspace data
if not repre_thumb_created:
if oiio_supported:
self.log.debug(
"Converting with FFMPEG because input"
" can't be read by OIIO."
)
repre_thumb_created = self._create_thumbnail_ffmpeg(
full_input_path, full_output_path
)
# Skip representation and try next one if wasn't created
# Skip representation and try next one if wasn't created
if not repre_thumb_created and oiio_supported:
repre_thumb_created = self._create_thumbnail_oiio(
full_input_path, full_output_path
)
if not repre_thumb_created:
continue
@ -380,7 +392,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
return ext in IMAGE_EXTENSIONS or ext in VIDEO_EXTENSIONS
def _create_thumbnail_oiio(
def _create_colorspace_thumbnail(
self,
src_path,
dst_path,
@ -431,13 +443,15 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
oiio_default_view = display_and_view["view"]
try:
convert_colorspace(
oiio_color_convert(
src_path,
dst_path,
colorspace_data["config"]["path"],
colorspace_data["colorspace"],
display=repre_display or oiio_default_display,
view=repre_view or oiio_default_view,
source_display=colorspace_data.get("display"),
source_view=colorspace_data.get("view"),
target_display=repre_display or oiio_default_display,
target_view=repre_view or oiio_default_view,
target_colorspace=oiio_default_colorspace,
additional_command_args=resolution_arg,
logger=self.log,
@ -451,9 +465,50 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
return True
def _create_thumbnail_oiio(self, src_path, dst_path):
self.log.debug(f"Extracting thumbnail with OIIO: {dst_path}")
try:
resolution_arg = self._get_resolution_arg("oiiotool", src_path)
except RuntimeError:
self.log.warning(
"Failed to create thumbnail using oiio", exc_info=True
)
return False
input_info = get_oiio_info_for_input(src_path, logger=self.log)
input_arg, channels_arg = get_oiio_input_and_channel_args(input_info)
oiio_cmd = get_oiio_tool_args(
"oiiotool",
input_arg, src_path,
# Tell oiiotool which channels should be put to top stack
# (and output)
"--ch", channels_arg,
# Use first subimage
"--subimage", "0"
)
oiio_cmd.extend(resolution_arg)
oiio_cmd.extend(("-o", dst_path))
self.log.debug("Running: {}".format(" ".join(oiio_cmd)))
try:
run_subprocess(oiio_cmd, logger=self.log)
return True
except Exception:
self.log.warning(
"Failed to create thumbnail using oiiotool",
exc_info=True
)
return False
def _create_thumbnail_ffmpeg(self, src_path, dst_path):
self.log.debug("Extracting thumbnail with FFMPEG: {}".format(dst_path))
resolution_arg = self._get_resolution_arg("ffmpeg", src_path)
try:
resolution_arg = self._get_resolution_arg("ffmpeg", src_path)
except RuntimeError:
self.log.warning(
"Failed to create thumbnail using ffmpeg", exc_info=True
)
return False
ffmpeg_path_args = get_ffmpeg_tool_args("ffmpeg")
ffmpeg_args = self.ffmpeg_args or {}

View file

@ -256,6 +256,7 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
label = "Collect USD Layer Contributions (Asset/Shot)"
families = ["usd"]
enabled = True
settings_category = "core"
# A contribution defines a contribution into a (department) layer which
# will get layered into the target product, usually the asset or shot.
@ -633,6 +634,8 @@ class ExtractUSDLayerContribution(publish.Extractor):
label = "Extract USD Layer Contributions (Asset/Shot)"
order = pyblish.api.ExtractorOrder + 0.45
settings_category = "core"
use_ayon_entity_uri = False
def process(self, instance):
@ -795,6 +798,8 @@ class ExtractUSDAssetContribution(publish.Extractor):
label = "Extract USD Asset/Shot Contributions"
order = ExtractUSDLayerContribution.order + 0.01
settings_category = "core"
use_ayon_entity_uri = False
def process(self, instance):

View file

@ -121,7 +121,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"version",
"representation",
"username",
"user",
"output",
# OpenPype keys - should be removed
"asset", # folder[name]
@ -796,6 +795,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
if value is not None:
repre_context[key] = value
# Keep only username
# NOTE This is to avoid storing all user attributes and data
# to representation
if "user" not in repre_context:
repre_context["user"] = {
"name": template_data["user"]["name"]
}
# Use previous representation's id if there is a name match
existing = existing_repres_by_name.get(repre["name"].lower())
repre_id = None

View file

@ -61,6 +61,8 @@ class IntegrateHeroVersion(
# Must happen after IntegrateNew
order = pyblish.api.IntegratorOrder + 0.1
settings_category = "core"
optional = True
active = True
@ -87,7 +89,6 @@ class IntegrateHeroVersion(
"family",
"representation",
"username",
"user",
"output"
]
# QUESTION/TODO this process should happen on server if crashed due to
@ -362,6 +363,14 @@ class IntegrateHeroVersion(
if value is not None:
repre_context[key] = value
# Keep only username
# NOTE This is to avoid storing all user attributes and data
# to representation
if "user" not in repre_context:
repre_context["user"] = {
"name": anatomy_data["user"]["name"]
}
# Prepare new repre
repre_entity = copy.deepcopy(repre_info["representation"])
repre_entity.pop("id", None)

View file

@ -105,7 +105,7 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin):
created links by its type
"""
if workfile_instance is None:
self.log.warn("No workfile in this publish session.")
self.log.warning("No workfile in this publish session.")
return
workfile_version_id = workfile_instance.data["versionEntity"]["id"]

View file

@ -24,6 +24,8 @@ class IntegrateProductGroup(pyblish.api.InstancePlugin):
order = pyblish.api.IntegratorOrder - 0.1
label = "Product Group"
settings_category = "core"
# Attributes set by settings
product_grouping_profiles = None

View file

@ -22,6 +22,8 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin):
label = "Override Integrate Thumbnail Representations"
order = pyblish.api.IntegratorOrder - 0.1
settings_category = "core"
integrate_profiles = []
def process(self, instance):

View file

@ -31,6 +31,7 @@ class ValidateOutdatedContainers(
label = "Validate Outdated Containers"
order = pyblish.api.ValidatorOrder
settings_category = "core"
optional = True
actions = [ShowInventory]

View file

@ -37,7 +37,7 @@ class ValidateCurrentSaveFile(pyblish.api.ContextPlugin):
label = "Validate File Saved"
order = pyblish.api.ValidatorOrder - 0.1
hosts = ["fusion", "houdini", "max", "maya", "nuke", "substancepainter",
"cinema4d", "silhouette", "gaffer", "blender"]
"cinema4d", "silhouette", "gaffer", "blender", "loki"]
actions = [SaveByVersionUpAction, ShowWorkfilesAction]
def process(self, context):

View file

@ -14,6 +14,8 @@ class ValidateIntent(pyblish.api.ContextPlugin):
order = pyblish.api.ValidatorOrder
label = "Validate Intent"
settings_category = "core"
enabled = False
# Can be modified by settings

View file

@ -34,7 +34,11 @@ class ValidateProductUniqueness(pyblish.api.ContextPlugin):
for instance in context:
# Ignore disabled instances
if not instance.data.get('publish', True):
if not instance.data.get("publish", True):
continue
# Ignore instances not marked to integrate
if not instance.data.get("integrate", True):
continue
# Ignore instance without folder data

View file

@ -17,6 +17,7 @@ class ValidateVersion(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin):
order = pyblish.api.ValidatorOrder
label = "Validate Version"
settings_category = "core"
optional = False
active = True

View file

@ -6,7 +6,12 @@ import json
import tempfile
from string import Formatter
import opentimelineio_contrib.adapters.ffmpeg_burnins as ffmpeg_burnins
try:
from otio_burnins_adapter import ffmpeg_burnins
except ImportError:
import opentimelineio_contrib.adapters.ffmpeg_burnins as ffmpeg_burnins
from PIL import ImageFont
from ayon_core.lib import (
get_ffmpeg_tool_args,
get_ffmpeg_codec_args,
@ -36,6 +41,39 @@ TIMECODE_KEY = "{timecode}"
SOURCE_TIMECODE_KEY = "{source_timecode}"
def _drawtext(align, resolution, text, options):
"""
:rtype: {'x': int, 'y': int}
"""
x_pos = "0"
if align in (ffmpeg_burnins.TOP_CENTERED, ffmpeg_burnins.BOTTOM_CENTERED):
x_pos = "w/2-tw/2"
elif align in (ffmpeg_burnins.TOP_RIGHT, ffmpeg_burnins.BOTTOM_RIGHT):
ifont = ImageFont.truetype(options["font"], options["font_size"])
if hasattr(ifont, "getbbox"):
left, top, right, bottom = ifont.getbbox(text)
box_size = right - left, bottom - top
else:
box_size = ifont.getsize(text)
x_pos = resolution[0] - (box_size[0] + options["x_offset"])
elif align in (ffmpeg_burnins.TOP_LEFT, ffmpeg_burnins.BOTTOM_LEFT):
x_pos = options["x_offset"]
if align in (
ffmpeg_burnins.TOP_CENTERED,
ffmpeg_burnins.TOP_RIGHT,
ffmpeg_burnins.TOP_LEFT
):
y_pos = "%d" % options["y_offset"]
else:
y_pos = "h-text_h-%d" % (options["y_offset"])
return {"x": x_pos, "y": y_pos}
ffmpeg_burnins._drawtext = _drawtext
def _get_ffprobe_data(source):
"""Reimplemented from otio burnins to be able use full path to ffprobe
:param str source: source media file

View file

@ -4,6 +4,8 @@ import logging
import collections
import copy
import time
import warnings
from urllib.parse import urlencode
import ayon_api
@ -35,6 +37,37 @@ class CacheItem:
return time.time() > self._outdate_time
def _get_addons_settings(
studio_bundle_name,
project_bundle_name,
variant,
project_name=None,
):
"""Modified version of `ayon_api.get_addons_settings` function."""
query_values = {
key: value
for key, value in (
("bundle_name", studio_bundle_name),
("variant", variant),
("project_name", project_name),
)
if value
}
if project_bundle_name != studio_bundle_name:
query_values["project_bundle_name"] = project_bundle_name
site_id = ayon_api.get_site_id()
if site_id:
query_values["site_id"] = site_id
response = ayon_api.get(f"settings?{urlencode(query_values)}")
response.raise_for_status()
return {
addon["name"]: addon["settings"]
for addon in response.data["addons"]
}
class _AyonSettingsCache:
use_bundles = None
variant = None
@ -67,53 +100,70 @@ class _AyonSettingsCache:
return _AyonSettingsCache.variant
@classmethod
def _get_bundle_name(cls):
def _get_studio_bundle_name(cls):
bundle_name = os.environ.get("AYON_STUDIO_BUNDLE_NAME")
if bundle_name:
return bundle_name
return os.environ["AYON_BUNDLE_NAME"]
@classmethod
def _get_project_bundle_name(cls):
return os.environ["AYON_BUNDLE_NAME"]
@classmethod
def get_value_by_project(cls, project_name):
cache_item = _AyonSettingsCache.cache_by_project_name[project_name]
if cache_item.is_outdated:
if cls._use_bundles():
value = ayon_api.get_addons_settings(
bundle_name=cls._get_bundle_name(),
cache_item.update_value(
_get_addons_settings(
studio_bundle_name=cls._get_studio_bundle_name(),
project_bundle_name=cls._get_project_bundle_name(),
project_name=project_name,
variant=cls._get_variant()
variant=cls._get_variant(),
)
else:
value = ayon_api.get_addons_settings(project_name)
cache_item.update_value(value)
)
return cache_item.get_value()
@classmethod
def _get_addon_versions_from_bundle(cls):
expected_bundle = cls._get_bundle_name()
studio_bundle_name = cls._get_studio_bundle_name()
project_bundle_name = cls._get_project_bundle_name()
bundles = ayon_api.get_bundles()["bundles"]
bundle = next(
project_bundle = next(
(
bundle
for bundle in bundles
if bundle["name"] == expected_bundle
if bundle["name"] == project_bundle_name
),
None
)
if bundle is not None:
return bundle["addons"]
studio_bundle = None
if studio_bundle_name and project_bundle_name != studio_bundle_name:
studio_bundle = next(
(
bundle
for bundle in bundles
if bundle["name"] == studio_bundle_name
),
None
)
if studio_bundle and project_bundle:
addons = copy.deepcopy(studio_bundle["addons"])
addons.update(project_bundle["addons"])
project_bundle["addons"] = addons
if project_bundle is not None:
return project_bundle["addons"]
return {}
@classmethod
def get_addon_versions(cls):
cache_item = _AyonSettingsCache.addon_versions
if cache_item.is_outdated:
if cls._use_bundles():
addons = cls._get_addon_versions_from_bundle()
else:
settings_data = ayon_api.get_addons_settings(
only_values=False,
variant=cls._get_variant()
)
addons = settings_data["versions"]
cache_item.update_value(addons)
cache_item.update_value(
cls._get_addon_versions_from_bundle()
)
return cache_item.get_value()
@ -175,17 +225,22 @@ def get_project_environments(project_name, project_settings=None):
def get_current_project_settings():
"""Project settings for current context project.
"""DEPRECATE Project settings for current context project.
Function requires access to pipeline context which is in
'ayon_core.pipeline'.
Returns:
dict[str, Any]: Project settings for current context project.
Project name should be stored in environment variable `AYON_PROJECT_NAME`.
This function should be used only in host context where environment
variable must be set and should not happen that any part of process will
change the value of the environment variable.
"""
project_name = os.environ.get("AYON_PROJECT_NAME")
if not project_name:
raise ValueError(
"Missing context project in environment"
" variable `AYON_PROJECT_NAME`."
)
return get_project_settings(project_name)
warnings.warn(
"Used deprecated function 'get_current_project_settings' in"
" 'ayon_core.settings'. The function was moved to"
" 'ayon_core.pipeline.context_tools'.",
DeprecationWarning,
stacklevel=2
)
from ayon_core.pipeline.context_tools import get_current_project_settings
return get_current_project_settings()

View file

@ -97,6 +97,7 @@
},
"publisher": {
"error": "#AA5050",
"disabled": "#5b6779",
"crash": "#FF6432",
"success": "#458056",
"warning": "#ffc671",

View file

@ -1153,6 +1153,10 @@ PixmapButton:disabled {
color: {color:publisher:error};
}
#ListViewProductName[state="disabled"] {
color: {color:publisher:disabled};
}
#PublishInfoFrame {
background: {color:bg};
border-radius: 0.3em;

View file

@ -892,6 +892,29 @@ class FilesWidget(QtWidgets.QFrame):
self._add_filepaths(new_items)
self._remove_item_by_ids(item_ids)
def _on_merge_request(self):
if self._multivalue:
return
item_ids = self._files_view.get_selected_item_ids()
if not item_ids:
return
all_paths = set()
merged_item_ids = set()
for item_id in item_ids:
file_item = self._files_model.get_file_item_by_id(item_id)
if file_item is None:
continue
merged_item_ids.add(item_id)
all_paths |= {
os.path.join(file_item.directory, filename)
for filename in file_item.filenames
}
self._remove_item_by_ids(merged_item_ids)
new_items = FileDefItem.from_value(list(all_paths), True)
self._add_filepaths(new_items)
def _on_remove_requested(self):
if self._multivalue:
return
@ -911,6 +934,9 @@ class FilesWidget(QtWidgets.QFrame):
split_action.triggered.connect(self._on_split_request)
menu.addAction(split_action)
merge_action = QtWidgets.QAction("Merge sequence", menu)
merge_action.triggered.connect(self._on_merge_request)
menu.addAction(merge_action)
remove_action = QtWidgets.QAction("Remove", menu)
remove_action.triggered.connect(self._on_remove_requested)
menu.addAction(remove_action)

View file

@ -10,6 +10,7 @@ from .projects import (
PROJECTS_MODEL_SENDER,
FolderTypeItem,
TaskTypeItem,
ProductTypeIconMapping,
)
from .hierarchy import (
FolderItem,
@ -34,6 +35,7 @@ __all__ = (
"PROJECTS_MODEL_SENDER",
"FolderTypeItem",
"TaskTypeItem",
"ProductTypeIconMapping",
"FolderItem",
"TaskItem",

View file

@ -2,7 +2,7 @@ from __future__ import annotations
import contextlib
from abc import ABC, abstractmethod
from typing import Dict, Any
from typing import Any, Optional
from dataclasses import dataclass
import ayon_api
@ -51,7 +51,7 @@ class StatusItem:
self.icon: str = icon
self.state: str = state
def to_data(self) -> Dict[str, Any]:
def to_data(self) -> dict[str, Any]:
return {
"name": self.name,
"color": self.color,
@ -125,16 +125,24 @@ class TaskTypeItem:
icon (str): Icon name in MaterialIcons ("fiber_new").
"""
def __init__(self, name, short, icon):
def __init__(
self,
name: str,
short: str,
icon: str,
color: Optional[str],
):
self.name = name
self.short = short
self.icon = icon
self.color = color
def to_data(self):
return {
"name": self.name,
"short": self.short,
"icon": self.icon,
"color": self.color,
}
@classmethod
@ -147,6 +155,7 @@ class TaskTypeItem:
name=task_type_data["name"],
short=task_type_data["shortName"],
icon=task_type_data["icon"],
color=task_type_data.get("color"),
)
@ -218,6 +227,54 @@ class ProjectItem:
return cls(**data)
class ProductTypeIconMapping:
def __init__(
self,
default: Optional[dict[str, str]] = None,
definitions: Optional[list[dict[str, str]]] = None,
):
self._default = default or {}
self._definitions = definitions or []
self._default_def = None
self._definitions_by_name = None
def get_icon(
self,
product_base_type: Optional[str] = None,
product_type: Optional[str] = None,
) -> dict[str, str]:
defs = self._get_defs_by_name()
icon = defs.get(product_type)
if icon is None:
icon = defs.get(product_base_type)
if icon is None:
icon = self._get_default_def()
return icon.copy()
def _get_default_def(self) -> dict[str, str]:
if self._default_def is None:
self._default_def = {
"type": "material-symbols",
"name": self._default.get("icon", "deployed_code"),
"color": self._default.get("color", "#cccccc"),
}
return self._default_def
def _get_defs_by_name(self) -> dict[str, dict[str, str]]:
if self._definitions_by_name is None:
self._definitions_by_name = {
product_base_type_def["name"]: {
"type": "material-symbols",
"name": product_base_type_def.get("icon", "deployed_code"),
"color": product_base_type_def.get("color", "#cccccc"),
}
for product_base_type_def in self._definitions
}
return self._definitions_by_name
def _get_project_items_from_entitiy(
projects: list[dict[str, Any]]
) -> list[ProjectItem]:
@ -242,6 +299,9 @@ class ProjectsModel(object):
self._projects_by_name = NestedCacheItem(
levels=1, default_factory=list
)
self._product_type_icons_mapping = NestedCacheItem(
levels=1, default_factory=ProductTypeIconMapping
)
self._project_statuses_cache = {}
self._folder_types_cache = {}
self._task_types_cache = {}
@ -255,6 +315,7 @@ class ProjectsModel(object):
self._task_types_cache = {}
self._projects_cache.reset()
self._projects_by_name.reset()
self._product_type_icons_mapping.reset()
def refresh(self):
"""Refresh project items.
@ -390,6 +451,27 @@ class ProjectsModel(object):
self._task_type_items_getter,
)
def get_product_type_icons_mapping(
self, project_name: Optional[str]
) -> ProductTypeIconMapping:
cache = self._product_type_icons_mapping[project_name]
if cache.is_valid:
return cache.get_data()
project_entity = self.get_project_entity(project_name)
icons_mapping = ProductTypeIconMapping()
if project_entity:
product_base_types = (
project_entity["config"].get("productBaseTypes", {})
)
icons_mapping = ProductTypeIconMapping(
product_base_types.get("default"),
product_base_types.get("definitions")
)
cache.update_data(icons_mapping)
return icons_mapping
def _get_project_items(
self, project_name, sender, item_type, cache_obj, getter
):

View file

@ -1,9 +0,0 @@
from .window import (
show,
CreatorWindow
)
__all__ = (
"show",
"CreatorWindow"
)

View file

@ -1,8 +0,0 @@
from qtpy import QtCore
PRODUCT_TYPE_ROLE = QtCore.Qt.UserRole + 1
ITEM_ID_ROLE = QtCore.Qt.UserRole + 2
SEPARATOR = "---"
SEPARATORS = {"---", "---separator---"}

View file

@ -1,61 +0,0 @@
import uuid
from qtpy import QtGui, QtCore
from ayon_core.pipeline import discover_legacy_creator_plugins
from . constants import (
PRODUCT_TYPE_ROLE,
ITEM_ID_ROLE
)
class CreatorsModel(QtGui.QStandardItemModel):
def __init__(self, *args, **kwargs):
super(CreatorsModel, self).__init__(*args, **kwargs)
self._creators_by_id = {}
def reset(self):
# TODO change to refresh when clearing is not needed
self.clear()
self._creators_by_id = {}
items = []
creators = discover_legacy_creator_plugins()
for creator in creators:
if not creator.enabled:
continue
item_id = str(uuid.uuid4())
self._creators_by_id[item_id] = creator
label = creator.label or creator.product_type
item = QtGui.QStandardItem(label)
item.setEditable(False)
item.setData(item_id, ITEM_ID_ROLE)
item.setData(creator.product_type, PRODUCT_TYPE_ROLE)
items.append(item)
if not items:
item = QtGui.QStandardItem("No registered create plugins")
item.setEnabled(False)
item.setData(False, QtCore.Qt.ItemIsEnabled)
items.append(item)
items.sort(key=lambda item: item.text())
self.invisibleRootItem().appendRows(items)
def get_creator_by_id(self, item_id):
return self._creators_by_id.get(item_id)
def get_indexes_by_product_type(self, product_type):
indexes = []
for row in range(self.rowCount()):
index = self.index(row, 0)
item_id = index.data(ITEM_ID_ROLE)
creator_plugin = self._creators_by_id.get(item_id)
if creator_plugin and (
creator_plugin.label.lower() == product_type.lower()
or creator_plugin.product_type.lower() == product_type.lower()
):
indexes.append(index)
return indexes

View file

@ -1,275 +0,0 @@
import re
import inspect
from qtpy import QtWidgets, QtCore, QtGui
import qtawesome
from ayon_core.pipeline.create import PRODUCT_NAME_ALLOWED_SYMBOLS
from ayon_core.tools.utils import ErrorMessageBox
if hasattr(QtGui, "QRegularExpressionValidator"):
RegularExpressionValidatorClass = QtGui.QRegularExpressionValidator
RegularExpressionClass = QtCore.QRegularExpression
else:
RegularExpressionValidatorClass = QtGui.QRegExpValidator
RegularExpressionClass = QtCore.QRegExp
class CreateErrorMessageBox(ErrorMessageBox):
def __init__(
self,
product_type,
product_name,
folder_path,
exc_msg,
formatted_traceback,
parent
):
self._product_type = product_type
self._product_name = product_name
self._folder_path = folder_path
self._exc_msg = exc_msg
self._formatted_traceback = formatted_traceback
super(CreateErrorMessageBox, self).__init__("Creation failed", parent)
def _create_top_widget(self, parent_widget):
label_widget = QtWidgets.QLabel(parent_widget)
label_widget.setText(
"<span style='font-size:18pt;'>Failed to create</span>"
)
return label_widget
def _get_report_data(self):
report_message = (
"Failed to create Product: \"{product_name}\""
" Type: \"{product_type}\""
" in Folder: \"{folder_path}\""
"\n\nError: {message}"
).format(
product_name=self._product_name,
product_type=self._product_type,
folder_path=self._folder_path,
message=self._exc_msg
)
if self._formatted_traceback:
report_message += "\n\n{}".format(self._formatted_traceback)
return [report_message]
def _create_content(self, content_layout):
item_name_template = (
"<span style='font-weight:bold;'>{}:</span> {{}}<br>"
"<span style='font-weight:bold;'>{}:</span> {{}}<br>"
"<span style='font-weight:bold;'>{}:</span> {{}}<br>"
).format(
"Product type",
"Product name",
"Folder"
)
exc_msg_template = "<span style='font-weight:bold'>{}</span>"
line = self._create_line()
content_layout.addWidget(line)
item_name_widget = QtWidgets.QLabel(self)
item_name_widget.setText(
item_name_template.format(
self._product_type, self._product_name, self._folder_path
)
)
content_layout.addWidget(item_name_widget)
message_label_widget = QtWidgets.QLabel(self)
message_label_widget.setText(
exc_msg_template.format(self.convert_text_for_html(self._exc_msg))
)
content_layout.addWidget(message_label_widget)
if self._formatted_traceback:
line_widget = self._create_line()
tb_widget = self._create_traceback_widget(
self._formatted_traceback
)
content_layout.addWidget(line_widget)
content_layout.addWidget(tb_widget)
class ProductNameValidator(RegularExpressionValidatorClass):
invalid = QtCore.Signal(set)
pattern = "^[{}]*$".format(PRODUCT_NAME_ALLOWED_SYMBOLS)
def __init__(self):
reg = RegularExpressionClass(self.pattern)
super(ProductNameValidator, self).__init__(reg)
def validate(self, text, pos):
results = super(ProductNameValidator, self).validate(text, pos)
if results[0] == RegularExpressionValidatorClass.Invalid:
self.invalid.emit(self.invalid_chars(text))
return results
def invalid_chars(self, text):
invalid = set()
re_valid = re.compile(self.pattern)
for char in text:
if char == " ":
invalid.add("' '")
continue
if not re_valid.match(char):
invalid.add(char)
return invalid
class VariantLineEdit(QtWidgets.QLineEdit):
report = QtCore.Signal(str)
colors = {
"empty": (QtGui.QColor("#78879b"), ""),
"exists": (QtGui.QColor("#4E76BB"), "border-color: #4E76BB;"),
"new": (QtGui.QColor("#7AAB8F"), "border-color: #7AAB8F;"),
}
def __init__(self, *args, **kwargs):
super(VariantLineEdit, self).__init__(*args, **kwargs)
validator = ProductNameValidator()
self.setValidator(validator)
self.setToolTip("Only alphanumeric characters (A-Z a-z 0-9), "
"'_' and '.' are allowed.")
self._status_color = self.colors["empty"][0]
anim = QtCore.QPropertyAnimation()
anim.setTargetObject(self)
anim.setPropertyName(b"status_color")
anim.setEasingCurve(QtCore.QEasingCurve.InCubic)
anim.setDuration(300)
anim.setStartValue(QtGui.QColor("#C84747")) # `Invalid` status color
self.animation = anim
validator.invalid.connect(self.on_invalid)
def on_invalid(self, invalid):
message = "Invalid character: %s" % ", ".join(invalid)
self.report.emit(message)
self.animation.stop()
self.animation.start()
def as_empty(self):
self._set_border("empty")
self.report.emit("Empty product name ..")
def as_exists(self):
self._set_border("exists")
self.report.emit("Existing product, appending next version.")
def as_new(self):
self._set_border("new")
self.report.emit("New product, creating first version.")
def _set_border(self, status):
qcolor, style = self.colors[status]
self.animation.setEndValue(qcolor)
self.setStyleSheet(style)
def _get_status_color(self):
return self._status_color
def _set_status_color(self, color):
self._status_color = color
self.setStyleSheet("border-color: %s;" % color.name())
status_color = QtCore.Property(
QtGui.QColor, _get_status_color, _set_status_color
)
class ProductTypeDescriptionWidget(QtWidgets.QWidget):
"""A product type description widget.
Shows a product type icon, name and a help description.
Used in creator header.
_______________________
| ____ |
| |icon| PRODUCT TYPE |
| |____| help |
|_______________________|
"""
SIZE = 35
def __init__(self, parent=None):
super(ProductTypeDescriptionWidget, self).__init__(parent=parent)
icon_label = QtWidgets.QLabel(self)
icon_label.setSizePolicy(
QtWidgets.QSizePolicy.Maximum,
QtWidgets.QSizePolicy.Maximum
)
# Add 4 pixel padding to avoid icon being cut off
icon_label.setFixedWidth(self.SIZE + 4)
icon_label.setFixedHeight(self.SIZE + 4)
label_layout = QtWidgets.QVBoxLayout()
label_layout.setSpacing(0)
product_type_label = QtWidgets.QLabel(self)
product_type_label.setObjectName("CreatorProductTypeLabel")
product_type_label.setAlignment(
QtCore.Qt.AlignBottom | QtCore.Qt.AlignLeft
)
help_label = QtWidgets.QLabel(self)
help_label.setAlignment(QtCore.Qt.AlignTop | QtCore.Qt.AlignLeft)
label_layout.addWidget(product_type_label)
label_layout.addWidget(help_label)
layout = QtWidgets.QHBoxLayout(self)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(5)
layout.addWidget(icon_label)
layout.addLayout(label_layout)
self._help_label = help_label
self._product_type_label = product_type_label
self._icon_label = icon_label
def set_item(self, creator_plugin):
"""Update elements to display information of a product type item.
Args:
creator_plugin (dict): A product type item as registered with
name, help and icon.
Returns:
None
"""
if not creator_plugin:
self._icon_label.setPixmap(None)
self._product_type_label.setText("")
self._help_label.setText("")
return
# Support a font-awesome icon
icon_name = getattr(creator_plugin, "icon", None) or "info-circle"
try:
icon = qtawesome.icon("fa.{}".format(icon_name), color="white")
pixmap = icon.pixmap(self.SIZE, self.SIZE)
except Exception:
print("BUG: Couldn't load icon \"fa.{}\"".format(str(icon_name)))
# Create transparent pixmap
pixmap = QtGui.QPixmap()
pixmap.fill(QtCore.Qt.transparent)
pixmap = pixmap.scaled(self.SIZE, self.SIZE)
# Parse a clean line from the Creator's docstring
docstring = inspect.getdoc(creator_plugin)
creator_help = docstring.splitlines()[0] if docstring else ""
self._icon_label.setPixmap(pixmap)
self._product_type_label.setText(creator_plugin.product_type)
self._help_label.setText(creator_help)

View file

@ -1,508 +0,0 @@
import sys
import traceback
import re
import ayon_api
from qtpy import QtWidgets, QtCore
from ayon_core import style
from ayon_core.settings import get_current_project_settings
from ayon_core.tools.utils.lib import qt_app_context
from ayon_core.pipeline import (
get_current_project_name,
get_current_folder_path,
get_current_task_name,
)
from ayon_core.pipeline.create import (
PRODUCT_NAME_ALLOWED_SYMBOLS,
legacy_create,
CreatorError,
)
from .model import CreatorsModel
from .widgets import (
CreateErrorMessageBox,
VariantLineEdit,
ProductTypeDescriptionWidget
)
from .constants import (
ITEM_ID_ROLE,
SEPARATOR,
SEPARATORS
)
module = sys.modules[__name__]
module.window = None
class CreatorWindow(QtWidgets.QDialog):
def __init__(self, parent=None):
super(CreatorWindow, self).__init__(parent)
self.setWindowTitle("Instance Creator")
self.setFocusPolicy(QtCore.Qt.StrongFocus)
if not parent:
self.setWindowFlags(
self.windowFlags() | QtCore.Qt.WindowStaysOnTopHint
)
creator_info = ProductTypeDescriptionWidget(self)
creators_model = CreatorsModel()
creators_proxy = QtCore.QSortFilterProxyModel()
creators_proxy.setSourceModel(creators_model)
creators_view = QtWidgets.QListView(self)
creators_view.setObjectName("CreatorsView")
creators_view.setModel(creators_proxy)
folder_path_input = QtWidgets.QLineEdit(self)
variant_input = VariantLineEdit(self)
product_name_input = QtWidgets.QLineEdit(self)
product_name_input.setEnabled(False)
variants_btn = QtWidgets.QPushButton()
variants_btn.setFixedWidth(18)
variants_menu = QtWidgets.QMenu(variants_btn)
variants_btn.setMenu(variants_menu)
name_layout = QtWidgets.QHBoxLayout()
name_layout.addWidget(variant_input)
name_layout.addWidget(variants_btn)
name_layout.setSpacing(3)
name_layout.setContentsMargins(0, 0, 0, 0)
body_layout = QtWidgets.QVBoxLayout()
body_layout.setContentsMargins(0, 0, 0, 0)
body_layout.addWidget(creator_info, 0)
body_layout.addWidget(QtWidgets.QLabel("Product type", self), 0)
body_layout.addWidget(creators_view, 1)
body_layout.addWidget(QtWidgets.QLabel("Folder path", self), 0)
body_layout.addWidget(folder_path_input, 0)
body_layout.addWidget(QtWidgets.QLabel("Product name", self), 0)
body_layout.addLayout(name_layout, 0)
body_layout.addWidget(product_name_input, 0)
useselection_chk = QtWidgets.QCheckBox("Use selection", self)
useselection_chk.setCheckState(QtCore.Qt.Checked)
create_btn = QtWidgets.QPushButton("Create", self)
# Need to store error_msg to prevent garbage collection
msg_label = QtWidgets.QLabel(self)
footer_layout = QtWidgets.QVBoxLayout()
footer_layout.addWidget(create_btn, 0)
footer_layout.addWidget(msg_label, 0)
footer_layout.setContentsMargins(0, 0, 0, 0)
layout = QtWidgets.QVBoxLayout(self)
layout.addLayout(body_layout, 1)
layout.addWidget(useselection_chk, 0, QtCore.Qt.AlignLeft)
layout.addLayout(footer_layout, 0)
msg_timer = QtCore.QTimer()
msg_timer.setSingleShot(True)
msg_timer.setInterval(5000)
validation_timer = QtCore.QTimer()
validation_timer.setSingleShot(True)
validation_timer.setInterval(300)
msg_timer.timeout.connect(self._on_msg_timer)
validation_timer.timeout.connect(self._on_validation_timer)
create_btn.clicked.connect(self._on_create)
variant_input.returnPressed.connect(self._on_create)
variant_input.textChanged.connect(self._on_data_changed)
variant_input.report.connect(self.echo)
folder_path_input.textChanged.connect(self._on_data_changed)
creators_view.selectionModel().currentChanged.connect(
self._on_selection_changed
)
# Store valid states and
self._is_valid = False
create_btn.setEnabled(self._is_valid)
self._first_show = True
# Message dialog when something goes wrong during creation
self._message_dialog = None
self._creator_info = creator_info
self._create_btn = create_btn
self._useselection_chk = useselection_chk
self._variant_input = variant_input
self._product_name_input = product_name_input
self._folder_path_input = folder_path_input
self._creators_model = creators_model
self._creators_proxy = creators_proxy
self._creators_view = creators_view
self._variants_btn = variants_btn
self._variants_menu = variants_menu
self._msg_label = msg_label
self._validation_timer = validation_timer
self._msg_timer = msg_timer
# Defaults
self.resize(300, 500)
variant_input.setFocus()
def _set_valid_state(self, valid):
if self._is_valid == valid:
return
self._is_valid = valid
self._create_btn.setEnabled(valid)
def _build_menu(self, default_names=None):
"""Create optional predefined variants.
Args:
default_names(list): all predefined names
Returns:
None
"""
if not default_names:
default_names = []
menu = self._variants_menu
button = self._variants_btn
# Get and destroy the action group
group = button.findChild(QtWidgets.QActionGroup)
if group:
group.deleteLater()
state = any(default_names)
button.setEnabled(state)
if state is False:
return
# Build new action group
group = QtWidgets.QActionGroup(button)
for name in default_names:
if name in SEPARATORS:
menu.addSeparator()
continue
action = group.addAction(name)
menu.addAction(action)
group.triggered.connect(self._on_action_clicked)
def _on_action_clicked(self, action):
self._variant_input.setText(action.text())
def _on_data_changed(self, *args):
# Set invalid state until it's reconfirmed to be valid by the
# scheduled callback so any form of creation is held back until
# valid again
self._set_valid_state(False)
self._validation_timer.start()
def _on_validation_timer(self):
index = self._creators_view.currentIndex()
item_id = index.data(ITEM_ID_ROLE)
creator_plugin = self._creators_model.get_creator_by_id(item_id)
user_input_text = self._variant_input.text()
folder_path = self._folder_path_input.text()
# Early exit if no folder path
if not folder_path:
self._build_menu()
self.echo("Folder is required ..")
self._set_valid_state(False)
return
project_name = get_current_project_name()
folder_entity = None
if creator_plugin:
# Get the folder from the database which match with the name
folder_entity = ayon_api.get_folder_by_path(
project_name, folder_path, fields={"id"}
)
# Get plugin
if not folder_entity or not creator_plugin:
self._build_menu()
if not creator_plugin:
self.echo("No registered product types ..")
else:
self.echo("Folder '{}' not found ..".format(folder_path))
self._set_valid_state(False)
return
folder_id = folder_entity["id"]
task_name = get_current_task_name()
task_entity = ayon_api.get_task_by_name(
project_name, folder_id, task_name
)
# Calculate product name with Creator plugin
product_name = creator_plugin.get_product_name(
project_name, folder_entity, task_entity, user_input_text
)
# Force replacement of prohibited symbols
# QUESTION should Creator care about this and here should be only
# validated with schema regex?
# Allow curly brackets in product name for dynamic keys
curly_left = "__cbl__"
curly_right = "__cbr__"
tmp_product_name = (
product_name
.replace("{", curly_left)
.replace("}", curly_right)
)
# Replace prohibited symbols
tmp_product_name = re.sub(
"[^{}]+".format(PRODUCT_NAME_ALLOWED_SYMBOLS),
"",
tmp_product_name
)
product_name = (
tmp_product_name
.replace(curly_left, "{")
.replace(curly_right, "}")
)
self._product_name_input.setText(product_name)
# Get all products of the current folder
product_entities = ayon_api.get_products(
project_name, folder_ids={folder_id}, fields={"name"}
)
existing_product_names = {
product_entity["name"]
for product_entity in product_entities
}
existing_product_names_low = set(
_name.lower()
for _name in existing_product_names
)
# Defaults to dropdown
defaults = []
# Check if Creator plugin has set defaults
if (
creator_plugin.defaults
and isinstance(creator_plugin.defaults, (list, tuple, set))
):
defaults = list(creator_plugin.defaults)
# Replace
compare_regex = re.compile(re.sub(
user_input_text, "(.+)", product_name, flags=re.IGNORECASE
))
variant_hints = set()
if user_input_text:
for _name in existing_product_names:
_result = compare_regex.search(_name)
if _result:
variant_hints |= set(_result.groups())
if variant_hints:
if defaults:
defaults.append(SEPARATOR)
defaults.extend(variant_hints)
self._build_menu(defaults)
# Indicate product existence
if not user_input_text:
self._variant_input.as_empty()
elif product_name.lower() in existing_product_names_low:
# validate existence of product name with lowered text
# - "renderMain" vs. "rensermain" mean same path item for
# windows
self._variant_input.as_exists()
else:
self._variant_input.as_new()
# Update the valid state
valid = product_name.strip() != ""
self._set_valid_state(valid)
def _on_selection_changed(self, old_idx, new_idx):
index = self._creators_view.currentIndex()
item_id = index.data(ITEM_ID_ROLE)
creator_plugin = self._creators_model.get_creator_by_id(item_id)
self._creator_info.set_item(creator_plugin)
if creator_plugin is None:
return
default = None
if hasattr(creator_plugin, "get_default_variant"):
default = creator_plugin.get_default_variant()
if not default:
if (
creator_plugin.defaults
and isinstance(creator_plugin.defaults, list)
):
default = creator_plugin.defaults[0]
else:
default = "Default"
self._variant_input.setText(default)
self._on_data_changed()
def keyPressEvent(self, event):
"""Custom keyPressEvent.
Override keyPressEvent to do nothing so that Maya's panels won't
take focus when pressing "SHIFT" whilst mouse is over viewport or
outliner. This way users don't accidentally perform Maya commands
whilst trying to name an instance.
"""
pass
def showEvent(self, event):
super(CreatorWindow, self).showEvent(event)
if self._first_show:
self._first_show = False
self.setStyleSheet(style.load_stylesheet())
def refresh(self):
self._folder_path_input.setText(get_current_folder_path())
self._creators_model.reset()
product_types_smart_select = (
get_current_project_settings()
["core"]
["tools"]
["creator"]
["product_types_smart_select"]
)
current_index = None
product_type = None
task_name = get_current_task_name() or None
lowered_task_name = task_name.lower()
if task_name:
for smart_item in product_types_smart_select:
_low_task_names = {
name.lower() for name in smart_item["task_names"]
}
for _task_name in _low_task_names:
if _task_name in lowered_task_name:
product_type = smart_item["name"]
break
if product_type:
break
if product_type:
indexes = self._creators_model.get_indexes_by_product_type(
product_type
)
if indexes:
index = indexes[0]
current_index = self._creators_proxy.mapFromSource(index)
if current_index is None or not current_index.isValid():
current_index = self._creators_proxy.index(0, 0)
self._creators_view.setCurrentIndex(current_index)
def _on_create(self):
# Do not allow creation in an invalid state
if not self._is_valid:
return
index = self._creators_view.currentIndex()
item_id = index.data(ITEM_ID_ROLE)
creator_plugin = self._creators_model.get_creator_by_id(item_id)
if creator_plugin is None:
return
product_name = self._product_name_input.text()
folder_path = self._folder_path_input.text()
use_selection = self._useselection_chk.isChecked()
variant = self._variant_input.text()
error_info = None
try:
legacy_create(
creator_plugin,
product_name,
folder_path,
options={"useSelection": use_selection},
data={"variant": variant}
)
except CreatorError as exc:
self.echo("Creator error: {}".format(str(exc)))
error_info = (str(exc), None)
except Exception as exc:
self.echo("Program error: %s" % str(exc))
exc_type, exc_value, exc_traceback = sys.exc_info()
formatted_traceback = "".join(traceback.format_exception(
exc_type, exc_value, exc_traceback
))
error_info = (str(exc), formatted_traceback)
if error_info:
box = CreateErrorMessageBox(
creator_plugin.product_type,
product_name,
folder_path,
*error_info,
parent=self
)
box.show()
# Store dialog so is not garbage collected before is shown
self._message_dialog = box
else:
self.echo("Created %s .." % product_name)
def _on_msg_timer(self):
self._msg_label.setText("")
def echo(self, message):
self._msg_label.setText(str(message))
self._msg_timer.start()
def show(parent=None):
"""Display product creator GUI
Arguments:
debug (bool, optional): Run loader in debug-mode,
defaults to False
parent (QtCore.QObject, optional): When provided parent the interface
to this QObject.
"""
try:
module.window.close()
del module.window
except (AttributeError, RuntimeError):
pass
with qt_app_context():
window = CreatorWindow(parent)
window.refresh()
window.show()
module.window = window
# Pull window to the front.
module.window.raise_()
module.window.activateWindow()

View file

@ -4,6 +4,7 @@ from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Optional, Any
from ayon_core.addon import AddonsManager
from ayon_core.tools.common_models import (
ProjectItem,
FolderItem,
@ -20,6 +21,7 @@ class WebactionContext:
project_name: str
folder_id: str
task_id: str
workfile_id: str
addon_name: str
addon_version: str
@ -33,7 +35,7 @@ class ActionItem:
identifier (str): Unique identifier of action item.
order (int): Action ordering.
label (str): Action label.
variant_label (Union[str, None]): Variant label, full label is
variant_label (Optional[str]): Variant label, full label is
concatenated with space. Actions are grouped under single
action if it has same 'label' and have set 'variant_label'.
full_label (str): Full label, if not set it is generated
@ -56,6 +58,15 @@ class ActionItem:
addon_version: Optional[str] = None
@dataclass
class WorkfileItem:
workfile_id: str
filename: str
exists: bool
icon: Optional[str]
version: Optional[int]
class AbstractLauncherCommon(ABC):
@abstractmethod
def register_event_callback(self, topic, callback):
@ -85,12 +96,16 @@ class AbstractLauncherBackend(AbstractLauncherCommon):
pass
@abstractmethod
def get_addons_manager(self) -> AddonsManager:
pass
@abstractmethod
def get_project_settings(self, project_name):
"""Project settings for current project.
Args:
project_name (Union[str, None]): Project name.
project_name (Optional[str]): Project name.
Returns:
dict[str, Any]: Project settings.
@ -254,7 +269,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
"""Selected project name.
Returns:
Union[str, None]: Selected project name.
Optional[str]: Selected project name.
"""
pass
@ -264,7 +279,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
"""Selected folder id.
Returns:
Union[str, None]: Selected folder id.
Optional[str]: Selected folder id.
"""
pass
@ -274,7 +289,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
"""Selected task id.
Returns:
Union[str, None]: Selected task id.
Optional[str]: Selected task id.
"""
pass
@ -284,7 +299,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
"""Selected task name.
Returns:
Union[str, None]: Selected task name.
Optional[str]: Selected task name.
"""
pass
@ -302,7 +317,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
}
Returns:
dict[str, Union[str, None]]: Selected context.
dict[str, Optional[str]]: Selected context.
"""
pass
@ -312,7 +327,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
"""Change selected folder.
Args:
project_name (Union[str, None]): Project nameor None if no project
project_name (Optional[str]): Project nameor None if no project
is selected.
"""
@ -323,7 +338,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
"""Change selected folder.
Args:
folder_id (Union[str, None]): Folder id or None if no folder
folder_id (Optional[str]): Folder id or None if no folder
is selected.
"""
@ -336,14 +351,24 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
"""Change selected task.
Args:
task_id (Union[str, None]): Task id or None if no task
task_id (Optional[str]): Task id or None if no task
is selected.
task_name (Union[str, None]): Task name or None if no task
task_name (Optional[str]): Task name or None if no task
is selected.
"""
pass
@abstractmethod
def set_selected_workfile(self, workfile_id: Optional[str]):
"""Change selected workfile.
Args:
workfile_id (Optional[str]): Workfile id or None.
"""
pass
# Actions
@abstractmethod
def get_action_items(
@ -351,13 +376,15 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
project_name: Optional[str],
folder_id: Optional[str],
task_id: Optional[str],
workfile_id: Optional[str],
) -> list[ActionItem]:
"""Get action items for given context.
Args:
project_name (Union[str, None]): Project name.
folder_id (Union[str, None]): Folder id.
task_id (Union[str, None]): Task id.
project_name (Optional[str]): Project name.
folder_id (Optional[str]): Folder id.
task_id (Optional[str]): Task id.
workfile_id (Optional[str]): Workfile id.
Returns:
list[ActionItem]: List of action items that should be shown
@ -373,14 +400,16 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
project_name: Optional[str],
folder_id: Optional[str],
task_id: Optional[str],
workfile_id: Optional[str],
):
"""Trigger action on given context.
Args:
action_id (str): Action identifier.
project_name (Union[str, None]): Project name.
folder_id (Union[str, None]): Folder id.
task_id (Union[str, None]): Task id.
project_name (Optional[str]): Project name.
folder_id (Optional[str]): Folder id.
task_id (Optional[str]): Task id.
workfile_id (Optional[str]): Task id.
"""
pass
@ -465,3 +494,21 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
"""
pass
@abstractmethod
def get_workfile_items(
self,
project_name: Optional[str],
task_id: Optional[str],
) -> list[WorkfileItem]:
"""Get workfile items for a given context.
Args:
project_name (Optional[str]): Project name.
task_id (Optional[str]): Task id.
Returns:
list[WorkfileItem]: List of workfile items.
"""
pass

View file

@ -1,10 +1,21 @@
from typing import Optional
from ayon_core.lib import Logger, get_ayon_username
from ayon_core.lib.events import QueuedEventSystem
from ayon_core.addon import AddonsManager
from ayon_core.settings import get_project_settings, get_studio_settings
from ayon_core.tools.common_models import ProjectsModel, HierarchyModel
from .abstract import AbstractLauncherFrontEnd, AbstractLauncherBackend
from .models import LauncherSelectionModel, ActionsModel
from .abstract import (
AbstractLauncherFrontEnd,
AbstractLauncherBackend,
WorkfileItem,
)
from .models import (
LauncherSelectionModel,
ActionsModel,
WorkfilesModel,
)
NOT_SET = object()
@ -17,12 +28,15 @@ class BaseLauncherController(
self._event_system = None
self._log = None
self._addons_manager = None
self._username = NOT_SET
self._selection_model = LauncherSelectionModel(self)
self._projects_model = ProjectsModel(self)
self._hierarchy_model = HierarchyModel(self)
self._actions_model = ActionsModel(self)
self._workfiles_model = WorkfilesModel(self)
@property
def log(self):
@ -59,6 +73,11 @@ class BaseLauncherController(
def register_event_callback(self, topic, callback):
self.event_system.add_callback(topic, callback)
def get_addons_manager(self) -> AddonsManager:
if self._addons_manager is None:
self._addons_manager = AddonsManager()
return self._addons_manager
# Entity items for UI
def get_project_items(self, sender=None):
return self._projects_model.get_project_items(sender)
@ -125,6 +144,9 @@ class BaseLauncherController(
def set_selected_task(self, task_id, task_name):
self._selection_model.set_selected_task(task_id, task_name)
def set_selected_workfile(self, workfile_id):
self._selection_model.set_selected_workfile(workfile_id)
def get_selected_context(self):
return {
"project_name": self.get_selected_project_name(),
@ -133,10 +155,24 @@ class BaseLauncherController(
"task_name": self.get_selected_task_name(),
}
# Workfiles
def get_workfile_items(
self,
project_name: Optional[str],
task_id: Optional[str],
) -> list[WorkfileItem]:
return self._workfiles_model.get_workfile_items(
project_name,
task_id,
)
# Actions
def get_action_items(self, project_name, folder_id, task_id):
def get_action_items(
self, project_name, folder_id, task_id, workfile_id
):
return self._actions_model.get_action_items(
project_name, folder_id, task_id)
project_name, folder_id, task_id, workfile_id
)
def trigger_action(
self,
@ -144,12 +180,14 @@ class BaseLauncherController(
project_name,
folder_id,
task_id,
workfile_id,
):
self._actions_model.trigger_action(
identifier,
project_name,
folder_id,
task_id,
workfile_id,
)
def trigger_webaction(self, context, action_label, form_data=None):
@ -186,6 +224,8 @@ class BaseLauncherController(
self._projects_model.reset()
# Refresh actions
self._actions_model.refresh()
# Reset workfiles model
self._workfiles_model.reset()
self._emit_event("controller.refresh.actions.finished")

View file

@ -1,8 +1,10 @@
from .actions import ActionsModel
from .selection import LauncherSelectionModel
from .workfiles import WorkfilesModel
__all__ = (
"ActionsModel",
"LauncherSelectionModel",
"WorkfilesModel",
)

View file

@ -15,7 +15,6 @@ from ayon_core.lib import (
get_settings_variant,
run_detached_ayon_launcher_process,
)
from ayon_core.addon import AddonsManager
from ayon_core.pipeline.actions import (
discover_launcher_actions,
LauncherActionSelection,
@ -104,8 +103,6 @@ class ActionsModel:
levels=2, default_factory=list, lifetime=20,
)
self._addons_manager = None
self._variant = get_settings_variant()
@staticmethod
@ -131,19 +128,28 @@ class ActionsModel:
self._get_action_objects()
self._controller.emit_event("actions.refresh.finished")
def get_action_items(self, project_name, folder_id, task_id):
def get_action_items(
self,
project_name: Optional[str],
folder_id: Optional[str],
task_id: Optional[str],
workfile_id: Optional[str],
) -> list[ActionItem]:
"""Get actions for project.
Args:
project_name (Union[str, None]): Project name.
folder_id (Union[str, None]): Folder id.
task_id (Union[str, None]): Task id.
project_name (Optional[str]): Project name.
folder_id (Optional[str]): Folder id.
task_id (Optional[str]): Task id.
workfile_id (Optional[str]): Workfile id.
Returns:
list[ActionItem]: List of actions.
"""
selection = self._prepare_selection(project_name, folder_id, task_id)
selection = self._prepare_selection(
project_name, folder_id, task_id, workfile_id
)
output = []
action_items = self._get_action_items(project_name)
for identifier, action in self._get_action_objects().items():
@ -159,8 +165,11 @@ class ActionsModel:
project_name,
folder_id,
task_id,
workfile_id,
):
selection = self._prepare_selection(project_name, folder_id, task_id)
selection = self._prepare_selection(
project_name, folder_id, task_id, workfile_id
)
failed = False
error_message = None
action_label = identifier
@ -202,11 +211,15 @@ class ActionsModel:
identifier = context.identifier
folder_id = context.folder_id
task_id = context.task_id
workfile_id = context.workfile_id
project_name = context.project_name
addon_name = context.addon_name
addon_version = context.addon_version
if task_id:
if workfile_id:
entity_type = "workfile"
entity_ids.append(workfile_id)
elif task_id:
entity_type = "task"
entity_ids.append(task_id)
elif folder_id:
@ -272,6 +285,7 @@ class ActionsModel:
"project_name": project_name,
"folder_id": folder_id,
"task_id": task_id,
"workfile_id": workfile_id,
"addon_name": addon_name,
"addon_version": addon_version,
})
@ -282,7 +296,10 @@ class ActionsModel:
def get_action_config_values(self, context: WebactionContext):
selection = self._prepare_selection(
context.project_name, context.folder_id, context.task_id
context.project_name,
context.folder_id,
context.task_id,
context.workfile_id,
)
if not selection.is_project_selected:
return {}
@ -309,7 +326,10 @@ class ActionsModel:
def set_action_config_values(self, context, values):
selection = self._prepare_selection(
context.project_name, context.folder_id, context.task_id
context.project_name,
context.folder_id,
context.task_id,
context.workfile_id,
)
if not selection.is_project_selected:
return {}
@ -333,12 +353,9 @@ class ActionsModel:
exc_info=True
)
def _get_addons_manager(self):
if self._addons_manager is None:
self._addons_manager = AddonsManager()
return self._addons_manager
def _prepare_selection(self, project_name, folder_id, task_id):
def _prepare_selection(
self, project_name, folder_id, task_id, workfile_id
):
project_entity = None
if project_name:
project_entity = self._controller.get_project_entity(project_name)
@ -347,6 +364,7 @@ class ActionsModel:
project_name,
folder_id,
task_id,
workfile_id,
project_entity=project_entity,
project_settings=project_settings,
)
@ -355,7 +373,12 @@ class ActionsModel:
entity_type = None
entity_id = None
entity_subtypes = []
if selection.is_task_selected:
if selection.is_workfile_selected:
entity_type = "workfile"
entity_id = selection.workfile_id
entity_subtypes = []
elif selection.is_task_selected:
entity_type = "task"
entity_id = selection.task_entity["id"]
entity_subtypes = [selection.task_entity["taskType"]]
@ -399,7 +422,11 @@ class ActionsModel:
return cache.get_data()
try:
response = ayon_api.post("actions/list", **request_data)
# 'variant' query is supported since AYON backend 1.10.4
query = urlencode({"variant": self._variant, "mode": "all"})
response = ayon_api.post(
f"actions/list?{query}", **request_data
)
response.raise_for_status()
except Exception:
self.log.warning("Failed to collect webactions.", exc_info=True)
@ -513,7 +540,12 @@ class ActionsModel:
uri = payload["uri"]
else:
uri = data["uri"]
run_detached_ayon_launcher_process(uri)
# Remove bundles from environment variables
env = os.environ.copy()
env.pop("AYON_BUNDLE_NAME", None)
env.pop("AYON_STUDIO_BUNDLE_NAME", None)
run_detached_ayon_launcher_process(uri, env=env)
elif response_type in ("query", "navigate"):
response.error_message = (
@ -533,7 +565,7 @@ class ActionsModel:
# NOTE We don't need to register the paths, but that would
# require to change discovery logic and deprecate all functions
# related to registering and discovering launcher actions.
addons_manager = self._get_addons_manager()
addons_manager = self._controller.get_addons_manager()
actions_paths = addons_manager.collect_launcher_action_paths()
for path in actions_paths:
if path and os.path.exists(path):

View file

@ -1,26 +1,37 @@
class LauncherSelectionModel(object):
from __future__ import annotations
import typing
from typing import Optional
if typing.TYPE_CHECKING:
from ayon_core.tools.launcher.abstract import AbstractLauncherBackend
class LauncherSelectionModel:
"""Model handling selection changes.
Triggering events:
- "selection.project.changed"
- "selection.folder.changed"
- "selection.task.changed"
- "selection.workfile.changed"
"""
event_source = "launcher.selection.model"
def __init__(self, controller):
def __init__(self, controller: AbstractLauncherBackend) -> None:
self._controller = controller
self._project_name = None
self._folder_id = None
self._task_name = None
self._task_id = None
self._workfile_id = None
def get_selected_project_name(self):
def get_selected_project_name(self) -> Optional[str]:
return self._project_name
def set_selected_project(self, project_name):
def set_selected_project(self, project_name: Optional[str]) -> None:
if project_name == self._project_name:
return
@ -31,10 +42,10 @@ class LauncherSelectionModel(object):
self.event_source
)
def get_selected_folder_id(self):
def get_selected_folder_id(self) -> Optional[str]:
return self._folder_id
def set_selected_folder(self, folder_id):
def set_selected_folder(self, folder_id: Optional[str]) -> None:
if folder_id == self._folder_id:
return
@ -48,13 +59,15 @@ class LauncherSelectionModel(object):
self.event_source
)
def get_selected_task_name(self):
def get_selected_task_name(self) -> Optional[str]:
return self._task_name
def get_selected_task_id(self):
def get_selected_task_id(self) -> Optional[str]:
return self._task_id
def set_selected_task(self, task_id, task_name):
def set_selected_task(
self, task_id: Optional[str], task_name: Optional[str]
) -> None:
if task_id == self._task_id:
return
@ -70,3 +83,23 @@ class LauncherSelectionModel(object):
},
self.event_source
)
def get_selected_workfile(self) -> Optional[str]:
return self._workfile_id
def set_selected_workfile(self, workfile_id: Optional[str]) -> None:
if workfile_id == self._workfile_id:
return
self._workfile_id = workfile_id
self._controller.emit_event(
"selection.workfile.changed",
{
"project_name": self._project_name,
"folder_id": self._folder_id,
"task_name": self._task_name,
"task_id": self._task_id,
"workfile_id": workfile_id,
},
self.event_source
)

View file

@ -0,0 +1,102 @@
import os
from typing import Optional, Any
import ayon_api
from ayon_core.lib import (
Logger,
NestedCacheItem,
)
from ayon_core.pipeline import Anatomy
from ayon_core.tools.launcher.abstract import (
WorkfileItem,
AbstractLauncherBackend,
)
class WorkfilesModel:
def __init__(self, controller: AbstractLauncherBackend):
self._controller = controller
self._log = Logger.get_logger(self.__class__.__name__)
self._host_icons = None
self._workfile_items = NestedCacheItem(
levels=2, default_factory=list, lifetime=60,
)
def reset(self) -> None:
self._workfile_items.reset()
def get_workfile_items(
self,
project_name: Optional[str],
task_id: Optional[str],
) -> list[WorkfileItem]:
if not project_name or not task_id:
return []
cache = self._workfile_items[project_name][task_id]
if cache.is_valid:
return cache.get_data()
project_entity = self._controller.get_project_entity(project_name)
anatomy = Anatomy(project_name, project_entity=project_entity)
items = []
for workfile_entity in ayon_api.get_workfiles_info(
project_name, task_ids={task_id}, fields={"id", "path", "data"}
):
rootless_path = workfile_entity["path"]
exists = False
try:
path = anatomy.fill_root(rootless_path)
exists = os.path.exists(path)
except Exception:
self._log.warning(
"Failed to fill root for workfile path",
exc_info=True,
)
workfile_data = workfile_entity["data"]
host_name = workfile_data.get("host_name")
version = workfile_data.get("version")
items.append(WorkfileItem(
workfile_id=workfile_entity["id"],
filename=os.path.basename(rootless_path),
exists=exists,
icon=self._get_host_icon(host_name),
version=version,
))
cache.update_data(items)
return items
def _get_host_icon(
self, host_name: Optional[str]
) -> Optional[dict[str, Any]]:
if self._host_icons is None:
host_icons = {}
try:
host_icons = self._get_host_icons()
except Exception:
self._log.warning(
"Failed to get host icons",
exc_info=True,
)
self._host_icons = host_icons
return self._host_icons.get(host_name)
def _get_host_icons(self) -> dict[str, Any]:
addons_manager = self._controller.get_addons_manager()
applications_addon = addons_manager["applications"]
apps_manager = applications_addon.get_applications_manager()
output = {}
for app_group in apps_manager.app_groups.values():
host_name = app_group.host_name
icon_filename = app_group.icon
if not host_name or not icon_filename:
continue
icon_url = applications_addon.get_app_icon_url(
icon_filename, server=True
)
output[host_name] = icon_url
return output

View file

@ -136,6 +136,10 @@ class ActionsQtModel(QtGui.QStandardItemModel):
"selection.task.changed",
self._on_selection_task_changed,
)
controller.register_event_callback(
"selection.workfile.changed",
self._on_selection_workfile_changed,
)
self._controller = controller
@ -146,6 +150,7 @@ class ActionsQtModel(QtGui.QStandardItemModel):
self._selected_project_name = None
self._selected_folder_id = None
self._selected_task_id = None
self._selected_workfile_id = None
def get_selected_project_name(self):
return self._selected_project_name
@ -156,6 +161,9 @@ class ActionsQtModel(QtGui.QStandardItemModel):
def get_selected_task_id(self):
return self._selected_task_id
def get_selected_workfile_id(self):
return self._selected_workfile_id
def get_group_items(self, action_id):
return self._groups_by_id[action_id]
@ -194,6 +202,7 @@ class ActionsQtModel(QtGui.QStandardItemModel):
self._selected_project_name,
self._selected_folder_id,
self._selected_task_id,
self._selected_workfile_id,
)
if not items:
self._clear_items()
@ -286,18 +295,28 @@ class ActionsQtModel(QtGui.QStandardItemModel):
self._selected_project_name = event["project_name"]
self._selected_folder_id = None
self._selected_task_id = None
self._selected_workfile_id = None
self.refresh()
def _on_selection_folder_changed(self, event):
self._selected_project_name = event["project_name"]
self._selected_folder_id = event["folder_id"]
self._selected_task_id = None
self._selected_workfile_id = None
self.refresh()
def _on_selection_task_changed(self, event):
self._selected_project_name = event["project_name"]
self._selected_folder_id = event["folder_id"]
self._selected_task_id = event["task_id"]
self._selected_workfile_id = None
self.refresh()
def _on_selection_workfile_changed(self, event):
self._selected_project_name = event["project_name"]
self._selected_folder_id = event["folder_id"]
self._selected_task_id = event["task_id"]
self._selected_workfile_id = event["workfile_id"]
self.refresh()
@ -578,9 +597,6 @@ class ActionMenuPopup(QtWidgets.QWidget):
if not index or not index.isValid():
return
if not index.data(ACTION_HAS_CONFIGS_ROLE):
return
action_id = index.data(ACTION_ID_ROLE)
self.action_triggered.emit(action_id)
@ -970,10 +986,11 @@ class ActionsWidget(QtWidgets.QWidget):
event["project_name"],
event["folder_id"],
event["task_id"],
event["workfile_id"],
event["addon_name"],
event["addon_version"],
),
event["action_label"],
event["full_label"],
form_data,
)
@ -1050,24 +1067,26 @@ class ActionsWidget(QtWidgets.QWidget):
project_name = self._model.get_selected_project_name()
folder_id = self._model.get_selected_folder_id()
task_id = self._model.get_selected_task_id()
workfile_id = self._model.get_selected_workfile_id()
action_item = self._model.get_action_item_by_id(action_id)
if action_item.action_type == "webaction":
action_item = self._model.get_action_item_by_id(action_id)
context = WebactionContext(
action_id,
project_name,
folder_id,
task_id,
action_item.addon_name,
action_item.addon_version
identifier=action_id,
project_name=project_name,
folder_id=folder_id,
task_id=task_id,
workfile_id=workfile_id,
addon_name=action_item.addon_name,
addon_version=action_item.addon_version,
)
self._controller.trigger_webaction(
context, action_item.full_label
)
else:
self._controller.trigger_action(
action_id, project_name, folder_id, task_id
action_id, project_name, folder_id, task_id, workfile_id
)
if index is None:
@ -1087,11 +1106,13 @@ class ActionsWidget(QtWidgets.QWidget):
project_name = self._model.get_selected_project_name()
folder_id = self._model.get_selected_folder_id()
task_id = self._model.get_selected_task_id()
workfile_id = self._model.get_selected_workfile_id()
context = WebactionContext(
action_id,
identifier=action_id,
project_name=project_name,
folder_id=folder_id,
task_id=task_id,
workfile_id=workfile_id,
addon_name=action_item.addon_name,
addon_version=action_item.addon_version,
)

View file

@ -12,6 +12,8 @@ from ayon_core.tools.utils import (
)
from ayon_core.tools.utils.lib import checkstate_int_to_enum
from .workfiles_page import WorkfilesPage
class HierarchyPage(QtWidgets.QWidget):
def __init__(self, controller, parent):
@ -73,10 +75,15 @@ class HierarchyPage(QtWidgets.QWidget):
# - Tasks widget
tasks_widget = TasksWidget(controller, content_body)
# - Third page - Workfiles
workfiles_page = WorkfilesPage(controller, content_body)
content_body.addWidget(folders_widget)
content_body.addWidget(tasks_widget)
content_body.setStretchFactor(0, 100)
content_body.setStretchFactor(1, 65)
content_body.addWidget(workfiles_page)
content_body.setStretchFactor(0, 120)
content_body.setStretchFactor(1, 85)
content_body.setStretchFactor(2, 220)
main_layout = QtWidgets.QVBoxLayout(self)
main_layout.setContentsMargins(0, 0, 0, 0)
@ -99,6 +106,7 @@ class HierarchyPage(QtWidgets.QWidget):
self._my_tasks_checkbox = my_tasks_checkbox
self._folders_widget = folders_widget
self._tasks_widget = tasks_widget
self._workfiles_page = workfiles_page
self._project_name = None
@ -117,6 +125,7 @@ class HierarchyPage(QtWidgets.QWidget):
def refresh(self):
self._folders_widget.refresh()
self._tasks_widget.refresh()
self._workfiles_page.refresh()
self._on_my_tasks_checkbox_state_changed(
self._my_tasks_checkbox.checkState()
)

View file

@ -177,7 +177,7 @@ class LauncherWindow(QtWidgets.QWidget):
self._page_slide_anim = page_slide_anim
hierarchy_page.setVisible(not self._is_on_projects_page)
self.resize(520, 740)
self.resize(920, 740)
def showEvent(self, event):
super().showEvent(event)

View file

@ -0,0 +1,175 @@
from typing import Optional
import ayon_api
from qtpy import QtCore, QtWidgets, QtGui
from ayon_core.tools.utils import get_qt_icon
from ayon_core.tools.launcher.abstract import AbstractLauncherFrontEnd
VERSION_ROLE = QtCore.Qt.UserRole + 1
WORKFILE_ID_ROLE = QtCore.Qt.UserRole + 2
class WorkfilesModel(QtGui.QStandardItemModel):
refreshed = QtCore.Signal()
def __init__(self, controller: AbstractLauncherFrontEnd) -> None:
super().__init__()
self.setColumnCount(1)
self.setHeaderData(0, QtCore.Qt.Horizontal, "Workfiles")
controller.register_event_callback(
"selection.project.changed",
self._on_selection_project_changed,
)
controller.register_event_callback(
"selection.folder.changed",
self._on_selection_folder_changed,
)
controller.register_event_callback(
"selection.task.changed",
self._on_selection_task_changed,
)
self._controller = controller
self._selected_project_name = None
self._selected_folder_id = None
self._selected_task_id = None
self._transparent_icon = None
self._cached_icons = {}
def refresh(self) -> None:
root_item = self.invisibleRootItem()
root_item.removeRows(0, root_item.rowCount())
workfile_items = self._controller.get_workfile_items(
self._selected_project_name, self._selected_task_id
)
new_items = []
for workfile_item in workfile_items:
icon = self._get_icon(workfile_item.icon)
item = QtGui.QStandardItem(workfile_item.filename)
item.setData(icon, QtCore.Qt.DecorationRole)
item.setData(workfile_item.version, VERSION_ROLE)
item.setData(workfile_item.workfile_id, WORKFILE_ID_ROLE)
flags = QtCore.Qt.NoItemFlags
if workfile_item.exists:
flags = QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
item.setFlags(flags)
new_items.append(item)
if not new_items:
title = "< No workfiles >"
if not self._selected_project_name:
title = "< Select a project >"
elif not self._selected_folder_id:
title = "< Select a folder >"
elif not self._selected_task_id:
title = "< Select a task >"
item = QtGui.QStandardItem(title)
item.setFlags(QtCore.Qt.NoItemFlags)
new_items.append(item)
root_item.appendRows(new_items)
self.refreshed.emit()
def _on_selection_project_changed(self, event) -> None:
self._selected_project_name = event["project_name"]
self._selected_folder_id = None
self._selected_task_id = None
self.refresh()
def _on_selection_folder_changed(self, event) -> None:
self._selected_project_name = event["project_name"]
self._selected_folder_id = event["folder_id"]
self._selected_task_id = None
self.refresh()
def _on_selection_task_changed(self, event) -> None:
self._selected_project_name = event["project_name"]
self._selected_folder_id = event["folder_id"]
self._selected_task_id = event["task_id"]
self.refresh()
def _get_transparent_icon(self) -> QtGui.QIcon:
if self._transparent_icon is None:
self._transparent_icon = get_qt_icon({
"type": "transparent", "size": 256
})
return self._transparent_icon
def _get_icon(self, icon_url: Optional[str]) -> QtGui.QIcon:
if icon_url is None:
return self._get_transparent_icon()
icon = self._cached_icons.get(icon_url)
if icon is not None:
return icon
base_url = ayon_api.get_base_url()
if icon_url.startswith(base_url):
icon_def = {
"type": "ayon_url",
"url": icon_url[len(base_url) + 1:],
}
else:
icon_def = {
"type": "url",
"url": icon_url,
}
icon = get_qt_icon(icon_def)
if icon is None:
icon = self._get_transparent_icon()
self._cached_icons[icon_url] = icon
return icon
class WorkfilesView(QtWidgets.QTreeView):
def drawBranches(self, painter, rect, index):
return
class WorkfilesPage(QtWidgets.QWidget):
def __init__(
self,
controller: AbstractLauncherFrontEnd,
parent: QtWidgets.QWidget,
) -> None:
super().__init__(parent)
workfiles_view = WorkfilesView(self)
workfiles_view.setIndentation(0)
workfiles_model = WorkfilesModel(controller)
workfiles_proxy = QtCore.QSortFilterProxyModel()
workfiles_proxy.setSourceModel(workfiles_model)
workfiles_view.setModel(workfiles_proxy)
layout = QtWidgets.QVBoxLayout(self)
layout.setContentsMargins(0, 0, 0, 0)
layout.addWidget(workfiles_view, 1)
workfiles_view.selectionModel().selectionChanged.connect(
self._on_selection_changed
)
workfiles_model.refreshed.connect(self._on_refresh)
self._controller = controller
self._workfiles_view = workfiles_view
self._workfiles_model = workfiles_model
self._workfiles_proxy = workfiles_proxy
def refresh(self) -> None:
self._workfiles_model.refresh()
def _on_refresh(self) -> None:
self._workfiles_proxy.sort(0, QtCore.Qt.DescendingOrder)
def _on_selection_changed(self, selected, _deselected) -> None:
workfile_id = None
for index in selected.indexes():
workfile_id = index.data(WORKFILE_ID_ROLE)
self._controller.set_selected_workfile(workfile_id)

View file

@ -9,7 +9,11 @@ from ayon_core.lib.attribute_definitions import (
deserialize_attr_defs,
serialize_attr_defs,
)
from ayon_core.tools.common_models import TaskItem, TagItem
from ayon_core.tools.common_models import (
TaskItem,
TagItem,
ProductTypeIconMapping,
)
class ProductTypeItem:
@ -78,7 +82,6 @@ class ProductItem:
product_type (str): Product type.
product_name (str): Product name.
product_icon (dict[str, Any]): Product icon definition.
product_type_icon (dict[str, Any]): Product type icon definition.
product_in_scene (bool): Is product in scene (only when used in DCC).
group_name (str): Group name.
folder_id (str): Folder id.
@ -93,8 +96,6 @@ class ProductItem:
product_base_type: str,
product_name: str,
product_icon: dict[str, Any],
product_type_icon: dict[str, Any],
product_base_type_icon: dict[str, Any],
group_name: str,
folder_id: str,
folder_label: str,
@ -106,8 +107,6 @@ class ProductItem:
self.product_base_type = product_base_type
self.product_name = product_name
self.product_icon = product_icon
self.product_type_icon = product_type_icon
self.product_base_type_icon = product_base_type_icon
self.product_in_scene = product_in_scene
self.group_name = group_name
self.folder_id = folder_id
@ -121,8 +120,6 @@ class ProductItem:
"product_base_type": self.product_base_type,
"product_name": self.product_name,
"product_icon": self.product_icon,
"product_type_icon": self.product_type_icon,
"product_base_type_icon": self.product_base_type_icon,
"product_in_scene": self.product_in_scene,
"group_name": self.group_name,
"folder_id": self.folder_id,
@ -499,8 +496,8 @@ class BackendLoaderController(_BaseLoaderController):
topic (str): Event topic name.
data (Optional[dict[str, Any]]): Event data.
source (Optional[str]): Event source.
"""
"""
pass
@abstractmethod
@ -509,8 +506,20 @@ class BackendLoaderController(_BaseLoaderController):
Returns:
set[str]: Set of loaded product ids.
"""
"""
pass
@abstractmethod
def get_product_type_icons_mapping(
self, project_name: Optional[str]
) -> ProductTypeIconMapping:
"""Product type icons mapping.
Returns:
ProductTypeIconMapping: Product type icons mapping.
"""
pass

View file

@ -2,6 +2,7 @@ from __future__ import annotations
import logging
import uuid
from typing import Optional
import ayon_api
@ -16,6 +17,7 @@ from ayon_core.tools.common_models import (
HierarchyModel,
ThumbnailsModel,
TagItem,
ProductTypeIconMapping,
)
from .abstract import (
@ -198,6 +200,13 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
project_name, sender
)
def get_product_type_icons_mapping(
self, project_name: Optional[str]
) -> ProductTypeIconMapping:
return self._projects_model.get_product_type_icons_mapping(
project_name
)
def get_folder_items(self, project_name, sender=None):
return self._hierarchy_model.get_folder_items(project_name, sender)

Some files were not shown because too many files have changed in this diff Show more