diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 9fb6ee645d..c79ca69fca 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -35,6 +35,18 @@ body: label: Version description: What version are you running? Look to AYON Tray options: + - 1.6.7 + - 1.6.6 + - 1.6.5 + - 1.6.4 + - 1.6.3 + - 1.6.2 + - 1.6.1 + - 1.6.0 + - 1.5.3 + - 1.5.2 + - 1.5.1 + - 1.5.0 - 1.4.1 - 1.4.0 - 1.3.2 diff --git a/.github/workflows/deploy_mkdocs.yml b/.github/workflows/deploy_mkdocs.yml new file mode 100644 index 0000000000..deafc7b850 --- /dev/null +++ b/.github/workflows/deploy_mkdocs.yml @@ -0,0 +1,18 @@ +name: Deploy MkDocs + +on: + push: + tags: + - "*" + workflow_dispatch: + +jobs: + build-mk-docs: + # FIXME: Update @develop to @main after `ops-repo-automation` release. + uses: ynput/ops-repo-automation/.github/workflows/deploy_mkdocs.yml@develop + with: + repo: ${{ github.repository }} + secrets: + YNPUT_BOT_TOKEN: ${{ secrets.YNPUT_BOT_TOKEN }} + CI_USER: ${{ secrets.CI_USER }} + CI_EMAIL: ${{ secrets.CI_EMAIL }} diff --git a/client/ayon_core/addon/base.py b/client/ayon_core/addon/base.py index 72270fa585..a04aedb8cc 100644 --- a/client/ayon_core/addon/base.py +++ b/client/ayon_core/addon/base.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- """Base class for AYON addons.""" -import copy +from __future__ import annotations + import os import sys import time @@ -8,12 +9,15 @@ import inspect import logging import threading import collections +import warnings from uuid import uuid4 from abc import ABC, abstractmethod -from typing import Optional +from urllib.parse import urlencode +from types import ModuleType +import typing +from typing import Optional, Any, Union import ayon_api -from semver import VersionInfo from ayon_core import AYON_CORE_ROOT from ayon_core.lib import ( @@ -29,6 +33,11 @@ from .interfaces import ( IHostAddon, ) +if typing.TYPE_CHECKING: + import click + + from ayon_core.host import HostBase + # Files that will be always ignored on addons import IGNORED_FILENAMES = { "__pycache__", @@ -38,33 +47,6 @@ IGNORED_DEFAULT_FILENAMES = { "__init__.py", } -# When addon was moved from ayon-core codebase -# - this is used to log the missing addon -MOVED_ADDON_MILESTONE_VERSIONS = { - "aftereffects": VersionInfo(0, 2, 0), - "applications": VersionInfo(0, 2, 0), - "blender": VersionInfo(0, 2, 0), - "celaction": VersionInfo(0, 2, 0), - "clockify": VersionInfo(0, 2, 0), - "deadline": VersionInfo(0, 2, 0), - "flame": VersionInfo(0, 2, 0), - "fusion": VersionInfo(0, 2, 0), - "harmony": VersionInfo(0, 2, 0), - "hiero": VersionInfo(0, 2, 0), - "max": VersionInfo(0, 2, 0), - "photoshop": VersionInfo(0, 2, 0), - "timers_manager": VersionInfo(0, 2, 0), - "traypublisher": VersionInfo(0, 2, 0), - "tvpaint": VersionInfo(0, 2, 0), - "maya": VersionInfo(0, 2, 0), - "nuke": VersionInfo(0, 2, 0), - "resolve": VersionInfo(0, 2, 0), - "royalrender": VersionInfo(0, 2, 0), - "substancepainter": VersionInfo(0, 2, 0), - "houdini": VersionInfo(0, 3, 0), - "unreal": VersionInfo(0, 2, 0), -} - class ProcessPreparationError(Exception): """Exception that can be used when process preparation failed. @@ -127,7 +109,7 @@ class _LoadCache: addon_modules = [] -def load_addons(force=False): +def load_addons(force: bool = False) -> None: """Load AYON addons as python modules. Modules does not load only classes (like in Interfaces) because there must @@ -154,91 +136,79 @@ def load_addons(force=False): time.sleep(0.1) -def _get_ayon_bundle_data(): +def _get_ayon_bundle_data() -> tuple[ + dict[str, Any], Optional[dict[str, Any]] +]: + studio_bundle_name = os.environ.get("AYON_STUDIO_BUNDLE_NAME") + project_bundle_name = os.getenv("AYON_BUNDLE_NAME") + # If AYON launcher <1.4.0 was used + if not studio_bundle_name: + studio_bundle_name = project_bundle_name bundles = ayon_api.get_bundles()["bundles"] - - bundle_name = os.getenv("AYON_BUNDLE_NAME") - - return next( + studio_bundle = next( ( bundle for bundle in bundles - if bundle["name"] == bundle_name + if bundle["name"] == studio_bundle_name ), None ) + if studio_bundle is None: + raise RuntimeError(f"Failed to find bundle '{studio_bundle_name}'.") -def _get_ayon_addons_information(bundle_info): + project_bundle = None + if project_bundle_name and project_bundle_name != studio_bundle_name: + project_bundle = next( + ( + bundle + for bundle in bundles + if bundle["name"] == project_bundle_name + ), + None + ) + + if project_bundle is None: + raise RuntimeError( + f"Failed to find project bundle '{project_bundle_name}'." + ) + + return studio_bundle, project_bundle + + +def _get_ayon_addons_information( + studio_bundle: dict[str, Any], + project_bundle: Optional[dict[str, Any]], +) -> dict[str, str]: """Receive information about addons to use from server. Todos: Actually ask server for the information. Allow project name as optional argument to be able to query information about used addons for specific project. + Wrap versions into an object. Returns: - List[Dict[str, Any]]: List of addon information to use. + list[dict[str, Any]]: List of addon information to use. + """ + key_values = { + "summary": "true", + "bundle_name": studio_bundle["name"], + } + if project_bundle: + key_values["project_bundle_name"] = project_bundle["name"] - output = [] - bundle_addons = bundle_info["addons"] - addons = ayon_api.get_addons_info()["addons"] - for addon in addons: - name = addon["name"] - versions = addon.get("versions") - addon_version = bundle_addons.get(name) - if addon_version is None or not versions: - continue - version = versions.get(addon_version) - if version: - version = copy.deepcopy(version) - version["name"] = name - version["version"] = addon_version - output.append(version) - return output + query = urlencode(key_values) + + response = ayon_api.get(f"settings?{query}") + return { + addon["name"]: addon["version"] + for addon in response.data["addons"] + } -def _handle_moved_addons(addon_name, milestone_version, log): - """Log message that addon version is not compatible with current core. - - The function can return path to addon client code, but that can happen - only if ayon-core is used from code (for development), but still - logs a warning. - - Args: - addon_name (str): Addon name. - milestone_version (str): Milestone addon version. - log (logging.Logger): Logger object. - - Returns: - Union[str, None]: Addon dir or None. - """ - # Handle addons which were moved out of ayon-core - # - Try to fix it by loading it directly from server addons dir in - # ayon-core repository. But that will work only if ayon-core is - # used from code. - addon_dir = os.path.join( - os.path.dirname(os.path.dirname(AYON_CORE_ROOT)), - "server_addon", - addon_name, - "client", - ) - if not os.path.exists(addon_dir): - log.error( - f"Addon '{addon_name}' is not available. Please update " - f"{addon_name} addon to '{milestone_version}' or higher." - ) - return None - - log.warning(( - "Please update '{}' addon to '{}' or higher." - " Using client code from ayon-core repository." - ).format(addon_name, milestone_version)) - return addon_dir - - -def _load_ayon_addons(log): +def _load_ayon_addons(log: logging.Logger) -> list[ModuleType]: """Load AYON addons based on information from server. This function should not trigger downloading of any addons but only use @@ -248,10 +218,13 @@ def _load_ayon_addons(log): Args: log (logging.Logger): Logger object. + Returns: + list[ModuleType]: Loaded addon modules. + """ all_addon_modules = [] - bundle_info = _get_ayon_bundle_data() - addons_info = _get_ayon_addons_information(bundle_info) + studio_bundle, project_bundle = _get_ayon_bundle_data() + addons_info = _get_ayon_addons_information(studio_bundle, project_bundle) if not addons_info: return all_addon_modules @@ -263,18 +236,16 @@ def _load_ayon_addons(log): dev_addons_info = {} if dev_mode_enabled: # Get dev addons info only when dev mode is enabled - dev_addons_info = bundle_info.get("addonDevelopment", dev_addons_info) + dev_addons_info = studio_bundle.get( + "addonDevelopment", dev_addons_info + ) addons_dir_exists = os.path.exists(addons_dir) if not addons_dir_exists: - log.warning("Addons directory does not exists. Path \"{}\"".format( - addons_dir - )) - - for addon_info in addons_info: - addon_name = addon_info["name"] - addon_version = addon_info["version"] + log.warning( + f"Addons directory does not exists. Path \"{addons_dir}\"") + for addon_name, addon_version in addons_info.items(): # core addon does not have any addon object if addon_name == "core": continue @@ -283,32 +254,28 @@ def _load_ayon_addons(log): use_dev_path = dev_addon_info.get("enabled", False) addon_dir = None - milestone_version = MOVED_ADDON_MILESTONE_VERSIONS.get(addon_name) if use_dev_path: addon_dir = dev_addon_info["path"] - if not addon_dir or not os.path.exists(addon_dir): - log.warning(( - "Dev addon {} {} path does not exists. Path \"{}\"" - ).format(addon_name, addon_version, addon_dir)) - continue + if addon_dir: + addon_dir = os.path.expandvars( + addon_dir.format_map(os.environ) + ) - elif ( - milestone_version is not None - and VersionInfo.parse(addon_version) < milestone_version - ): - addon_dir = _handle_moved_addons( - addon_name, milestone_version, log - ) - if not addon_dir: + if not addon_dir or not os.path.exists(addon_dir): + log.warning( + f"Dev addon {addon_name} {addon_version} path" + f" does not exists. Path \"{addon_dir}\"" + ) continue elif addons_dir_exists: - folder_name = "{}_{}".format(addon_name, addon_version) + folder_name = f"{addon_name}_{addon_version}" addon_dir = os.path.join(addons_dir, folder_name) if not os.path.exists(addon_dir): - log.debug(( - "No localized client code found for addon {} {}." - ).format(addon_name, addon_version)) + log.debug( + "No localized client code found" + f" for addon {addon_name} {addon_version}." + ) continue if not addon_dir: @@ -347,24 +314,22 @@ def _load_ayon_addons(log): except BaseException: log.warning( - "Failed to import \"{}\"".format(basename), + f"Failed to import \"{basename}\"", exc_info=True ) if not addon_modules: - log.warning("Addon {} {} has no content to import".format( - addon_name, addon_version - )) + log.warning( + f"Addon {addon_name} {addon_version} has no content to import" + ) continue if len(addon_modules) > 1: - log.warning(( - "Multiple modules ({}) were found in addon '{}' in dir {}." - ).format( - ", ".join([m.__name__ for m in addon_modules]), - addon_name, - addon_dir, - )) + joined_modules = ", ".join([m.__name__ for m in addon_modules]) + log.warning( + f"Multiple modules ({joined_modules}) were found in" + f" addon '{addon_name}' in dir {addon_dir}." + ) all_addon_modules.extend(addon_modules) return all_addon_modules @@ -382,20 +347,21 @@ class AYONAddon(ABC): Attributes: enabled (bool): Is addon enabled. - name (str): Addon name. Args: manager (AddonsManager): Manager object who discovered addon. settings (dict[str, Any]): AYON settings. """ - enabled = True + enabled: bool = True _id = None # Temporary variable for 'version' property _missing_version_warned = False - def __init__(self, manager, settings): + def __init__( + self, manager: AddonsManager, settings: dict[str, Any] + ) -> None: self.manager = manager self.log = Logger.get_logger(self.name) @@ -403,7 +369,7 @@ class AYONAddon(ABC): self.initialize(settings) @property - def id(self): + def id(self) -> str: """Random id of addon object. Returns: @@ -416,7 +382,7 @@ class AYONAddon(ABC): @property @abstractmethod - def name(self): + def name(self) -> str: """Addon name. Returns: @@ -426,7 +392,7 @@ class AYONAddon(ABC): pass @property - def version(self): + def version(self) -> str: """Addon version. Todo: @@ -445,7 +411,7 @@ class AYONAddon(ABC): ) return "0.0.0" - def initialize(self, settings): + def initialize(self, settings: dict[str, Any]) -> None: """Initialization of addon attributes. It is not recommended to override __init__ that's why specific method @@ -457,7 +423,7 @@ class AYONAddon(ABC): """ pass - def connect_with_addons(self, enabled_addons): + def connect_with_addons(self, enabled_addons: list[AYONAddon]) -> None: """Connect with other enabled addons. Args: @@ -468,7 +434,7 @@ class AYONAddon(ABC): def ensure_is_process_ready( self, process_context: ProcessContext - ): + ) -> None: """Make sure addon is prepared for a process. This method is called when some action makes sure that addon has set @@ -489,7 +455,7 @@ class AYONAddon(ABC): """ pass - def get_global_environments(self): + def get_global_environments(self) -> dict[str, str]: """Get global environments values of addon. Environment variables that can be get only from system settings. @@ -500,20 +466,12 @@ class AYONAddon(ABC): """ return {} - def modify_application_launch_arguments(self, application, env): - """Give option to modify launch environments before application launch. - - Implementation is optional. To change environments modify passed - dictionary of environments. - - Args: - application (Application): Application that is launched. - env (dict[str, str]): Current environment variables. - - """ - pass - - def on_host_install(self, host, host_name, project_name): + def on_host_install( + self, + host: HostBase, + host_name: str, + project_name: str, + ) -> None: """Host was installed which gives option to handle in-host logic. It is a good option to register in-host event callbacks which are @@ -524,7 +482,7 @@ class AYONAddon(ABC): to receive from 'host' object. Args: - host (Union[ModuleType, HostBase]): Access to installed/registered + host (HostBase): Access to installed/registered host object. host_name (str): Name of host. project_name (str): Project name which is main part of host @@ -533,7 +491,7 @@ class AYONAddon(ABC): """ pass - def cli(self, addon_click_group): + def cli(self, addon_click_group: click.Group) -> None: """Add commands to click group. The best practise is to create click group for whole addon which is @@ -564,15 +522,21 @@ class AYONAddon(ABC): class _AddonReportInfo: def __init__( - self, class_name, name, version, report_value_by_label - ): + self, + class_name: str, + name: str, + version: str, + report_value_by_label: dict[str, Optional[str]], + ) -> None: self.class_name = class_name self.name = name self.version = version self.report_value_by_label = report_value_by_label @classmethod - def from_addon(cls, addon, report): + def from_addon( + cls, addon: AYONAddon, report: dict[str, dict[str, int]] + ) -> "_AddonReportInfo": class_name = addon.__class__.__name__ report_value_by_label = { label: reported.get(class_name) @@ -599,29 +563,35 @@ class AddonsManager: _report_total_key = "Total" _log = None - def __init__(self, settings=None, initialize=True): + def __init__( + self, + settings: Optional[dict[str, Any]] = None, + initialize: bool = True, + ) -> None: self._settings = settings - self._addons = [] - self._addons_by_id = {} - self._addons_by_name = {} + self._addons: list[AYONAddon] = [] + self._addons_by_id: dict[str, AYONAddon] = {} + self._addons_by_name: dict[str, AYONAddon] = {} # For report of time consumption - self._report = {} + self._report: dict[str, dict[str, int]] = {} if initialize: self.initialize_addons() self.connect_addons() - def __getitem__(self, addon_name): + def __getitem__(self, addon_name: str) -> AYONAddon: return self._addons_by_name[addon_name] @property - def log(self): + def log(self) -> logging.Logger: if self._log is None: - self._log = logging.getLogger(self.__class__.__name__) + self._log = Logger.get_logger(self.__class__.__name__) return self._log - def get(self, addon_name, default=None): + def get( + self, addon_name: str, default: Optional[Any] = None + ) -> Union[AYONAddon, Any]: """Access addon by name. Args: @@ -635,18 +605,20 @@ class AddonsManager: return self._addons_by_name.get(addon_name, default) @property - def addons(self): + def addons(self) -> list[AYONAddon]: return list(self._addons) @property - def addons_by_id(self): + def addons_by_id(self) -> dict[str, AYONAddon]: return dict(self._addons_by_id) @property - def addons_by_name(self): + def addons_by_name(self) -> dict[str, AYONAddon]: return dict(self._addons_by_name) - def get_enabled_addon(self, addon_name, default=None): + def get_enabled_addon( + self, addon_name: str, default: Optional[Any] = None + ) -> Union[AYONAddon, Any]: """Fast access to enabled addon. If addon is available but is not enabled default value is returned. @@ -657,7 +629,7 @@ class AddonsManager: not enabled. Returns: - Union[AYONAddon, None]: Enabled addon found by name or None. + Union[AYONAddon, Any]: Enabled addon found by name or None. """ addon = self.get(addon_name) @@ -665,7 +637,7 @@ class AddonsManager: return addon return default - def get_enabled_addons(self): + def get_enabled_addons(self) -> list[AYONAddon]: """Enabled addons initialized by the manager. Returns: @@ -678,7 +650,7 @@ class AddonsManager: if addon.enabled ] - def initialize_addons(self): + def initialize_addons(self) -> None: """Import and initialize addons.""" # Make sure modules are loaded load_addons() @@ -759,7 +731,7 @@ class AddonsManager: report[self._report_total_key] = time.time() - time_start self._report["Initialization"] = report - def connect_addons(self): + def connect_addons(self) -> None: """Trigger connection with other enabled addons. Addons should handle their interfaces in `connect_with_addons`. @@ -768,7 +740,7 @@ class AddonsManager: time_start = time.time() prev_start_time = time_start enabled_addons = self.get_enabled_addons() - self.log.debug("Has {} enabled addons.".format(len(enabled_addons))) + self.log.debug(f"Has {len(enabled_addons)} enabled addons.") for addon in enabled_addons: try: addon.connect_with_addons(enabled_addons) @@ -787,7 +759,7 @@ class AddonsManager: report[self._report_total_key] = time.time() - time_start self._report["Connect modules"] = report - def collect_global_environments(self): + def collect_global_environments(self) -> dict[str, str]: """Helper to collect global environment variabled from modules. Returns: @@ -810,15 +782,31 @@ class AddonsManager: module_envs[key] = value return module_envs - def collect_plugin_paths(self): + def collect_plugin_paths(self) -> dict[str, list[str]]: """Helper to collect all plugins from modules inherited IPluginPaths. Unknown keys are logged out. + Deprecated: + Use targeted methods 'collect_launcher_action_paths', + 'collect_create_plugin_paths', 'collect_load_plugin_paths', + 'collect_publish_plugin_paths' and + 'collect_inventory_action_paths' to collect plugin paths. + Returns: dict: Output is dictionary with keys "publish", "create", "load", "actions" and "inventory" each containing list of paths. + """ + warnings.warn( + "Used deprecated method 'collect_plugin_paths'. Please use" + " targeted methods 'collect_launcher_action_paths'," + " 'collect_create_plugin_paths', 'collect_load_plugin_paths'" + " 'collect_publish_plugin_paths' and" + " 'collect_inventory_action_paths'", + DeprecationWarning, + stacklevel=2 + ) # Output structure output = { "publish": [], @@ -853,7 +841,7 @@ class AddonsManager: # Report unknown keys (Developing purposes) if unknown_keys_by_addon: expected_keys = ", ".join([ - "\"{}\"".format(key) for key in output.keys() + f'"{key}"' for key in output.keys() ]) msg_template = "Addon: \"{}\" - got key {}" msg_items = [] @@ -862,39 +850,45 @@ class AddonsManager: "\"{}\"".format(key) for key in keys ]) msg_items.append(msg_template.format(addon_name, joined_keys)) - self.log.warning(( - "Expected keys from `get_plugin_paths` are {}. {}" - ).format(expected_keys, " | ".join(msg_items))) + joined_items = " | ".join(msg_items) + self.log.warning( + f"Expected keys from `get_plugin_paths` are {expected_keys}." + f" {joined_items}" + ) return output - def _collect_plugin_paths(self, method_name, *args, **kwargs): + def _collect_plugin_paths(self, method_name: str, *args, **kwargs): output = [] for addon in self.get_enabled_addons(): # Skip addon that do not inherit from `IPluginPaths` if not isinstance(addon, IPluginPaths): continue + paths = None method = getattr(addon, method_name) try: paths = method(*args, **kwargs) except Exception: self.log.warning( - ( - "Failed to get plugin paths from addon" - " '{}' using '{}'." - ).format(addon.__class__.__name__, method_name), + "Failed to get plugin paths from addon" + f" '{addon.name}' using '{method_name}'.", exc_info=True ) + + if not paths: continue - if paths: - # Convert to list if value is not list - if not isinstance(paths, (list, tuple, set)): - paths = [paths] - output.extend(paths) + if isinstance(paths, str): + paths = [paths] + self.log.warning( + f"Addon '{addon.name}' returned invalid output type" + f" from '{method_name}'." + f" Got 'str' expected 'list[str]'." + ) + output.extend(paths) return output - def collect_launcher_action_paths(self): + def collect_launcher_action_paths(self) -> list[str]: """Helper to collect launcher action paths from addons. Returns: @@ -909,16 +903,16 @@ class AddonsManager: output.insert(0, actions_dir) return output - def collect_create_plugin_paths(self, host_name): + def collect_create_plugin_paths(self, host_name: str) -> list[str]: """Helper to collect creator plugin paths from addons. Args: host_name (str): For which host are creators meant. Returns: - list: List of creator plugin paths. - """ + list[str]: List of creator plugin paths. + """ return self._collect_plugin_paths( "get_create_plugin_paths", host_name @@ -926,37 +920,37 @@ class AddonsManager: collect_creator_plugin_paths = collect_create_plugin_paths - def collect_load_plugin_paths(self, host_name): + def collect_load_plugin_paths(self, host_name: str) -> list[str]: """Helper to collect load plugin paths from addons. Args: host_name (str): For which host are load plugins meant. Returns: - list: List of load plugin paths. - """ + list[str]: List of load plugin paths. + """ return self._collect_plugin_paths( "get_load_plugin_paths", host_name ) - def collect_publish_plugin_paths(self, host_name): + def collect_publish_plugin_paths(self, host_name: str) -> list[str]: """Helper to collect load plugin paths from addons. Args: host_name (str): For which host are load plugins meant. Returns: - list: List of pyblish plugin paths. - """ + list[str]: List of pyblish plugin paths. + """ return self._collect_plugin_paths( "get_publish_plugin_paths", host_name ) - def collect_inventory_action_paths(self, host_name): + def collect_inventory_action_paths(self, host_name: str) -> list[str]: """Helper to collect load plugin paths from addons. Args: @@ -964,21 +958,21 @@ class AddonsManager: Returns: list: List of pyblish plugin paths. - """ + """ return self._collect_plugin_paths( "get_inventory_action_paths", host_name ) - def get_host_addon(self, host_name): + def get_host_addon(self, host_name: str) -> Optional[AYONAddon]: """Find host addon by host name. Args: host_name (str): Host name for which is found host addon. Returns: - Union[AYONAddon, None]: Found host addon by name or `None`. + Optional[AYONAddon]: Found host addon by name or `None`. """ for addon in self.get_enabled_addons(): @@ -989,21 +983,21 @@ class AddonsManager: return addon return None - def get_host_names(self): + def get_host_names(self) -> set[str]: """List of available host names based on host addons. Returns: - Iterable[str]: All available host names based on enabled addons + set[str]: All available host names based on enabled addons inheriting 'IHostAddon'. - """ + """ return { addon.host_name for addon in self.get_enabled_addons() if isinstance(addon, IHostAddon) } - def print_report(self): + def print_report(self) -> None: """Print out report of time spent on addons initialization parts. Reporting is not automated must be implemented for each initialization diff --git a/client/ayon_core/addon/interfaces.py b/client/ayon_core/addon/interfaces.py index 232c056fb4..bf08ccd48c 100644 --- a/client/ayon_core/addon/interfaces.py +++ b/client/ayon_core/addon/interfaces.py @@ -1,6 +1,7 @@ """Addon interfaces for AYON.""" from __future__ import annotations +import warnings from abc import ABCMeta, abstractmethod from typing import TYPE_CHECKING, Callable, Optional, Type @@ -39,26 +40,29 @@ class AYONInterface(metaclass=_AYONInterfaceMeta): class IPluginPaths(AYONInterface): - """Addon has plugin paths to return. + """Addon wants to register plugin paths.""" - Expected result is dictionary with keys "publish", "create", "load", - "actions" or "inventory" and values as list or string. - { - "publish": ["path/to/publish_plugins"] - } - """ - - @abstractmethod def get_plugin_paths(self) -> dict[str, list[str]]: """Return plugin paths for addon. + This method was abstract (required) in the past, so raise the required + 'core' addon version when 'get_plugin_paths' is removed from + addon. + + Deprecated: + Please implement specific methods 'get_create_plugin_paths', + 'get_load_plugin_paths', 'get_inventory_action_paths' and + 'get_publish_plugin_paths' to return plugin paths. + Returns: dict[str, list[str]]: Plugin paths for addon. """ + return {} def _get_plugin_paths_by_type( - self, plugin_type: str) -> list[str]: + self, plugin_type: str + ) -> list[str]: """Get plugin paths by type. Args: @@ -78,6 +82,24 @@ class IPluginPaths(AYONInterface): if not isinstance(paths, (list, tuple, set)): paths = [paths] + + new_function_name = "get_launcher_action_paths" + if plugin_type == "create": + new_function_name = "get_create_plugin_paths" + elif plugin_type == "load": + new_function_name = "get_load_plugin_paths" + elif plugin_type == "publish": + new_function_name = "get_publish_plugin_paths" + elif plugin_type == "inventory": + new_function_name = "get_inventory_action_paths" + + warnings.warn( + f"Addon '{self.name}' returns '{plugin_type}' paths using" + " 'get_plugin_paths' method. Please implement" + f" '{new_function_name}' instead.", + DeprecationWarning, + stacklevel=2 + ) return paths def get_launcher_action_paths(self) -> list[str]: diff --git a/client/ayon_core/cli.py b/client/ayon_core/cli.py index ca3dcc86ee..85c254e7eb 100644 --- a/client/ayon_core/cli.py +++ b/client/ayon_core/cli.py @@ -27,25 +27,40 @@ from ayon_core.lib.env_tools import ( @click.group(invoke_without_command=True) @click.pass_context -@click.option("--use-staging", is_flag=True, - expose_value=False, help="use staging variants") -@click.option("--debug", is_flag=True, expose_value=False, - help="Enable debug") -@click.option("--verbose", expose_value=False, - help=("Change AYON log level (debug - critical or 0-50)")) -@click.option("--force", is_flag=True, hidden=True) -def main_cli(ctx, force): +@click.option( + "--use-staging", + is_flag=True, + expose_value=False, + help="use staging variants") +@click.option( + "--debug", + is_flag=True, + expose_value=False, + help="Enable debug") +@click.option( + "--project", + help="Project name") +@click.option( + "--verbose", + expose_value=False, + help="Change AYON log level (debug - critical or 0-50)") +@click.option( + "--use-dev", + is_flag=True, + expose_value=False, + help="use dev bundle") +def main_cli(ctx, *_args, **_kwargs): """AYON is main command serving as entry point to pipeline system. It wraps different commands together. """ - if ctx.invoked_subcommand is None: # Print help if headless mode is used if os.getenv("AYON_HEADLESS_MODE") == "1": print(ctx.get_help()) sys.exit(0) else: + ctx.params.pop("project") ctx.forward(tray) @@ -60,7 +75,6 @@ def tray(force): Default action of AYON command is to launch tray widget to control basic aspects of AYON. See documentation for more information. """ - from ayon_core.tools.tray import main main(force) @@ -306,6 +320,43 @@ def _add_addons(addons_manager): ) +def _cleanup_project_args(): + rem_args = list(sys.argv[1:]) + if "--project" not in rem_args: + return + + cmd = None + current_ctx = None + parent_name = "ayon" + parent_cmd = main_cli + while hasattr(parent_cmd, "resolve_command"): + if current_ctx is None: + current_ctx = main_cli.make_context(parent_name, rem_args) + else: + current_ctx = parent_cmd.make_context( + parent_name, + rem_args, + parent=current_ctx + ) + if not rem_args: + break + cmd_name, cmd, rem_args = parent_cmd.resolve_command( + current_ctx, rem_args + ) + parent_name = cmd_name + parent_cmd = cmd + + if cmd is None: + return + + param_names = {param.name for param in cmd.params} + if "project" in param_names: + return + idx = sys.argv.index("--project") + sys.argv.pop(idx) + sys.argv.pop(idx) + + def main(*args, **kwargs): logging.basicConfig() @@ -332,10 +383,14 @@ def main(*args, **kwargs): addons_manager = AddonsManager() _set_addons_environments(addons_manager) _add_addons(addons_manager) + + _cleanup_project_args() + try: main_cli( prog_name="ayon", obj={"addons_manager": addons_manager}, + args=(sys.argv[1:]), ) except Exception: # noqa exc_info = sys.exc_info() diff --git a/client/ayon_core/hooks/pre_add_last_workfile_arg.py b/client/ayon_core/hooks/pre_add_last_workfile_arg.py index d6110ea367..752302bb20 100644 --- a/client/ayon_core/hooks/pre_add_last_workfile_arg.py +++ b/client/ayon_core/hooks/pre_add_last_workfile_arg.py @@ -33,22 +33,25 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook): "cinema4d", "silhouette", "gaffer", + "loki", } launch_types = {LaunchTypes.local} def execute(self): - if not self.data.get("start_last_workfile"): - self.log.info("It is set to not start last workfile on start.") - return + workfile_path = self.data.get("workfile_path") + if not workfile_path: + if not self.data.get("start_last_workfile"): + self.log.info("It is set to not start last workfile on start.") + return - last_workfile = self.data.get("last_workfile_path") - if not last_workfile: - self.log.warning("Last workfile was not collected.") - return + workfile_path = self.data.get("last_workfile_path") + if not workfile_path: + self.log.warning("Last workfile was not collected.") + return - if not os.path.exists(last_workfile): + if not os.path.exists(workfile_path): self.log.info("Current context does not have any workfile yet.") return # Add path to workfile to arguments - self.launch_context.launch_args.append(last_workfile) + self.launch_context.launch_args.append(workfile_path) diff --git a/client/ayon_core/hooks/pre_ocio_hook.py b/client/ayon_core/hooks/pre_ocio_hook.py index 12f9454a88..be086dae65 100644 --- a/client/ayon_core/hooks/pre_ocio_hook.py +++ b/client/ayon_core/hooks/pre_ocio_hook.py @@ -14,7 +14,7 @@ class OCIOEnvHook(PreLaunchHook): "fusion", "blender", "aftereffects", - "3dsmax", + "max", "houdini", "maya", "nuke", @@ -24,6 +24,7 @@ class OCIOEnvHook(PreLaunchHook): "cinema4d", "silhouette", "gaffer", + "loki", } launch_types = set() diff --git a/client/ayon_core/host/__init__.py b/client/ayon_core/host/__init__.py index da1237c739..7d5918b0ac 100644 --- a/client/ayon_core/host/__init__.py +++ b/client/ayon_core/host/__init__.py @@ -1,9 +1,14 @@ +from .constants import ContextChangeReason +from .abstract import AbstractHost, ApplicationInformation from .host import ( HostBase, + ContextChangeData, ) from .interfaces import ( IWorkfileHost, + WorkfileInfo, + PublishedWorkfileInfo, ILoadHost, IPublishHost, INewPublisher, @@ -13,9 +18,17 @@ from .dirmap import HostDirmap __all__ = ( + "ContextChangeReason", + + "AbstractHost", + "ApplicationInformation", + "HostBase", + "ContextChangeData", "IWorkfileHost", + "WorkfileInfo", + "PublishedWorkfileInfo", "ILoadHost", "IPublishHost", "INewPublisher", diff --git a/client/ayon_core/host/abstract.py b/client/ayon_core/host/abstract.py new file mode 100644 index 0000000000..7b4bb5b791 --- /dev/null +++ b/client/ayon_core/host/abstract.py @@ -0,0 +1,120 @@ +from __future__ import annotations + +import logging +from abc import ABC, abstractmethod +from dataclasses import dataclass +import typing +from typing import Optional, Any + +from .constants import ContextChangeReason + +if typing.TYPE_CHECKING: + from ayon_core.pipeline import Anatomy + + from .typing import HostContextData + + +@dataclass +class ApplicationInformation: + """Application information. + + Attributes: + app_name (Optional[str]): Application name. e.g. Maya, NukeX, Nuke + app_version (Optional[str]): Application version. e.g. 15.2.1 + + """ + app_name: Optional[str] = None + app_version: Optional[str] = None + + +class AbstractHost(ABC): + """Abstract definition of host implementation.""" + @property + @abstractmethod + def log(self) -> logging.Logger: + pass + + @property + @abstractmethod + def name(self) -> str: + """Host name.""" + pass + + @abstractmethod + def get_app_information(self) -> ApplicationInformation: + """Information about the application where host is running. + + Returns: + ApplicationInformation: Application information. + + """ + pass + + @abstractmethod + def get_current_context(self) -> HostContextData: + """Get the current context of the host. + + Current context is defined by project name, folder path and task name. + + Returns: + HostContextData: The current context of the host. + + """ + pass + + @abstractmethod + def set_current_context( + self, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + reason: ContextChangeReason = ContextChangeReason.undefined, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional[Anatomy] = None, + ) -> HostContextData: + """Change context of the host. + + Args: + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. + reason (ContextChangeReason): Reason for change. + project_entity (dict[str, Any]): Project entity. + anatomy (Anatomy): Anatomy entity. + + """ + pass + + @abstractmethod + def get_current_project_name(self) -> str: + """Get the current project name. + + Returns: + Optional[str]: The current project name. + + """ + pass + + @abstractmethod + def get_current_folder_path(self) -> Optional[str]: + """Get the current folder path. + + Returns: + Optional[str]: The current folder path. + + """ + pass + + @abstractmethod + def get_current_task_name(self) -> Optional[str]: + """Get the current task name. + + Returns: + Optional[str]: The current task name. + + """ + pass + + @abstractmethod + def get_context_title(self) -> str: + """Get the context title used in UIs.""" + pass diff --git a/client/ayon_core/host/constants.py b/client/ayon_core/host/constants.py new file mode 100644 index 0000000000..2564c5d54d --- /dev/null +++ b/client/ayon_core/host/constants.py @@ -0,0 +1,15 @@ +from enum import Enum + + +class StrEnum(str, Enum): + """A string-based Enum class that allows for string comparison.""" + + def __str__(self) -> str: + return self.value + + +class ContextChangeReason(StrEnum): + """Reasons for context change in the host.""" + undefined = "undefined" + workfile_open = "workfile.opened" + workfile_save = "workfile.saved" diff --git a/client/ayon_core/host/host.py b/client/ayon_core/host/host.py index 5a29de6cd7..7d6d3ddbe4 100644 --- a/client/ayon_core/host/host.py +++ b/client/ayon_core/host/host.py @@ -1,13 +1,35 @@ +from __future__ import annotations + import os import logging import contextlib -from abc import ABC, abstractproperty +import typing +from typing import Optional, Any +from dataclasses import dataclass -# NOTE can't import 'typing' because of issues in Maya 2020 -# - shiboken crashes on 'typing' module import +import ayon_api + +from ayon_core.lib import emit_event + +from .constants import ContextChangeReason +from .abstract import AbstractHost, ApplicationInformation + +if typing.TYPE_CHECKING: + from ayon_core.pipeline import Anatomy + + from .typing import HostContextData -class HostBase(ABC): +@dataclass +class ContextChangeData: + project_entity: dict[str, Any] + folder_entity: dict[str, Any] + task_entity: dict[str, Any] + reason: ContextChangeReason + anatomy: Anatomy + + +class HostBase(AbstractHost): """Base of host implementation class. Host is pipeline implementation of DCC application. This class should help @@ -74,6 +96,18 @@ class HostBase(ABC): pass + def get_app_information(self) -> ApplicationInformation: + """Running application information. + + Host integration should override this method and return correct + information. + + Returns: + ApplicationInformation: Application information. + + """ + return ApplicationInformation() + def install(self): """Install host specific functionality. @@ -82,47 +116,41 @@ class HostBase(ABC): It is called automatically when 'ayon_core.pipeline.install_host' is triggered. - """ + """ pass @property - def log(self): + def log(self) -> logging.Logger: if self._log is None: self._log = logging.getLogger(self.__class__.__name__) return self._log - @abstractproperty - def name(self): - """Host name.""" - - pass - - def get_current_project_name(self): + def get_current_project_name(self) -> str: """ Returns: - Union[str, None]: Current project name. + str: Current project name. + """ + return os.environ["AYON_PROJECT_NAME"] - return os.environ.get("AYON_PROJECT_NAME") - - def get_current_folder_path(self): + def get_current_folder_path(self) -> Optional[str]: """ Returns: - Union[str, None]: Current asset name. - """ + Optional[str]: Current asset name. + """ return os.environ.get("AYON_FOLDER_PATH") - def get_current_task_name(self): + def get_current_task_name(self) -> Optional[str]: """ Returns: - Union[str, None]: Current task name. - """ + Optional[str]: Current task name. + """ return os.environ.get("AYON_TASK_NAME") - def get_current_context(self): + def get_current_context(self) -> HostContextData: """Get current context information. This method should be used to get current context of host. Usage of @@ -131,16 +159,85 @@ class HostBase(ABC): can't be caught properly. Returns: - Dict[str, Union[str, None]]: Context with 3 keys 'project_name', - 'folder_path' and 'task_name'. All of them can be 'None'. - """ + HostContextData: Current context with 'project_name', + 'folder_path' and 'task_name'. + """ return { "project_name": self.get_current_project_name(), "folder_path": self.get_current_folder_path(), "task_name": self.get_current_task_name() } + def set_current_context( + self, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + reason: ContextChangeReason = ContextChangeReason.undefined, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional[Anatomy] = None, + ) -> HostContextData: + """Set current context information. + + This method should be used to set current context of host. Usage of + this method can be crucial for host implementations in DCCs where + can be opened multiple workfiles at one moment and change of context + can't be caught properly. + + Notes: + This method should not care about change of workdir and expect any + of the arguments. + + Args: + folder_entity (Optional[dict[str, Any]]): Folder entity. + task_entity (Optional[dict[str, Any]]): Task entity. + reason (ContextChangeReason): Reason for context change. + project_entity (Optional[dict[str, Any]]): Project entity data. + anatomy (Optional[Anatomy]): Anatomy instance for the project. + + Returns: + dict[str, Optional[str]]: Context information with project name, + folder path and task name. + + """ + from ayon_core.pipeline import Anatomy + + folder_path = folder_entity["path"] + task_name = task_entity["name"] + + context = self.get_current_context() + # Don't do anything if context did not change + if ( + context["folder_path"] == folder_path + and context["task_name"] == task_name + ): + return context + + project_name = self.get_current_project_name() + if project_entity is None: + project_entity = ayon_api.get_project(project_name) + + if anatomy is None: + anatomy = Anatomy(project_name, project_entity=project_entity) + + context_change_data = ContextChangeData( + project_entity, + folder_entity, + task_entity, + reason, + anatomy, + ) + self._before_context_change(context_change_data) + self._set_current_context(context_change_data) + self._after_context_change(context_change_data) + + return self._emit_context_change_event( + project_name, + folder_path, + task_name, + ) + def get_context_title(self): """Context title shown for UI purposes. @@ -187,3 +284,91 @@ class HostBase(ABC): yield finally: pass + + def _emit_context_change_event( + self, + project_name: str, + folder_path: Optional[str], + task_name: Optional[str], + ) -> HostContextData: + """Emit context change event. + + Args: + project_name (str): Name of the project. + folder_path (Optional[str]): Path of the folder. + task_name (Optional[str]): Name of the task. + + Returns: + HostContextData: Data send to context change event. + + """ + data: HostContextData = { + "project_name": project_name, + "folder_path": folder_path, + "task_name": task_name, + } + emit_event("taskChanged", data) + return data + + def _set_current_context( + self, context_change_data: ContextChangeData + ) -> None: + """Method that changes the context in host. + + Can be overriden for hosts that do need different handling of context + than using environment variables. + + Args: + context_change_data (ContextChangeData): Context change related + data. + + """ + project_name = self.get_current_project_name() + folder_path = None + task_name = None + if context_change_data.folder_entity: + folder_path = context_change_data.folder_entity["path"] + if context_change_data.task_entity: + task_name = context_change_data.task_entity["name"] + + envs = { + "AYON_PROJECT_NAME": project_name, + "AYON_FOLDER_PATH": folder_path, + "AYON_TASK_NAME": task_name, + } + + # Update the Session and environments. Pop from environments all + # keys with value set to None. + for key, value in envs.items(): + if value is None: + os.environ.pop(key, None) + else: + os.environ[key] = value + + def _before_context_change(self, context_change_data: ContextChangeData): + """Before context is changed. + + This method is called before the context is changed in the host. + + Can be overridden to implement host specific logic. + + Args: + context_change_data (ContextChangeData): Object with information + about context change. + + """ + pass + + def _after_context_change(self, context_change_data: ContextChangeData): + """After context is changed. + + This method is called after the context is changed in the host. + + Can be overridden to implement host specific logic. + + Args: + context_change_data (ContextChangeData): Object with information + about context change. + + """ + pass diff --git a/client/ayon_core/host/interfaces.py b/client/ayon_core/host/interfaces.py deleted file mode 100644 index c077dfeae9..0000000000 --- a/client/ayon_core/host/interfaces.py +++ /dev/null @@ -1,384 +0,0 @@ -from abc import ABC, abstractmethod - - -class MissingMethodsError(ValueError): - """Exception when host miss some required methods for specific workflow. - - Args: - host (HostBase): Host implementation where are missing methods. - missing_methods (list[str]): List of missing methods. - """ - - def __init__(self, host, missing_methods): - joined_missing = ", ".join( - ['"{}"'.format(item) for item in missing_methods] - ) - host_name = getattr(host, "name", None) - if not host_name: - try: - host_name = host.__file__.replace("\\", "/").split("/")[-3] - except Exception: - host_name = str(host) - message = ( - "Host \"{}\" miss methods {}".format(host_name, joined_missing) - ) - super(MissingMethodsError, self).__init__(message) - - -class ILoadHost: - """Implementation requirements to be able use reference of representations. - - The load plugins can do referencing even without implementation of methods - here, but switch and removement of containers would not be possible. - - Questions: - - Is list container dependency of host or load plugins? - - Should this be directly in HostBase? - - how to find out if referencing is available? - - do we need to know that? - """ - - @staticmethod - def get_missing_load_methods(host): - """Look for missing methods on "old type" host implementation. - - Method is used for validation of implemented functions related to - loading. Checks only existence of methods. - - Args: - Union[ModuleType, HostBase]: Object of host where to look for - required methods. - - Returns: - list[str]: Missing method implementations for loading workflow. - """ - - if isinstance(host, ILoadHost): - return [] - - required = ["ls"] - missing = [] - for name in required: - if not hasattr(host, name): - missing.append(name) - return missing - - @staticmethod - def validate_load_methods(host): - """Validate implemented methods of "old type" host for load workflow. - - Args: - Union[ModuleType, HostBase]: Object of host to validate. - - Raises: - MissingMethodsError: If there are missing methods on host - implementation. - """ - missing = ILoadHost.get_missing_load_methods(host) - if missing: - raise MissingMethodsError(host, missing) - - @abstractmethod - def get_containers(self): - """Retrieve referenced containers from scene. - - This can be implemented in hosts where referencing can be used. - - Todo: - Rename function to something more self explanatory. - Suggestion: 'get_containers' - - Returns: - list[dict]: Information about loaded containers. - """ - - pass - - # --- Deprecated method names --- - def ls(self): - """Deprecated variant of 'get_containers'. - - Todo: - Remove when all usages are replaced. - """ - - return self.get_containers() - - -class IWorkfileHost(ABC): - """Implementation requirements to be able use workfile utils and tool.""" - - @staticmethod - def get_missing_workfile_methods(host): - """Look for missing methods on "old type" host implementation. - - Method is used for validation of implemented functions related to - workfiles. Checks only existence of methods. - - Args: - Union[ModuleType, HostBase]: Object of host where to look for - required methods. - - Returns: - list[str]: Missing method implementations for workfiles workflow. - """ - - if isinstance(host, IWorkfileHost): - return [] - - required = [ - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - ] - missing = [] - for name in required: - if not hasattr(host, name): - missing.append(name) - return missing - - @staticmethod - def validate_workfile_methods(host): - """Validate methods of "old type" host for workfiles workflow. - - Args: - Union[ModuleType, HostBase]: Object of host to validate. - - Raises: - MissingMethodsError: If there are missing methods on host - implementation. - """ - - missing = IWorkfileHost.get_missing_workfile_methods(host) - if missing: - raise MissingMethodsError(host, missing) - - @abstractmethod - def get_workfile_extensions(self): - """Extensions that can be used as save. - - Questions: - This could potentially use 'HostDefinition'. - """ - - return [] - - @abstractmethod - def save_workfile(self, dst_path=None): - """Save currently opened scene. - - Args: - dst_path (str): Where the current scene should be saved. Or use - current path if 'None' is passed. - """ - - pass - - @abstractmethod - def open_workfile(self, filepath): - """Open passed filepath in the host. - - Args: - filepath (str): Path to workfile. - """ - - pass - - @abstractmethod - def get_current_workfile(self): - """Retrieve path to current opened file. - - Returns: - str: Path to file which is currently opened. - None: If nothing is opened. - """ - - return None - - def workfile_has_unsaved_changes(self): - """Currently opened scene is saved. - - Not all hosts can know if current scene is saved because the API of - DCC does not support it. - - Returns: - bool: True if scene is saved and False if has unsaved - modifications. - None: Can't tell if workfiles has modifications. - """ - - return None - - def work_root(self, session): - """Modify workdir per host. - - Default implementation keeps workdir untouched. - - Warnings: - We must handle this modification with more sophisticated way - because this can't be called out of DCC so opening of last workfile - (calculated before DCC is launched) is complicated. Also breaking - defined work template is not a good idea. - Only place where it's really used and can make sense is Maya. There - workspace.mel can modify subfolders where to look for maya files. - - Args: - session (dict): Session context data. - - Returns: - str: Path to new workdir. - """ - - return session["AYON_WORKDIR"] - - # --- Deprecated method names --- - def file_extensions(self): - """Deprecated variant of 'get_workfile_extensions'. - - Todo: - Remove when all usages are replaced. - """ - return self.get_workfile_extensions() - - def save_file(self, dst_path=None): - """Deprecated variant of 'save_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - self.save_workfile(dst_path) - - def open_file(self, filepath): - """Deprecated variant of 'open_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - return self.open_workfile(filepath) - - def current_file(self): - """Deprecated variant of 'get_current_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - return self.get_current_workfile() - - def has_unsaved_changes(self): - """Deprecated variant of 'workfile_has_unsaved_changes'. - - Todo: - Remove when all usages are replaced. - """ - - return self.workfile_has_unsaved_changes() - - -class IPublishHost: - """Functions related to new creation system in new publisher. - - New publisher is not storing information only about each created instance - but also some global data. At this moment are data related only to context - publish plugins but that can extend in future. - """ - - @staticmethod - def get_missing_publish_methods(host): - """Look for missing methods on "old type" host implementation. - - Method is used for validation of implemented functions related to - new publish creation. Checks only existence of methods. - - Args: - Union[ModuleType, HostBase]: Host module where to look for - required methods. - - Returns: - list[str]: Missing method implementations for new publisher - workflow. - """ - - if isinstance(host, IPublishHost): - return [] - - required = [ - "get_context_data", - "update_context_data", - "get_context_title", - "get_current_context", - ] - missing = [] - for name in required: - if not hasattr(host, name): - missing.append(name) - return missing - - @staticmethod - def validate_publish_methods(host): - """Validate implemented methods of "old type" host. - - Args: - Union[ModuleType, HostBase]: Host module to validate. - - Raises: - MissingMethodsError: If there are missing methods on host - implementation. - """ - missing = IPublishHost.get_missing_publish_methods(host) - if missing: - raise MissingMethodsError(host, missing) - - @abstractmethod - def get_context_data(self): - """Get global data related to creation-publishing from workfile. - - These data are not related to any created instance but to whole - publishing context. Not saving/returning them will cause that each - reset of publishing resets all values to default ones. - - Context data can contain information about enabled/disabled publish - plugins or other values that can be filled by artist. - - Returns: - dict: Context data stored using 'update_context_data'. - """ - - pass - - @abstractmethod - def update_context_data(self, data, changes): - """Store global context data to workfile. - - Called when some values in context data has changed. - - Without storing the values in a way that 'get_context_data' would - return them will each reset of publishing cause loose of filled values - by artist. Best practice is to store values into workfile, if possible. - - Args: - data (dict): New data as are. - changes (dict): Only data that has been changed. Each value has - tuple with '(, )' value. - """ - - pass - - -class INewPublisher(IPublishHost): - """Legacy interface replaced by 'IPublishHost'. - - Deprecated: - 'INewPublisher' is replaced by 'IPublishHost' please change your - imports. - There is no "reasonable" way hot mark these classes as deprecated - to show warning of wrong import. Deprecated since 3.14.* will be - removed in 3.15.* - """ - - pass diff --git a/client/ayon_core/host/interfaces/__init__.py b/client/ayon_core/host/interfaces/__init__.py new file mode 100644 index 0000000000..8f11ad4e2f --- /dev/null +++ b/client/ayon_core/host/interfaces/__init__.py @@ -0,0 +1,66 @@ +from .exceptions import MissingMethodsError +from .workfiles import ( + IWorkfileHost, + WorkfileInfo, + PublishedWorkfileInfo, + + OpenWorkfileOptionalData, + ListWorkfilesOptionalData, + ListPublishedWorkfilesOptionalData, + SaveWorkfileOptionalData, + CopyWorkfileOptionalData, + CopyPublishedWorkfileOptionalData, + + get_open_workfile_context, + get_list_workfiles_context, + get_list_published_workfiles_context, + get_save_workfile_context, + get_copy_workfile_context, + get_copy_repre_workfile_context, + + OpenWorkfileContext, + ListWorkfilesContext, + ListPublishedWorkfilesContext, + SaveWorkfileContext, + CopyWorkfileContext, + CopyPublishedWorkfileContext, +) +from .interfaces import ( + IPublishHost, + INewPublisher, + ILoadHost, +) + + +__all__ = ( + "MissingMethodsError", + + "IWorkfileHost", + "WorkfileInfo", + "PublishedWorkfileInfo", + + "OpenWorkfileOptionalData", + "ListWorkfilesOptionalData", + "ListPublishedWorkfilesOptionalData", + "SaveWorkfileOptionalData", + "CopyWorkfileOptionalData", + "CopyPublishedWorkfileOptionalData", + + "get_open_workfile_context", + "get_list_workfiles_context", + "get_list_published_workfiles_context", + "get_save_workfile_context", + "get_copy_workfile_context", + "get_copy_repre_workfile_context", + + "OpenWorkfileContext", + "ListWorkfilesContext", + "ListPublishedWorkfilesContext", + "SaveWorkfileContext", + "CopyWorkfileContext", + "CopyPublishedWorkfileContext", + + "IPublishHost", + "INewPublisher", + "ILoadHost", +) diff --git a/client/ayon_core/host/interfaces/exceptions.py b/client/ayon_core/host/interfaces/exceptions.py new file mode 100644 index 0000000000..eec4564142 --- /dev/null +++ b/client/ayon_core/host/interfaces/exceptions.py @@ -0,0 +1,15 @@ +class MissingMethodsError(ValueError): + """Exception when host miss some required methods for a specific workflow. + + Args: + host (HostBase): Host implementation where are missing methods. + missing_methods (list[str]): List of missing methods. + """ + + def __init__(self, host, missing_methods): + joined_missing = ", ".join( + ['"{}"'.format(item) for item in missing_methods] + ) + super().__init__( + f"Host \"{host.name}\" miss methods {joined_missing}" + ) diff --git a/client/ayon_core/host/interfaces/interfaces.py b/client/ayon_core/host/interfaces/interfaces.py new file mode 100644 index 0000000000..6f9a3d8c87 --- /dev/null +++ b/client/ayon_core/host/interfaces/interfaces.py @@ -0,0 +1,189 @@ +from abc import abstractmethod + +from ayon_core.host.abstract import AbstractHost + +from .exceptions import MissingMethodsError + + +class ILoadHost(AbstractHost): + """Implementation requirements to be able use reference of representations. + + The load plugins can do referencing even without implementation of methods + here, but switch and removement of containers would not be possible. + + Questions: + - Is list container dependency of host or load plugins? + - Should this be directly in HostBase? + - how to find out if referencing is available? + - do we need to know that? + """ + + @staticmethod + def get_missing_load_methods(host): + """Look for missing methods on "old type" host implementation. + + Method is used for validation of implemented functions related to + loading. Checks only existence of methods. + + Args: + Union[ModuleType, AbstractHost]: Object of host where to look for + required methods. + + Returns: + list[str]: Missing method implementations for loading workflow. + """ + + if isinstance(host, ILoadHost): + return [] + + required = ["ls"] + missing = [] + for name in required: + if not hasattr(host, name): + missing.append(name) + return missing + + @staticmethod + def validate_load_methods(host): + """Validate implemented methods of "old type" host for load workflow. + + Args: + Union[ModuleType, AbstractHost]: Object of host to validate. + + Raises: + MissingMethodsError: If there are missing methods on host + implementation. + """ + missing = ILoadHost.get_missing_load_methods(host) + if missing: + raise MissingMethodsError(host, missing) + + @abstractmethod + def get_containers(self): + """Retrieve referenced containers from scene. + + This can be implemented in hosts where referencing can be used. + + Todo: + Rename function to something more self explanatory. + Suggestion: 'get_containers' + + Returns: + list[dict]: Information about loaded containers. + """ + + pass + + # --- Deprecated method names --- + def ls(self): + """Deprecated variant of 'get_containers'. + + Todo: + Remove when all usages are replaced. + """ + + return self.get_containers() + + +class IPublishHost(AbstractHost): + """Functions related to new creation system in new publisher. + + New publisher is not storing information only about each created instance + but also some global data. At this moment are data related only to context + publish plugins but that can extend in future. + """ + + @staticmethod + def get_missing_publish_methods(host): + """Look for missing methods on "old type" host implementation. + + Method is used for validation of implemented functions related to + new publish creation. Checks only existence of methods. + + Args: + Union[ModuleType, AbstractHost]: Host module where to look for + required methods. + + Returns: + list[str]: Missing method implementations for new publisher + workflow. + """ + + if isinstance(host, IPublishHost): + return [] + + required = [ + "get_context_data", + "update_context_data", + "get_context_title", + "get_current_context", + ] + missing = [] + for name in required: + if not hasattr(host, name): + missing.append(name) + return missing + + @staticmethod + def validate_publish_methods(host): + """Validate implemented methods of "old type" host. + + Args: + Union[ModuleType, AbstractHost]: Host module to validate. + + Raises: + MissingMethodsError: If there are missing methods on host + implementation. + """ + missing = IPublishHost.get_missing_publish_methods(host) + if missing: + raise MissingMethodsError(host, missing) + + @abstractmethod + def get_context_data(self): + """Get global data related to creation-publishing from workfile. + + These data are not related to any created instance but to whole + publishing context. Not saving/returning them will cause that each + reset of publishing resets all values to default ones. + + Context data can contain information about enabled/disabled publish + plugins or other values that can be filled by artist. + + Returns: + dict: Context data stored using 'update_context_data'. + """ + + pass + + @abstractmethod + def update_context_data(self, data, changes): + """Store global context data to workfile. + + Called when some values in context data has changed. + + Without storing the values in a way that 'get_context_data' would + return them will each reset of publishing cause loose of filled values + by artist. Best practice is to store values into workfile, if possible. + + Args: + data (dict): New data as are. + changes (dict): Only data that has been changed. Each value has + tuple with '(, )' value. + """ + + pass + + +class INewPublisher(IPublishHost): + """Legacy interface replaced by 'IPublishHost'. + + Deprecated: + 'INewPublisher' is replaced by 'IPublishHost' please change your + imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.15.* + """ + + pass diff --git a/client/ayon_core/host/interfaces/workfiles.py b/client/ayon_core/host/interfaces/workfiles.py new file mode 100644 index 0000000000..5dbf29bd7b --- /dev/null +++ b/client/ayon_core/host/interfaces/workfiles.py @@ -0,0 +1,1820 @@ +from __future__ import annotations + +import os +import platform +import shutil +import typing +import warnings +import functools +from abc import abstractmethod +from dataclasses import dataclass, asdict +from typing import Optional, Any + +import ayon_api +import arrow + +from ayon_core.lib import emit_event +from ayon_core.settings import get_project_settings +from ayon_core.host.abstract import AbstractHost +from ayon_core.host.constants import ContextChangeReason + +if typing.TYPE_CHECKING: + from ayon_core.pipeline import Anatomy + + +def deprecated(reason): + def decorator(func): + message = f"Call to deprecated function {func.__name__} ({reason})." + + @functools.wraps(func) + def new_func(*args, **kwargs): + warnings.simplefilter("always", DeprecationWarning) + warnings.warn( + message, + category=DeprecationWarning, + stacklevel=2 + ) + warnings.simplefilter("default", DeprecationWarning) + return func(*args, **kwargs) + + return new_func + + return decorator + + +# Wrappers for optional arguments that might change in future +class _WorkfileOptionalData: + """Base class for optional data used in workfile operations.""" + def __init__( + self, + *, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, + project_settings: Optional[dict[str, Any]] = None, + **kwargs + ): + if kwargs: + cls_name = self.__class__.__name__ + keys = ", ".join([f'"{k}"' for k in kwargs.keys()]) + warnings.warn( + f"Unknown keywords passed to {cls_name}: {keys}", + ) + + self.project_entity = project_entity + self.anatomy = anatomy + self.project_settings = project_settings + + def get_project_data( + self, project_name: str + ) -> tuple[dict[str, Any], "Anatomy", dict[str, Any]]: + from ayon_core.pipeline import Anatomy + + project_entity = self.project_entity + anatomy = self.anatomy + project_settings = self.project_settings + + if project_entity is None: + project_entity = ayon_api.get_project(project_name) + + if anatomy is None: + anatomy = Anatomy( + project_name, + project_entity=project_entity + ) + + if project_settings is None: + project_settings = get_project_settings(project_name) + return ( + project_entity, + anatomy, + project_settings, + ) + + +class OpenWorkfileOptionalData(_WorkfileOptionalData): + """Optional data for opening workfile.""" + data_version = 1 + + +class ListWorkfilesOptionalData(_WorkfileOptionalData): + """Optional data to list workfiles.""" + data_version = 1 + + def __init__( + self, + *, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, + project_settings: Optional[dict[str, Any]] = None, + template_key: Optional[str] = None, + workfile_entities: Optional[list[dict[str, Any]]] = None, + **kwargs + ): + super().__init__( + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + **kwargs + ) + self.template_key = template_key + self.workfile_entities = workfile_entities + + def get_template_key( + self, + project_name: str, + task_type: str, + host_name: str, + project_settings: dict[str, Any], + ) -> str: + from ayon_core.pipeline.workfile import get_workfile_template_key + + if self.template_key is not None: + return self.template_key + + return get_workfile_template_key( + project_name=project_name, + task_type=task_type, + host_name=host_name, + project_settings=project_settings, + ) + + def get_workfile_entities( + self, project_name: str, task_id: str + ) -> list[dict[str, Any]]: + """Fill workfile entities if not provided.""" + if self.workfile_entities is not None: + return self.workfile_entities + return list(ayon_api.get_workfiles_info( + project_name, task_ids=[task_id] + )) + + +class ListPublishedWorkfilesOptionalData(_WorkfileOptionalData): + """Optional data to list published workfiles.""" + data_version = 1 + + def __init__( + self, + *, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, + project_settings: Optional[dict[str, Any]] = None, + product_entities: Optional[list[dict[str, Any]]] = None, + version_entities: Optional[list[dict[str, Any]]] = None, + repre_entities: Optional[list[dict[str, Any]]] = None, + **kwargs + ): + super().__init__( + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + **kwargs + ) + + self.product_entities = product_entities + self.version_entities = version_entities + self.repre_entities = repre_entities + + def get_entities( + self, + project_name: str, + folder_id: str, + ) -> tuple[ + list[dict[str, Any]], + list[dict[str, Any]], + list[dict[str, Any]] + ]: + product_entities = self.product_entities + if product_entities is None: + product_entities = list(ayon_api.get_products( + project_name, + folder_ids={folder_id}, + product_types={"workfile"}, + fields={"id", "name"}, + )) + + version_entities = self.version_entities + if version_entities is None: + product_ids = {p["id"] for p in product_entities} + version_entities = list(ayon_api.get_versions( + project_name, + product_ids=product_ids, + fields={"id", "author", "taskId"}, + )) + + repre_entities = self.repre_entities + if repre_entities is None: + version_ids = {v["id"] for v in version_entities} + repre_entities = list(ayon_api.get_representations( + project_name, + version_ids=version_ids, + )) + return product_entities, version_entities, repre_entities + + +class SaveWorkfileOptionalData(_WorkfileOptionalData): + """Optional data to save workfile.""" + data_version = 1 + + def __init__( + self, + *, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, + project_settings: Optional[dict[str, Any]] = None, + rootless_path: Optional[str] = None, + workfile_entities: Optional[list[dict[str, Any]]] = None, + **kwargs + ): + super().__init__( + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + **kwargs + ) + + self.rootless_path = rootless_path + self.workfile_entities = workfile_entities + + def get_workfile_entities(self, project_name: str, task_id: str): + """Fill workfile entities if not provided.""" + if self.workfile_entities is not None: + return self.workfile_entities + return list(ayon_api.get_workfiles_info( + project_name, task_ids=[task_id] + )) + + def get_rootless_path( + self, + workfile_path: str, + project_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + host_name: str, + project_entity: dict[str, Any], + project_settings: dict[str, Any], + anatomy: "Anatomy", + ): + from ayon_core.pipeline.workfile.utils import ( + find_workfile_rootless_path + ) + + if self.rootless_path is not None: + return self.rootless_path + + return find_workfile_rootless_path( + workfile_path, + project_name, + folder_entity, + task_entity, + host_name, + project_entity=project_entity, + project_settings=project_settings, + anatomy=anatomy, + ) + + +class CopyWorkfileOptionalData(SaveWorkfileOptionalData): + """Optional data to copy workfile.""" + data_version = 1 + + +class CopyPublishedWorkfileOptionalData(SaveWorkfileOptionalData): + """Optional data to copy published workfile.""" + data_version = 1 + + def __init__( + self, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, + project_settings: Optional[dict[str, Any]] = None, + rootless_path: Optional[str] = None, + workfile_entities: Optional[list[dict[str, Any]]] = None, + src_anatomy: Optional["Anatomy"] = None, + src_representation_path: Optional[str] = None, + **kwargs + ): + super().__init__( + rootless_path=rootless_path, + workfile_entities=workfile_entities, + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + **kwargs + ) + self.src_anatomy = src_anatomy + self.src_representation_path = src_representation_path + + def get_source_data( + self, + current_anatomy: Optional["Anatomy"], + project_name: str, + representation_entity: dict[str, Any], + ) -> tuple["Anatomy", str]: + from ayon_core.pipeline import Anatomy + from ayon_core.pipeline.load import ( + get_representation_path_with_anatomy + ) + + src_anatomy = self.src_anatomy + + if ( + src_anatomy is None + and current_anatomy is not None + and current_anatomy.project_name == project_name + ): + src_anatomy = current_anatomy + else: + src_anatomy = Anatomy(project_name) + + repre_path = self.src_representation_path + if repre_path is None: + repre_path = get_representation_path_with_anatomy( + representation_entity, + src_anatomy, + ) + return src_anatomy, repre_path + + +# Dataclasses used during workfile operations +@dataclass +class OpenWorkfileContext: + data_version: int + project_name: str + filepath: str + project_entity: dict[str, Any] + folder_entity: dict[str, Any] + task_entity: dict[str, Any] + anatomy: "Anatomy" + project_settings: dict[str, Any] + + +@dataclass +class ListWorkfilesContext: + data_version: int + project_name: str + project_entity: dict[str, Any] + folder_entity: dict[str, Any] + task_entity: dict[str, Any] + anatomy: "Anatomy" + project_settings: dict[str, Any] + template_key: str + workfile_entities: list[dict[str, Any]] + + +@dataclass +class ListPublishedWorkfilesContext: + data_version: int + project_name: str + project_entity: dict[str, Any] + folder_id: str + anatomy: "Anatomy" + project_settings: dict[str, Any] + product_entities: list[dict[str, Any]] + version_entities: list[dict[str, Any]] + repre_entities: list[dict[str, Any]] + + +@dataclass +class SaveWorkfileContext: + data_version: int + project_name: str + project_entity: dict[str, Any] + folder_entity: dict[str, Any] + task_entity: dict[str, Any] + anatomy: "Anatomy" + project_settings: dict[str, Any] + dst_path: str + rootless_path: str + workfile_entities: list[dict[str, Any]] + + +@dataclass +class CopyWorkfileContext(SaveWorkfileContext): + src_path: str + version: Optional[int] + comment: Optional[str] + description: Optional[str] + open_workfile: bool + + +@dataclass +class CopyPublishedWorkfileContext(CopyWorkfileContext): + src_project_name: str + src_representation_entity: dict[str, Any] + src_anatomy: "Anatomy" + + +def get_open_workfile_context( + project_name: str, + filepath: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + prepared_data: Optional[OpenWorkfileOptionalData], +) -> OpenWorkfileContext: + if prepared_data is None: + prepared_data = OpenWorkfileOptionalData() + ( + project_entity, anatomy, project_settings + ) = prepared_data.get_project_data(project_name) + return OpenWorkfileContext( + data_version=prepared_data.data_version, + filepath=filepath, + folder_entity=folder_entity, + task_entity=task_entity, + project_name=project_name, + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + ) + + +def get_list_workfiles_context( + project_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + host_name: str, + prepared_data: Optional[ListWorkfilesOptionalData], +) -> ListWorkfilesContext: + if prepared_data is None: + prepared_data = ListWorkfilesOptionalData() + ( + project_entity, anatomy, project_settings + ) = prepared_data.get_project_data(project_name) + + template_key = prepared_data.get_template_key( + project_name, + task_entity["taskType"], + host_name, + project_settings, + ) + workfile_entities = prepared_data.get_workfile_entities( + project_name, task_entity["id"] + ) + return ListWorkfilesContext( + data_version=prepared_data.data_version, + project_entity=project_entity, + folder_entity=folder_entity, + task_entity=task_entity, + project_name=project_name, + anatomy=anatomy, + project_settings=project_settings, + template_key=template_key, + workfile_entities=workfile_entities, + ) + + +def get_list_published_workfiles_context( + project_name: str, + folder_id: str, + prepared_data: Optional[ListPublishedWorkfilesOptionalData], +) -> ListPublishedWorkfilesContext: + if prepared_data is None: + prepared_data = ListPublishedWorkfilesOptionalData() + ( + project_entity, anatomy, project_settings + ) = prepared_data.get_project_data(project_name) + ( + product_entities, + version_entities, + repre_entities, + ) = prepared_data.get_entities(project_name, folder_id) + + return ListPublishedWorkfilesContext( + data_version=prepared_data.data_version, + project_name=project_name, + project_entity=project_entity, + folder_id=folder_id, + anatomy=anatomy, + project_settings=project_settings, + product_entities=product_entities, + version_entities=version_entities, + repre_entities=repre_entities, + ) + + +def get_save_workfile_context( + project_name: str, + filepath: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + host_name: str, + prepared_data: Optional[SaveWorkfileOptionalData], +) -> SaveWorkfileContext: + if prepared_data is None: + prepared_data = SaveWorkfileOptionalData() + + ( + project_entity, anatomy, project_settings + ) = prepared_data.get_project_data(project_name) + + rootless_path = prepared_data.get_rootless_path( + filepath, + project_name, + folder_entity, + task_entity, + host_name, + project_entity, + project_settings, + anatomy, + ) + workfile_entities = prepared_data.get_workfile_entities( + project_name, task_entity["id"] + ) + return SaveWorkfileContext( + data_version=prepared_data.data_version, + project_name=project_name, + project_entity=project_entity, + folder_entity=folder_entity, + task_entity=task_entity, + anatomy=anatomy, + project_settings=project_settings, + dst_path=filepath, + rootless_path=rootless_path, + workfile_entities=workfile_entities, + ) + + +def get_copy_workfile_context( + project_name: str, + src_path: str, + dst_path: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + version: Optional[int], + comment: Optional[str], + description: Optional[str], + open_workfile: bool, + host_name: str, + prepared_data: Optional[CopyWorkfileOptionalData], +) -> CopyWorkfileContext: + if prepared_data is None: + prepared_data = CopyWorkfileOptionalData() + context: SaveWorkfileContext = get_save_workfile_context( + project_name, + dst_path, + folder_entity, + task_entity, + host_name, + prepared_data, + ) + return CopyWorkfileContext( + data_version=prepared_data.data_version, + src_path=src_path, + project_name=context.project_name, + project_entity=context.project_entity, + folder_entity=context.folder_entity, + task_entity=context.task_entity, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + anatomy=context.anatomy, + project_settings=context.project_settings, + dst_path=context.dst_path, + rootless_path=context.rootless_path, + workfile_entities=context.workfile_entities, + ) + + +def get_copy_repre_workfile_context( + project_name: str, + src_project_name: str, + src_representation_entity: dict[str, Any], + dst_path: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + version: Optional[int], + comment: Optional[str], + description: Optional[str], + open_workfile: bool, + host_name: str, + prepared_data: Optional[CopyPublishedWorkfileOptionalData], +) -> CopyPublishedWorkfileContext: + if prepared_data is None: + prepared_data = CopyPublishedWorkfileOptionalData() + + context: SaveWorkfileContext = get_save_workfile_context( + project_name, + dst_path, + folder_entity, + task_entity, + host_name, + prepared_data, + ) + src_anatomy, repre_path = prepared_data.get_source_data( + context.anatomy, + src_project_name, + src_representation_entity, + ) + return CopyPublishedWorkfileContext( + data_version=prepared_data.data_version, + src_project_name=src_project_name, + src_representation_entity=src_representation_entity, + src_path=repre_path, + dst_path=context.dst_path, + project_name=context.project_name, + project_entity=context.project_entity, + folder_entity=context.folder_entity, + task_entity=context.task_entity, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + anatomy=context.anatomy, + project_settings=context.project_settings, + rootless_path=context.rootless_path, + workfile_entities=context.workfile_entities, + src_anatomy=src_anatomy, + ) + + +@dataclass +class WorkfileInfo: + """Information about workfile. + + Host can open, copy and use the workfile using this information object. + + Attributes: + filepath (str): Path to the workfile. + rootless_path (str): Path to the workfile without the root. And without + backslashes on Windows. + version (Optional[int]): Version of the workfile. + comment (Optional[str]): Comment of the workfile. + file_size (Optional[float]): Size of the workfile in bytes. + file_created (Optional[float]): Timestamp when the workfile was + created on the filesystem. + file_modified (Optional[float]): Timestamp when the workfile was + modified on the filesystem. + workfile_entity_id (Optional[str]): Workfile entity id. If None then + the workfile is not in the database. + description (str): Description of the workfile. + created_by (Optional[str]): User id of the user who created the + workfile entity. + updated_by (Optional[str]): User id of the user who updated the + workfile entity. + available (bool): True if workfile is available on the machine. + + """ + filepath: str + rootless_path: str + version: Optional[int] + comment: Optional[str] + file_size: Optional[float] + file_created: Optional[float] + file_modified: Optional[float] + workfile_entity_id: Optional[str] + description: str + created_by: Optional[str] + updated_by: Optional[str] + available: bool + + @classmethod + def new( + cls, + filepath: str, + rootless_path: str, + *, + version: Optional[int], + comment: Optional[str], + available: bool, + workfile_entity: dict[str, Any], + ): + file_size = file_modified = file_created = None + if filepath and os.path.exists(filepath): + filestat = os.stat(filepath) + file_size = filestat.st_size + file_created = filestat.st_ctime + file_modified = filestat.st_mtime + + if workfile_entity is None: + workfile_entity = {} + + attrib = {} + if workfile_entity: + attrib = workfile_entity["attrib"] + + return cls( + filepath=filepath, + rootless_path=rootless_path, + version=version, + comment=comment, + file_size=file_size, + file_created=file_created, + file_modified=file_modified, + workfile_entity_id=workfile_entity.get("id"), + description=attrib.get("description") or "", + created_by=workfile_entity.get("createdBy"), + updated_by=workfile_entity.get("updatedBy"), + available=available, + ) + + def to_data(self) -> dict[str, Any]: + """Converts file item to data. + + Returns: + dict[str, Any]: Workfile item data. + + """ + return asdict(self) + + @classmethod + def from_data(cls, data: dict[str, Any]) -> WorkfileInfo: + """Converts data to workfile item. + + Args: + data (dict[str, Any]): Workfile item data. + + Returns: + WorkfileInfo: File item. + + """ + return WorkfileInfo(**data) + + +@dataclass +class PublishedWorkfileInfo: + """Information about published workfile. + + Host can copy and use the workfile using this information object. + + Attributes: + project_name (str): Name of the project where workfile lives. + folder_id (str): Folder id under which is workfile stored. + task_id (Optional[str]): Task id under which is workfile stored. + representation_id (str): Representation id of the workfile. + filepath (str): Path to the workfile. + created_at (float): Timestamp when the workfile representation + was created. + author (str): Author of the workfile representation. + available (bool): True if workfile is available on the machine. + file_size (Optional[float]): Size of the workfile in bytes. + file_created (Optional[float]): Timestamp when the workfile was + created on the filesystem. + file_modified (Optional[float]): Timestamp when the workfile was + modified on the filesystem. + + """ + project_name: str + folder_id: str + task_id: Optional[str] + representation_id: str + filepath: str + created_at: float + author: str + available: bool + file_size: Optional[float] + file_created: Optional[float] + file_modified: Optional[float] + + @classmethod + def new( + cls, + project_name: str, + folder_id: str, + task_id: Optional[str], + repre_entity: dict[str, Any], + *, + filepath: str, + author: str, + available: bool, + file_size: Optional[float], + file_modified: Optional[float], + file_created: Optional[float], + ) -> "PublishedWorkfileInfo": + created_at = arrow.get(repre_entity["createdAt"]).to("local") + + return cls( + project_name=project_name, + folder_id=folder_id, + task_id=task_id, + representation_id=repre_entity["id"], + filepath=filepath, + created_at=created_at.float_timestamp, + author=author, + available=available, + file_size=file_size, + file_created=file_created, + file_modified=file_modified, + ) + + def to_data(self) -> dict[str, Any]: + """Converts file item to data. + + Returns: + dict[str, Any]: Workfile item data. + + """ + return asdict(self) + + @classmethod + def from_data(cls, data: dict[str, Any]) -> "PublishedWorkfileInfo": + """Converts data to workfile item. + + Args: + data (dict[str, Any]): Workfile item data. + + Returns: + PublishedWorkfileInfo: File item. + + """ + return PublishedWorkfileInfo(**data) + + +class IWorkfileHost(AbstractHost): + """Implementation requirements to be able to use workfiles utils and tool. + + Some of the methods are pre-implemented as they generally do the same in + all host integrations. + + """ + @abstractmethod + def save_workfile(self, dst_path: Optional[str] = None) -> None: + """Save the currently opened scene. + + Args: + dst_path (str): Where the current scene should be saved. Or use + the current path if 'None' is passed. + + """ + pass + + @abstractmethod + def open_workfile(self, filepath: str) -> None: + """Open passed filepath in the host. + + Args: + filepath (str): Path to workfile. + + """ + pass + + @abstractmethod + def get_current_workfile(self) -> Optional[str]: + """Retrieve a path to current opened file. + + Returns: + Optional[str]: Path to the file which is currently opened. None if + nothing is opened or the current workfile is unsaved. + + """ + return None + + def workfile_has_unsaved_changes(self) -> Optional[bool]: + """Currently opened scene is saved. + + Not all hosts can know if the current scene is saved because the API + of DCC does not support it. + + Returns: + Optional[bool]: True if scene is saved and False if has unsaved + modifications. None if can't tell if workfiles has + modifications. + + """ + return None + + def get_workfile_extensions(self) -> list[str]: + """Extensions that can be used to save the workfile to. + + Notes: + Method may not be used if 'list_workfiles' and + 'list_published_workfiles' are re-implemented with different + logic. + + Returns: + list[str]: List of extensions that can be used for saving. + + """ + return [] + + def save_workfile_with_context( + self, + filepath: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + prepared_data: Optional[SaveWorkfileOptionalData] = None, + ) -> None: + """Save the current workfile with context. + + Arguments 'rootless_path', 'workfile_entities', 'project_entity' + and 'anatomy' can be filled to enhance efficiency if you already + have access to the values. + + Argument 'project_settings' is used to calculate 'rootless_path' + if it is not provided. + + Notes: + Should this method care about context change? + + Args: + filepath (str): Where the current scene should be saved. + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. + version (Optional[int]): Version of the workfile. Information + for workfile entity. Recommended to fill. + comment (Optional[str]): Comment for the workfile. + Usually used in the filename template. + description (Optional[str]): Artist note for the workfile entity. + prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + project_name = self.get_current_project_name() + save_workfile_context = get_save_workfile_context( + project_name, + filepath, + folder_entity, + task_entity, + host_name=self.name, + prepared_data=prepared_data, + ) + + self._before_workfile_save(save_workfile_context) + event_data = self._get_workfile_event_data( + project_name, + folder_entity, + task_entity, + filepath, + ) + self._emit_workfile_save_event(event_data, after_save=False) + + workdir = os.path.dirname(filepath) + if not os.path.exists(workdir): + os.makedirs(workdir, exist_ok=True) + + # Set 'AYON_WORKDIR' environment variable + os.environ["AYON_WORKDIR"] = workdir + + self.set_current_context( + folder_entity, + task_entity, + reason=ContextChangeReason.workfile_save, + project_entity=save_workfile_context.project_entity, + anatomy=save_workfile_context.anatomy, + ) + + self.save_workfile(filepath) + + self._save_workfile_entity( + save_workfile_context, + version, + comment, + description, + ) + self._after_workfile_save(save_workfile_context) + self._emit_workfile_save_event(event_data) + + def open_workfile_with_context( + self, + filepath: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + prepared_data: Optional[OpenWorkfileOptionalData] = None, + ) -> None: + """Open passed filepath in the host with context. + + This function should be used to open workfile in different context. + + Notes: + Should this method care about context change? + + Args: + filepath (str): Path to workfile. + folder_entity (dict[str, Any]): Folder id. + task_entity (dict[str, Any]): Task id. + prepared_data (Optional[WorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + context = self.get_current_context() + project_name = context["project_name"] + + open_workfile_context = get_open_workfile_context( + project_name, + filepath, + folder_entity, + task_entity, + prepared_data=prepared_data, + ) + + workdir = os.path.dirname(filepath) + # Set 'AYON_WORKDIR' environment variable + os.environ["AYON_WORKDIR"] = workdir + + event_data = self._get_workfile_event_data( + project_name, folder_entity, task_entity, filepath + ) + self._before_workfile_open(open_workfile_context) + self._emit_workfile_open_event(event_data, after_open=False) + + self.set_current_context( + folder_entity, + task_entity, + reason=ContextChangeReason.workfile_open, + project_entity=open_workfile_context.project_entity, + anatomy=open_workfile_context.anatomy, + ) + + self.open_workfile(filepath) + + self._after_workfile_open(open_workfile_context) + self._emit_workfile_open_event(event_data) + + def list_workfiles( + self, + project_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + prepared_data: Optional[ListWorkfilesOptionalData] = None, + ) -> list[WorkfileInfo]: + """List workfiles in the given task. + + The method should also return workfiles that are not available on + disk, but are in the AYON database. + + Notes: + - Better method name? + - This method is pre-implemented as the logic can be shared across + 95% of host integrations. Ad-hoc implementation to give host + integration workfile api functionality. + + Args: + project_name (str): Project name. + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. + prepared_data (Optional[ListWorkfilesOptionalData]): Prepared + data for speed enhancements. + + Returns: + list[WorkfileInfo]: List of workfiles. + + """ + from ayon_core.pipeline.template_data import get_template_data + from ayon_core.pipeline.workfile.path_resolving import ( + get_workdir_with_workdir_data, + WorkfileDataParser, + ) + + extensions = self.get_workfile_extensions() + if not extensions: + return [] + + list_workfiles_context = get_list_workfiles_context( + project_name, + folder_entity, + task_entity, + host_name=self.name, + prepared_data=prepared_data, + ) + + workfile_entities_by_path = {} + for workfile_entity in list_workfiles_context.workfile_entities: + rootless_path = workfile_entity["path"] + path = os.path.normpath( + list_workfiles_context.anatomy.fill_root(rootless_path) + ) + workfile_entities_by_path[path] = workfile_entity + + workdir_data = get_template_data( + list_workfiles_context.project_entity, + folder_entity, + task_entity, + host_name=self.name, + ) + workdir = get_workdir_with_workdir_data( + workdir_data, + project_name, + anatomy=list_workfiles_context.anatomy, + template_key=list_workfiles_context.template_key, + project_settings=list_workfiles_context.project_settings, + ) + + file_template = list_workfiles_context.anatomy.get_template_item( + "work", list_workfiles_context.template_key, "file" + ) + rootless_workdir = workdir.rootless + if platform.system().lower() == "windows": + rootless_workdir = rootless_workdir.replace("\\", "/") + + filenames = [] + if os.path.exists(workdir): + filenames = list(os.listdir(workdir)) + + data_parser = WorkfileDataParser(file_template, workdir_data) + items = [] + for filename in filenames: + # TODO add 'default' support for folders + ext = os.path.splitext(filename)[1].lower() + if ext not in extensions: + continue + + filepath = os.path.join(workdir, filename) + + rootless_path = f"{rootless_workdir}/{filename}" + workfile_entity = workfile_entities_by_path.pop( + filepath, None + ) + version = comment = None + if workfile_entity is not None: + _data = workfile_entity["data"] + version = _data.get("version") + comment = _data.get("comment") + + if version is None: + parsed_data = data_parser.parse_data(filename) + version = parsed_data.version + comment = parsed_data.comment + + item = WorkfileInfo.new( + filepath, + rootless_path, + version=version, + comment=comment, + available=True, + workfile_entity=workfile_entity, + ) + items.append(item) + + for filepath, workfile_entity in workfile_entities_by_path.items(): + # Workfile entity is not in the filesystem + # but it is in the database + rootless_path = workfile_entity["path"] + ext = os.path.splitext(rootless_path)[1].lower() + if ext not in extensions: + continue + + _data = workfile_entity["data"] + version = _data.get("version") + comment = _data.get("comment") + if version is None: + filename = os.path.basename(rootless_path) + parsed_data = data_parser.parse_data(filename) + version = parsed_data.version + comment = parsed_data.comment + + available = os.path.exists(filepath) + items.append(WorkfileInfo.new( + filepath, + rootless_path, + version=version, + comment=comment, + available=available, + workfile_entity=workfile_entity, + )) + + return items + + def list_published_workfiles( + self, + project_name: str, + folder_id: str, + *, + prepared_data: Optional[ListPublishedWorkfilesOptionalData] = None, + ) -> list[PublishedWorkfileInfo]: + """List published workfiles for the given folder. + + The default implementation looks for products with the 'workfile' + product type. + + Pre-fetched entities have mandatory fields to be fetched: + - Version: 'id', 'author', 'taskId' + - Representation: 'id', 'versionId', 'files' + + Args: + project_name (str): Project name. + folder_id (str): Folder id. + prepared_data (Optional[ListPublishedWorkfilesOptionalData]): + Prepared data for speed enhancements. + + Returns: + list[PublishedWorkfileInfo]: Published workfile information for + the given context. + + """ + list_workfiles_context = get_list_published_workfiles_context( + project_name, + folder_id, + prepared_data=prepared_data, + ) + if not list_workfiles_context.repre_entities: + return [] + + versions_by_id = { + version_entity["id"]: version_entity + for version_entity in list_workfiles_context.version_entities + } + extensions = { + ext.lstrip(".") + for ext in self.get_workfile_extensions() + } + items = [] + for repre_entity in list_workfiles_context.repre_entities: + version_id = repre_entity["versionId"] + version_entity = versions_by_id[version_id] + task_id = version_entity["taskId"] + + # Filter by extension + workfile_path = None + for repre_file in repre_entity["files"]: + ext = ( + os.path.splitext(repre_file["name"])[1] + .lower() + .lstrip(".") + ) + if ext in extensions: + workfile_path = repre_file["path"] + break + + if not workfile_path: + continue + + try: + workfile_path = workfile_path.format( + root=list_workfiles_context.anatomy.roots + ) + except Exception: + self.log.warning( + "Failed to format workfile path.", exc_info=True + ) + + is_available = False + file_size = file_modified = file_created = None + if workfile_path and os.path.exists(workfile_path): + filestat = os.stat(workfile_path) + is_available = True + file_size = filestat.st_size + file_created = filestat.st_ctime + file_modified = filestat.st_mtime + + workfile_item = PublishedWorkfileInfo.new( + project_name, + folder_id, + task_id, + repre_entity, + filepath=workfile_path, + author=version_entity["author"], + available=is_available, + file_size=file_size, + file_created=file_created, + file_modified=file_modified, + ) + items.append(workfile_item) + + return items + + def copy_workfile( + self, + src_path: str, + dst_path: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + open_workfile: bool = True, + prepared_data: Optional[CopyWorkfileOptionalData] = None, + ) -> None: + """Save workfile path with target folder and task context. + + It is expected that workfile is saved to the current project, but + can be copied from the other project. + + Arguments 'rootless_path', 'workfile_entities', 'project_entity' + and 'anatomy' can be filled to enhance efficiency if you already + have access to the values. + + Argument 'project_settings' is used to calculate 'rootless_path' + if it is not provided. + + Args: + src_path (str): Path to the source scene. + dst_path (str): Where the scene should be saved. + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. + version (Optional[int]): Version of the workfile. Information + for workfile entity. Recommended to fill. + comment (Optional[str]): Comment for the workfile. + description (Optional[str]): Artist note for the workfile entity. + open_workfile (bool): Open workfile when copied. + prepared_data (Optional[CopyWorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + project_name = self.get_current_project_name() + copy_workfile_context: CopyWorkfileContext = get_copy_workfile_context( + project_name, + src_path, + dst_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + host_name=self.name, + prepared_data=prepared_data, + ) + self._copy_workfile( + copy_workfile_context, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + ) + + def copy_workfile_representation( + self, + src_project_name: str, + src_representation_entity: dict[str, Any], + dst_path: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + open_workfile: bool = True, + prepared_data: Optional[CopyPublishedWorkfileOptionalData] = None, + ) -> None: + """Copy workfile representation. + + Use representation as a source for the workfile. + + Arguments 'rootless_path', 'workfile_entities', 'project_entity' + and 'anatomy' can be filled to enhance efficiency if you already + have access to the values. + + Argument 'project_settings' is used to calculate 'rootless_path' + if it is not provided. + + Args: + src_project_name (str): Project name. + src_representation_entity (dict[str, Any]): Representation + entity. + dst_path (str): Where the scene should be saved. + folder_entity (dict[str, Any): Folder entity. + task_entity (dict[str, Any]): Task entity. + version (Optional[int]): Version of the workfile. Information + for workfile entity. Recommended to fill. + comment (Optional[str]): Comment for the workfile. + description (Optional[str]): Artist note for the workfile entity. + open_workfile (bool): Open workfile when copied. + prepared_data (Optional[CopyPublishedWorkfileOptionalData]): + Prepared data for speed enhancements. + + """ + project_name = self.get_current_project_name() + copy_repre_workfile_context: CopyPublishedWorkfileContext = ( + get_copy_repre_workfile_context( + project_name, + src_project_name, + src_representation_entity, + dst_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + host_name=self.name, + prepared_data=prepared_data, + ) + ) + self._copy_workfile( + copy_repre_workfile_context, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + ) + + # --- Deprecated method names --- + @deprecated("Use 'get_workfile_extensions' instead") + def file_extensions(self): + """Deprecated variant of 'get_workfile_extensions'. + + Todo: + Remove when all usages are replaced. + + """ + return self.get_workfile_extensions() + + @deprecated("Use 'save_workfile' instead") + def save_file(self, dst_path=None): + """Deprecated variant of 'save_workfile'. + + Todo: + Remove when all usages are replaced + + """ + self.save_workfile(dst_path) + + @deprecated("Use 'open_workfile' instead") + def open_file(self, filepath): + """Deprecated variant of 'open_workfile'. + + Todo: + Remove when all usages are replaced. + + """ + return self.open_workfile(filepath) + + @deprecated("Use 'get_current_workfile' instead") + def current_file(self): + """Deprecated variant of 'get_current_workfile'. + + Todo: + Remove when all usages are replaced. + + """ + return self.get_current_workfile() + + @deprecated("Use 'workfile_has_unsaved_changes' instead") + def has_unsaved_changes(self): + """Deprecated variant of 'workfile_has_unsaved_changes'. + + Todo: + Remove when all usages are replaced. + + """ + return self.workfile_has_unsaved_changes() + + def _copy_workfile( + self, + copy_workfile_context: CopyWorkfileContext, + *, + version: Optional[int], + comment: Optional[str], + description: Optional[str], + open_workfile: bool, + ) -> None: + """Save workfile path with target folder and task context. + + It is expected that workfile is saved to the current project, but + can be copied from the other project. + + Arguments 'rootless_path', 'workfile_entities', 'project_entity' + and 'anatomy' can be filled to enhance efficiency if you already + have access to the values. + + Argument 'project_settings' is used to calculate 'rootless_path' + if it is not provided. + + Args: + copy_workfile_context (CopyWorkfileContext): Prepared data + for speed enhancements. + version (Optional[int]): Version of the workfile. Information + for workfile entity. Recommended to fill. + comment (Optional[str]): Comment for the workfile. + description (Optional[str]): Artist note for the workfile entity. + open_workfile (bool): Open workfile when copied. + + """ + self._before_workfile_copy(copy_workfile_context) + event_data = self._get_workfile_event_data( + copy_workfile_context.project_name, + copy_workfile_context.folder_entity, + copy_workfile_context.task_entity, + copy_workfile_context.dst_path, + ) + self._emit_workfile_save_event(event_data, after_save=False) + + dst_dir = os.path.dirname(copy_workfile_context.dst_path) + if not os.path.exists(dst_dir): + os.makedirs(dst_dir, exist_ok=True) + shutil.copy( + copy_workfile_context.src_path, + copy_workfile_context.dst_path + ) + + self._save_workfile_entity( + copy_workfile_context, + version, + comment, + description, + ) + self._after_workfile_copy(copy_workfile_context) + self._emit_workfile_save_event(event_data) + + if not open_workfile: + return + + self.open_workfile_with_context( + copy_workfile_context.dst_path, + copy_workfile_context.folder_entity, + copy_workfile_context.task_entity, + ) + + def _save_workfile_entity( + self, + save_workfile_context: SaveWorkfileContext, + version: Optional[int], + comment: Optional[str], + description: Optional[str], + ) -> Optional[dict[str, Any]]: + """Create of update workfile entity to AYON based on provided data. + + Args: + save_workfile_context (SaveWorkfileContext): Save workfile + context with all prepared data. + version (Optional[int]): Version of the workfile. + comment (Optional[str]): Comment for the workfile. + description (Optional[str]): Artist note for the workfile entity. + + Returns: + Optional[dict[str, Any]]: Workfile entity. + + """ + from ayon_core.pipeline.workfile.utils import ( + save_workfile_info + ) + + project_name = self.get_current_project_name() + if not description: + description = None + + if not comment: + comment = None + + rootless_path = save_workfile_context.rootless_path + # It is not possible to create workfile infor without rootless path + workfile_info = None + if not rootless_path: + return workfile_info + + if platform.system().lower() == "windows": + rootless_path = rootless_path.replace("\\", "/") + + # Get application information + app_info = self.get_app_information() + data = {} + if app_info.app_name: + data["app_name"] = app_info.app_name + if app_info.app_version: + data["app_version"] = app_info.app_version + + # Use app group and app variant from applications addon (if available) + app_addon_name = os.environ.get("AYON_APP_NAME") + if not app_addon_name: + app_addon_name = None + + app_addon_tools_s = os.environ.get("AYON_APP_TOOLS") + app_addon_tools = [] + if app_addon_tools_s: + app_addon_tools = app_addon_tools_s.split(";") + + data["ayon_app_name"] = app_addon_name + data["ayon_app_tools"] = app_addon_tools + + workfile_info = save_workfile_info( + project_name, + save_workfile_context.task_entity["id"], + rootless_path, + self.name, + version, + comment, + description, + data=data, + workfile_entities=save_workfile_context.workfile_entities, + ) + return workfile_info + + def _create_extra_folders( + self, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + workdir: str, + ) -> None: + """Create extra folders in the workdir. + + This method should be called when workfile is saved or copied. + + Args: + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. + workdir (str): Workdir where workfile/s will be stored. + + """ + from ayon_core.pipeline.workfile.path_resolving import ( + create_workdir_extra_folders + ) + + project_name = self.get_current_project_name() + + # Create extra folders + create_workdir_extra_folders( + workdir, + self.name, + task_entity["taskType"], + task_entity["name"], + project_name + ) + + def _get_workfile_event_data( + self, + project_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + filepath: str, + ) -> dict[str, Optional[str]]: + """Prepare workfile event data. + + Args: + project_name (str): Name of the project where workfile lives. + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. + filepath (str): Path to the workfile. + + Returns: + dict[str, Optional[str]]: Data for workfile event. + + """ + workdir, filename = os.path.split(filepath) + return { + "project_name": project_name, + "folder_id": folder_entity["id"], + "folder_path": folder_entity["path"], + "task_id": task_entity["id"], + "task_name": task_entity["name"], + "host_name": self.name, + "filepath": filepath, + "filename": filename, + "workdir_path": workdir, + } + + def _before_workfile_open( + self, open_workfile_context: OpenWorkfileContext + ) -> None: + """Before workfile is opened. + + This method is called before the workfile is opened in the host. + + Can be overridden to implement host specific logic. + + Args: + open_workfile_context (OpenWorkfileContext): Context and path of + workfile to open. + + """ + pass + + def _after_workfile_open( + self, open_workfile_context: OpenWorkfileContext + ) -> None: + """After workfile is opened. + + This method is called after the workfile is opened in the host. + + Can be overridden to implement host specific logic. + + Args: + open_workfile_context (OpenWorkfileContext): Context and path of + opened workfile. + + """ + pass + + def _before_workfile_save( + self, save_workfile_context: SaveWorkfileContext + ) -> None: + """Before workfile is saved. + + This method is called before the workfile is saved in the host. + + Can be overridden to implement host specific logic. + + Args: + save_workfile_context (SaveWorkfileContext): Workfile path with + target folder and task context. + + """ + pass + + def _after_workfile_save( + self, save_workfile_context: SaveWorkfileContext + ) -> None: + """After workfile is saved. + + This method is called after the workfile is saved in the host. + + Can be overridden to implement host specific logic. + + Args: + save_workfile_context (SaveWorkfileContext): Workfile path with + target folder and task context. + + """ + workdir = os.path.dirname(save_workfile_context.dst_path) + self._create_extra_folders( + save_workfile_context.folder_entity, + save_workfile_context.task_entity, + workdir + ) + + def _before_workfile_copy( + self, copy_workfile_context: CopyWorkfileContext + ) -> None: + """Before workfile is copied. + + This method is called before the workfile is copied by host + integration. + + Can be overridden to implement host specific logic. + + Args: + copy_workfile_context (CopyWorkfileContext): Source and destination + path with context before workfile is copied. + + """ + pass + + def _after_workfile_copy( + self, copy_workfile_context: CopyWorkfileContext + ) -> None: + """After workfile is copied. + + This method is called after the workfile is copied by host + integration. + + Can be overridden to implement host specific logic. + + Args: + copy_workfile_context (CopyWorkfileContext): Source and destination + path with context after workfile is copied. + + """ + workdir = os.path.dirname(copy_workfile_context.dst_path) + self._create_extra_folders( + copy_workfile_context.folder_entity, + copy_workfile_context.task_entity, + workdir, + ) + + def _emit_workfile_open_event( + self, + event_data: dict[str, Optional[str]], + after_open: bool = True, + ) -> None: + """Emit workfile save event. + + Emit event before and after workfile is opened. + + This method is not meant to be overridden. + + Other addons can listen to this event and do additional steps. + + Args: + event_data (dict[str, Optional[str]]): Prepare event data. + after_open (bool): Emit event after workfile is opened. + + """ + topics = [] + topic_end = "before" + if after_open: + topics.append("workfile.opened") + topic_end = "after" + + # Keep backwards compatible event topic + topics.append(f"workfile.open.{topic_end}") + + for topic in topics: + emit_event(topic, event_data) + + def _emit_workfile_save_event( + self, + event_data: dict[str, Optional[str]], + after_save: bool = True, + ) -> None: + """Emit workfile save event. + + Emit event before and after workfile is saved or copied. + + This method is not meant to be overridden. + + Other addons can listen to this event and do additional steps. + + Args: + event_data (dict[str, Optional[str]]): Prepare event data. + after_save (bool): Emit event after workfile is saved. + + """ + topics = [] + topic_end = "before" + if after_save: + topics.append("workfile.saved") + topic_end = "after" + + # Keep backwards compatible event topic + topics.append(f"workfile.save.{topic_end}") + + for topic in topics: + emit_event(topic, event_data) diff --git a/client/ayon_core/host/typing.py b/client/ayon_core/host/typing.py new file mode 100644 index 0000000000..a51460713b --- /dev/null +++ b/client/ayon_core/host/typing.py @@ -0,0 +1,7 @@ +from typing import Optional, TypedDict + + +class HostContextData(TypedDict): + project_name: str + folder_path: Optional[str] + task_name: Optional[str] diff --git a/client/ayon_core/lib/__init__.py b/client/ayon_core/lib/__init__.py index 5ccc8d03e5..d5629cbf3d 100644 --- a/client/ayon_core/lib/__init__.py +++ b/client/ayon_core/lib/__init__.py @@ -11,6 +11,7 @@ from .local_settings import ( get_launcher_storage_dir, get_addons_resources_dir, get_local_site_id, + get_ayon_user_entity, get_ayon_username, ) from .ayon_connection import initialize_ayon_connection @@ -73,6 +74,7 @@ from .log import ( ) from .path_templates import ( + DefaultKeysDict, TemplateUnsolved, StringTemplate, FormatObject, @@ -148,6 +150,7 @@ __all__ = [ "get_launcher_storage_dir", "get_addons_resources_dir", "get_local_site_id", + "get_ayon_user_entity", "get_ayon_username", "initialize_ayon_connection", @@ -228,6 +231,7 @@ __all__ = [ "get_version_from_path", "get_last_version_from_path", + "DefaultKeysDict", "TemplateUnsolved", "StringTemplate", "FormatObject", diff --git a/client/ayon_core/lib/local_settings.py b/client/ayon_core/lib/local_settings.py index d994145d4b..8a17b7af38 100644 --- a/client/ayon_core/lib/local_settings.py +++ b/client/ayon_core/lib/local_settings.py @@ -5,32 +5,46 @@ import json import platform import configparser import warnings +import copy from datetime import datetime from abc import ABC, abstractmethod from functools import lru_cache +from typing import Optional, Any import platformdirs import ayon_api +from .cache import NestedCacheItem, CacheItem + _PLACEHOLDER = object() -def _get_ayon_appdirs(*args): +# TODO should use 'KeyError' or 'Exception' as base +class RegistryItemNotFound(ValueError): + """Raised when the item is not found in the keyring.""" + + +class _Cache: + username = None + user_entities_by_name = NestedCacheItem() + + +def _get_ayon_appdirs(*args: str) -> str: return os.path.join( platformdirs.user_data_dir("AYON", "Ynput"), *args ) -def get_ayon_appdirs(*args): +def get_ayon_appdirs(*args: str) -> str: """Local app data directory of AYON client. Deprecated: Use 'get_launcher_local_dir' or 'get_launcher_storage_dir' based on - use-case. Deprecation added 24/08/09 (0.4.4-dev.1). + a use-case. Deprecation added 24/08/09 (0.4.4-dev.1). Args: - *args (Iterable[str]): Subdirectories/files in local app data dir. + *args (Iterable[str]): Subdirectories/files in the local app data dir. Returns: str: Path to directory/file in local app data dir. @@ -48,7 +62,7 @@ def get_ayon_appdirs(*args): def get_launcher_storage_dir(*subdirs: str) -> str: - """Get storage directory for launcher. + """Get a storage directory for launcher. Storage directory is used for storing shims, addons, dependencies, etc. @@ -73,14 +87,14 @@ def get_launcher_storage_dir(*subdirs: str) -> str: def get_launcher_local_dir(*subdirs: str) -> str: - """Get local directory for launcher. + """Get a local directory for launcher. - Local directory is used for storing machine or user specific data. + Local directory is used for storing machine or user-specific data. - The location is user specific. + The location is user-specific. Note: - This function should be called at least once on bootstrap. + This function should be called at least once on the bootstrap. Args: *subdirs (str): Subdirectories relative to local dir. @@ -97,7 +111,7 @@ def get_launcher_local_dir(*subdirs: str) -> str: def get_addons_resources_dir(addon_name: str, *args) -> str: - """Get directory for storing resources for addons. + """Get a directory for storing resources for addons. Some addons might need to store ad-hoc resources that are not part of addon client package (e.g. because of size). Studio might define @@ -107,7 +121,7 @@ def get_addons_resources_dir(addon_name: str, *args) -> str: Args: addon_name (str): Addon name. - *args (str): Subfolders in resources directory. + *args (str): Subfolders in the resources directory. Returns: str: Path to resources directory. @@ -120,6 +134,10 @@ def get_addons_resources_dir(addon_name: str, *args) -> str: return os.path.join(addons_resources_dir, addon_name, *args) +class _FakeException(Exception): + """Placeholder exception used if real exception is not available.""" + + class AYONSecureRegistry: """Store information using keyring. @@ -130,9 +148,10 @@ class AYONSecureRegistry: identify which data were created by AYON. Args: - name(str): Name of registry used as identifier for data. + name(str): Name of registry used as the identifier for data. + """ - def __init__(self, name): + def __init__(self, name: str) -> None: try: import keyring @@ -148,13 +167,12 @@ class AYONSecureRegistry: keyring.set_keyring(Windows.WinVaultKeyring()) # Force "AYON" prefix - self._name = "/".join(("AYON", name)) + self._name = f"AYON/{name}" - def set_item(self, name, value): - # type: (str, str) -> None - """Set sensitive item into system's keyring. + def set_item(self, name: str, value: str) -> None: + """Set sensitive item into the system's keyring. - This uses `Keyring module`_ to save sensitive stuff into system's + This uses `Keyring module`_ to save sensitive stuff into the system's keyring. Args: @@ -168,22 +186,26 @@ class AYONSecureRegistry: import keyring keyring.set_password(self._name, name, value) + self.get_item.cache_clear() @lru_cache(maxsize=32) - def get_item(self, name, default=_PLACEHOLDER): - """Get value of sensitive item from system's keyring. + def get_item( + self, name: str, default: Any = _PLACEHOLDER + ) -> Optional[str]: + """Get value of sensitive item from the system's keyring. See also `Keyring module`_ Args: name (str): Name of the item. - default (Any): Default value if item is not available. + default (Any): Default value if the item is not available. Returns: value (str): Value of the item. Raises: - ValueError: If item doesn't exist and default is not defined. + RegistryItemNotFound: If the item doesn't exist and default + is not defined. .. _Keyring module: https://github.com/jaraco/keyring @@ -191,21 +213,29 @@ class AYONSecureRegistry: """ import keyring - value = keyring.get_password(self._name, name) + # Capture 'ItemNotFoundException' exception (on linux) + try: + from secretstorage.exceptions import ItemNotFoundException + except ImportError: + ItemNotFoundException = _FakeException + + try: + value = keyring.get_password(self._name, name) + except ItemNotFoundException: + value = None + if value is not None: return value if default is not _PLACEHOLDER: return default - # NOTE Should raise `KeyError` - raise ValueError( - "Item {}:{} does not exist in keyring.".format(self._name, name) + raise RegistryItemNotFound( + f"Item {self._name}:{name} not found in keyring." ) - def delete_item(self, name): - # type: (str) -> None - """Delete value stored in system's keyring. + def delete_item(self, name: str) -> None: + """Delete value stored in the system's keyring. See also `Keyring module`_ @@ -223,47 +253,38 @@ class AYONSecureRegistry: class ASettingRegistry(ABC): - """Abstract class defining structure of **SettingRegistry** class. - - It is implementing methods to store secure items into keyring, otherwise - mechanism for storing common items must be implemented in abstract - methods. - - Attributes: - _name (str): Registry names. + """Abstract class to defining structure of registry class. """ - - def __init__(self, name): - # type: (str) -> ASettingRegistry - super(ASettingRegistry, self).__init__() - + def __init__(self, name: str) -> None: self._name = name - self._items = {} - - def set_item(self, name, value): - # type: (str, str) -> None - """Set item to settings registry. - - Args: - name (str): Name of the item. - value (str): Value of the item. - - """ - self._set_item(name, value) @abstractmethod - def _set_item(self, name, value): - # type: (str, str) -> None - # Implement it - pass + def _get_item(self, name: str) -> Any: + """Get item value from registry.""" - def __setitem__(self, name, value): - self._items[name] = value + @abstractmethod + def _set_item(self, name: str, value: str) -> None: + """Set item value to registry.""" + + @abstractmethod + def _delete_item(self, name: str) -> None: + """Delete item from registry.""" + + def __getitem__(self, name: str) -> Any: + return self._get_item(name) + + def __setitem__(self, name: str, value: str) -> None: self._set_item(name, value) - def get_item(self, name): - # type: (str) -> str + def __delitem__(self, name: str) -> None: + self._delete_item(name) + + @property + def name(self) -> str: + return self._name + + def get_item(self, name: str) -> str: """Get item from settings registry. Args: @@ -273,22 +294,22 @@ class ASettingRegistry(ABC): value (str): Value of the item. Raises: - ValueError: If item doesn't exist. + RegistryItemNotFound: If the item doesn't exist. """ return self._get_item(name) - @abstractmethod - def _get_item(self, name): - # type: (str) -> str - # Implement it - pass + def set_item(self, name: str, value: str) -> None: + """Set item to settings registry. - def __getitem__(self, name): - return self._get_item(name) + Args: + name (str): Name of the item. + value (str): Value of the item. - def delete_item(self, name): - # type: (str) -> None + """ + self._set_item(name, value) + + def delete_item(self, name: str) -> None: """Delete item from settings registry. Args: @@ -297,16 +318,6 @@ class ASettingRegistry(ABC): """ self._delete_item(name) - @abstractmethod - def _delete_item(self, name): - # type: (str) -> None - """Delete item from settings.""" - pass - - def __delitem__(self, name): - del self._items[name] - self._delete_item(name) - class IniSettingRegistry(ASettingRegistry): """Class using :mod:`configparser`. @@ -314,20 +325,17 @@ class IniSettingRegistry(ASettingRegistry): This class is using :mod:`configparser` (ini) files to store items. """ - - def __init__(self, name, path): - # type: (str, str) -> IniSettingRegistry - super(IniSettingRegistry, self).__init__(name) + def __init__(self, name: str, path: str) -> None: + super().__init__(name) # get registry file - self._registry_file = os.path.join(path, "{}.ini".format(name)) + self._registry_file = os.path.join(path, f"{name}.ini") if not os.path.exists(self._registry_file): with open(self._registry_file, mode="w") as cfg: print("# Settings registry", cfg) now = datetime.now().strftime("%d/%m/%Y %H:%M:%S") - print("# {}".format(now), cfg) + print(f"# {now}", cfg) - def set_item_section(self, section, name, value): - # type: (str, str, str) -> None + def set_item_section(self, section: str, name: str, value: str) -> None: """Set item to specific section of ini registry. If section doesn't exists, it is created. @@ -350,12 +358,10 @@ class IniSettingRegistry(ASettingRegistry): with open(self._registry_file, mode="w") as cfg: config.write(cfg) - def _set_item(self, name, value): - # type: (str, str) -> None + def _set_item(self, name: str, value: str) -> None: self.set_item_section("MAIN", name, value) - def set_item(self, name, value): - # type: (str, str) -> None + def set_item(self, name: str, value: str) -> None: """Set item to settings ini file. This saves item to ``DEFAULT`` section of ini as each item there @@ -368,10 +374,9 @@ class IniSettingRegistry(ASettingRegistry): """ # this does the some, overridden just for different docstring. # we cast value to str as ini options values must be strings. - super(IniSettingRegistry, self).set_item(name, str(value)) + super().set_item(name, str(value)) - def get_item(self, name): - # type: (str) -> str + def get_item(self, name: str) -> str: """Gets item from settings ini file. This gets settings from ``DEFAULT`` section of ini file as each item @@ -384,19 +389,18 @@ class IniSettingRegistry(ASettingRegistry): str: Value of item. Raises: - ValueError: If value doesn't exist. + RegistryItemNotFound: If value doesn't exist. """ - return super(IniSettingRegistry, self).get_item(name) + return super().get_item(name) @lru_cache(maxsize=32) - def get_item_from_section(self, section, name): - # type: (str, str) -> str + def get_item_from_section(self, section: str, name: str) -> str: """Get item from section of ini file. This will read ini file and try to get item value from specified - section. If that section or item doesn't exist, :exc:`ValueError` - is risen. + section. If that section or item doesn't exist, + :exc:`RegistryItemNotFound` is risen. Args: section (str): Name of ini section. @@ -406,7 +410,7 @@ class IniSettingRegistry(ASettingRegistry): str: Item value. Raises: - ValueError: If value doesn't exist. + RegistryItemNotFound: If value doesn't exist. """ config = configparser.ConfigParser() @@ -414,16 +418,15 @@ class IniSettingRegistry(ASettingRegistry): try: value = config[section][name] except KeyError: - raise ValueError( - "Registry doesn't contain value {}:{}".format(section, name)) + raise RegistryItemNotFound( + f"Registry doesn't contain value {section}:{name}" + ) return value - def _get_item(self, name): - # type: (str) -> str + def _get_item(self, name: str) -> str: return self.get_item_from_section("MAIN", name) - def delete_item_from_section(self, section, name): - # type: (str, str) -> None + def delete_item_from_section(self, section: str, name: str) -> None: """Delete item from section in ini file. Args: @@ -431,7 +434,7 @@ class IniSettingRegistry(ASettingRegistry): name (str): Name of the item. Raises: - ValueError: If item doesn't exist. + RegistryItemNotFound: If the item doesn't exist. """ self.get_item_from_section.cache_clear() @@ -440,8 +443,9 @@ class IniSettingRegistry(ASettingRegistry): try: _ = config[section][name] except KeyError: - raise ValueError( - "Registry doesn't contain value {}:{}".format(section, name)) + raise RegistryItemNotFound( + f"Registry doesn't contain value {section}:{name}" + ) config.remove_option(section, name) # if section is empty, delete it @@ -457,29 +461,28 @@ class IniSettingRegistry(ASettingRegistry): class JSONSettingRegistry(ASettingRegistry): - """Class using json file as storage.""" + """Class using a json file as storage.""" - def __init__(self, name, path): - # type: (str, str) -> JSONSettingRegistry - super(JSONSettingRegistry, self).__init__(name) - #: str: name of registry file - self._registry_file = os.path.join(path, "{}.json".format(name)) + def __init__(self, name: str, path: str) -> None: + super().__init__(name) + self._registry_file = os.path.join(path, f"{name}.json") now = datetime.now().strftime("%d/%m/%Y %H:%M:%S") header = { "__metadata__": {"generated": now}, "registry": {} } - if not os.path.exists(os.path.dirname(self._registry_file)): - os.makedirs(os.path.dirname(self._registry_file), exist_ok=True) + # Use 'os.path.dirname' in case someone uses slashes in 'name' + dirpath = os.path.dirname(self._registry_file) + if not os.path.exists(dirpath): + os.makedirs(dirpath, exist_ok=True) if not os.path.exists(self._registry_file): with open(self._registry_file, mode="w") as cfg: json.dump(header, cfg, indent=4) @lru_cache(maxsize=32) - def _get_item(self, name): - # type: (str) -> object - """Get item value from registry json. + def _get_item(self, name: str) -> str: + """Get item value from the registry. Note: See :meth:`ayon_core.lib.JSONSettingRegistry.get_item` @@ -490,29 +493,13 @@ class JSONSettingRegistry(ASettingRegistry): try: value = data["registry"][name] except KeyError: - raise ValueError( - "Registry doesn't contain value {}".format(name)) + raise RegistryItemNotFound( + f"Registry doesn't contain value {name}" + ) return value - def get_item(self, name): - # type: (str) -> object - """Get item value from registry json. - - Args: - name (str): Name of the item. - - Returns: - value of the item - - Raises: - ValueError: If item is not found in registry file. - - """ - return self._get_item(name) - - def _set_item(self, name, value): - # type: (str, object) -> None - """Set item value to registry json. + def _set_item(self, name: str, value: str) -> None: + """Set item value to the registry. Note: See :meth:`ayon_core.lib.JSONSettingRegistry.set_item` @@ -524,41 +511,39 @@ class JSONSettingRegistry(ASettingRegistry): cfg.truncate(0) cfg.seek(0) json.dump(data, cfg, indent=4) - - def set_item(self, name, value): - # type: (str, object) -> None - """Set item and its value into json registry file. - - Args: - name (str): name of the item. - value (Any): value of the item. - - """ - self._set_item(name, value) - - def _delete_item(self, name): - # type: (str) -> None self._get_item.cache_clear() + + def _delete_item(self, name: str) -> None: with open(self._registry_file, "r+") as cfg: data = json.load(cfg) del data["registry"][name] cfg.truncate(0) cfg.seek(0) json.dump(data, cfg, indent=4) + self._get_item.cache_clear() class AYONSettingsRegistry(JSONSettingRegistry): """Class handling AYON general settings registry. Args: - name (Optional[str]): Name of the registry. - """ + name (Optional[str]): Name of the registry. Using 'None' or not + passing name is deprecated. - def __init__(self, name=None): + """ + def __init__(self, name: Optional[str] = None) -> None: if not name: name = "AYON_settings" + warnings.warn( + ( + "Used 'AYONSettingsRegistry' without 'name' argument." + " The argument will be required in future versions." + ), + DeprecationWarning, + stacklevel=2, + ) path = get_launcher_storage_dir() - super(AYONSettingsRegistry, self).__init__(name, path) + super().__init__(name, path) def get_local_site_id(): @@ -588,13 +573,76 @@ def get_local_site_id(): return site_id +def _get_ayon_service_username() -> Optional[str]: + # TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather + # use public method to get username from connection stack. + con = ayon_api.get_server_api_connection() + user_stack = getattr(con, "_as_user_stack", None) + if user_stack is None: + return None + return user_stack.username + + +def get_ayon_user_entity(username: Optional[str] = None) -> dict[str, Any]: + """AYON user entity used for templates and publishing. + + Note: + Usually only service and admin users can receive the full user entity. + + Args: + username (Optional[str]): Username of the user. If not passed, then + the current user in 'ayon_api' is used. + + Returns: + dict[str, Any]: User entity. + + """ + service_username = _get_ayon_service_username() + # Handle service user handling first + if service_username: + if username is None: + username = service_username + cache: CacheItem = _Cache.user_entities_by_name[username] + if not cache.is_valid: + if username == service_username: + user = ayon_api.get_user() + else: + user = ayon_api.get_user(username) + cache.update_data(user) + return copy.deepcopy(cache.get_data()) + + # Cache current user + current_user = None + if _Cache.username is None: + current_user = ayon_api.get_user() + _Cache.username = current_user["name"] + + if username is None: + username = _Cache.username + + cache: CacheItem = _Cache.user_entities_by_name[username] + if not cache.is_valid: + user = None + if username == _Cache.username: + if current_user is None: + current_user = ayon_api.get_user() + user = current_user + + if user is None: + user = ayon_api.get_user(username) + cache.update_data(user) + + return copy.deepcopy(cache.get_data()) + + def get_ayon_username(): """AYON username used for templates and publishing. - Uses curet ayon api username. + Uses current ayon api username. Returns: str: Username. """ - return ayon_api.get_user()["name"] + user = get_ayon_user_entity() + return user["name"] diff --git a/client/ayon_core/lib/path_templates.py b/client/ayon_core/lib/path_templates.py index 9e3e455a6c..aba2f296e3 100644 --- a/client/ayon_core/lib/path_templates.py +++ b/client/ayon_core/lib/path_templates.py @@ -1,17 +1,17 @@ +from __future__ import annotations + import os import re import copy import numbers import warnings +import platform from string import Formatter -import typing -from typing import List, Dict, Any, Set - -if typing.TYPE_CHECKING: - from typing import Union +from typing import Any, Union, Iterable SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)") OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?") +_IS_WINDOWS = platform.system().lower() == "windows" class TemplateUnsolved(Exception): @@ -42,6 +42,54 @@ class TemplateUnsolved(Exception): ) +class DefaultKeysDict(dict): + """Dictionary that supports the default key to use for str conversion. + + Is helpful for changes of a key in a template from string to dictionary + for example '{folder}' -> '{folder[name]}'. + >>> data = DefaultKeysDict( + >>> "name", + >>> {"folder": {"name": "FolderName"}} + >>> ) + >>> print("{folder[name]}".format_map(data)) + FolderName + >>> print("{folder}".format_map(data)) + FolderName + + Args: + default_key (Union[str, Iterable[str]]): Default key to use for str + conversion. Can also expect multiple keys for more nested + dictionary. + + """ + def __init__( + self, default_keys: Union[str, Iterable[str]], *args, **kwargs + ) -> None: + if isinstance(default_keys, str): + default_keys = [default_keys] + else: + default_keys = list(default_keys) + if not default_keys: + raise ValueError( + "Default key must be set. Got empty default keys." + ) + + self._default_keys = default_keys + super().__init__(*args, **kwargs) + + def __str__(self) -> str: + return str(self.get_default_value()) + + def get_default_keys(self) -> list[str]: + return list(self._default_keys) + + def get_default_value(self) -> Any: + value = self + for key in self._default_keys: + value = value[key] + return value + + class StringTemplate: """String that can be formatted.""" def __init__(self, template: str): @@ -82,7 +130,7 @@ class StringTemplate: if substr: new_parts.append(substr) - self._parts: List["Union[str, OptionalPart, FormattingPart]"] = ( + self._parts: list[Union[str, OptionalPart, FormattingPart]] = ( self.find_optional_parts(new_parts) ) @@ -103,7 +151,7 @@ class StringTemplate: def template(self) -> str: return self._template - def format(self, data: Dict[str, Any]) -> "TemplateResult": + def format(self, data: dict[str, Any]) -> "TemplateResult": """ Figure out with whole formatting. Separate advanced keys (*Like '{project[name]}') from string which must @@ -143,29 +191,29 @@ class StringTemplate: invalid_types ) - def format_strict(self, data: Dict[str, Any]) -> "TemplateResult": + def format_strict(self, data: dict[str, Any]) -> "TemplateResult": result = self.format(data) result.validate() return result @classmethod def format_template( - cls, template: str, data: Dict[str, Any] + cls, template: str, data: dict[str, Any] ) -> "TemplateResult": objected_template = cls(template) return objected_template.format(data) @classmethod def format_strict_template( - cls, template: str, data: Dict[str, Any] + cls, template: str, data: dict[str, Any] ) -> "TemplateResult": objected_template = cls(template) return objected_template.format_strict(data) @staticmethod def find_optional_parts( - parts: List["Union[str, FormattingPart]"] - ) -> List["Union[str, OptionalPart, FormattingPart]"]: + parts: list[Union[str, FormattingPart]] + ) -> list[Union[str, OptionalPart, FormattingPart]]: new_parts = [] tmp_parts = {} counted_symb = -1 @@ -190,7 +238,7 @@ class StringTemplate: len(parts) == 1 and isinstance(parts[0], str) ): - value = "<{}>".format(parts[0]) + value = f"<{parts[0]}>" else: value = OptionalPart(parts) @@ -221,7 +269,7 @@ class TemplateResult(str): only used keys. solved (bool): For check if all required keys were filled. template (str): Original template. - missing_keys (Iterable[str]): Missing keys that were not in the data. + missing_keys (list[str]): Missing keys that were not in the data. Include missing optional keys. invalid_types (dict): When key was found in data, but value had not allowed DataType. Allowed data types are `numbers`, @@ -230,11 +278,11 @@ class TemplateResult(str): of number. """ - used_values: Dict[str, Any] = None + used_values: dict[str, Any] = None solved: bool = None template: str = None - missing_keys: List[str] = None - invalid_types: Dict[str, Any] = None + missing_keys: list[str] = None + invalid_types: dict[str, Any] = None def __new__( cls, filled_template, template, solved, @@ -277,8 +325,11 @@ class TemplateResult(str): """Convert to normalized path.""" cls = self.__class__ + path = str(self) + if _IS_WINDOWS: + path = path.replace("\\", "/") return cls( - os.path.normpath(self.replace("\\", "/")), + os.path.normpath(path), self.template, self.solved, self.used_values, @@ -291,21 +342,21 @@ class TemplatePartResult: """Result to store result of template parts.""" def __init__(self, optional: bool = False): # Missing keys or invalid value types of required keys - self._missing_keys: Set[str] = set() - self._invalid_types: Dict[str, Any] = {} + self._missing_keys: set[str] = set() + self._invalid_types: dict[str, Any] = {} # Missing keys or invalid value types of optional keys - self._missing_optional_keys: Set[str] = set() - self._invalid_optional_types: Dict[str, Any] = {} + self._missing_optional_keys: set[str] = set() + self._invalid_optional_types: dict[str, Any] = {} # Used values stored by key with origin type # - key without any padding or key modifiers # - value from filling data # Example: {"version": 1} - self._used_values: Dict[str, Any] = {} + self._used_values: dict[str, Any] = {} # Used values stored by key with all modifirs # - value is already formatted string # Example: {"version:0>3": "001"} - self._really_used_values: Dict[str, Any] = {} + self._really_used_values: dict[str, Any] = {} # Concatenated string output after formatting self._output: str = "" # Is this result from optional part @@ -331,8 +382,9 @@ class TemplatePartResult: self._really_used_values.update(other.really_used_values) else: - raise TypeError("Cannot add data from \"{}\" to \"{}\"".format( - str(type(other)), self.__class__.__name__) + raise TypeError( + f"Cannot add data from \"{type(other)}\"" + f" to \"{self.__class__.__name__}\"" ) @property @@ -357,40 +409,41 @@ class TemplatePartResult: return self._output @property - def missing_keys(self) -> Set[str]: + def missing_keys(self) -> set[str]: return self._missing_keys @property - def missing_optional_keys(self) -> Set[str]: + def missing_optional_keys(self) -> set[str]: return self._missing_optional_keys @property - def invalid_types(self) -> Dict[str, Any]: + def invalid_types(self) -> dict[str, Any]: return self._invalid_types @property - def invalid_optional_types(self) -> Dict[str, Any]: + def invalid_optional_types(self) -> dict[str, Any]: return self._invalid_optional_types @property - def really_used_values(self) -> Dict[str, Any]: + def really_used_values(self) -> dict[str, Any]: return self._really_used_values @property - def realy_used_values(self) -> Dict[str, Any]: + def realy_used_values(self) -> dict[str, Any]: warnings.warn( "Property 'realy_used_values' is deprecated." " Use 'really_used_values' instead.", - DeprecationWarning + DeprecationWarning, + stacklevel=2, ) return self._really_used_values @property - def used_values(self) -> Dict[str, Any]: + def used_values(self) -> dict[str, Any]: return self._used_values @staticmethod - def split_keys_to_subdicts(values: Dict[str, Any]) -> Dict[str, Any]: + def split_keys_to_subdicts(values: dict[str, Any]) -> dict[str, Any]: output = {} formatter = Formatter() for key, value in values.items(): @@ -405,7 +458,7 @@ class TemplatePartResult: data[last_key] = value return output - def get_clean_used_values(self) -> Dict[str, Any]: + def get_clean_used_values(self) -> dict[str, Any]: new_used_values = {} for key, value in self.used_values.items(): if isinstance(value, FormatObject): @@ -421,7 +474,8 @@ class TemplatePartResult: warnings.warn( "Method 'add_realy_used_value' is deprecated." " Use 'add_really_used_value' instead.", - DeprecationWarning + DeprecationWarning, + stacklevel=2, ) self.add_really_used_value(key, value) @@ -474,7 +528,7 @@ class FormattingPart: self, field_name: str, format_spec: str, - conversion: "Union[str, None]", + conversion: Union[str, None], ): format_spec_v = "" if format_spec: @@ -541,7 +595,7 @@ class FormattingPart: return not queue @staticmethod - def keys_to_template_base(keys: List[str]): + def keys_to_template_base(keys: list[str]): if not keys: return None # Create copy of keys @@ -551,7 +605,7 @@ class FormattingPart: return f"{template_base}{joined_keys}" def format( - self, data: Dict[str, Any], result: TemplatePartResult + self, data: dict[str, Any], result: TemplatePartResult ) -> TemplatePartResult: """Format the formattings string. @@ -630,6 +684,12 @@ class FormattingPart: result.add_output(self.template) return result + if isinstance(value, DefaultKeysDict): + try: + value = value.get_default_value() + except KeyError: + pass + if not self.validate_value_type(value): result.add_invalid_type(key, value) result.add_output(self.template) @@ -682,23 +742,25 @@ class OptionalPart: def __init__( self, - parts: List["Union[str, OptionalPart, FormattingPart]"] + parts: list[Union[str, OptionalPart, FormattingPart]] ): - self._parts: List["Union[str, OptionalPart, FormattingPart]"] = parts + self._parts: list[Union[str, OptionalPart, FormattingPart]] = parts @property - def parts(self) -> List["Union[str, OptionalPart, FormattingPart]"]: + def parts(self) -> list[Union[str, OptionalPart, FormattingPart]]: return self._parts def __str__(self) -> str: - return "<{}>".format("".join([str(p) for p in self._parts])) + joined_parts = "".join([str(p) for p in self._parts]) + return f"<{joined_parts}>" def __repr__(self) -> str: - return "".format("".join([str(p) for p in self._parts])) + joined_parts = "".join([str(p) for p in self._parts]) + return f"" def format( self, - data: Dict[str, Any], + data: dict[str, Any], result: TemplatePartResult, ) -> TemplatePartResult: new_result = TemplatePartResult(True) diff --git a/client/ayon_core/lib/plugin_tools.py b/client/ayon_core/lib/plugin_tools.py index 654bc7ac4a..b19fe1e200 100644 --- a/client/ayon_core/lib/plugin_tools.py +++ b/client/ayon_core/lib/plugin_tools.py @@ -1,11 +1,9 @@ # -*- coding: utf-8 -*- """AYON plugin tools.""" import os -import logging import re import collections -log = logging.getLogger(__name__) CAPITALIZE_REGEX = re.compile(r"[a-zA-Z0-9]") diff --git a/client/ayon_core/lib/transcoding.py b/client/ayon_core/lib/transcoding.py index 643a056563..1762881846 100644 --- a/client/ayon_core/lib/transcoding.py +++ b/client/ayon_core/lib/transcoding.py @@ -6,6 +6,8 @@ import collections import tempfile import subprocess import platform +import warnings +import functools from typing import Optional import xml.etree.ElementTree @@ -67,6 +69,47 @@ VIDEO_EXTENSIONS = { } +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", DeprecationWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=DeprecationWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + + class MissingRGBAChannelsError(ValueError): """Raised when we can't find channels to use as RGBA for conversion in input media. @@ -393,12 +436,14 @@ def get_review_info_by_layer_name(channel_names): channels_by_layer_name[layer_name][channel] = channel_name - # Put empty layer to the beginning of the list + # Put empty layer or 'rgba' to the beginning of the list # - if input has R, G, B, A channels they should be used for review def _sort(_layer_name: str) -> int: # Prioritize "" layer name # Prioritize layers with RGB channels order = 0 + if _layer_name == "rgba": + order -= 11 if _layer_name == "": order -= 10 @@ -1024,6 +1069,8 @@ def convert_ffprobe_fps_to_float(value): return dividend / divisor +# --- Deprecated functions --- +@deprecated("oiio_color_convert") def convert_colorspace( input_path, output_path, @@ -1035,7 +1082,62 @@ def convert_colorspace( additional_command_args=None, logger=None, ): - """Convert source file from one color space to another. + """DEPRECATED function use `oiio_color_convert` instead + + Args: + input_path (str): Path to input file that should be converted. + output_path (str): Path to output file where result will be stored. + config_path (str): Path to OCIO config file. + source_colorspace (str): OCIO valid color space of source files. + target_colorspace (str, optional): OCIO valid target color space. + If filled, 'view' and 'display' must be empty. + view (str, optional): Name for target viewer space (OCIO valid). + Both 'view' and 'display' must be filled + (if not 'target_colorspace'). + display (str, optional): Name for target display-referred + reference space. Both 'view' and 'display' must be filled + (if not 'target_colorspace'). + additional_command_args (list, optional): Additional arguments + for oiiotool (like binary depth for .dpx). + logger (logging.Logger, optional): Logger used for logging. + + Returns: + None: Function returns None. + + Raises: + ValueError: If parameters are misconfigured. + """ + return oiio_color_convert( + input_path, + output_path, + config_path, + source_colorspace, + target_colorspace=target_colorspace, + target_display=display, + target_view=view, + additional_command_args=additional_command_args, + logger=logger, + ) + + +def oiio_color_convert( + input_path, + output_path, + config_path, + source_colorspace, + source_display=None, + source_view=None, + target_colorspace=None, + target_display=None, + target_view=None, + additional_command_args=None, + logger=None, +): + """Transcode source file to other with colormanagement. + + Oiiotool also support additional arguments for transcoding. + For more information, see the official documentation: + https://openimageio.readthedocs.io/en/latest/oiiotool.html Args: input_path (str): Path that should be converted. It is expected that @@ -1047,17 +1149,26 @@ def convert_colorspace( sequence in 'file.FRAMESTART-FRAMEEND#.ext', `output.1-3#.tif`) config_path (str): path to OCIO config file source_colorspace (str): ocio valid color space of source files + source_display (str, optional): name for source display-referred + reference space (ocio valid). If provided, source_view must also be + provided, and source_colorspace will be ignored + source_view (str, optional): name for source viewer space (ocio valid) + If provided, source_display must also be provided, and + source_colorspace will be ignored target_colorspace (str): ocio valid target color space if filled, 'view' and 'display' must be empty - view (str): name for viewer space (ocio valid) - both 'view' and 'display' must be filled (if 'target_colorspace') - display (str): name for display-referred reference space (ocio valid) + target_display (str): name for target display-referred reference space + (ocio valid) both 'view' and 'display' must be filled (if + 'target_colorspace') + target_view (str): name for target viewer space (ocio valid) both 'view' and 'display' must be filled (if 'target_colorspace') additional_command_args (list): arguments for oiiotool (like binary depth for .dpx) logger (logging.Logger): Logger used for logging. + Raises: ValueError: if misconfigured + """ if logger is None: logger = logging.getLogger(__name__) @@ -1082,23 +1193,82 @@ def convert_colorspace( "--ch", channels_arg ]) - if all([target_colorspace, view, display]): - raise ValueError("Colorspace and both screen and display" - " cannot be set together." - "Choose colorspace or screen and display") - if not target_colorspace and not all([view, display]): - raise ValueError("Both screen and display must be set.") + # Validate input parameters + if target_colorspace and target_view and target_display: + raise ValueError( + "Colorspace and both view and display cannot be set together." + "Choose colorspace or screen and display" + ) + + if not target_colorspace and not target_view and not target_display: + raise ValueError( + "Both view and display must be set if target_colorspace is not " + "provided." + ) + + if ( + (source_view and not source_display) + or (source_display and not source_view) + ): + raise ValueError( + "Both source_view and source_display must be provided if using " + "display/view inputs." + ) + + if source_view and source_display and source_colorspace: + logger.warning( + "Both source display/view and source_colorspace provided. " + "Using source display/view pair and ignoring source_colorspace." + ) if additional_command_args: oiio_cmd.extend(additional_command_args) - if target_colorspace: - oiio_cmd.extend(["--colorconvert:subimages=0", - source_colorspace, - target_colorspace]) - if view and display: - oiio_cmd.extend(["--iscolorspace", source_colorspace]) - oiio_cmd.extend(["--ociodisplay:subimages=0", display, view]) + # Handle the different conversion cases + # Source view and display are known + if source_view and source_display: + if target_colorspace: + # This is a two-step conversion process since there's no direct + # display/view to colorspace command + # This could be a config parameter or determined from OCIO config + # Use temporarty role space 'scene_linear' + color_convert_args = ("scene_linear", target_colorspace) + elif source_display != target_display or source_view != target_view: + # Complete display/view pair conversion + # - go through a reference space + color_convert_args = (target_display, target_view) + else: + color_convert_args = None + logger.debug( + "Source and target display/view pairs are identical." + " No color conversion needed." + ) + + if color_convert_args: + oiio_cmd.extend([ + "--ociodisplay:inverse=1:subimages=0", + source_display, + source_view, + "--colorconvert:subimages=0", + *color_convert_args + ]) + + elif target_colorspace: + # Standard color space to color space conversion + oiio_cmd.extend([ + "--colorconvert:subimages=0", + source_colorspace, + target_colorspace, + ]) + else: + # Standard conversion from colorspace to display/view + oiio_cmd.extend([ + "--iscolorspace", + source_colorspace, + "--ociodisplay:subimages=0", + target_display, + target_view, + ]) oiio_cmd.extend(["-o", output_path]) @@ -1410,12 +1580,27 @@ def get_media_mime_type(filepath: str) -> Optional[str]: Optional[str]: Mime type or None if is unknown mime type. """ + # The implementation is identical or better with ayon_api >=1.1.0, + # which is used in AYON launcher >=1.3.0. + # NOTE Remove safe import when AYON launcher >=1.2.0. + try: + from ayon_api.utils import ( + get_media_mime_type_for_content as _ayon_api_func + ) + except ImportError: + _ayon_api_func = None + if not filepath or not os.path.exists(filepath): return None with open(filepath, "rb") as stream: content = stream.read() + if _ayon_api_func is not None: + mime_type = _ayon_api_func(content) + if mime_type is not None: + return mime_type + content_len = len(content) # Pre-validation (largest definition check) # - hopefully there cannot be media defined in less than 12 bytes @@ -1442,11 +1627,13 @@ def get_media_mime_type(filepath: str) -> Optional[str]: if b'xmlns="http://www.w3.org/2000/svg"' in content: return "image/svg+xml" - # JPEG, JFIF or Exif - if ( - content[0:4] == b"\xff\xd8\xff\xdb" - or content[6:10] in (b"JFIF", b"Exif") - ): + # JPEG + # - [0:2] is constant b"\xff\xd8" + # (ref. https://www.file-recovery.com/jpg-signature-format.htm) + # - [2:4] Marker identifier b"\xff{?}" + # (ref. https://www.disktuna.com/list-of-jpeg-markers/) + # NOTE: File ends with b"\xff\xd9" + if content[0:3] == b"\xff\xd8\xff": return "image/jpeg" # Webp diff --git a/client/ayon_core/pipeline/__init__.py b/client/ayon_core/pipeline/__init__.py index 137736c302..f2ec952cd6 100644 --- a/client/ayon_core/pipeline/__init__.py +++ b/client/ayon_core/pipeline/__init__.py @@ -19,11 +19,7 @@ from .create import ( CreatedInstance, CreatorError, - LegacyCreator, - legacy_create, - discover_creator_plugins, - discover_legacy_creator_plugins, register_creator_plugin, deregister_creator_plugin, register_creator_plugin_path, @@ -141,12 +137,7 @@ __all__ = ( "CreatorError", - # - legacy creation - "LegacyCreator", - "legacy_create", - "discover_creator_plugins", - "discover_legacy_creator_plugins", "register_creator_plugin", "deregister_creator_plugin", "register_creator_plugin_path", diff --git a/client/ayon_core/pipeline/actions.py b/client/ayon_core/pipeline/actions.py index 860fed5e8b..6892af4252 100644 --- a/client/ayon_core/pipeline/actions.py +++ b/client/ayon_core/pipeline/actions.py @@ -37,16 +37,19 @@ class LauncherActionSelection: project_name, folder_id, task_id, + workfile_id, folder_path=None, task_name=None, project_entity=None, folder_entity=None, task_entity=None, + workfile_entity=None, project_settings=None, ): self._project_name = project_name self._folder_id = folder_id self._task_id = task_id + self._workfile_id = workfile_id self._folder_path = folder_path self._task_name = task_name @@ -54,6 +57,7 @@ class LauncherActionSelection: self._project_entity = project_entity self._folder_entity = folder_entity self._task_entity = task_entity + self._workfile_entity = workfile_entity self._project_settings = project_settings @@ -213,6 +217,15 @@ class LauncherActionSelection: self._task_name = self.task_entity["name"] return self._task_name + def get_workfile_id(self): + """Selected workfile id. + + Returns: + Union[str, None]: Selected workfile id. + + """ + return self._workfile_id + def get_project_entity(self): """Project entity for the selection. @@ -259,6 +272,24 @@ class LauncherActionSelection: ) return self._task_entity + def get_workfile_entity(self): + """Workfile entity for the selection. + + Returns: + Union[dict[str, Any], None]: Workfile entity. + + """ + if ( + self._project_name is None + or self._workfile_id is None + ): + return None + if self._workfile_entity is None: + self._workfile_entity = ayon_api.get_workfile_info_by_id( + self._project_name, self._workfile_id + ) + return self._workfile_entity + def get_project_settings(self): """Project settings for the selection. @@ -305,15 +336,27 @@ class LauncherActionSelection: """ return self._task_id is not None + @property + def is_workfile_selected(self): + """Return whether a task is selected. + + Returns: + bool: Whether a task is selected. + + """ + return self._workfile_id is not None + project_name = property(get_project_name) folder_id = property(get_folder_id) task_id = property(get_task_id) + workfile_id = property(get_workfile_id) folder_path = property(get_folder_path) task_name = property(get_task_name) project_entity = property(get_project_entity) folder_entity = property(get_folder_entity) task_entity = property(get_task_entity) + workfile_entity = property(get_workfile_entity) class LauncherAction(object): diff --git a/client/ayon_core/pipeline/anatomy/__init__.py b/client/ayon_core/pipeline/anatomy/__init__.py index 7000f51495..36bc2a138d 100644 --- a/client/ayon_core/pipeline/anatomy/__init__.py +++ b/client/ayon_core/pipeline/anatomy/__init__.py @@ -6,6 +6,7 @@ from .exceptions import ( AnatomyTemplateUnsolved, ) from .anatomy import Anatomy +from .templates import AnatomyTemplateResult, AnatomyStringTemplate __all__ = ( @@ -16,4 +17,7 @@ __all__ = ( "AnatomyTemplateUnsolved", "Anatomy", + + "AnatomyTemplateResult", + "AnatomyStringTemplate", ) diff --git a/client/ayon_core/pipeline/anatomy/templates.py b/client/ayon_core/pipeline/anatomy/templates.py index d89b70719e..e3ec005089 100644 --- a/client/ayon_core/pipeline/anatomy/templates.py +++ b/client/ayon_core/pipeline/anatomy/templates.py @@ -1,6 +1,7 @@ import os import re import copy +import platform import collections import numbers @@ -15,6 +16,7 @@ from .exceptions import ( AnatomyTemplateUnsolved, ) +_IS_WINDOWS = platform.system().lower() == "windows" _PLACEHOLDER = object() @@ -526,6 +528,14 @@ class AnatomyTemplates: root_key = "{" + root_key + "}" output = output.replace(str(used_value), root_key) + # Make sure rootless path is with forward slashes + if _IS_WINDOWS: + output.replace("\\", "/") + + # Make sure there are no double slashes + while "//" in output: + output = output.replace("//", "/") + return output def format(self, data, strict=True): diff --git a/client/ayon_core/pipeline/colorspace.py b/client/ayon_core/pipeline/colorspace.py index 4b1d14d570..41241e17ca 100644 --- a/client/ayon_core/pipeline/colorspace.py +++ b/client/ayon_core/pipeline/colorspace.py @@ -1403,7 +1403,12 @@ def _get_display_view_colorspace_name(config_path, display, view): """ config = _get_ocio_config(config_path) - return config.getDisplayViewColorSpaceName(display, view) + colorspace = config.getDisplayViewColorSpaceName(display, view) + # Special token. See https://opencolorio.readthedocs.io/en/latest/guides/authoring/authoring.html#shared-views # noqa + if colorspace == "": + colorspace = display + + return colorspace def _get_ocio_config_colorspaces(config_path): diff --git a/client/ayon_core/pipeline/context_tools.py b/client/ayon_core/pipeline/context_tools.py index 66556bbb35..0589eeb49f 100644 --- a/client/ayon_core/pipeline/context_tools.py +++ b/client/ayon_core/pipeline/context_tools.py @@ -1,21 +1,22 @@ """Core pipeline functionality""" +from __future__ import annotations import os import logging import platform import uuid +import warnings +from typing import Optional, Any import ayon_api import pyblish.api from pyblish.lib import MessageHandler from ayon_core import AYON_CORE_ROOT -from ayon_core.host import HostBase +from ayon_core.host import AbstractHost from ayon_core.lib import ( is_in_tests, initialize_ayon_connection, - emit_event, - version_up ) from ayon_core.addon import load_addons, AddonsManager from ayon_core.settings import get_project_settings @@ -23,13 +24,7 @@ from ayon_core.settings import get_project_settings from .publish.lib import filter_pyblish_plugins from .anatomy import Anatomy from .template_data import get_template_data_with_names -from .workfile import ( - get_workdir, - get_custom_workfile_template_by_string_context, - get_workfile_template_key_from_context, - get_last_workfile, - MissingWorkdirError, -) +from .workfile import get_custom_workfile_template_by_string_context from . import ( register_loader_plugin_path, register_inventory_action_path, @@ -75,7 +70,7 @@ def _get_addons_manager(): def register_root(path): - """Register currently active root""" + """DEPRECATED Register currently active root.""" log.info("Registering root: %s" % path) _registered_root["_"] = path @@ -94,18 +89,29 @@ def registered_root(): Returns: dict[str, str]: Root paths. - """ + """ + warnings.warn( + "Used deprecated function 'registered_root'. Please use 'Anatomy'" + " to get roots.", + DeprecationWarning, + stacklevel=2, + ) return _registered_root["_"] -def install_host(host): +def install_host(host: AbstractHost) -> None: """Install `host` into the running Python session. Args: - host (HostBase): A host interface object. + host (AbstractHost): A host interface object. """ + if not isinstance(host, AbstractHost): + log.error( + f"Host must be a subclass of 'AbstractHost', got '{type(host)}'." + ) + global _is_installed _is_installed = True @@ -183,7 +189,7 @@ def install_ayon_plugins(project_name=None, host_name=None): register_inventory_action_path(INVENTORY_PATH) if host_name is None: - host_name = os.environ.get("AYON_HOST_NAME") + host_name = get_current_host_name() addons_manager = _get_addons_manager() publish_plugin_dirs = addons_manager.collect_publish_plugin_paths( @@ -304,7 +310,7 @@ def get_current_host_name(): """ host = registered_host() - if isinstance(host, HostBase): + if isinstance(host, AbstractHost): return host.name return os.environ.get("AYON_HOST_NAME") @@ -340,32 +346,50 @@ def get_global_context(): def get_current_context(): host = registered_host() - if isinstance(host, HostBase): + if isinstance(host, AbstractHost): return host.get_current_context() return get_global_context() def get_current_project_name(): host = registered_host() - if isinstance(host, HostBase): + if isinstance(host, AbstractHost): return host.get_current_project_name() return get_global_context()["project_name"] def get_current_folder_path(): host = registered_host() - if isinstance(host, HostBase): + if isinstance(host, AbstractHost): return host.get_current_folder_path() return get_global_context()["folder_path"] def get_current_task_name(): host = registered_host() - if isinstance(host, HostBase): + if isinstance(host, AbstractHost): return host.get_current_task_name() return get_global_context()["task_name"] +def get_current_project_settings() -> dict[str, Any]: + """Project settings for the current context project. + + Returns: + dict[str, Any]: Project settings for the current context project. + + Raises: + ValueError: If current project is not set. + + """ + project_name = get_current_project_name() + if not project_name: + raise ValueError( + "Current project is not set. Can't get project settings." + ) + return get_project_settings(project_name) + + def get_current_project_entity(fields=None): """Helper function to get project document based on global Session. @@ -505,66 +529,64 @@ def get_current_context_custom_workfile_template(project_settings=None): ) -def change_current_context(folder_entity, task_entity, template_key=None): +_PLACEHOLDER = object() + + +def change_current_context( + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + template_key: Optional[str] = _PLACEHOLDER, + reason: Optional[str] = None, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional[Anatomy] = None, +) -> dict[str, str]: """Update active Session to a new task work area. - This updates the live Session to a different task under folder. + This updates the live Session to a different task under a folder. + + Notes: + * This function does a lot of things related to workfiles which + extends arguments options a lot. + * We might want to implement 'set_current_context' on host integration + instead. But `AYON_WORKDIR`, which is related to 'IWorkfileHost', + would not be available in that case which might break some + logic. Args: folder_entity (Dict[str, Any]): Folder entity to set. task_entity (Dict[str, Any]): Task entity to set. - template_key (Union[str, None]): Prepared template key to be used for - workfile template in Anatomy. + template_key (Optional[str]): DEPRECATED: Prepared template key to + be used for workfile template in Anatomy. + reason (Optional[str]): Reason for changing context. + anatomy (Optional[Anatomy]): Anatomy object used for workdir + calculation. + project_entity (Optional[dict[str, Any]]): Project entity used for + workdir calculation. Returns: - Dict[str, str]: The changed key, values in the current Session. - """ + dict[str, str]: New context data. - project_name = get_current_project_name() - workdir = None - folder_path = None - task_name = None - if folder_entity: - folder_path = folder_entity["path"] - if task_entity: - task_name = task_entity["name"] - project_entity = ayon_api.get_project(project_name) - host_name = get_current_host_name() - workdir = get_workdir( - project_entity, - folder_entity, - task_entity, - host_name, - template_key=template_key + """ + if template_key is not _PLACEHOLDER: + warnings.warn( + ( + "Used deprecated argument 'template_key' in" + " 'change_current_context'." + " It is not necessary to pass it in anymore." + ), + DeprecationWarning, + stacklevel=2, ) - envs = { - "AYON_PROJECT_NAME": project_name, - "AYON_FOLDER_PATH": folder_path, - "AYON_TASK_NAME": task_name, - "AYON_WORKDIR": workdir, - } - - # Update the Session and environments. Pop from environments all keys with - # value set to None. - for key, value in envs.items(): - if value is None: - os.environ.pop(key, None) - else: - os.environ[key] = value - - data = envs.copy() - - # Convert env keys to human readable keys - data["project_name"] = project_name - data["folder_path"] = folder_path - data["task_name"] = task_name - data["workdir_path"] = workdir - - # Emit session change - emit_event("taskChanged", data) - - return data + host = registered_host() + return host.set_current_context( + folder_entity, + task_entity, + reason=reason, + project_entity=project_entity, + anatomy=anatomy, + ) def get_process_id(): @@ -583,53 +605,16 @@ def get_process_id(): def version_up_current_workfile(): - """Function to increment and save workfile + """DEPRECATED Function to increment and save workfile. + + Please use 'save_next_version' from 'ayon_core.pipeline.workfile' instead. + """ - host = registered_host() - - project_name = get_current_project_name() - folder_path = get_current_folder_path() - task_name = get_current_task_name() - host_name = get_current_host_name() - - template_key = get_workfile_template_key_from_context( - project_name, - folder_path, - task_name, - host_name, + warnings.warn( + "Used deprecated 'version_up_current_workfile' please use" + " 'save_next_version' from 'ayon_core.pipeline.workfile' instead.", + DeprecationWarning, + stacklevel=2, ) - anatomy = Anatomy(project_name) - - data = get_template_data_with_names( - project_name, folder_path, task_name, host_name - ) - data["root"] = anatomy.roots - - work_template = anatomy.get_template_item("work", template_key) - - # Define saving file extension - extensions = host.get_workfile_extensions() - current_file = host.get_current_workfile() - if current_file: - extensions = [os.path.splitext(current_file)[-1]] - - work_root = work_template["directory"].format_strict(data) - file_template = work_template["file"].template - last_workfile_path = get_last_workfile( - work_root, file_template, data, extensions, True - ) - # `get_last_workfile` will return the first expected file version - # if no files exist yet. In that case, if they do not exist we will - # want to save v001 - new_workfile_path = last_workfile_path - if os.path.exists(new_workfile_path): - new_workfile_path = version_up(new_workfile_path) - - # Raise an error if the parent folder doesn't exist as `host.save_workfile` - # is not supposed/able to create missing folders. - parent_folder = os.path.dirname(new_workfile_path) - if not os.path.exists(parent_folder): - raise MissingWorkdirError( - f"Work area directory '{parent_folder}' does not exist.") - - host.save_workfile(new_workfile_path) + from ayon_core.pipeline.workfile import save_next_version + save_next_version() diff --git a/client/ayon_core/pipeline/create/__init__.py b/client/ayon_core/pipeline/create/__init__.py index ced43528eb..edb1b12cd4 100644 --- a/client/ayon_core/pipeline/create/__init__.py +++ b/client/ayon_core/pipeline/create/__init__.py @@ -21,12 +21,14 @@ from .exceptions import ( TemplateFillError, ) from .structures import ( + ParentFlags, CreatedInstance, ConvertorItem, AttributeValues, CreatorAttributeValues, PublishAttributeValues, PublishAttributes, + InstanceContextInfo, ) from .utils import ( get_last_versions_for_instances, @@ -44,9 +46,6 @@ from .creator_plugins import ( AutoCreator, HiddenCreator, - discover_legacy_creator_plugins, - get_legacy_creator_by_name, - discover_creator_plugins, register_creator_plugin, deregister_creator_plugin, @@ -58,11 +57,6 @@ from .creator_plugins import ( from .context import CreateContext -from .legacy_create import ( - LegacyCreator, - legacy_create, -) - __all__ = ( "PRODUCT_NAME_ALLOWED_SYMBOLS", @@ -85,12 +79,14 @@ __all__ = ( "TaskNotSetError", "TemplateFillError", + "ParentFlags", "CreatedInstance", "ConvertorItem", "AttributeValues", "CreatorAttributeValues", "PublishAttributeValues", "PublishAttributes", + "InstanceContextInfo", "get_last_versions_for_instances", "get_next_versions_for_instances", @@ -105,9 +101,6 @@ __all__ = ( "AutoCreator", "HiddenCreator", - "discover_legacy_creator_plugins", - "get_legacy_creator_by_name", - "discover_creator_plugins", "register_creator_plugin", "deregister_creator_plugin", @@ -117,7 +110,4 @@ __all__ = ( "cache_and_get_instances", "CreateContext", - - "LegacyCreator", - "legacy_create", ) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 929cc59d2a..c9b3178fe4 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -41,7 +41,12 @@ from .exceptions import ( HostMissRequiredMethod, ) from .changes import TrackChangesItem -from .structures import PublishAttributes, ConvertorItem, InstanceContextInfo +from .structures import ( + PublishAttributes, + ConvertorItem, + InstanceContextInfo, + ParentFlags, +) from .creator_plugins import ( Creator, AutoCreator, @@ -49,15 +54,12 @@ from .creator_plugins import ( discover_convertor_plugins, ) if typing.TYPE_CHECKING: - from ayon_core.host import HostBase from ayon_core.lib import AbstractAttrDef from ayon_core.lib.events import EventCallback, Event from .structures import CreatedInstance from .creator_plugins import BaseCreator - class PublishHost(HostBase, IPublishHost): - pass # Import of functions and classes that were moved to different file # TODO Should be removed in future release - Added 24/08/28, 0.4.3-dev.1 @@ -80,6 +82,7 @@ INSTANCE_ADDED_TOPIC = "instances.added" INSTANCE_REMOVED_TOPIC = "instances.removed" VALUE_CHANGED_TOPIC = "values.changed" INSTANCE_REQUIREMENT_CHANGED_TOPIC = "instance.requirement.changed" +INSTANCE_PARENT_CHANGED_TOPIC = "instance.parent.changed" PRE_CREATE_ATTR_DEFS_CHANGED_TOPIC = "pre.create.attr.defs.changed" CREATE_ATTR_DEFS_CHANGED_TOPIC = "create.attr.defs.changed" PUBLISH_ATTR_DEFS_CHANGED_TOPIC = "publish.attr.defs.changed" @@ -163,7 +166,7 @@ class CreateContext: context which should be handled by host. Args: - host (PublishHost): Host implementation which handles implementation + host (IPublishHost): Host implementation which handles implementation and global metadata. headless (bool): Context is created out of UI (Current not used). reset (bool): Reset context on initialization. @@ -173,7 +176,7 @@ class CreateContext: def __init__( self, - host: "PublishHost", + host: IPublishHost, headless: bool = False, reset: bool = True, discover_publish_plugins: bool = True, @@ -262,6 +265,8 @@ class CreateContext: # - right now used only for 'mandatory' but can be extended # in future "requirement_change": BulkInfo(), + # Instance parent changed + "parent_change": BulkInfo(), } self._bulk_order = [] @@ -1083,6 +1088,35 @@ class CreateContext: INSTANCE_REQUIREMENT_CHANGED_TOPIC, callback ) + def add_instance_parent_change_callback( + self, callback: Callable + ) -> "EventCallback": + """Register callback to listen to instance parent changes. + + Instance changed parent or parent flags. + + Data structure of event: + + ```python + { + "instances": [CreatedInstance, ...], + "create_context": CreateContext + } + ``` + + Args: + callback (Callable): Callback function that will be called when + instance requirement changed. + + Returns: + EventCallback: Created callback object which can be used to + stop listening. + + """ + return self._event_hub.add_callback( + INSTANCE_PARENT_CHANGED_TOPIC, callback + ) + def context_data_to_store(self) -> dict[str, Any]: """Data that should be stored by host function. @@ -1364,6 +1398,13 @@ class CreateContext: ) as bulk_info: yield bulk_info + @contextmanager + def bulk_instance_parent_change(self, sender: Optional[str] = None): + with self._bulk_context( + "parent_change", sender + ) as bulk_info: + yield bulk_info + @contextmanager def bulk_publish_attr_defs_change(self, sender: Optional[str] = None): with self._bulk_context("publish_attrs_change", sender) as bulk_info: @@ -1444,6 +1485,19 @@ class CreateContext: with self.bulk_instance_requirement_change() as bulk_item: bulk_item.append(instance_id) + def instance_parent_changed(self, instance_id: str) -> None: + """Instance parent changed. + + Triggered by `CreatedInstance`. + + Args: + instance_id (Optional[str]): Instance id. + + """ + if self._is_instance_events_ready(instance_id): + with self.bulk_instance_parent_change() as bulk_item: + bulk_item.append(instance_id) + # --- context change callbacks --- def publish_attribute_value_changed( self, plugin_name: str, value: dict[str, Any] @@ -2046,63 +2100,97 @@ class CreateContext: sender (Optional[str]): Sender of the event. """ + instance_ids_by_parent_id = collections.defaultdict(set) + for instance in self.instances: + instance_ids_by_parent_id[instance.parent_instance_id].add( + instance.id + ) + + instances_to_remove = list(instances) + ids_to_remove = { + instance.id + for instance in instances_to_remove + } + _queue = collections.deque() + _queue.extend(instances_to_remove) + # Add children with parent lifetime flag + while _queue: + instance = _queue.popleft() + ids_to_remove.add(instance.id) + children_ids = instance_ids_by_parent_id[instance.id] + for children_id in children_ids: + if children_id in ids_to_remove: + continue + instance = self._instances_by_id[children_id] + if instance.parent_flags & ParentFlags.parent_lifetime: + instances_to_remove.append(instance) + ids_to_remove.add(instance.id) + _queue.append(instance) + instances_by_identifier = collections.defaultdict(list) - for instance in instances: + for instance in instances_to_remove: identifier = instance.creator_identifier instances_by_identifier[identifier].append(instance) # Just remove instances from context if creator is not available missing_creators = set(instances_by_identifier) - set(self.creators) - instances = [] + miss_creator_instances = [] for identifier in missing_creators: - instances.extend( - instance - for instance in instances_by_identifier[identifier] - ) + miss_creator_instances.extend(instances_by_identifier[identifier]) - self._remove_instances(instances, sender) + with self.bulk_remove_instances(sender): + self._remove_instances(miss_creator_instances, sender) - error_message = "Instances removement of creator \"{}\" failed. {}" - failed_info = [] - # Remove instances by creator plugin order - for creator in self.get_sorted_creators( - instances_by_identifier.keys() - ): - identifier = creator.identifier - creator_instances = instances_by_identifier[identifier] + error_message = "Instances removement of creator \"{}\" failed. {}" + failed_info = [] + # Remove instances by creator plugin order + for creator in self.get_sorted_creators( + instances_by_identifier.keys() + ): + identifier = creator.identifier + # Filter instances by current state of 'CreateContext' + # - in case instances were already removed as subroutine of + # previous create plugin. + creator_instances = [ + instance + for instance in instances_by_identifier[identifier] + if instance.id in self._instances_by_id + ] + if not creator_instances: + continue - label = creator.label - failed = False - add_traceback = False - exc_info = None - try: - creator.remove_instances(creator_instances) + label = creator.label + failed = False + add_traceback = False + exc_info = None + try: + creator.remove_instances(creator_instances) - except CreatorError: - failed = True - exc_info = sys.exc_info() - self.log.warning( - error_message.format(identifier, exc_info[1]) - ) - - except (KeyboardInterrupt, SystemExit): - raise - - except: # noqa: E722 - failed = True - add_traceback = True - exc_info = sys.exc_info() - self.log.warning( - error_message.format(identifier, ""), - exc_info=True - ) - - if failed: - failed_info.append( - prepare_failed_creator_operation_info( - identifier, label, exc_info, add_traceback + except CreatorError: + failed = True + exc_info = sys.exc_info() + self.log.warning( + error_message.format(identifier, exc_info[1]) + ) + + except (KeyboardInterrupt, SystemExit): + raise + + except: # noqa: E722 + failed = True + add_traceback = True + exc_info = sys.exc_info() + self.log.warning( + error_message.format(identifier, ""), + exc_info=True + ) + + if failed: + failed_info.append( + prepare_failed_creator_operation_info( + identifier, label, exc_info, add_traceback + ) ) - ) if failed_info: raise CreatorsRemoveFailed(failed_info) @@ -2305,6 +2393,8 @@ class CreateContext: self._bulk_publish_attrs_change_finished(data, sender) elif key == "requirement_change": self._bulk_instance_requirement_change_finished(data, sender) + elif key == "parent_change": + self._bulk_instance_parent_change_finished(data, sender) def _bulk_add_instances_finished( self, @@ -2518,3 +2608,22 @@ class CreateContext: {"instances": instances}, sender, ) + + def _bulk_instance_parent_change_finished( + self, + instance_ids: list[str], + sender: Optional[str], + ): + if not instance_ids: + return + + instances = [ + self.get_instance_by_id(instance_id) + for instance_id in set(instance_ids) + ] + + self._emit_event( + INSTANCE_PARENT_CHANGED_TOPIC, + {"instances": instances}, + sender, + ) diff --git a/client/ayon_core/pipeline/create/creator_plugins.py b/client/ayon_core/pipeline/create/creator_plugins.py index cbc06145fb..7573589b82 100644 --- a/client/ayon_core/pipeline/create/creator_plugins.py +++ b/client/ayon_core/pipeline/create/creator_plugins.py @@ -6,7 +6,6 @@ from typing import TYPE_CHECKING, Optional, Dict, Any from abc import ABC, abstractmethod -from ayon_core.settings import get_project_settings from ayon_core.lib import Logger, get_version_from_path from ayon_core.pipeline.plugin_discover import ( discover, @@ -20,7 +19,6 @@ from ayon_core.pipeline.staging_dir import get_staging_dir_info, StagingDir from .constants import DEFAULT_VARIANT_VALUE from .product_name import get_product_name from .utils import get_next_versions_for_instances -from .legacy_create import LegacyCreator from .structures import CreatedInstance if TYPE_CHECKING: @@ -975,62 +973,10 @@ def discover_convertor_plugins(*args, **kwargs): return discover(ProductConvertorPlugin, *args, **kwargs) -def discover_legacy_creator_plugins(): - from ayon_core.pipeline import get_current_project_name - - log = Logger.get_logger("CreatorDiscover") - - plugins = discover(LegacyCreator) - project_name = get_current_project_name() - project_settings = get_project_settings(project_name) - for plugin in plugins: - try: - plugin.apply_settings(project_settings) - except Exception: - log.warning( - "Failed to apply settings to creator {}".format( - plugin.__name__ - ), - exc_info=True - ) - return plugins - - -def get_legacy_creator_by_name(creator_name, case_sensitive=False): - """Find creator plugin by name. - - Args: - creator_name (str): Name of creator class that should be returned. - case_sensitive (bool): Match of creator plugin name is case sensitive. - Set to `False` by default. - - Returns: - Creator: Return first matching plugin or `None`. - """ - - # Lower input creator name if is not case sensitive - if not case_sensitive: - creator_name = creator_name.lower() - - for creator_plugin in discover_legacy_creator_plugins(): - _creator_name = creator_plugin.__name__ - - # Lower creator plugin name if is not case sensitive - if not case_sensitive: - _creator_name = _creator_name.lower() - - if _creator_name == creator_name: - return creator_plugin - return None - - def register_creator_plugin(plugin): if issubclass(plugin, BaseCreator): register_plugin(BaseCreator, plugin) - elif issubclass(plugin, LegacyCreator): - register_plugin(LegacyCreator, plugin) - elif issubclass(plugin, ProductConvertorPlugin): register_plugin(ProductConvertorPlugin, plugin) @@ -1039,22 +985,17 @@ def deregister_creator_plugin(plugin): if issubclass(plugin, BaseCreator): deregister_plugin(BaseCreator, plugin) - elif issubclass(plugin, LegacyCreator): - deregister_plugin(LegacyCreator, plugin) - elif issubclass(plugin, ProductConvertorPlugin): deregister_plugin(ProductConvertorPlugin, plugin) def register_creator_plugin_path(path): register_plugin_path(BaseCreator, path) - register_plugin_path(LegacyCreator, path) register_plugin_path(ProductConvertorPlugin, path) def deregister_creator_plugin_path(path): deregister_plugin_path(BaseCreator, path) - deregister_plugin_path(LegacyCreator, path) deregister_plugin_path(ProductConvertorPlugin, path) diff --git a/client/ayon_core/pipeline/create/legacy_create.py b/client/ayon_core/pipeline/create/legacy_create.py deleted file mode 100644 index f6427d9bd1..0000000000 --- a/client/ayon_core/pipeline/create/legacy_create.py +++ /dev/null @@ -1,216 +0,0 @@ -"""Create workflow moved from avalon-core repository. - -Renamed classes and functions -- 'Creator' -> 'LegacyCreator' -- 'create' -> 'legacy_create' -""" - -import os -import logging -import collections - -from ayon_core.pipeline.constants import AYON_INSTANCE_ID - -from .product_name import get_product_name - - -class LegacyCreator: - """Determine how assets are created""" - label = None - product_type = None - defaults = None - maintain_selection = True - enabled = True - - dynamic_product_name_keys = [] - - log = logging.getLogger("LegacyCreator") - log.propagate = True - - def __init__(self, name, folder_path, options=None, data=None): - self.name = name # For backwards compatibility - self.options = options - - # Default data - self.data = collections.OrderedDict() - # TODO use 'AYON_INSTANCE_ID' when all hosts support it - self.data["id"] = AYON_INSTANCE_ID - self.data["productType"] = self.product_type - self.data["folderPath"] = folder_path - self.data["productName"] = name - self.data["active"] = True - - self.data.update(data or {}) - - @classmethod - def apply_settings(cls, project_settings): - """Apply AYON settings to a plugin class.""" - - host_name = os.environ.get("AYON_HOST_NAME") - plugin_type = "create" - plugin_type_settings = ( - project_settings - .get(host_name, {}) - .get(plugin_type, {}) - ) - global_type_settings = ( - project_settings - .get("core", {}) - .get(plugin_type, {}) - ) - if not global_type_settings and not plugin_type_settings: - return - - plugin_name = cls.__name__ - - plugin_settings = None - # Look for plugin settings in host specific settings - if plugin_name in plugin_type_settings: - plugin_settings = plugin_type_settings[plugin_name] - - # Look for plugin settings in global settings - elif plugin_name in global_type_settings: - plugin_settings = global_type_settings[plugin_name] - - if not plugin_settings: - return - - cls.log.debug(">>> We have preset for {}".format(plugin_name)) - for option, value in plugin_settings.items(): - if option == "enabled" and value is False: - cls.log.debug(" - is disabled by preset") - else: - cls.log.debug(" - setting `{}`: `{}`".format(option, value)) - setattr(cls, option, value) - - def process(self): - pass - - @classmethod - def get_dynamic_data( - cls, project_name, folder_entity, task_entity, variant, host_name - ): - """Return dynamic data for current Creator plugin. - - By default return keys from `dynamic_product_name_keys` attribute - as mapping to keep formatted template unchanged. - - ``` - dynamic_product_name_keys = ["my_key"] - --- - output = { - "my_key": "{my_key}" - } - ``` - - Dynamic keys may override default Creator keys (productType, task, - folderPath, ...) but do it wisely if you need. - - All of keys will be converted into 3 variants unchanged, capitalized - and all upper letters. Because of that are all keys lowered. - - This method can be modified to prefill some values just keep in mind it - is class method. - - Args: - project_name (str): Context's project name. - folder_entity (dict[str, Any]): Folder entity. - task_entity (dict[str, Any]): Task entity. - variant (str): What is entered by user in creator tool. - host_name (str): Name of host. - - Returns: - dict: Fill data for product name template. - """ - dynamic_data = {} - for key in cls.dynamic_product_name_keys: - key = key.lower() - dynamic_data[key] = "{" + key + "}" - return dynamic_data - - @classmethod - def get_product_name( - cls, project_name, folder_entity, task_entity, variant, host_name=None - ): - """Return product name created with entered arguments. - - Logic extracted from Creator tool. This method should give ability - to get product name without the tool. - - TODO: Maybe change `variant` variable. - - By default is output concatenated product type with variant. - - Args: - project_name (str): Context's project name. - folder_entity (dict[str, Any]): Folder entity. - task_entity (dict[str, Any]): Task entity. - variant (str): What is entered by user in creator tool. - host_name (str): Name of host. - - Returns: - str: Formatted product name with entered arguments. Should match - config's logic. - """ - - dynamic_data = cls.get_dynamic_data( - project_name, folder_entity, task_entity, variant, host_name - ) - task_name = task_type = None - if task_entity: - task_name = task_entity["name"] - task_type = task_entity["taskType"] - return get_product_name( - project_name, - task_name, - task_type, - host_name, - cls.product_type, - variant, - dynamic_data=dynamic_data - ) - - -def legacy_create( - Creator, product_name, folder_path, options=None, data=None -): - """Create a new instance - - Associate nodes with a product name and type. These nodes are later - validated, according to their `product type`, and integrated into the - shared environment, relative their `productName`. - - Data relative each product type, along with default data, are imprinted - into the resulting objectSet. This data is later used by extractors - and finally asset browsers to help identify the origin of the asset. - - Arguments: - Creator (Creator): Class of creator. - product_name (str): Name of product. - folder_path (str): Folder path. - options (dict, optional): Additional options from GUI. - data (dict, optional): Additional data from GUI. - - Raises: - NameError on `productName` already exists - KeyError on invalid dynamic property - RuntimeError on host error - - Returns: - Name of instance - - """ - from ayon_core.pipeline import registered_host - - host = registered_host() - plugin = Creator(product_name, folder_path, options, data) - - if plugin.maintain_selection is True: - with host.maintained_selection(): - print("Running %s with maintained selection" % plugin) - instance = plugin.process() - return instance - - print("Running %s" % plugin) - instance = plugin.process() - return instance diff --git a/client/ayon_core/pipeline/create/structures.py b/client/ayon_core/pipeline/create/structures.py index a4c68d2502..b2be377b42 100644 --- a/client/ayon_core/pipeline/create/structures.py +++ b/client/ayon_core/pipeline/create/structures.py @@ -1,6 +1,7 @@ import copy import collections from uuid import uuid4 +from enum import Enum import typing from typing import Optional, Dict, List, Any @@ -22,6 +23,23 @@ if typing.TYPE_CHECKING: from .creator_plugins import BaseCreator +class IntEnum(int, Enum): + """An int-based Enum class that allows for int comparison.""" + + def __int__(self) -> int: + return self.value + + +class ParentFlags(IntEnum): + # Delete instance if parent is deleted + parent_lifetime = 1 + # Active state is propagated from parent to children + # - the active state is propagated in collection phase + # NOTE It might be helpful to have a function that would return "real" + # active state for instances + share_active = 1 << 1 + + class ConvertorItem: """Item representing convertor plugin. @@ -507,7 +525,9 @@ class CreatedInstance: if transient_data is None: transient_data = {} self._transient_data = transient_data - self._is_mandatory = False + self._is_mandatory: bool = False + self._parent_instance_id: Optional[str] = None + self._parent_flags: int = 0 # Create a copy of passed data to avoid changing them on the fly data = copy.deepcopy(data or {}) @@ -752,6 +772,39 @@ class CreatedInstance: self["active"] = True self._create_context.instance_requirement_changed(self.id) + @property + def parent_instance_id(self) -> Optional[str]: + return self._parent_instance_id + + @property + def parent_flags(self) -> int: + return self._parent_flags + + def set_parent( + self, instance_id: Optional[str], flags: int + ) -> None: + """Set parent instance id and parenting flags. + + Args: + instance_id (Optional[str]): Parent instance id. + flags (int): Parenting flags. + + """ + changed = False + if instance_id != self._parent_instance_id: + changed = True + self._parent_instance_id = instance_id + + if flags is None: + flags = 0 + + if self._parent_flags != flags: + self._parent_flags = flags + changed = True + + if changed: + self._create_context.instance_parent_changed(self.id) + def changes(self): """Calculate and return changes.""" diff --git a/client/ayon_core/pipeline/editorial.py b/client/ayon_core/pipeline/editorial.py index 8b6cfc52f1..21468e6ddd 100644 --- a/client/ayon_core/pipeline/editorial.py +++ b/client/ayon_core/pipeline/editorial.py @@ -7,6 +7,10 @@ import opentimelineio as otio from opentimelineio import opentime as _ot +# https://github.com/AcademySoftwareFoundation/OpenTimelineIO/issues/1822 +OTIO_EPSILON = 1e-9 + + def otio_range_to_frame_range(otio_range): start = _ot.to_frames( otio_range.start_time, otio_range.start_time.rate) @@ -198,7 +202,8 @@ def is_clip_from_media_sequence(otio_clip): def remap_range_on_file_sequence(otio_clip, otio_range): - """ + """ Remap the provided range on a file sequence clip. + Args: otio_clip (otio.schema.Clip): The OTIO clip to check. otio_range (otio.schema.TimeRange): The trim range to apply. @@ -245,7 +250,11 @@ def remap_range_on_file_sequence(otio_clip, otio_range): if ( is_clip_from_media_sequence(otio_clip) and available_range_start_frame == media_ref.start_frame - and conformed_src_in.to_frames() < media_ref.start_frame + + # source range should be included in available range from media + # using round instead of conformed_src_in.to_frames() to avoid + # any precision issue with frame rate. + and round(conformed_src_in.value) < media_ref.start_frame ): media_in = otio.opentime.RationalTime( 0, rate=available_range_rate diff --git a/client/ayon_core/pipeline/farm/pyblish_functions.py b/client/ayon_core/pipeline/farm/pyblish_functions.py index 0d8e70f9d2..a5053844b9 100644 --- a/client/ayon_core/pipeline/farm/pyblish_functions.py +++ b/client/ayon_core/pipeline/farm/pyblish_functions.py @@ -249,7 +249,8 @@ def create_skeleton_instance( # map inputVersions `ObjectId` -> `str` so json supports it "inputVersions": list(map(str, data.get("inputVersions", []))), "colorspace": data.get("colorspace"), - "hasExplicitFrames": data.get("hasExplicitFrames") + "hasExplicitFrames": data.get("hasExplicitFrames", False), + "reuseLastVersion": data.get("reuseLastVersion", False), } if data.get("renderlayer"): diff --git a/client/ayon_core/pipeline/load/plugins.py b/client/ayon_core/pipeline/load/plugins.py index dc5bb0f66f..ed963110c6 100644 --- a/client/ayon_core/pipeline/load/plugins.py +++ b/client/ayon_core/pipeline/load/plugins.py @@ -2,10 +2,10 @@ from __future__ import annotations from abc import abstractmethod -import logging import os from typing import Any, Optional, Type +from ayon_core.lib import Logger from ayon_core.pipeline.plugin_discover import ( deregister_plugin, deregister_plugin_path, @@ -31,8 +31,7 @@ class LoaderPlugin(list): options = [] - log = logging.getLogger("ProductLoader") - log.propagate = True + log = Logger.get_logger("ProductLoader") @classmethod def apply_settings(cls, project_settings): @@ -373,7 +372,7 @@ def discover_loader_plugins(project_name=None): if not project_name: project_name = get_current_project_name() project_settings = get_project_settings(project_name) - plugins = discover(LoaderPlugin) + plugins = discover(LoaderPlugin, allow_duplicates=False) hooks = discover(LoaderHookPlugin) sorted_hooks = sorted(hooks, key=lambda hook: hook.order) for plugin in plugins: diff --git a/client/ayon_core/pipeline/load/utils.py b/client/ayon_core/pipeline/load/utils.py index 3c50d76fb5..d1731d4cf9 100644 --- a/client/ayon_core/pipeline/load/utils.py +++ b/client/ayon_core/pipeline/load/utils.py @@ -9,7 +9,7 @@ from typing import Optional, Union, Any import ayon_api -from ayon_core.host import ILoadHost +from ayon_core.host import ILoadHost, AbstractHost from ayon_core.lib import ( StringTemplate, TemplateUnsolved, @@ -720,11 +720,13 @@ def get_representation_path(representation, root=None): str: fullpath of the representation """ - if root is None: - from ayon_core.pipeline import registered_root + from ayon_core.pipeline import get_current_project_name, Anatomy - root = registered_root() + anatomy = Anatomy(get_current_project_name()) + return get_representation_path_with_anatomy( + representation, anatomy + ) def path_from_representation(): try: @@ -772,7 +774,7 @@ def get_representation_path(representation, root=None): dir_path, file_name = os.path.split(path) if not os.path.exists(dir_path): - return + return None base_name, ext = os.path.splitext(file_name) file_name_items = None @@ -782,7 +784,7 @@ def get_representation_path(representation, root=None): file_name_items = base_name.split("%") if not file_name_items: - return + return None filename_start = file_name_items[0] @@ -940,15 +942,21 @@ def any_outdated_containers(host=None, project_name=None): return False -def get_outdated_containers(host=None, project_name=None): +def get_outdated_containers( + host: Optional[AbstractHost] = None, + project_name: Optional[str] = None, + ignore_locked_versions: bool = False, +): """Collect outdated containers from host scene. Currently registered host and project in global session are used if arguments are not passed. Args: - host (ModuleType): Host implementation with 'ls' function available. - project_name (str): Name of project in which context we are. + host (Optional[AbstractHost]): Host implementation. + project_name (Optional[str]): Name of project in which context we are. + ignore_locked_versions (bool): Locked versions are ignored. + """ from ayon_core.pipeline import registered_host, get_current_project_name @@ -962,7 +970,16 @@ def get_outdated_containers(host=None, project_name=None): containers = host.get_containers() else: containers = host.ls() - return filter_containers(containers, project_name).outdated + + outdated_containers = [] + for container in filter_containers(containers, project_name).outdated: + if ( + not ignore_locked_versions + and container.get("version_locked") is True + ): + continue + outdated_containers.append(container) + return outdated_containers def _is_valid_representation_id(repre_id: Any) -> bool: @@ -983,6 +1000,9 @@ def filter_containers(containers, project_name): 'invalid' are invalid containers (invalid content) and 'not_found' has some missing entity in database. + Todos: + Respect 'project_name' on containers if is available. + Args: containers (Iterable[dict]): List of containers referenced into scene. project_name (str): Name of project in which context shoud look for @@ -991,8 +1011,8 @@ def filter_containers(containers, project_name): Returns: ContainersFilterResult: Named tuple with 'latest', 'outdated', 'invalid' and 'not_found' containers. - """ + """ # Make sure containers is list that won't change containers = list(containers) @@ -1040,13 +1060,13 @@ def filter_containers(containers, project_name): hero=True, fields={"id", "productId", "version"} ) - verisons_by_id = {} + versions_by_id = {} versions_by_product_id = collections.defaultdict(list) hero_version_ids = set() for version_entity in version_entities: version_id = version_entity["id"] # Store versions by their ids - verisons_by_id[version_id] = version_entity + versions_by_id[version_id] = version_entity # There's no need to query products for hero versions # - they are considered as latest? if version_entity["version"] < 0: @@ -1081,24 +1101,23 @@ def filter_containers(containers, project_name): repre_entity = repre_entities_by_id.get(repre_id) if not repre_entity: - log.debug(( - "Container '{}' has an invalid representation." + log.debug( + f"Container '{container_name}' has an invalid representation." " It is missing in the database." - ).format(container_name)) + ) not_found_containers.append(container) continue version_id = repre_entity["versionId"] - if version_id in outdated_version_ids: - outdated_containers.append(container) - - elif version_id not in verisons_by_id: - log.debug(( - "Representation on container '{}' has an invalid version." - " It is missing in the database." - ).format(container_name)) + if version_id not in versions_by_id: + log.debug( + f"Representation on container '{container_name}' has an" + " invalid version. It is missing in the database." + ) not_found_containers.append(container) + elif version_id in outdated_version_ids: + outdated_containers.append(container) else: uptodate_containers.append(container) diff --git a/client/ayon_core/pipeline/plugin_discover.py b/client/ayon_core/pipeline/plugin_discover.py index f531600276..03da7fce79 100644 --- a/client/ayon_core/pipeline/plugin_discover.py +++ b/client/ayon_core/pipeline/plugin_discover.py @@ -51,7 +51,7 @@ class DiscoverResult: "*** Discovered {} plugins".format(len(self.plugins)) ) for cls in self.plugins: - lines.append("- {}".format(cls.__class__.__name__)) + lines.append("- {}".format(cls.__name__)) # Plugin that were defined to be ignored if self.ignored_plugins or full_report: diff --git a/client/ayon_core/pipeline/publish/lib.py b/client/ayon_core/pipeline/publish/lib.py index 49143c4426..1f983808b0 100644 --- a/client/ayon_core/pipeline/publish/lib.py +++ b/client/ayon_core/pipeline/publish/lib.py @@ -5,14 +5,22 @@ import sys import inspect import copy import warnings +import hashlib import xml.etree.ElementTree -from typing import TYPE_CHECKING, Optional, Union, List +from typing import TYPE_CHECKING, Optional, Union, List, Any +import clique +import speedcopy +import logging -import ayon_api import pyblish.util import pyblish.plugin import pyblish.api +from ayon_api import ( + get_server_api_connection, + get_representations, + get_last_version_by_product_name +) from ayon_core.lib import ( import_filepath, Logger, @@ -33,6 +41,8 @@ if TYPE_CHECKING: TRAIT_INSTANCE_KEY: str = "representations_with_traits" +log = logging.getLogger(__name__) + def get_template_name_profiles( project_name, project_settings=None, logger=None @@ -243,32 +253,38 @@ def publish_plugins_discover( for path in paths: path = os.path.normpath(path) - if not os.path.isdir(path): - continue + filenames = [] + if os.path.isdir(path): + filenames.extend( + name + for name in os.listdir(path) + if ( + os.path.isfile(os.path.join(path, name)) + and not name.startswith("_") + ) + ) + else: + filenames.append(os.path.basename(path)) + path = os.path.dirname(path) - for fname in os.listdir(path): - if fname.startswith("_"): - continue - - abspath = os.path.join(path, fname) - - if not os.path.isfile(abspath): - continue - - mod_name, mod_ext = os.path.splitext(fname) - - if mod_ext != ".py": + dirpath_hash = hashlib.md5(path.encode("utf-8")).hexdigest() + for filename in filenames: + basename, ext = os.path.splitext(filename) + if ext.lower() != ".py": continue + filepath = os.path.join(path, filename) + module_name = f"{dirpath_hash}.{basename}" try: module = import_filepath( - abspath, mod_name, sys_module_name=mod_name) + filepath, module_name, sys_module_name=module_name + ) except Exception as err: # noqa: BLE001 # we need broad exception to catch all possible errors. - result.crashed_file_paths[abspath] = sys.exc_info() + result.crashed_file_paths[filepath] = sys.exc_info() - log.debug('Skipped: "%s" (%s)', mod_name, err) + log.debug('Skipped: "%s" (%s)', filepath, err) continue for plugin in pyblish.plugin.plugins_from_module(module): @@ -354,12 +370,18 @@ def get_plugin_settings(plugin, project_settings, log, category=None): # Use project settings based on a category name if category: try: - return ( + output = ( project_settings [category] ["publish"] [plugin.__name__] ) + warnings.warn( + "Please fill 'settings_category'" + f" for plugin '{plugin.__name__}'.", + DeprecationWarning + ) + return output except KeyError: pass @@ -384,12 +406,18 @@ def get_plugin_settings(plugin, project_settings, log, category=None): category_from_file = "core" try: - return ( + output = ( project_settings [category_from_file] [plugin_kind] [plugin.__name__] ) + warnings.warn( + "Please fill 'settings_category'" + f" for plugin '{plugin.__name__}'.", + DeprecationWarning + ) + return output except KeyError: pass return {} @@ -955,7 +983,26 @@ def get_instance_expected_output_path( "version": version }) - path_template_obj = anatomy.get_template_item("publish", "default")["path"] + # Get instance publish template name + task_name = task_type = None + task_entity = instance.data.get("taskEntity") + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] + + template_name = get_publish_template_name( + project_name=instance.context.data["projectName"], + host_name=instance.context.data["hostName"], + product_type=instance.data["productType"], + task_name=task_name, + task_type=task_type, + project_settings=instance.context.data["project_settings"], + ) + + path_template_obj = anatomy.get_template_item( + "publish", + template_name + )["path"] template_filled = path_template_obj.format_strict(template_data) return os.path.normpath(template_filled) @@ -1011,7 +1058,7 @@ def main_cli_publish( # NOTE: ayon-python-api does not have public api function to find # out if is used service user. So we need to have try > except # block. - con = ayon_api.get_server_api_connection() + con = get_server_api_connection() try: con.set_default_service_username(username) except ValueError: @@ -1048,7 +1095,7 @@ def main_cli_publish( discover_result = publish_plugins_discover() publish_plugins = discover_result.plugins - print("\n".join(discover_result.get_report(only_errors=False))) + print(discover_result.get_report(only_errors=False)) # Error exit as soon as any error occurs. error_format = ("Failed {plugin.__name__}: " @@ -1124,3 +1171,90 @@ def get_trait_representations( """ return instance.data.get(TRAIT_INSTANCE_KEY, []) + + +def fill_sequence_gaps_with_previous_version( + collection: str, + staging_dir: str, + instance: pyblish.plugin.Instance, + current_repre_name: str, + start_frame: int, + end_frame: int +) -> tuple[Optional[dict[str, Any]], Optional[dict[int, str]]]: + """Tries to replace missing frames from ones from last version""" + used_version_entity, repre_file_paths = _get_last_version_files( + instance, current_repre_name + ) + if repre_file_paths is None: + # issues in getting last version files + return (None, None) + + prev_collection = clique.assemble( + repre_file_paths, + patterns=[clique.PATTERNS["frames"]], + minimum_items=1 + )[0][0] + prev_col_format = prev_collection.format("{head}{padding}{tail}") + + added_files = {} + anatomy = instance.context.data["anatomy"] + col_format = collection.format("{head}{padding}{tail}") + for frame in range(start_frame, end_frame + 1): + if frame in collection.indexes: + continue + hole_fpath = os.path.join(staging_dir, col_format % frame) + + previous_version_path = prev_col_format % frame + previous_version_path = anatomy.fill_root(previous_version_path) + if not os.path.exists(previous_version_path): + log.warning( + "Missing frame should be replaced from " + f"'{previous_version_path}' but that doesn't exist. " + ) + return (None, None) + + log.warning( + f"Replacing missing '{hole_fpath}' with " + f"'{previous_version_path}'" + ) + speedcopy.copyfile(previous_version_path, hole_fpath) + added_files[frame] = hole_fpath + + return (used_version_entity, added_files) + + +def _get_last_version_files( + instance: pyblish.plugin.Instance, + current_repre_name: str, +) -> tuple[Optional[dict[str, Any]], Optional[list[str]]]: + product_name = instance.data["productName"] + project_name = instance.data["projectEntity"]["name"] + folder_entity = instance.data["folderEntity"] + + version_entity = get_last_version_by_product_name( + project_name, + product_name, + folder_entity["id"], + fields={"id", "attrib"} + ) + + if not version_entity: + return None, None + + matching_repres = get_representations( + project_name, + version_ids=[version_entity["id"]], + representation_names=[current_repre_name], + fields={"files"} + ) + + matching_repre = next(matching_repres, None) + if not matching_repre: + return None, None + + repre_file_paths = [ + file_info["path"] + for file_info in matching_repre["files"] + ] + + return (version_entity, repre_file_paths) diff --git a/client/ayon_core/pipeline/template_data.py b/client/ayon_core/pipeline/template_data.py index 0a95a98be8..dc7e95c788 100644 --- a/client/ayon_core/pipeline/template_data.py +++ b/client/ayon_core/pipeline/template_data.py @@ -1,27 +1,50 @@ +from __future__ import annotations + +from typing import Optional, Any + import ayon_api from ayon_core.settings import get_studio_settings -from ayon_core.lib.local_settings import get_ayon_username +from ayon_core.lib import DefaultKeysDict +from ayon_core.lib.local_settings import get_ayon_user_entity -def get_general_template_data(settings=None, username=None): +def get_general_template_data( + settings: Optional[dict[str, Any]] = None, + username: Optional[str] = None, + user_entity: Optional[dict[str, Any]] = None, +): """General template data based on system settings or machine. Output contains formatting keys: - - 'studio[name]' - Studio name filled from system settings - - 'studio[code]' - Studio code filled from system settings - - 'user' - User's name using 'get_ayon_username' + - 'studio[name]' - Studio name filled from system settings + - 'studio[code]' - Studio code filled from system settings + - 'user[name]' - User's name + - 'user[attrib][...]' - User's attributes + - 'user[data][...]' - User's data Args: settings (Dict[str, Any]): Studio or project settings. username (Optional[str]): AYON Username. - """ + user_entity (Optional[dict[str, Any]]): User entity. + """ if not settings: settings = get_studio_settings() - if username is None: - username = get_ayon_username() + if user_entity is None: + user_entity = get_ayon_user_entity(username) + + # Use dictionary with default value for backwards compatibility + # - we did support '{user}' now it should be '{user[name]}' + user_data = DefaultKeysDict( + "name", + { + "name": user_entity["name"], + "attrib": user_entity["attrib"], + "data": user_entity["data"], + } + ) core_settings = settings["core"] return { @@ -29,7 +52,7 @@ def get_general_template_data(settings=None, username=None): "name": core_settings["studio_name"], "code": core_settings["studio_code"] }, - "user": username + "user": user_data, } @@ -150,7 +173,8 @@ def get_template_data( task_entity=None, host_name=None, settings=None, - username=None + username=None, + user_entity=None, ): """Prepare data for templates filling from entered documents and info. @@ -173,13 +197,18 @@ def get_template_data( host_name (Optional[str]): Used to fill '{app}' key. settings (Union[Dict, None]): Prepared studio or project settings. They're queried if not passed (may be slower). - username (Optional[str]): AYON Username. + username (Optional[str]): DEPRECATED AYON Username. + user_entity (Optional[dict[str, Any]): AYON user entity. Returns: Dict[str, Any]: Data prepared for filling workdir template. """ - template_data = get_general_template_data(settings, username=username) + template_data = get_general_template_data( + settings, + username=username, + user_entity=user_entity, + ) template_data.update(get_project_template_data(project_entity)) if folder_entity: template_data.update(get_folder_template_data( diff --git a/client/ayon_core/pipeline/workfile/__init__.py b/client/ayon_core/pipeline/workfile/__init__.py index aa7e150bca..7acaf69a7c 100644 --- a/client/ayon_core/pipeline/workfile/__init__.py +++ b/client/ayon_core/pipeline/workfile/__init__.py @@ -4,6 +4,8 @@ from .path_resolving import ( get_workdir_with_workdir_data, get_workdir, + get_last_workfile_with_version_from_paths, + get_last_workfile_from_paths, get_last_workfile_with_version, get_last_workfile, @@ -11,12 +13,21 @@ from .path_resolving import ( get_custom_workfile_template_by_string_context, create_workdir_extra_folders, + + get_comments_from_workfile_paths, ) from .utils import ( should_use_last_workfile_on_launch, should_open_workfiles_tool_on_launch, MissingWorkdirError, + + save_workfile_info, + save_current_workfile_to, + save_workfile_with_current_context, + save_next_version, + copy_workfile_to_context, + find_workfile_rootless_path, ) from .build_workfile import BuildWorkfile @@ -37,18 +48,29 @@ __all__ = ( "get_workdir_with_workdir_data", "get_workdir", + "get_last_workfile_with_version_from_paths", + "get_last_workfile_from_paths", "get_last_workfile_with_version", "get_last_workfile", + "find_workfile_rootless_path", "get_custom_workfile_template", "get_custom_workfile_template_by_string_context", "create_workdir_extra_folders", + "get_comments_from_workfile_paths", + "should_use_last_workfile_on_launch", "should_open_workfiles_tool_on_launch", "MissingWorkdirError", + "save_workfile_info", + "save_current_workfile_to", + "save_workfile_with_current_context", + "save_next_version", + "copy_workfile_to_context", + "BuildWorkfile", "discover_workfile_build_plugins", diff --git a/client/ayon_core/pipeline/workfile/path_resolving.py b/client/ayon_core/pipeline/workfile/path_resolving.py index 9b2fe25199..b806f1ebf0 100644 --- a/client/ayon_core/pipeline/workfile/path_resolving.py +++ b/client/ayon_core/pipeline/workfile/path_resolving.py @@ -1,8 +1,12 @@ +from __future__ import annotations import os import re import copy import platform +import warnings +import typing from typing import Optional, Dict, Any +from dataclasses import dataclass import ayon_api @@ -15,6 +19,9 @@ from ayon_core.lib import ( from ayon_core.pipeline import version_start, Anatomy from ayon_core.pipeline.template_data import get_template_data +if typing.TYPE_CHECKING: + from ayon_core.pipeline.anatomy import AnatomyTemplateResult + def get_workfile_template_key_from_context( project_name: str, @@ -111,7 +118,7 @@ def get_workdir_with_workdir_data( anatomy=None, template_key=None, project_settings=None -): +) -> "AnatomyTemplateResult": """Fill workdir path from entered data and project's anatomy. It is possible to pass only project's name instead of project's anatomy but @@ -130,9 +137,9 @@ def get_workdir_with_workdir_data( if 'template_key' is not passed. Returns: - TemplateResult: Workdir path. - """ + AnatomyTemplateResult: Workdir path. + """ if not anatomy: anatomy = Anatomy(project_name) @@ -147,7 +154,7 @@ def get_workdir_with_workdir_data( template_obj = anatomy.get_template_item( "work", template_key, "directory" ) - # Output is TemplateResult object which contain useful data + # Output is AnatomyTemplateResult object which contain useful data output = template_obj.format_strict(workdir_data) if output: return output.normalized() @@ -155,14 +162,14 @@ def get_workdir_with_workdir_data( def get_workdir( - project_entity, - folder_entity, - task_entity, - host_name, + project_entity: dict[str, Any], + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + host_name: str, anatomy=None, template_key=None, project_settings=None -): +) -> "AnatomyTemplateResult": """Fill workdir path from entered data and project's anatomy. Args: @@ -174,8 +181,8 @@ def get_workdir( is stored under `AYON_HOST_NAME` key. anatomy (Anatomy): Optional argument. Anatomy object is created using project name from `project_entity`. It is preferred to pass this - argument as initialization of a new Anatomy object may be time - consuming. + argument as initialization of a new Anatomy object may be + time-consuming. template_key (str): Key of work templates in anatomy templates. Default value is defined in `get_workdir_with_workdir_data`. project_settings(Dict[str, Any]): Prepared project settings for @@ -183,9 +190,9 @@ def get_workdir( if 'template_key' is not passed. Returns: - TemplateResult: Workdir path. - """ + AnatomyTemplateResult: Workdir path. + """ if not anatomy: anatomy = Anatomy( project_entity["name"], project_entity=project_entity @@ -197,7 +204,7 @@ def get_workdir( task_entity, host_name, ) - # Output is TemplateResult object which contain useful data + # Output is AnatomyTemplateResult object which contain useful data return get_workdir_with_workdir_data( workdir_data, anatomy.project_name, @@ -207,12 +214,141 @@ def get_workdir( ) -def get_last_workfile_with_version( - workdir, file_template, fill_data, extensions -): +@dataclass +class WorkfileParsedData: + version: Optional[int] = None + comment: Optional[str] = None + ext: Optional[str] = None + + +class WorkfileDataParser: + """Parse dynamic data from existing filenames based on template. + + Args: + file_template (str): Workfile file template. + data (dict[str, Any]): Data to fill the template with. + + """ + def __init__( + self, + file_template: str, + data: dict[str, Any], + ): + data = copy.deepcopy(data) + file_template = str(file_template) + # Use placeholders that will never be in the filename + ext_replacement = "CIextID" + version_replacement = "CIversionID" + comment_replacement = "CIcommentID" + data["version"] = version_replacement + data["comment"] = comment_replacement + for pattern, replacement in ( + # Replace `.{ext}` with `{ext}` so we are sure dot is not + # at the end + (r"\.?{ext}", ext_replacement), + ): + file_template = re.sub(pattern, replacement, file_template) + + file_template = StringTemplate(file_template) + # Prepare template that does contain 'comment' + comment_template = re.escape(str(file_template.format_strict(data))) + # Prepare template that does not contain 'comment' + # - comment is usually marked as optional and in that case the regex + # to find the comment is different based on the filename + # - if filename contains comment then 'comment_template' will match + # - if filename does not contain comment then 'file_template' will + # match + data.pop("comment") + file_template = re.escape(str(file_template.format_strict(data))) + for src, replacement in ( + (ext_replacement, r"(?P\..*)"), + (version_replacement, r"(?P[0-9]+)"), + (comment_replacement, r"(?P.+?)"), + ): + comment_template = comment_template.replace(src, replacement) + file_template = file_template.replace(src, replacement) + + kwargs = {} + if platform.system().lower() == "windows": + kwargs["flags"] = re.IGNORECASE + + # Match from beginning to end of string to be safe + self._comment_template = re.compile(f"^{comment_template}$", **kwargs) + self._file_template = re.compile(f"^{file_template}$", **kwargs) + + def parse_data(self, filename: str) -> WorkfileParsedData: + """Parse the dynamic data from a filename.""" + match = self._comment_template.match(filename) + if not match: + match = self._file_template.match(filename) + + if not match: + return WorkfileParsedData() + + kwargs = match.groupdict() + version = kwargs.get("version") + if version is not None: + kwargs["version"] = int(version) + return WorkfileParsedData(**kwargs) + + +def parse_dynamic_data_from_workfile( + filename: str, + file_template: str, + template_data: dict[str, Any], +) -> WorkfileParsedData: + """Parse dynamic data from a workfile filename. + + Dynamic data are 'version', 'comment' and 'ext'. + + Args: + filename (str): Workfile filename. + file_template (str): Workfile file template. + template_data (dict[str, Any]): Data to fill the template with. + + Returns: + WorkfileParsedData: Dynamic data parsed from the filename. + + """ + parser = WorkfileDataParser(file_template, template_data) + return parser.parse_data(filename) + + +def parse_dynamic_data_from_workfiles( + filenames: list[str], + file_template: str, + template_data: dict[str, Any], +) -> dict[str, WorkfileParsedData]: + """Parse dynamic data from a workfiles filenames. + + Dynamic data are 'version', 'comment' and 'ext'. + + Args: + filenames (list[str]): Workfiles filenames. + file_template (str): Workfile file template. + template_data (dict[str, Any]): Data to fill the template with. + + Returns: + dict[str, WorkfileParsedData]: Dynamic data parsed from the filenames + by filename. + + """ + parser = WorkfileDataParser(file_template, template_data) + return { + filename: parser.parse_data(filename) + for filename in filenames + } + + +def get_last_workfile_with_version_from_paths( + filepaths: list[str], + file_template: str, + template_data: dict[str, Any], + extensions: set[str], +) -> tuple[Optional[str], Optional[int]]: """Return last workfile version. - Usign workfile template and it's filling data find most possible last + Using the workfile template and its template data find most possible last version of workfile which was created for the context. Functionality is fully based on knowing which keys are optional or what @@ -222,50 +358,43 @@ def get_last_workfile_with_version( last workfile. Args: - workdir (str): Path to dir where workfiles are stored. + filepaths (list[str]): Workfile paths. file_template (str): Template of file name. - fill_data (Dict[str, Any]): Data for filling template. - extensions (Iterable[str]): All allowed file extensions of workfile. + template_data (Dict[str, Any]): Data for filling template. + extensions (set[str]): All allowed file extensions of workfile. Returns: - Tuple[Union[str, None], Union[int, None]]: Last workfile with version + tuple[Optional[str], Optional[int]]: Last workfile with version if there is any workfile otherwise None for both. - """ - if not os.path.exists(workdir): + """ + if not filepaths: return None, None dotted_extensions = set() for ext in extensions: if not ext.startswith("."): - ext = ".{}".format(ext) - dotted_extensions.add(ext) - - # Fast match on extension - filenames = [ - filename - for filename in os.listdir(workdir) - if os.path.splitext(filename)[-1] in dotted_extensions - ] + ext = f".{ext}" + dotted_extensions.add(re.escape(ext)) # Build template without optionals, version to digits only regex # and comment to any definable value. # Escape extensions dot for regex - regex_exts = [ - "\\" + ext - for ext in dotted_extensions - ] - ext_expression = "(?:" + "|".join(regex_exts) + ")" + ext_expression = "(?:" + "|".join(dotted_extensions) + ")" + + for pattern, replacement in ( + # Replace `.{ext}` with `{ext}` so we are sure dot is not at the end + (r"\.?{ext}", ext_expression), + # Replace optional keys with optional content regex + (r"<.*?>", r".*?"), + # Replace `{version}` with group regex + (r"{version.*?}", r"([0-9]+)"), + (r"{comment.*?}", r".+?"), + ): + file_template = re.sub(pattern, replacement, file_template) - # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end - file_template = re.sub(r"\.?{ext}", ext_expression, file_template) - # Replace optional keys with optional content regex - file_template = re.sub(r"<.*?>", r".*?", file_template) - # Replace `{version}` with group regex - file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) - file_template = re.sub(r"{comment.*?}", r".+?", file_template) file_template = StringTemplate.format_strict_template( - file_template, fill_data + file_template, template_data ) # Match with ignore case on Windows due to the Windows @@ -278,64 +407,189 @@ def get_last_workfile_with_version( # Get highest version among existing matching files version = None - output_filenames = [] - for filename in sorted(filenames): + output_filepaths = [] + for filepath in sorted(filepaths): + filename = os.path.basename(filepath) match = re.match(file_template, filename, **kwargs) if not match: continue if not match.groups(): - output_filenames.append(filename) + output_filepaths.append(filename) continue file_version = int(match.group(1)) if version is None or file_version > version: - output_filenames[:] = [] + output_filepaths.clear() version = file_version if file_version == version: - output_filenames.append(filename) + output_filepaths.append(filepath) - output_filename = None - if output_filenames: - if len(output_filenames) == 1: - output_filename = output_filenames[0] - else: - last_time = None - for _output_filename in output_filenames: - full_path = os.path.join(workdir, _output_filename) - mod_time = os.path.getmtime(full_path) - if last_time is None or last_time < mod_time: - output_filename = _output_filename - last_time = mod_time + # Use file modification time to use most recent file if there are + # multiple workfiles with the same version + output_filepath = None + last_time = None + for _output_filepath in output_filepaths: + mod_time = None + if os.path.exists(_output_filepath): + mod_time = os.path.getmtime(_output_filepath) + if ( + last_time is None + or (mod_time is not None and last_time < mod_time) + ): + output_filepath = _output_filepath + last_time = mod_time - return output_filename, version + return output_filepath, version -def get_last_workfile( - workdir, file_template, fill_data, extensions, full_path=False -): - """Return last workfile filename. +def get_last_workfile_from_paths( + filepaths: list[str], + file_template: str, + template_data: dict[str, Any], + extensions: set[str], +) -> Optional[str]: + """Return the last workfile filename. - Returns file with version 1 if there is not workfile yet. + Returns the file with version 1 if there is not workfile yet. + + Args: + filepaths (list[str]): Paths to workfiles. + file_template (str): Template of file name. + template_data (dict[str, Any]): Data for filling template. + extensions (set[str]): All allowed file extensions of workfile. + + Returns: + Optional[str]: Last workfile path. + + """ + filepath, _version = get_last_workfile_with_version_from_paths( + filepaths, file_template, template_data, extensions + ) + return filepath + + +def _filter_dir_files_by_ext( + dirpath: str, + extensions: set[str], +) -> tuple[list[str], set[str]]: + """Filter files by extensions. + + Args: + dirpath (str): List of file paths. + extensions (set[str]): Set of file extensions. + + Returns: + tuple[list[str], set[str]]: Filtered list of file paths. + + """ + dotted_extensions = set() + for ext in extensions: + if not ext.startswith("."): + ext = f".{ext}" + dotted_extensions.add(ext) + + if not os.path.exists(dirpath): + return [], dotted_extensions + + filtered_paths = [ + os.path.join(dirpath, filename) + for filename in os.listdir(dirpath) + if os.path.splitext(filename)[-1] in dotted_extensions + ] + return filtered_paths, dotted_extensions + + +def get_last_workfile_with_version( + workdir: str, + file_template: str, + template_data: dict[str, Any], + extensions: set[str], +) -> tuple[Optional[str], Optional[int]]: + """Return last workfile version. + + Using the workfile template and its filling data to find the most possible + last version of workfile which was created for the context. + + Functionality is fully based on knowing which keys are optional or what + values are expected as value. + + The last modified file is used if more files can be considered as + last workfile. Args: workdir (str): Path to dir where workfiles are stored. file_template (str): Template of file name. - fill_data (Dict[str, Any]): Data for filling template. - extensions (Iterable[str]): All allowed file extensions of workfile. - full_path (Optional[bool]): Full path to file is returned if - set to True. + template_data (dict[str, Any]): Data for filling template. + extensions (set[str]): All allowed file extensions of workfile. Returns: - str: Last or first workfile as filename of full path to filename. + tuple[Optional[str], Optional[int]]: Last workfile with version + if there is any workfile otherwise None for both. """ - filename, _version = get_last_workfile_with_version( - workdir, file_template, fill_data, extensions + if not os.path.exists(workdir): + return None, None + + filepaths, dotted_extensions = _filter_dir_files_by_ext( + workdir, extensions ) - if filename is None: - data = copy.deepcopy(fill_data) + + return get_last_workfile_with_version_from_paths( + filepaths, + file_template, + template_data, + dotted_extensions, + ) + + +def get_last_workfile( + workdir: str, + file_template: str, + template_data: dict[str, Any], + extensions: set[str], + full_path: bool = False, +) -> str: + """Return last the workfile filename. + + Returns first file name/path if there are not workfiles yet. + + Args: + workdir (str): Path to dir where workfiles are stored. + file_template (str): Template of file name. + template_data (Dict[str, Any]): Data for filling template. + extensions (Iterable[str]): All allowed file extensions of workfile. + full_path (bool): Return full path to the file or only filename. + + Returns: + str: Last or first workfile file name or path based on + 'full_path' value. + + """ + # TODO (iLLiCiTiT): Remove the argument 'full_path' and return only full + # path. As far as I can tell it is always called with 'full_path' set + # to 'True'. + # - it has to be 2 step operation, first warn about having it 'False', and + # then warn about having it filled. + if full_path is False: + warnings.warn( + "Argument 'full_path' will be removed and will return" + " only full path in future.", + DeprecationWarning, + ) + + filepaths, dotted_extensions = _filter_dir_files_by_ext( + workdir, extensions + ) + filepath = get_last_workfile_from_paths( + filepaths, + file_template, + template_data, + dotted_extensions + ) + if filepath is None: + data = copy.deepcopy(template_data) data["version"] = version_start.get_versioning_start( data["project"]["name"], data["app"], @@ -344,15 +598,15 @@ def get_last_workfile( product_type="workfile" ) data.pop("comment", None) - if not data.get("ext"): - data["ext"] = extensions[0] + if data.get("ext") is None: + data["ext"] = next(iter(extensions), "") data["ext"] = data["ext"].lstrip(".") filename = StringTemplate.format_strict_template(file_template, data) + filepath = os.path.join(workdir, filename) if full_path: - return os.path.normpath(os.path.join(workdir, filename)) - - return filename + return os.path.normpath(filepath) + return os.path.basename(filepath) def get_custom_workfile_template( @@ -389,11 +643,10 @@ def get_custom_workfile_template( project_settings(Dict[str, Any]): Preloaded project settings. Returns: - str: Path to template or None if none of profiles match current - context. Existence of formatted path is not validated. - None: If no profile is matching context. - """ + Optional[str]: Path to template or None if none of profiles match + current context. Existence of formatted path is not validated. + """ log = Logger.get_logger("CustomWorkfileResolve") project_name = project_entity["name"] @@ -562,3 +815,112 @@ def create_workdir_extra_folders( fullpath = os.path.join(workdir, subfolder) if not os.path.exists(fullpath): os.makedirs(fullpath) + + +class CommentMatcher: + """Use anatomy and work file data to parse comments from filenames. + + Args: + extensions (set[str]): Set of extensions. + file_template (StringTemplate): Workfile file template. + data (dict[str, Any]): Data to fill the template with. + + """ + def __init__( + self, + extensions: set[str], + file_template: StringTemplate, + data: dict[str, Any] + ): + warnings.warn( + "Class 'CommentMatcher' is deprecated. Please" + " use 'parse_dynamic_data_from_workfiles' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._fname_regex = None + + if "{comment}" not in file_template: + # Don't look for comment if template doesn't allow it + return + + # Create a regex group for extensions + any_extension = "(?:{})".format( + "|".join(re.escape(ext.lstrip(".")) for ext in extensions) + ) + + # Use placeholders that will never be in the filename + temp_data = copy.deepcopy(data) + temp_data["comment"] = "<>" + temp_data["version"] = "<>" + temp_data["ext"] = "<>" + + fname_pattern = re.escape( + file_template.format_strict(temp_data) + ) + + # Replace comment and version with something we can match with regex + replacements = ( + ("<>", r"(?P.+)"), + ("<>", r"[0-9]+"), + ("<>", any_extension), + ) + for src, dest in replacements: + fname_pattern = fname_pattern.replace(re.escape(src), dest) + + # Match from beginning to end of string to be safe + self._fname_regex = re.compile(f"^{fname_pattern}$") + + def parse_comment(self, filename: str) -> Optional[str]: + """Parse the {comment} part from a filename.""" + if self._fname_regex: + match = self._fname_regex.match(filename) + if match: + return match.group("comment") + return None + + +def get_comments_from_workfile_paths( + filepaths: list[str], + extensions: set[str], + file_template: StringTemplate, + template_data: dict[str, Any], + current_filename: Optional[str] = None, +) -> tuple[list[str], str]: + """DEPRECATED Collect comments from workfile filenames. + + Based on 'current_filename' is also returned "current comment". + + Args: + filepaths (list[str]): List of filepaths to parse. + extensions (set[str]): Set of file extensions. + file_template (StringTemplate): Workfile file template. + template_data (dict[str, Any]): Data to fill the template with. + current_filename (str): Filename to check for the current comment. + + Returns: + tuple[list[str], str]: List of comments and the current comment. + + """ + warnings.warn( + "Function 'get_comments_from_workfile_paths' is deprecated. Please" + " use 'parse_dynamic_data_from_workfiles' instead.", + DeprecationWarning, + stacklevel=2, + ) + current_comment = "" + if not filepaths: + return [], current_comment + + matcher = CommentMatcher(extensions, file_template, template_data) + + comment_hints = set() + for filepath in filepaths: + filename = os.path.basename(filepath) + comment = matcher.parse_comment(filename) + if comment: + comment_hints.add(comment) + if filename == current_filename: + current_comment = comment + + return list(comment_hints), current_comment diff --git a/client/ayon_core/pipeline/workfile/utils.py b/client/ayon_core/pipeline/workfile/utils.py index 25be061dec..c2b6fad660 100644 --- a/client/ayon_core/pipeline/workfile/utils.py +++ b/client/ayon_core/pipeline/workfile/utils.py @@ -1,5 +1,30 @@ -from ayon_core.lib import filter_profiles +from __future__ import annotations +import os +import platform +import uuid +import typing +from typing import Optional, Any + +import ayon_api +from ayon_api.operations import OperationsSession + +from ayon_core.lib import filter_profiles, get_ayon_username from ayon_core.settings import get_project_settings +from ayon_core.host.interfaces import ( + SaveWorkfileOptionalData, + ListWorkfilesOptionalData, + CopyWorkfileOptionalData, +) +from ayon_core.pipeline.version_start import get_versioning_start +from ayon_core.pipeline.template_data import get_template_data + +from .path_resolving import ( + get_workdir, + get_workfile_template_key, +) + +if typing.TYPE_CHECKING: + from ayon_core.pipeline import Anatomy class MissingWorkdirError(Exception): @@ -7,14 +32,61 @@ class MissingWorkdirError(Exception): pass +def get_workfiles_info( + workfile_path: str, + project_name: str, + task_id: str, + *, + anatomy: Optional["Anatomy"] = None, + workfile_entities: Optional[list[dict[str, Any]]] = None, +) -> Optional[dict[str, Any]]: + """Find workfile info entity for a workfile path. + + Args: + workfile_path (str): Workfile path. + project_name (str): The name of the project. + task_id (str): Task id under which is workfile created. + anatomy (Optional[Anatomy]): Project anatomy used to get roots. + workfile_entities (Optional[list[dict[str, Any]]]): Pre-fetched + workfile entities related to the task. + + Returns: + Optional[dict[str, Any]]: Workfile info entity if found, otherwise + `None`. + + """ + if anatomy is None: + anatomy = Anatomy(project_name) + + if workfile_entities is None: + workfile_entities = list(ayon_api.get_workfiles_info( + project_name, + task_ids=[task_id], + )) + + if platform.system().lower() == "windows": + workfile_path = workfile_path.replace("\\", "/") + workfile_path = workfile_path.lower() + + for workfile_entity in workfile_entities: + path = workfile_entity["path"] + filled_path = anatomy.fill_root(path) + if platform.system().lower() == "windows": + filled_path = filled_path.replace("\\", "/") + filled_path = filled_path.lower() + if filled_path == workfile_path: + return workfile_entity + return None + + def should_use_last_workfile_on_launch( - project_name, - host_name, - task_name, - task_type, - default_output=False, - project_settings=None, -): + project_name: str, + host_name: str, + task_name: str, + task_type: str, + default_output: bool = False, + project_settings: Optional[dict[str, Any]] = None, +) -> bool: """Define if host should start last version workfile if possible. Default output is `False`. Can be overridden with environment variable @@ -124,3 +196,618 @@ def should_open_workfiles_tool_on_launch( if output is None: return default_output return output + + +def save_workfile_info( + project_name: str, + task_id: str, + rootless_path: str, + host_name: str, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + username: Optional[str] = None, + data: Optional[dict[str, Any]] = None, + workfile_entities: Optional[list[dict[str, Any]]] = None, +) -> dict[str, Any]: + """Save workfile info entity for a workfile path. + + Args: + project_name (str): The name of the project. + task_id (str): Task id under which is workfile created. + rootless_path (str): Rootless path of the workfile. + host_name (str): Name of host which is saving the workfile. + version (Optional[int]): Workfile version. + comment (Optional[str]): Workfile comment. + description (Optional[str]): Workfile description. + username (Optional[str]): Username of user who saves the workfile. + If not provided, current user is used. + data (Optional[dict[str, Any]]): Additional workfile entity data. + workfile_entities (Optional[list[dict[str, Any]]]): Pre-fetched + workfile entities related to task. + + Returns: + dict[str, Any]: Workfile info entity. + + """ + if workfile_entities is None: + workfile_entities = list(ayon_api.get_workfiles_info( + project_name, + task_ids=[task_id], + )) + + workfile_entity = next( + ( + _ent + for _ent in workfile_entities + if _ent["path"] == rootless_path + ), + None + ) + + if username is None: + username = get_ayon_username() + + attrib = {} + extension = os.path.splitext(rootless_path)[1] + for key, value in ( + ("extension", extension), + ("description", description), + ): + if value is not None: + attrib[key] = value + + if data is None: + data = {} + + if not workfile_entity: + return _create_workfile_info_entity( + project_name, + task_id, + host_name, + rootless_path, + username, + version, + comment, + attrib, + data, + ) + + for key, value in ( + ("host_name", host_name), + ("version", version), + ("comment", comment), + ): + if value is not None: + data[key] = value + + changed_data = {} + old_data = workfile_entity["data"] + for key, value in data.items(): + if key not in old_data or old_data[key] != value: + changed_data[key] = value + workfile_entity["data"][key] = value + + changed_attrib = {} + old_attrib = workfile_entity["attrib"] + for key, value in attrib.items(): + if key not in old_attrib or old_attrib[key] != value: + changed_attrib[key] = value + workfile_entity["attrib"][key] = value + + update_data = {} + if changed_data: + update_data["data"] = changed_data + + if changed_attrib: + update_data["attrib"] = changed_attrib + + # Automatically fix 'createdBy' and 'updatedBy' fields + # NOTE both fields were not automatically filled by server + # until 1.1.3 release. + if workfile_entity.get("createdBy") is None: + update_data["createdBy"] = username + workfile_entity["createdBy"] = username + + if workfile_entity.get("updatedBy") != username: + update_data["updatedBy"] = username + workfile_entity["updatedBy"] = username + + if not update_data: + return workfile_entity + + session = OperationsSession() + session.update_entity( + project_name, + "workfile", + workfile_entity["id"], + update_data, + ) + session.commit() + return workfile_entity + + +def save_current_workfile_to( + workfile_path: str, + folder_path: str, + task_name: str, + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + prepared_data: Optional[SaveWorkfileOptionalData] = None, +) -> None: + """Save current workfile to new location or context. + + Args: + workfile_path (str): Destination workfile path. + folder_path (str): Target folder path. + task_name (str): Target task name. + version (Optional[int]): Workfile version. + comment (optional[str]): Workfile comment. + description (Optional[str]): Workfile description. + prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + from ayon_core.pipeline.context_tools import registered_host + + host = registered_host() + context = host.get_current_context() + project_name = context["project_name"] + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + host.save_workfile_with_context( + workfile_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + + +def save_workfile_with_current_context( + workfile_path: str, + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + prepared_data: Optional[SaveWorkfileOptionalData] = None, +) -> None: + """Save current workfile to new location using current context. + + Helper function to save workfile using current context. Calls + 'save_current_workfile_to' at the end. + + Args: + workfile_path (str): Destination workfile path. + version (Optional[int]): Workfile version. + comment (optional[str]): Workfile comment. + description (Optional[str]): Workfile description. + prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + from ayon_core.pipeline.context_tools import registered_host + + host = registered_host() + context = host.get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] + task_name = context["task_name"] + folder_entity = task_entity = None + if folder_path: + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + if folder_entity and task_name: + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + + host.save_workfile_with_context( + workfile_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + + +def save_next_version( + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + *, + prepared_data: Optional[SaveWorkfileOptionalData] = None, +) -> None: + """Save workfile using current context, version and comment. + + Helper function to save a workfile using the current context. Last + workfile version + 1 is used if is not passed in. + + Args: + version (Optional[int]): Workfile version that will be used. Last + version + 1 is used if is not passed in. + comment (optional[str]): Workfile comment. Pass '""' to clear comment. + The current workfile comment is used if it is not passed. + description (Optional[str]): Workfile description. + prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + from ayon_core.pipeline import Anatomy + from ayon_core.pipeline.context_tools import registered_host + + host = registered_host() + current_path = host.get_current_workfile() + if not current_path: + current_path = None + else: + current_path = os.path.normpath(current_path) + + context = host.get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] + task_name = context["task_name"] + if prepared_data is None: + prepared_data = SaveWorkfileOptionalData() + + project_entity = prepared_data.project_entity + anatomy = prepared_data.anatomy + project_settings = prepared_data.project_settings + + if project_entity is None: + project_entity = ayon_api.get_project(project_name) + prepared_data.project_entity = project_entity + + if project_settings is None: + project_settings = get_project_settings(project_name) + prepared_data.project_settings = project_settings + + if anatomy is None: + anatomy = Anatomy(project_name, project_entity=project_entity) + prepared_data.anatomy = anatomy + + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + + template_key = get_workfile_template_key( + project_name, + task_entity["taskType"], + host.name, + project_settings=project_settings + ) + file_template = anatomy.get_template_item("work", template_key, "file") + template_data = get_template_data( + project_entity, + folder_entity, + task_entity, + host.name, + project_settings, + ) + workdir = get_workdir( + project_entity, + folder_entity, + task_entity, + host.name, + anatomy=anatomy, + template_key=template_key, + project_settings=project_settings, + ) + rootless_dir = workdir.rootless + last_workfile = None + current_workfile = None + if version is None or comment is None: + workfiles = host.list_workfiles( + project_name, folder_entity, task_entity, + prepared_data=ListWorkfilesOptionalData( + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + template_key=template_key, + ) + ) + for workfile in workfiles: + if current_workfile is None and workfile.filepath == current_path: + current_workfile = workfile + + if workfile.version is None: + continue + + if ( + last_workfile is None + or last_workfile.version < workfile.version + ): + last_workfile = workfile + + if version is None and last_workfile is not None: + version = last_workfile.version + 1 + + if version is None: + version = get_versioning_start( + project_name, + host.name, + task_name=task_entity["name"], + task_type=task_entity["taskType"], + product_type="workfile" + ) + + # Re-use comment from the current workfile if is not passed in + if comment is None and current_workfile is not None: + comment = current_workfile.comment + + template_data["version"] = version + if comment: + template_data["comment"] = comment + + # Resolve extension + # - Don't fill any if the host does not have defined any -> e.g. if host + # uses directory instead of a file. + # 1. Use the current file extension. + # 2. Use the last known workfile extension. + # 3. Use the first extensions from 'get_workfile_extensions'. + ext = None + workfile_extensions = host.get_workfile_extensions() + if workfile_extensions: + if current_path: + ext = os.path.splitext(current_path)[1] + elif last_workfile is not None: + ext = os.path.splitext(last_workfile.filepath)[1] + else: + ext = next(iter(workfile_extensions)) + ext = ext.lstrip(".") + + if ext: + template_data["ext"] = ext + + filename = file_template.format_strict(template_data) + workfile_path = os.path.join(workdir, filename) + rootless_path = f"{rootless_dir}/{filename}" + if platform.system().lower() == "windows": + rootless_path = rootless_path.replace("\\", "/") + prepared_data.rootless_path = rootless_path + + host.save_workfile_with_context( + workfile_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + + +def copy_workfile_to_context( + src_workfile_path: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + open_workfile: bool = True, + prepared_data: Optional[CopyWorkfileOptionalData] = None, +) -> None: + """Copy workfile to a context. + + Copy workfile to a specified folder and task. Destination path is + calculated based on passed information. + + Args: + src_workfile_path (str): Source workfile path. + folder_entity (dict[str, Any]): Target folder entity. + task_entity (dict[str, Any]): Target task entity. + version (Optional[int]): Workfile version. Use next version if not + passed. + comment (optional[str]): Workfile comment. + description (Optional[str]): Workfile description. + prepared_data (Optional[CopyWorkfileOptionalData]): Prepared data + for speed enhancements. Rootless path is calculated in this + function. + + """ + from ayon_core.pipeline import Anatomy + from ayon_core.pipeline.context_tools import registered_host + + host = registered_host() + project_name = host.get_current_project_name() + + anatomy = prepared_data.anatomy + if anatomy is None: + if prepared_data.project_entity is None: + prepared_data.project_entity = ayon_api.get_project( + project_name + ) + anatomy = Anatomy( + project_name, project_entity=prepared_data.project_entity + ) + prepared_data.anatomy = anatomy + + project_settings = prepared_data.project_settings + if project_settings is None: + project_settings = get_project_settings(project_name) + prepared_data.project_settings = project_settings + + if version is None: + list_prepared_data = None + if prepared_data is not None: + list_prepared_data = ListWorkfilesOptionalData( + project_entity=prepared_data.project_entity, + anatomy=prepared_data.anatomy, + project_settings=prepared_data.project_settings, + workfile_entities=prepared_data.workfile_entities, + ) + + workfiles = host.list_workfiles( + project_name, + folder_entity, + task_entity, + prepared_data=list_prepared_data + ) + if workfiles: + version = max( + workfile.version + for workfile in workfiles + ) + 1 + else: + version = get_versioning_start( + project_name, + host.name, + task_name=task_entity["name"], + task_type=task_entity["taskType"], + product_type="workfile" + ) + + task_type = task_entity["taskType"] + template_key = get_workfile_template_key( + project_name, + task_type, + host.name, + project_settings=prepared_data.project_settings + ) + + template_data = get_template_data( + prepared_data.project_entity, + folder_entity, + task_entity, + host.name, + prepared_data.project_settings, + ) + template_data["version"] = version + if comment: + template_data["comment"] = comment + + workfile_extensions = host.get_workfile_extensions() + if workfile_extensions: + ext = os.path.splitext(src_workfile_path)[1].lstrip(".") + template_data["ext"] = ext + + workfile_template = anatomy.get_template_item( + "work", template_key, "path" + ) + workfile_path = workfile_template.format_strict(template_data) + prepared_data.rootless_path = workfile_path.rootless + host.copy_workfile( + src_workfile_path, + workfile_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + prepared_data=prepared_data, + ) + + +def find_workfile_rootless_path( + workfile_path: str, + project_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + host_name: str, + *, + project_entity: Optional[dict[str, Any]] = None, + project_settings: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, +) -> str: + """Find rootless workfile path.""" + if anatomy is None: + from ayon_core.pipeline import Anatomy + + anatomy = Anatomy(project_name, project_entity=project_entity) + + task_type = task_entity["taskType"] + template_key = get_workfile_template_key( + project_name, + task_type, + host_name, + project_settings=project_settings + ) + dir_template = anatomy.get_template_item( + "work", template_key, "directory" + ) + result = dir_template.format({"root": anatomy.roots}) + used_root = result.used_values.get("root") + rootless_path = str(workfile_path) + if platform.system().lower() == "windows": + rootless_path = rootless_path.replace("\\", "/") + + root_key = root_value = None + if used_root is not None: + root_key, root_value = next(iter(used_root.items())) + if platform.system().lower() == "windows": + root_value = root_value.replace("\\", "/") + + if root_value and rootless_path.startswith(root_value): + rootless_path = rootless_path[len(root_value):].lstrip("/") + rootless_path = f"{{root[{root_key}]}}/{rootless_path}" + else: + success, result = anatomy.find_root_template_from_path(rootless_path) + if success: + rootless_path = result + return rootless_path + + +def _create_workfile_info_entity( + project_name: str, + task_id: str, + host_name: str, + rootless_path: str, + username: str, + version: Optional[int], + comment: Optional[str], + attrib: dict[str, Any], + data: dict[str, Any], +) -> dict[str, Any]: + """Create workfile entity data. + + Args: + project_name (str): Project name. + task_id (str): Task id. + host_name (str): Host name. + rootless_path (str): Rootless workfile path. + username (str): Username. + version (Optional[int]): Workfile version. + comment (Optional[str]): Workfile comment. + attrib (dict[str, Any]): Workfile entity attributes. + data (dict[str, Any]): Workfile entity data. + + Returns: + dict[str, Any]: Created workfile entity data. + + """ + data.update({ + "host_name": host_name, + "version": version, + "comment": comment, + }) + + workfile_info = { + "id": uuid.uuid4().hex, + "path": rootless_path, + "taskId": task_id, + "attrib": attrib, + "data": data, + # TODO remove 'createdBy' and 'updatedBy' fields when server is + # or above 1.1.3 . + "createdBy": username, + "updatedBy": username, + } + + session = OperationsSession() + session.create_entity( + project_name, "workfile", workfile_info + ) + session.commit() + return workfile_info diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 8cea7de86b..52e27baa80 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -16,6 +16,7 @@ import re import collections import copy from abc import ABC, abstractmethod +from typing import Optional import ayon_api from ayon_api import ( @@ -29,7 +30,7 @@ from ayon_api import ( ) from ayon_core.settings import get_project_settings -from ayon_core.host import IWorkfileHost, HostBase +from ayon_core.host import IWorkfileHost, AbstractHost from ayon_core.lib import ( Logger, StringTemplate, @@ -53,7 +54,6 @@ from ayon_core.pipeline.plugin_discover import ( ) from ayon_core.pipeline.create import ( - discover_legacy_creator_plugins, CreateContext, HiddenCreator, ) @@ -126,15 +126,14 @@ class AbstractTemplateBuilder(ABC): placeholder population. Args: - host (Union[HostBase, ModuleType]): Implementation of host. + host (Union[AbstractHost, ModuleType]): Implementation of host. """ _log = None - use_legacy_creators = False def __init__(self, host): # Get host name - if isinstance(host, HostBase): + if isinstance(host, AbstractHost): host_name = host.name else: host_name = os.environ.get("AYON_HOST_NAME") @@ -162,24 +161,24 @@ class AbstractTemplateBuilder(ABC): @property def project_name(self): - if isinstance(self._host, HostBase): + if isinstance(self._host, AbstractHost): return self._host.get_current_project_name() return os.getenv("AYON_PROJECT_NAME") @property def current_folder_path(self): - if isinstance(self._host, HostBase): + if isinstance(self._host, AbstractHost): return self._host.get_current_folder_path() return os.getenv("AYON_FOLDER_PATH") @property def current_task_name(self): - if isinstance(self._host, HostBase): + if isinstance(self._host, AbstractHost): return self._host.get_current_task_name() return os.getenv("AYON_TASK_NAME") def get_current_context(self): - if isinstance(self._host, HostBase): + if isinstance(self._host, AbstractHost): return self._host.get_current_context() return { "project_name": self.project_name, @@ -201,12 +200,6 @@ class AbstractTemplateBuilder(ABC): ) return self._current_folder_entity - @property - def linked_folder_entities(self): - if self._linked_folder_entities is _NOT_SET: - self._linked_folder_entities = self._get_linked_folder_entities() - return self._linked_folder_entities - @property def current_task_entity(self): if self._current_task_entity is _NOT_SET: @@ -261,7 +254,7 @@ class AbstractTemplateBuilder(ABC): """Access to host implementation. Returns: - Union[HostBase, ModuleType]: Implementation of host. + Union[AbstractHost, ModuleType]: Implementation of host. """ return self._host @@ -307,13 +300,16 @@ class AbstractTemplateBuilder(ABC): self._loaders_by_name = get_loaders_by_name() return self._loaders_by_name - def _get_linked_folder_entities(self): + def get_linked_folder_entities(self, link_type: Optional[str]): + if not link_type: + return [] project_name = self.project_name folder_entity = self.current_folder_entity if not folder_entity: return [] links = get_folder_links( - project_name, folder_entity["id"], link_direction="in" + project_name, + folder_entity["id"], link_types=[link_type], link_direction="in" ) linked_folder_ids = { link["entityId"] @@ -323,19 +319,6 @@ class AbstractTemplateBuilder(ABC): return list(get_folders(project_name, folder_ids=linked_folder_ids)) - def _collect_legacy_creators(self): - creators_by_name = {} - for creator in discover_legacy_creator_plugins(): - if not creator.enabled: - continue - creator_name = creator.__name__ - if creator_name in creators_by_name: - raise KeyError( - "Duplicated creator name {} !".format(creator_name) - ) - creators_by_name[creator_name] = creator - self._creators_by_name = creators_by_name - def _collect_creators(self): self._creators_by_name = { identifier: creator @@ -347,10 +330,7 @@ class AbstractTemplateBuilder(ABC): def get_creators_by_name(self): if self._creators_by_name is None: - if self.use_legacy_creators: - self._collect_legacy_creators() - else: - self._collect_creators() + self._collect_creators() return self._creators_by_name @@ -631,7 +611,7 @@ class AbstractTemplateBuilder(ABC): """Open template file with registered host.""" template_preset = self.get_template_preset() template_path = template_preset["path"] - self.host.open_file(template_path) + self.host.open_workfile(template_path) @abstractmethod def import_template(self, template_path): @@ -1429,10 +1409,27 @@ class PlaceholderLoadMixin(object): builder_type_enum_items = [ {"label": "Current folder", "value": "context_folder"}, - # TODO implement linked folders - # {"label": "Linked folders", "value": "linked_folders"}, + {"label": "Linked folders", "value": "linked_folders"}, {"label": "All folders", "value": "all_folders"}, ] + + link_types = ayon_api.get_link_types(self.builder.project_name) + + # Filter link types for folder to folder links + link_types_enum_items = [ + {"label": link_type["name"], "value": link_type["linkType"]} + for link_type in link_types + if ( + link_type["inputType"] == "folder" + and link_type["outputType"] == "folder" + ) + ] + + if not link_types_enum_items: + link_types_enum_items.append( + {"label": "", "value": None} + ) + build_type_label = "Folder Builder Type" build_type_help = ( "Folder Builder Type\n" @@ -1461,6 +1458,16 @@ class PlaceholderLoadMixin(object): items=builder_type_enum_items, tooltip=build_type_help ), + attribute_definitions.EnumDef( + "link_type", + label="Link Type", + items=link_types_enum_items, + tooltip=( + "Link Type\n" + "\nDefines what type of link will be used to" + " link the asset to the current folder." + ) + ), attribute_definitions.EnumDef( "product_type", label="Product type", @@ -1607,10 +1614,7 @@ class PlaceholderLoadMixin(object): builder_type = placeholder.data["builder_type"] folder_ids = [] - if builder_type == "context_folder": - folder_ids = [current_folder_entity["id"]] - - elif builder_type == "all_folders": + if builder_type == "all_folders": folder_ids = { folder_entity["id"] for folder_entity in get_folders( @@ -1620,6 +1624,23 @@ class PlaceholderLoadMixin(object): ) } + elif builder_type == "context_folder": + folder_ids = [current_folder_entity["id"]] + + elif builder_type == "linked_folders": + # link type from placeholder data or default to "template" + link_type = placeholder.data.get("link_type", "template") + # Get all linked folders for the current folder + if hasattr(self, "builder") and isinstance( + self.builder, AbstractTemplateBuilder): + # self.builder: AbstractTemplateBuilder + folder_ids = [ + linked_folder_entity["id"] + for linked_folder_entity in ( + self.builder.get_linked_folder_entities( + link_type=link_type)) + ] + if not folder_ids: return [] @@ -1899,8 +1920,6 @@ class PlaceholderCreateMixin(object): pre_create_data (dict): dictionary of configuration from Creator configuration in UI """ - - legacy_create = self.builder.use_legacy_creators creator_name = placeholder.data["creator"] create_variant = placeholder.data["create_variant"] active = placeholder.data.get("active") @@ -1940,20 +1959,14 @@ class PlaceholderCreateMixin(object): # compile product name from variant try: - if legacy_create: - creator_instance = creator_plugin( - product_name, - folder_path - ).process() - else: - creator_instance = self.builder.create_context.create( - creator_plugin.identifier, - create_variant, - folder_entity, - task_entity, - pre_create_data=pre_create_data, - active=active - ) + creator_instance = self.builder.create_context.create( + creator_plugin.identifier, + create_variant, + folder_entity, + task_entity, + pre_create_data=pre_create_data, + active=active + ) except: # noqa: E722 failed = True diff --git a/client/ayon_core/plugins/load/create_hero_version.py b/client/ayon_core/plugins/load/create_hero_version.py new file mode 100644 index 0000000000..aef0cf8863 --- /dev/null +++ b/client/ayon_core/plugins/load/create_hero_version.py @@ -0,0 +1,630 @@ +"""Plugin to create hero version from selected context.""" +from __future__ import annotations +import os +import copy +import shutil +import errno +import itertools +from concurrent.futures import ThreadPoolExecutor +from typing import Any, Optional + +from speedcopy import copyfile +import clique +import ayon_api +from ayon_api.operations import OperationsSession, new_version_entity +from ayon_api.utils import create_entity_id +from qtpy import QtWidgets, QtCore +from ayon_core import style +from ayon_core.pipeline import load, Anatomy +from ayon_core.lib import create_hard_link, source_hash, StringTemplate +from ayon_core.lib.file_transaction import wait_for_future_errors +from ayon_core.pipeline.publish import get_publish_template_name +from ayon_core.pipeline.template_data import get_template_data + + +def prepare_changes(old_entity: dict, new_entity: dict) -> dict: + """Prepare changes dict for update entity operation. + + Args: + old_entity (dict): Existing entity data from database. + new_entity (dict): New entity data to compare against old. + + Returns: + dict: Changes to apply to old entity to make it like new entity. + + """ + changes = {} + for key in set(new_entity.keys()): + if key == "attrib": + continue + if key in new_entity and new_entity[key] != old_entity.get(key): + changes[key] = new_entity[key] + attrib_changes = {} + if "attrib" in new_entity: + for key, value in new_entity["attrib"].items(): + if value != old_entity["attrib"].get(key): + attrib_changes[key] = value + if attrib_changes: + changes["attrib"] = attrib_changes + return changes + + +class CreateHeroVersion(load.ProductLoaderPlugin): + """Create hero version from selected context.""" + + is_multiple_contexts_compatible = False + representations = {"*"} + product_types = {"*"} + label = "Create Hero Version" + order = 36 + icon = "star" + color = "#ffd700" + + ignored_representation_names: list[str] = [] + db_representation_context_keys = [ + "project", "folder", "asset", "hierarchy", "task", "product", + "subset", "family", "representation", "username", "user", "output" + ] + use_hardlinks = False + + @staticmethod + def message(text: str) -> None: + """Show message box with text.""" + msgBox = QtWidgets.QMessageBox() + msgBox.setText(text) + msgBox.setStyleSheet(style.load_stylesheet()) + msgBox.setWindowFlags( + msgBox.windowFlags() | QtCore.Qt.WindowType.FramelessWindowHint + ) + msgBox.exec_() + + def load(self, context, name=None, namespace=None, options=None) -> None: + """Load hero version from context (dict as in context.py).""" + success = True + errors = [] + + # Extract project, product, version, folder from context + project = context.get("project") + product = context.get("product") + version = context.get("version") + folder = context.get("folder") + task_entity = ayon_api.get_task_by_id( + task_id=version.get("taskId"), project_name=project["name"] + ) + + anatomy = Anatomy(project["name"]) + + version_id = version["id"] + project_name = project["name"] + repres = list( + ayon_api.get_representations( + project_name, version_ids={version_id} + ) + ) + anatomy_data = get_template_data( + project_entity=project, + folder_entity=folder, + task_entity=task_entity, + ) + anatomy_data["product"] = { + "name": product["name"], + "type": product["productType"], + } + anatomy_data["version"] = version["version"] + published_representations = {} + for repre in repres: + repre_anatomy = copy.deepcopy(anatomy_data) + if "ext" not in repre_anatomy: + repre_anatomy["ext"] = repre.get("context", {}).get("ext", "") + published_representations[repre["id"]] = { + "representation": repre, + "published_files": [f["path"] for f in repre.get("files", [])], + "anatomy_data": repre_anatomy + } + # get the publish directory + publish_template_key = get_publish_template_name( + project_name, + context.get("hostName"), + product["productType"], + task_name=anatomy_data.get("task", {}).get("name"), + task_type=anatomy_data.get("task", {}).get("type"), + project_settings=context.get("project_settings", {}), + logger=self.log + ) + published_template_obj = anatomy.get_template_item( + "publish", publish_template_key, "directory" + ) + published_dir = os.path.normpath( + published_template_obj.format_strict(anatomy_data) + ) + instance_data = { + "productName": product["name"], + "productType": product["productType"], + "anatomyData": anatomy_data, + "publishDir": published_dir, + "published_representations": published_representations, + "versionEntity": version, + } + + try: + self.create_hero_version(instance_data, anatomy, context) + except Exception as exc: + success = False + errors.append(str(exc)) + if success: + self.message("Hero version created successfully.") + else: + self.message( + f"Failed to create hero version:\n{chr(10).join(errors)}") + + def create_hero_version( + self, + instance_data: dict[str, Any], + anatomy: Anatomy, + context: dict[str, Any]) -> None: + """Create hero version from instance data. + + Args: + instance_data (dict): Instance data with keys: + - productName (str): Name of the product. + - productType (str): Type of the product. + - anatomyData (dict): Anatomy data for templates. + - publishDir (str): Directory where the product is published. + - published_representations (dict): Published representations. + - versionEntity (dict, optional): Source version entity. + anatomy (Anatomy): Anatomy object for the project. + context (dict): Context data with keys: + - hostName (str): Name of the host application. + - project_settings (dict): Project settings. + + Raises: + RuntimeError: If any required data is missing or an error occurs + during the hero version creation process. + + """ + published_repres = instance_data.get("published_representations") + if not published_repres: + raise RuntimeError("No published representations found.") + + project_name = anatomy.project_name + template_key = get_publish_template_name( + project_name, + context.get("hostName"), + instance_data.get("productType"), + instance_data.get("anatomyData", {}).get("task", {}).get("name"), + instance_data.get("anatomyData", {}).get("task", {}).get("type"), + project_settings=context.get("project_settings", {}), + hero=True, + ) + hero_template = anatomy.get_template_item( + "hero", template_key, "path", default=None + ) + if hero_template is None: + raise RuntimeError("Project anatomy does not have hero " + f"template key: {template_key}") + + self.log.info(f"Hero template: {hero_template.template}") + + hero_publish_dir = self.get_publish_dir( + instance_data, anatomy, template_key + ) + + self.log.info(f"Hero publish dir: {hero_publish_dir}") + + src_version_entity = instance_data.get("versionEntity") + filtered_repre_ids = [] + for repre_id, repre_info in published_repres.items(): + repre = repre_info["representation"] + if repre["name"].lower() in self.ignored_representation_names: + filtered_repre_ids.append(repre_id) + for repre_id in filtered_repre_ids: + published_repres.pop(repre_id, None) + if not published_repres: + raise RuntimeError( + "All published representations were filtered by name." + ) + + if src_version_entity is None: + src_version_entity = self.version_from_representations( + project_name, published_repres) + if not src_version_entity: + raise RuntimeError("Can't find origin version in database.") + if src_version_entity["version"] == 0: + raise RuntimeError("Version 0 cannot have hero version.") + + all_copied_files = [] + transfers = instance_data.get("transfers", []) + for _src, dst in transfers: + dst = os.path.normpath(dst) + if dst not in all_copied_files: + all_copied_files.append(dst) + hardlinks = instance_data.get("hardlinks", []) + for _src, dst in hardlinks: + dst = os.path.normpath(dst) + if dst not in all_copied_files: + all_copied_files.append(dst) + + all_repre_file_paths = [] + for repre_info in published_repres.values(): + published_files = repre_info.get("published_files") or [] + for file_path in published_files: + file_path = os.path.normpath(file_path) + if file_path not in all_repre_file_paths: + all_repre_file_paths.append(file_path) + + publish_dir = instance_data.get("publishDir", "") + if not publish_dir: + raise RuntimeError( + "publishDir is empty in instance_data, cannot continue." + ) + instance_publish_dir = os.path.normpath(publish_dir) + other_file_paths_mapping = [] + for file_path in all_copied_files: + if not file_path.startswith(instance_publish_dir): + continue + if file_path in all_repre_file_paths: + continue + dst_filepath = file_path.replace( + instance_publish_dir, hero_publish_dir + ) + other_file_paths_mapping.append((file_path, dst_filepath)) + + old_version, old_repres = self.current_hero_ents( + project_name, src_version_entity + ) + inactive_old_repres_by_name = {} + old_repres_by_name = {} + for repre in old_repres: + low_name = repre["name"].lower() + if repre["active"]: + old_repres_by_name[low_name] = repre + else: + inactive_old_repres_by_name[low_name] = repre + + op_session = OperationsSession() + entity_id = old_version["id"] if old_version else None + new_hero_version = new_version_entity( + -src_version_entity["version"], + src_version_entity["productId"], + task_id=src_version_entity.get("taskId"), + data=copy.deepcopy(src_version_entity["data"]), + attribs=copy.deepcopy(src_version_entity["attrib"]), + entity_id=entity_id, + ) + if old_version: + update_data = prepare_changes(old_version, new_hero_version) + op_session.update_entity( + project_name, "version", old_version["id"], update_data + ) + else: + op_session.create_entity(project_name, "version", new_hero_version) + + # Store hero entity to instance_data + instance_data["heroVersionEntity"] = new_hero_version + + old_repres_to_replace = {} + for repre_info in published_repres.values(): + repre = repre_info["representation"] + repre_name_low = repre["name"].lower() + if repre_name_low in old_repres_by_name: + old_repres_to_replace[repre_name_low] = ( + old_repres_by_name.pop(repre_name_low) + ) + old_repres_to_delete = old_repres_by_name or {} + backup_hero_publish_dir = None + if os.path.exists(hero_publish_dir): + base_backup_dir = f"{hero_publish_dir}.BACKUP" + max_idx = 10 + # Find the first available backup directory name + for idx in range(max_idx + 1): + if idx == 0: + candidate_backup_dir = base_backup_dir + else: + candidate_backup_dir = f"{base_backup_dir}{idx}" + if not os.path.exists(candidate_backup_dir): + backup_hero_publish_dir = candidate_backup_dir + break + else: + raise AssertionError( + f"Backup folders are fully occupied to max index {max_idx}" + ) + + try: + os.rename(hero_publish_dir, backup_hero_publish_dir) + except PermissionError as e: + raise AssertionError( + "Could not create hero version because it is " + "not possible to replace current hero files." + ) from e + + try: + src_to_dst_file_paths = [] + repre_integrate_data = [] + path_template_obj = anatomy.get_template_item( + "hero", template_key, "path") + anatomy_root = {"root": anatomy.roots} + for repre_info in published_repres.values(): + published_files = repre_info["published_files"] + if len(published_files) == 0: + continue + anatomy_data = copy.deepcopy(repre_info["anatomy_data"]) + anatomy_data.pop("version", None) + template_filled = path_template_obj.format_strict(anatomy_data) + repre_context = template_filled.used_values + for key in self.db_representation_context_keys: + value = anatomy_data.get(key) + if value is not None: + repre_context[key] = value + repre_entity = copy.deepcopy(repre_info["representation"]) + repre_entity.pop("id", None) + repre_entity["versionId"] = new_hero_version["id"] + repre_entity["context"] = repre_context + repre_entity["attrib"] = { + "path": str(template_filled), + "template": hero_template.template + } + dst_paths = [] + + if len(published_files) == 1: + dst_paths.append(str(template_filled)) + mapped_published_file = StringTemplate( + published_files[0]).format_strict( + anatomy_root + ) + src_to_dst_file_paths.append( + (mapped_published_file, template_filled) + ) + self.log.info( + f"Single published file: {mapped_published_file} -> " + f"{template_filled}" + ) + else: + collections, remainders = clique.assemble(published_files) + if remainders or not collections or len(collections) > 1: + raise RuntimeError( + ( + "Integrity error. Files of published " + "representation is combination of frame " + "collections and single files." + ) + ) + src_col = collections[0] + frame_splitter = "_-_FRAME_SPLIT_-_" + anatomy_data["frame"] = frame_splitter + _template_filled = path_template_obj.format_strict( + anatomy_data + ) + head, tail = _template_filled.split(frame_splitter) + padding = anatomy.templates_obj.frame_padding + dst_col = clique.Collection( + head=head, padding=padding, tail=tail + ) + dst_col.indexes.clear() + dst_col.indexes.update(src_col.indexes) + for src_file, dst_file in zip(src_col, dst_col): + src_file = StringTemplate(src_file).format_strict( + anatomy_root + ) + src_to_dst_file_paths.append((src_file, dst_file)) + dst_paths.append(dst_file) + self.log.info( + f"Collection published file: {src_file} " + f"-> {dst_file}" + ) + repre_integrate_data.append((repre_entity, dst_paths)) + + # Copy files + with ThreadPoolExecutor(max_workers=8) as executor: + futures = [ + executor.submit(self.copy_file, src_path, dst_path) + for src_path, dst_path in itertools.chain( + src_to_dst_file_paths, other_file_paths_mapping) + ] + wait_for_future_errors(executor, futures) + + # Update/create representations + for repre_entity, dst_paths in repre_integrate_data: + repre_files = self.get_files_info(dst_paths, anatomy) + repre_entity["files"] = repre_files + repre_name_low = repre_entity["name"].lower() + if repre_name_low in old_repres_to_replace: + old_repre = old_repres_to_replace.pop(repre_name_low) + repre_entity["id"] = old_repre["id"] + update_data = prepare_changes(old_repre, repre_entity) + op_session.update_entity( + project_name, + "representation", + old_repre["id"], + update_data + ) + elif repre_name_low in inactive_old_repres_by_name: + inactive_repre = inactive_old_repres_by_name.pop( + repre_name_low + ) + repre_entity["id"] = inactive_repre["id"] + update_data = prepare_changes(inactive_repre, repre_entity) + op_session.update_entity( + project_name, + "representation", + inactive_repre["id"], + update_data + ) + else: + op_session.create_entity( + project_name, + "representation", + repre_entity + ) + + for repre in old_repres_to_delete.values(): + op_session.update_entity( + project_name, + "representation", + repre["id"], + {"active": False} + ) + + op_session.commit() + + if backup_hero_publish_dir is not None and os.path.exists( + backup_hero_publish_dir + ): + shutil.rmtree(backup_hero_publish_dir) + + except Exception: + if backup_hero_publish_dir is not None and os.path.exists( + backup_hero_publish_dir): + if os.path.exists(hero_publish_dir): + shutil.rmtree(hero_publish_dir) + os.rename(backup_hero_publish_dir, hero_publish_dir) + raise + + def get_files_info( + self, filepaths: list[str], anatomy: Anatomy) -> list[dict]: + """Get list of file info dictionaries for given file paths. + + Args: + filepaths (list[str]): List of absolute file paths. + anatomy (Anatomy): Anatomy object for the project. + + Returns: + list[dict]: List of file info dictionaries. + + """ + file_infos = [] + for filepath in filepaths: + file_info = self.prepare_file_info(filepath, anatomy) + file_infos.append(file_info) + return file_infos + + def prepare_file_info(self, path: str, anatomy: Anatomy) -> dict: + """Prepare file info dictionary for given path. + + Args: + path (str): Absolute file path. + anatomy (Anatomy): Anatomy object for the project. + + Returns: + dict: File info dictionary with keys: + - id (str): Unique identifier for the file. + - name (str): Base name of the file. + - path (str): Rootless file path. + - size (int): Size of the file in bytes. + - hash (str): Hash of the file content. + - hash_type (str): Type of the hash used. + + """ + return { + "id": create_entity_id(), + "name": os.path.basename(path), + "path": self.get_rootless_path(anatomy, path), + "size": os.path.getsize(path), + "hash": source_hash(path), + "hash_type": "op3", + } + + @staticmethod + def get_publish_dir( + instance_data: dict, + anatomy: Anatomy, + template_key: str) -> str: + """Get publish directory from instance data and anatomy. + + Args: + instance_data (dict): Instance data with "anatomyData" key. + anatomy (Anatomy): Anatomy object for the project. + template_key (str): Template key for the hero template. + + Returns: + str: Normalized publish directory path. + + """ + template_data = copy.deepcopy(instance_data.get("anatomyData", {})) + if "originalBasename" in instance_data: + template_data["originalBasename"] = ( + instance_data["originalBasename"] + ) + template_obj = anatomy.get_template_item( + "hero", template_key, "directory" + ) + return os.path.normpath(template_obj.format_strict(template_data)) + + @staticmethod + def get_rootless_path(anatomy: Anatomy, path: str) -> str: + """Get rootless path from absolute path. + + Args: + anatomy (Anatomy): Anatomy object for the project. + path (str): Absolute file path. + + Returns: + str: Rootless file path if root found, else original path. + + """ + success, rootless_path = anatomy.find_root_template_from_path(path) + if success: + path = rootless_path + return path + + def copy_file(self, src_path: str, dst_path: str) -> None: + """Copy file from src to dst with creating directories. + + Args: + src_path (str): Source file path. + dst_path (str): Destination file path. + + Raises: + OSError: If copying or linking fails. + + """ + dirname = os.path.dirname(dst_path) + try: + os.makedirs(dirname) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + if self.use_hardlinks: + try: + create_hard_link(src_path, dst_path) + return + except OSError as exc: + if exc.errno not in [errno.EXDEV, errno.EINVAL]: + raise + copyfile(src_path, dst_path) + + @staticmethod + def version_from_representations( + project_name: str, repres: dict) -> Optional[dict[str, Any]]: + """Find version from representations. + + Args: + project_name (str): Name of the project. + repres (dict): Dictionary of representations info. + + Returns: + Optional[dict]: Version entity if found, else None. + + """ + for repre_info in repres.values(): + version = ayon_api.get_version_by_id( + project_name, repre_info["representation"]["versionId"] + ) + if version: + return version + return None + + @staticmethod + def current_hero_ents( + project_name: str, + version: dict[str, Any]) -> tuple[Any, list[dict[str, Any]]]: + hero_version = ayon_api.get_hero_version_by_product_id( + project_name, version["productId"] + ) + if not hero_version: + return None, [] + hero_repres = list( + ayon_api.get_representations( + project_name, version_ids={hero_version["id"]} + ) + ) + return hero_version, hero_repres diff --git a/client/ayon_core/plugins/load/push_to_library.py b/client/ayon_core/plugins/load/push_to_project.py similarity index 63% rename from client/ayon_core/plugins/load/push_to_library.py rename to client/ayon_core/plugins/load/push_to_project.py index 981028d734..0b218d6ea1 100644 --- a/client/ayon_core/plugins/load/push_to_library.py +++ b/client/ayon_core/plugins/load/push_to_project.py @@ -6,15 +6,15 @@ from ayon_core.pipeline import load from ayon_core.pipeline.load import LoadError -class PushToLibraryProject(load.ProductLoaderPlugin): - """Export selected versions to folder structure from Template""" +class PushToProject(load.ProductLoaderPlugin): + """Export selected versions to different project""" is_multiple_contexts_compatible = True representations = {"*"} product_types = {"*"} - label = "Push to Library project" + label = "Push to project" order = 35 icon = "send" color = "#d8d8d8" @@ -28,10 +28,12 @@ class PushToLibraryProject(load.ProductLoaderPlugin): if not filtered_contexts: raise LoadError("Nothing to push for your selection") - if len(filtered_contexts) > 1: - raise LoadError("Please select only one item") - - context = tuple(filtered_contexts)[0] + folder_ids = set( + context["folder"]["id"] + for context in filtered_contexts + ) + if len(folder_ids) > 1: + raise LoadError("Please select products from single folder") push_tool_script_path = os.path.join( AYON_CORE_ROOT, @@ -39,14 +41,16 @@ class PushToLibraryProject(load.ProductLoaderPlugin): "push_to_project", "main.py" ) + project_name = filtered_contexts[0]["project"]["name"] - project_name = context["project"]["name"] - version_id = context["version"]["id"] + version_ids = { + context["version"]["id"] + for context in filtered_contexts + } args = get_ayon_launcher_args( - "run", push_tool_script_path, "--project", project_name, - "--version", version_id + "--versions", ",".join(version_ids) ) run_detached_process(args) diff --git a/client/ayon_core/plugins/publish/cleanup.py b/client/ayon_core/plugins/publish/cleanup.py index 681fe700a3..03eaaf9c6e 100644 --- a/client/ayon_core/plugins/publish/cleanup.py +++ b/client/ayon_core/plugins/publish/cleanup.py @@ -38,6 +38,8 @@ class CleanUp(pyblish.api.InstancePlugin): "webpublisher", "shell" ] + settings_category = "core" + exclude_families = ["clip"] optional = True active = True diff --git a/client/ayon_core/plugins/publish/cleanup_farm.py b/client/ayon_core/plugins/publish/cleanup_farm.py index e655437ced..8d1c8de425 100644 --- a/client/ayon_core/plugins/publish/cleanup_farm.py +++ b/client/ayon_core/plugins/publish/cleanup_farm.py @@ -13,6 +13,8 @@ class CleanUpFarm(pyblish.api.ContextPlugin): order = pyblish.api.IntegratorOrder + 11 label = "Clean Up Farm" + + settings_category = "core" enabled = True # Keep "filesequence" for backwards compatibility of older jobs diff --git a/client/ayon_core/plugins/publish/collect_anatomy_context_data.py b/client/ayon_core/plugins/publish/collect_anatomy_context_data.py index cccf392e40..5d2ecec433 100644 --- a/client/ayon_core/plugins/publish/collect_anatomy_context_data.py +++ b/client/ayon_core/plugins/publish/collect_anatomy_context_data.py @@ -16,6 +16,7 @@ Provides: import json import pyblish.api +from ayon_core.lib import get_ayon_user_entity from ayon_core.pipeline.template_data import get_template_data @@ -55,17 +56,18 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): if folder_entity: task_entity = context.data["taskEntity"] + username = context.data["user"] + user_entity = get_ayon_user_entity(username) anatomy_data = get_template_data( project_entity, folder_entity, task_entity, - host_name, - project_settings + host_name=host_name, + settings=project_settings, + user_entity=user_entity, ) anatomy_data.update(context.data.get("datetimeData") or {}) - username = context.data["user"] - anatomy_data["user"] = username # Backwards compatibility for 'username' key anatomy_data["username"] = username diff --git a/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py b/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py index 2fcf562dd0..2cb2297bf7 100644 --- a/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py +++ b/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py @@ -46,6 +46,8 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.49 label = "Collect Anatomy Instance data" + settings_category = "core" + follow_workfile_version = False def process(self, context): diff --git a/client/ayon_core/plugins/publish/collect_audio.py b/client/ayon_core/plugins/publish/collect_audio.py index 57c69ef2b2..2949ff1196 100644 --- a/client/ayon_core/plugins/publish/collect_audio.py +++ b/client/ayon_core/plugins/publish/collect_audio.py @@ -39,8 +39,9 @@ class CollectAudio(pyblish.api.ContextPlugin): "blender", "houdini", "max", - "circuit", + "batchdelivery", ] + settings_category = "core" audio_product_name = "audioMain" diff --git a/client/ayon_core/plugins/publish/collect_farm_env_variables.py b/client/ayon_core/plugins/publish/collect_farm_env_variables.py index 39c421381d..d35f02b9df 100644 --- a/client/ayon_core/plugins/publish/collect_farm_env_variables.py +++ b/client/ayon_core/plugins/publish/collect_farm_env_variables.py @@ -32,6 +32,7 @@ class CollectCoreJobEnvVars(pyblish.api.ContextPlugin): for key in [ "AYON_BUNDLE_NAME", + "AYON_STUDIO_BUNDLE_NAME", "AYON_USE_STAGING", "AYON_IN_TESTS", # NOTE Not sure why workdir is needed? diff --git a/client/ayon_core/plugins/publish/collect_frames_fix.py b/client/ayon_core/plugins/publish/collect_frames_fix.py index 0f7d5b692a..4270af5541 100644 --- a/client/ayon_core/plugins/publish/collect_frames_fix.py +++ b/client/ayon_core/plugins/publish/collect_frames_fix.py @@ -23,6 +23,7 @@ class CollectFramesFixDef( targets = ["local"] hosts = ["nuke"] families = ["render", "prerender"] + settings_category = "core" rewrite_version_enable = False diff --git a/client/ayon_core/plugins/publish/collect_from_create_context.py b/client/ayon_core/plugins/publish/collect_from_create_context.py index b99866fed9..5e0ecbdff4 100644 --- a/client/ayon_core/plugins/publish/collect_from_create_context.py +++ b/client/ayon_core/plugins/publish/collect_from_create_context.py @@ -2,11 +2,13 @@ """ import os +import collections + import pyblish.api from ayon_core.host import IPublishHost from ayon_core.pipeline import registered_host -from ayon_core.pipeline.create import CreateContext +from ayon_core.pipeline.create import CreateContext, ParentFlags class CollectFromCreateContext(pyblish.api.ContextPlugin): @@ -36,18 +38,51 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): if project_name: context.data["projectName"] = project_name + # Separate root instances and parented instances + instances_by_parent_id = collections.defaultdict(list) + root_instances = [] for created_instance in create_context.instances: + parent_id = created_instance.parent_instance_id + if parent_id is None: + root_instances.append(created_instance) + else: + instances_by_parent_id[parent_id].append(created_instance) + + # Traverse instances from top to bottom + # - All instances without an existing parent are automatically + # eliminated + filtered_instances = [] + _queue = collections.deque() + _queue.append((root_instances, True)) + while _queue: + created_instances, parent_is_active = _queue.popleft() + for created_instance in created_instances: + is_active = created_instance["active"] + # Use a parent's active state if parent flags defines that + if ( + created_instance.parent_flags & ParentFlags.share_active + and is_active + ): + is_active = parent_is_active + + if is_active: + filtered_instances.append(created_instance) + + children = instances_by_parent_id[created_instance.id] + if children: + _queue.append((children, is_active)) + + for created_instance in filtered_instances: instance_data = created_instance.data_to_store() - if instance_data["active"]: - thumbnail_path = thumbnail_paths_by_instance_id.get( - created_instance.id - ) - self.create_instance( - context, - instance_data, - created_instance.transient_data, - thumbnail_path - ) + thumbnail_path = thumbnail_paths_by_instance_id.get( + created_instance.id + ) + self.create_instance( + context, + instance_data, + created_instance.transient_data, + thumbnail_path + ) # Update global data to context context.data.update(create_context.context_data_to_store()) diff --git a/client/ayon_core/plugins/publish/collect_otio_frame_ranges.py b/client/ayon_core/plugins/publish/collect_otio_frame_ranges.py index 0a4efc2172..543277f37e 100644 --- a/client/ayon_core/plugins/publish/collect_otio_frame_ranges.py +++ b/client/ayon_core/plugins/publish/collect_otio_frame_ranges.py @@ -8,13 +8,7 @@ This module contains a unified plugin that handles: from pprint import pformat -import opentimelineio as otio import pyblish.api -from ayon_core.pipeline.editorial import ( - get_media_range_with_retimes, - otio_range_to_frame_range, - otio_range_with_handles, -) def validate_otio_clip(instance, logger): @@ -74,7 +68,15 @@ class CollectOtioRanges(pyblish.api.InstancePlugin): if not validate_otio_clip(instance, self.log): return + import opentimelineio as otio + otio_clip = instance.data["otioClip"] + if isinstance( + otio_clip.media_reference, + otio.schema.MissingReference + ): + self.log.info("Clip has no media reference") + return # Collect timeline ranges if workfile start frame is available if "workfileFrameStart" in instance.data: @@ -100,6 +102,11 @@ class CollectOtioRanges(pyblish.api.InstancePlugin): def _collect_timeline_ranges(self, instance, otio_clip): """Collect basic timeline frame ranges.""" + from ayon_core.pipeline.editorial import ( + otio_range_to_frame_range, + otio_range_with_handles, + ) + workfile_start = instance.data["workfileFrameStart"] # Get timeline ranges @@ -129,6 +136,8 @@ class CollectOtioRanges(pyblish.api.InstancePlugin): def _collect_source_ranges(self, instance, otio_clip): """Collect source media frame ranges.""" + import opentimelineio as otio + # Get source ranges otio_src_range = otio_clip.source_range otio_available_range = otio_clip.available_range() @@ -178,6 +187,8 @@ class CollectOtioRanges(pyblish.api.InstancePlugin): def _collect_retimed_ranges(self, instance, otio_clip): """Handle retimed clip frame ranges.""" + from ayon_core.pipeline.editorial import get_media_range_with_retimes + retimed_attributes = get_media_range_with_retimes(otio_clip, 0, 0) self.log.debug(f"Retimed attributes: {retimed_attributes}") diff --git a/client/ayon_core/plugins/publish/collect_otio_subset_resources.py b/client/ayon_core/plugins/publish/collect_otio_subset_resources.py index 275b8a7f55..4d3c1cfb13 100644 --- a/client/ayon_core/plugins/publish/collect_otio_subset_resources.py +++ b/client/ayon_core/plugins/publish/collect_otio_subset_resources.py @@ -60,6 +60,13 @@ class CollectOtioSubsetResources( # get basic variables otio_clip = instance.data["otioClip"] + if isinstance( + otio_clip.media_reference, + otio.schema.MissingReference + ): + self.log.info("Clip has no media reference") + return + otio_available_range = otio_clip.available_range() media_fps = otio_available_range.start_time.rate available_duration = otio_available_range.duration.value diff --git a/client/ayon_core/plugins/publish/collect_resources_path.py b/client/ayon_core/plugins/publish/collect_resources_path.py index 2e5b296228..704c69a6ab 100644 --- a/client/ayon_core/plugins/publish/collect_resources_path.py +++ b/client/ayon_core/plugins/publish/collect_resources_path.py @@ -13,6 +13,8 @@ import copy import pyblish.api +from ayon_core.pipeline.publish import get_publish_template_name + class CollectResourcesPath(pyblish.api.InstancePlugin): """Generate directory path where the files and resources will be stored. @@ -77,16 +79,29 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): # This is for cases of Deprecated anatomy without `folder` # TODO remove when all clients have solved this issue - template_data.update({ - "frame": "FRAME_TEMP", - "representation": "TEMP" - }) + template_data.update({"frame": "FRAME_TEMP", "representation": "TEMP"}) - publish_templates = anatomy.get_template_item( - "publish", "default", "directory" + task_name = task_type = None + task_entity = instance.data.get("taskEntity") + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] + + template_name = get_publish_template_name( + project_name=instance.context.data["projectName"], + host_name=instance.context.data["hostName"], + product_type=instance.data["productType"], + task_name=task_name, + task_type=task_type, + project_settings=instance.context.data["project_settings"], + logger=self.log, ) + + publish_template = anatomy.get_template_item( + "publish", template_name, "directory") + publish_folder = os.path.normpath( - publish_templates.format_strict(template_data) + publish_template.format_strict(template_data) ) resources_folder = os.path.join(publish_folder, "resources") diff --git a/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py b/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py index 1abb8e29d2..524381f656 100644 --- a/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py +++ b/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py @@ -1,7 +1,9 @@ import ayon_api import ayon_api.utils +from ayon_core.host import ILoadHost from ayon_core.pipeline import registered_host + import pyblish.api @@ -27,16 +29,23 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): def process(self, context): host = registered_host() if host is None: - self.log.warn("No registered host.") + self.log.warning("No registered host.") return - if not hasattr(host, "ls"): - host_name = host.__name__ - self.log.warn("Host %r doesn't have ls() implemented." % host_name) + if not isinstance(host, ILoadHost): + host_name = host.name + self.log.warning( + f"Host {host_name} does not implement ILoadHost. " + "Skipping querying of loaded versions in scene." + ) + return + + containers = list(host.get_containers()) + if not containers: + # Opt out early if there are no containers + self.log.debug("No loaded containers found in scene.") return - loaded_versions = [] - containers = list(host.ls()) repre_ids = { container["representation"] for container in containers @@ -61,6 +70,7 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): # QUESTION should we add same representation id when loaded multiple # times? + loaded_versions = [] for con in containers: repre_id = con["representation"] repre_entity = repre_entities_by_id.get(repre_id) @@ -80,4 +90,5 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): } loaded_versions.append(version) + self.log.debug(f"Collected {len(loaded_versions)} loaded versions.") context.data["loadedVersions"] = loaded_versions diff --git a/client/ayon_core/plugins/publish/collect_scene_version.py b/client/ayon_core/plugins/publish/collect_scene_version.py index 7979b66abe..e6e81ea074 100644 --- a/client/ayon_core/plugins/publish/collect_scene_version.py +++ b/client/ayon_core/plugins/publish/collect_scene_version.py @@ -12,9 +12,10 @@ class CollectSceneVersion(pyblish.api.ContextPlugin): """ order = pyblish.api.CollectorOrder - label = 'Collect Scene Version' + label = "Collect Scene Version" # configurable in Settings hosts = ["*"] + settings_category = "core" # in some cases of headless publishing (for example webpublisher using PS) # you want to ignore version from name and let integrate use next version diff --git a/client/ayon_core/plugins/publish/extract_burnin.py b/client/ayon_core/plugins/publish/extract_burnin.py index fa7fd4e504..351d85a97f 100644 --- a/client/ayon_core/plugins/publish/extract_burnin.py +++ b/client/ayon_core/plugins/publish/extract_burnin.py @@ -55,8 +55,9 @@ class ExtractBurnin(publish.Extractor): "max", "blender", "unreal", - "circuit", + "batchdelivery", ] + settings_category = "core" optional = True diff --git a/client/ayon_core/plugins/publish/extract_color_transcode.py b/client/ayon_core/plugins/publish/extract_color_transcode.py index 7a96db76ad..f80ca7e150 100644 --- a/client/ayon_core/plugins/publish/extract_color_transcode.py +++ b/client/ayon_core/plugins/publish/extract_color_transcode.py @@ -12,7 +12,7 @@ from ayon_core.lib import ( ) from ayon_core.lib.transcoding import ( MissingRGBAChannelsError, - convert_colorspace, + oiio_color_convert, ) from ayon_core.lib.profiles_filtering import filter_profiles @@ -56,6 +56,8 @@ class ExtractOIIOTranscode(publish.Extractor): label = "Transcode color spaces" order = pyblish.api.ExtractorOrder + 0.019 + settings_category = "core" + optional = True # Supported extensions @@ -86,6 +88,14 @@ class ExtractOIIOTranscode(publish.Extractor): new_representations = [] repres = instance.data["representations"] for idx, repre in enumerate(list(repres)): + # target space, display and view might be defined upstream + # TODO: address https://github.com/ynput/ayon-core/pull/1268#discussion_r2156555474 + # Implement upstream logic to handle target_colorspace, + # target_display, target_view in other DCCs + target_colorspace = False + target_display = instance.data.get("colorspaceDisplay") + target_view = instance.data.get("colorspaceView") + self.log.debug("repre ({}): `{}`".format(idx + 1, repre["name"])) if not self._repre_is_valid(repre): continue @@ -95,6 +105,8 @@ class ExtractOIIOTranscode(publish.Extractor): colorspace_data = repre["colorspaceData"] source_colorspace = colorspace_data["colorspace"] + source_display = colorspace_data.get("display") + source_view = colorspace_data.get("view") config_path = colorspace_data.get("config", {}).get("path") if not config_path or not os.path.exists(config_path): self.log.warning("Config file doesn't exist, skipping") @@ -132,7 +144,6 @@ class ExtractOIIOTranscode(publish.Extractor): transcoding_type = output_def["transcoding_type"] - target_colorspace = view = display = None # NOTE: we use colorspace_data as the fallback values for # the target colorspace. if transcoding_type == "colorspace": @@ -144,18 +155,20 @@ class ExtractOIIOTranscode(publish.Extractor): colorspace_data.get("colorspace")) elif transcoding_type == "display_view": display_view = output_def["display_view"] - view = display_view["view"] or colorspace_data.get("view") - display = ( + target_view = ( + display_view["view"] + or colorspace_data.get("view")) + target_display = ( display_view["display"] or colorspace_data.get("display") ) # both could be already collected by DCC, # but could be overwritten when transcoding - if view: - new_repre["colorspaceData"]["view"] = view - if display: - new_repre["colorspaceData"]["display"] = display + if target_view: + new_repre["colorspaceData"]["view"] = target_view + if target_display: + new_repre["colorspaceData"]["display"] = target_display if target_colorspace: new_repre["colorspaceData"]["colorspace"] = \ target_colorspace @@ -175,16 +188,18 @@ class ExtractOIIOTranscode(publish.Extractor): new_staging_dir, output_extension) try: - convert_colorspace( - input_path, - output_path, - config_path, - source_colorspace, - target_colorspace, - view, - display, - additional_command_args, - self.log + oiio_color_convert( + input_path=input_path, + output_path=output_path, + config_path=config_path, + source_colorspace=source_colorspace, + target_colorspace=target_colorspace, + target_display=target_display, + target_view=target_view, + source_display=source_display, + source_view=source_view, + additional_command_args=additional_command_args, + logger=self.log ) except MissingRGBAChannelsError as exc: missing_rgba_review_channels = True diff --git a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py index 472694d334..3a450a4f33 100644 --- a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py +++ b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py @@ -158,6 +158,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): """ # Not all hosts can import this module. import opentimelineio as otio + from ayon_core.pipeline.editorial import OTIO_EPSILON output = [] # go trough all audio tracks @@ -172,6 +173,14 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): clip_start = otio_clip.source_range.start_time fps = clip_start.rate conformed_av_start = media_av_start.rescaled_to(fps) + + # Avoid rounding issue on media available range. + if clip_start.almost_equal( + conformed_av_start, + OTIO_EPSILON + ): + conformed_av_start = clip_start + # ffmpeg ignores embedded tc start = clip_start - conformed_av_start duration = otio_clip.source_range.duration diff --git a/client/ayon_core/plugins/publish/extract_otio_review.py b/client/ayon_core/plugins/publish/extract_otio_review.py index f217be551c..f338fba746 100644 --- a/client/ayon_core/plugins/publish/extract_otio_review.py +++ b/client/ayon_core/plugins/publish/extract_otio_review.py @@ -23,7 +23,10 @@ from ayon_core.lib import ( get_ffmpeg_tool_args, run_subprocess, ) -from ayon_core.pipeline import publish +from ayon_core.pipeline import ( + KnownPublishError, + publish, +) class ExtractOTIOReview( @@ -97,8 +100,11 @@ class ExtractOTIOReview( # skip instance if no reviewable data available if ( - not isinstance(otio_review_clips[0], otio.schema.Clip) - and len(otio_review_clips) == 1 + len(otio_review_clips) == 1 + and ( + not isinstance(otio_review_clips[0], otio.schema.Clip) + or otio_review_clips[0].media_reference.is_missing_reference + ) ): self.log.warning( "Instance `{}` has nothing to process".format(instance)) @@ -124,7 +130,7 @@ class ExtractOTIOReview( # NOTE it looks like it is set only in hiero integration res_data = {"width": self.to_width, "height": self.to_height} for key in res_data: - for meta_prefix in ("ayon.source.", "openpype.source."): + for meta_prefix in ("ayon.source", "openpype.source"): meta_key = f"{meta_prefix}.{key}" value = media_metadata.get(meta_key) if value is not None: @@ -248,7 +254,7 @@ class ExtractOTIOReview( # Single video way. # Extraction via FFmpeg. - else: + elif hasattr(media_ref, "target_url"): path = media_ref.target_url # Set extract range from 0 (FFmpeg ignores # embedded timecode). @@ -352,6 +358,7 @@ class ExtractOTIOReview( import opentimelineio as otio from ayon_core.pipeline.editorial import ( trim_media_range, + OTIO_EPSILON, ) def _round_to_frame(rational_time): @@ -370,6 +377,13 @@ class ExtractOTIOReview( avl_start = avl_range.start_time + # Avoid rounding issue on media available range. + if start.almost_equal( + avl_start, + OTIO_EPSILON + ): + avl_start = start + # An additional gap is required before the available # range to conform source start point and head handles. if start < avl_start: @@ -388,6 +402,14 @@ class ExtractOTIOReview( # (media duration is shorter then clip requirement). end_point = start + duration avl_end_point = avl_range.end_time_exclusive() + + # Avoid rounding issue on media available range. + if end_point.almost_equal( + avl_end_point, + OTIO_EPSILON + ): + avl_end_point = end_point + if end_point > avl_end_point: gap_duration = end_point - avl_end_point duration -= gap_duration @@ -444,7 +466,7 @@ class ExtractOTIOReview( command = get_ffmpeg_tool_args("ffmpeg") input_extension = None - if sequence: + if sequence is not None: input_dir, collection, sequence_fps = sequence in_frame_start = min(collection.indexes) @@ -478,7 +500,7 @@ class ExtractOTIOReview( "-i", input_path ]) - elif video: + elif video is not None: video_path, otio_range = video frame_start = otio_range.start_time.value input_fps = otio_range.start_time.rate @@ -496,7 +518,7 @@ class ExtractOTIOReview( "-i", video_path ]) - elif gap: + elif gap is not None: sec_duration = frames_to_seconds(gap, self.actual_fps) # form command for rendering gap files @@ -510,6 +532,9 @@ class ExtractOTIOReview( "-tune", "stillimage" ]) + else: + raise KnownPublishError("Sequence, video or gap is required.") + if video or sequence: command.extend([ "-vf", f"scale={self.to_width}:{self.to_height}:flags=lanczos", diff --git a/client/ayon_core/plugins/publish/extract_review.py b/client/ayon_core/plugins/publish/extract_review.py index a5f541225c..580aa27eef 100644 --- a/client/ayon_core/plugins/publish/extract_review.py +++ b/client/ayon_core/plugins/publish/extract_review.py @@ -13,14 +13,15 @@ import clique import speedcopy import pyblish.api -from ayon_api import get_last_version_by_product_name, get_representations - from ayon_core.lib import ( get_ffmpeg_tool_args, filter_profiles, path_to_subprocess_arg, run_subprocess, ) +from ayon_core.pipeline.publish.lib import ( + fill_sequence_gaps_with_previous_version +) from ayon_core.lib.transcoding import ( IMAGE_EXTENSIONS, get_ffprobe_streams, @@ -130,7 +131,7 @@ def frame_to_timecode(frame: int, fps: float) -> str: class ExtractReview(pyblish.api.InstancePlugin): - """Extracting Review mov file for Ftrack + """Extracting Reviewable medias Compulsory attribute of representation is tags list with "review", otherwise the representation is ignored. @@ -161,9 +162,11 @@ class ExtractReview(pyblish.api.InstancePlugin): "aftereffects", "flame", "unreal", - "circuit", + "batchdelivery", + "photoshop" ] + settings_category = "core" # Supported extensions image_exts = {"exr", "jpg", "jpeg", "png", "dpx", "tga", "tiff", "tif"} video_exts = {"mov", "mp4"} @@ -202,15 +205,21 @@ class ExtractReview(pyblish.api.InstancePlugin): def _get_outputs_for_instance(self, instance): host_name = instance.context.data["hostName"] product_type = instance.data["productType"] + task_type = None + task_entity = instance.data.get("taskEntity") + if task_entity: + task_type = task_entity["taskType"] self.log.debug("Host: \"{}\"".format(host_name)) self.log.debug("Product type: \"{}\"".format(product_type)) + self.log.debug("Task type: \"{}\"".format(task_type)) profile = filter_profiles( self.profiles, { "hosts": host_name, "product_types": product_type, + "task_types": task_type }, logger=self.log) if not profile: @@ -500,10 +509,10 @@ class ExtractReview(pyblish.api.InstancePlugin): resolution_width=temp_data.resolution_width, resolution_height=temp_data.resolution_height, extension=temp_data.input_ext, - temp_data=temp_data + temp_data=temp_data, ) elif fill_missing_frames == "previous_version": - new_frame_files = self.fill_sequence_gaps_with_previous( + fill_output = fill_sequence_gaps_with_previous_version( collection=collection, staging_dir=new_repre["stagingDir"], instance=instance, @@ -511,8 +520,13 @@ class ExtractReview(pyblish.api.InstancePlugin): start_frame=temp_data.frame_start, end_frame=temp_data.frame_end, ) + _, new_frame_files = fill_output # fallback to original workflow if new_frame_files is None: + self.log.warning( + "Falling back to filling from currently " + "last rendered." + ) new_frame_files = ( self.fill_sequence_gaps_from_existing( collection=collection, @@ -604,8 +618,6 @@ class ExtractReview(pyblish.api.InstancePlugin): "name": "{}_{}".format(output_name, output_ext), "outputName": output_name, "outputDef": output_def, - "frameStartFtrack": temp_data.output_frame_start, - "frameEndFtrack": temp_data.output_frame_end, "ffmpeg_cmd": subprcs_cmd }) @@ -1042,92 +1054,6 @@ class ExtractReview(pyblish.api.InstancePlugin): return all_args - def fill_sequence_gaps_with_previous( - self, - collection: str, - staging_dir: str, - instance: pyblish.plugin.Instance, - current_repre_name: str, - start_frame: int, - end_frame: int - ) -> Optional[dict[int, str]]: - """Tries to replace missing frames from ones from last version""" - repre_file_paths = self._get_last_version_files( - instance, current_repre_name) - if repre_file_paths is None: - # issues in getting last version files, falling back - return None - - prev_collection = clique.assemble( - repre_file_paths, - patterns=[clique.PATTERNS["frames"]], - minimum_items=1 - )[0][0] - prev_col_format = prev_collection.format("{head}{padding}{tail}") - - added_files = {} - anatomy = instance.context.data["anatomy"] - col_format = collection.format("{head}{padding}{tail}") - for frame in range(start_frame, end_frame + 1): - if frame in collection.indexes: - continue - hole_fpath = os.path.join(staging_dir, col_format % frame) - - previous_version_path = prev_col_format % frame - previous_version_path = anatomy.fill_root(previous_version_path) - if not os.path.exists(previous_version_path): - self.log.warning( - "Missing frame should be replaced from " - f"'{previous_version_path}' but that doesn't exist. " - "Falling back to filling from currently last rendered." - ) - return None - - self.log.warning( - f"Replacing missing '{hole_fpath}' with " - f"'{previous_version_path}'" - ) - speedcopy.copyfile(previous_version_path, hole_fpath) - added_files[frame] = hole_fpath - - return added_files - - def _get_last_version_files( - self, - instance: pyblish.plugin.Instance, - current_repre_name: str, - ): - product_name = instance.data["productName"] - project_name = instance.data["projectEntity"]["name"] - folder_entity = instance.data["folderEntity"] - - version_entity = get_last_version_by_product_name( - project_name, - product_name, - folder_entity["id"], - fields={"id"} - ) - if not version_entity: - return None - - matching_repres = get_representations( - project_name, - version_ids=[version_entity["id"]], - representation_names=[current_repre_name], - fields={"files"} - ) - - if not matching_repres: - return None - matching_repre = list(matching_repres)[0] - - repre_file_paths = [ - file_info["path"] - for file_info in matching_repre["files"] - ] - - return repre_file_paths - def fill_sequence_gaps_with_blanks( self, collection: str, @@ -1376,15 +1302,7 @@ class ExtractReview(pyblish.api.InstancePlugin): return audio_in_args, audio_filters, audio_out_args for audio in audio_inputs: - # NOTE modified, always was expected "frameStartFtrack" which is - # STRANGE?!!! There should be different key, right? - # TODO use different frame start! offset_seconds = 0 - frame_start_ftrack = instance.data.get("frameStartFtrack") - if frame_start_ftrack is not None: - offset_frames = frame_start_ftrack - audio["offset"] - offset_seconds = offset_frames / temp_data.fps - if offset_seconds > 0: audio_in_args.append( "-ss {}".format(offset_seconds) diff --git a/client/ayon_core/plugins/publish/extract_thumbnail.py b/client/ayon_core/plugins/publish/extract_thumbnail.py index 69bb9007f9..b5885178d0 100644 --- a/client/ayon_core/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/plugins/publish/extract_thumbnail.py @@ -6,6 +6,7 @@ import re import pyblish.api from ayon_core.lib import ( + get_oiio_tool_args, get_ffmpeg_tool_args, get_ffprobe_data, @@ -15,7 +16,11 @@ from ayon_core.lib import ( path_to_subprocess_arg, run_subprocess, ) -from ayon_core.lib.transcoding import convert_colorspace +from ayon_core.lib.transcoding import ( + oiio_color_convert, + get_oiio_input_and_channel_args, + get_oiio_info_for_input, +) from ayon_core.lib.transcoding import VIDEO_EXTENSIONS, IMAGE_EXTENSIONS @@ -38,10 +43,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): "substancedesigner", "nuke", "aftereffects", + "photoshop", "unreal", "houdini", - "circuit", + "batchdelivery", ] + settings_category = "core" enabled = False integrate_thumbnail = False @@ -208,6 +215,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): full_output_path = os.path.join(dst_staging, jpeg_file) colorspace_data = repre.get("colorspaceData") + # NOTE We should find out what is happening here. Why don't we + # use oiiotool all the time if it is available? Only possible + # reason might be that video files should be converted using + # ffmpeg, but other then that, we should use oiio all the time. + # - We should also probably get rid of the ffmpeg settings... + # only use OIIO if it is supported and representation has # colorspace data if oiio_supported and colorspace_data: @@ -217,7 +230,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): ) # If the input can read by OIIO then use OIIO method for # conversion otherwise use ffmpeg - repre_thumb_created = self._create_thumbnail_oiio( + repre_thumb_created = self._create_colorspace_thumbnail( full_input_path, full_output_path, colorspace_data @@ -227,17 +240,16 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # oiiotool isn't available or representation is not having # colorspace data if not repre_thumb_created: - if oiio_supported: - self.log.debug( - "Converting with FFMPEG because input" - " can't be read by OIIO." - ) - repre_thumb_created = self._create_thumbnail_ffmpeg( full_input_path, full_output_path ) - # Skip representation and try next one if wasn't created + # Skip representation and try next one if wasn't created + if not repre_thumb_created and oiio_supported: + repre_thumb_created = self._create_thumbnail_oiio( + full_input_path, full_output_path + ) + if not repre_thumb_created: continue @@ -380,7 +392,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): return ext in IMAGE_EXTENSIONS or ext in VIDEO_EXTENSIONS - def _create_thumbnail_oiio( + def _create_colorspace_thumbnail( self, src_path, dst_path, @@ -431,13 +443,15 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): oiio_default_view = display_and_view["view"] try: - convert_colorspace( + oiio_color_convert( src_path, dst_path, colorspace_data["config"]["path"], colorspace_data["colorspace"], - display=repre_display or oiio_default_display, - view=repre_view or oiio_default_view, + source_display=colorspace_data.get("display"), + source_view=colorspace_data.get("view"), + target_display=repre_display or oiio_default_display, + target_view=repre_view or oiio_default_view, target_colorspace=oiio_default_colorspace, additional_command_args=resolution_arg, logger=self.log, @@ -451,9 +465,50 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): return True + def _create_thumbnail_oiio(self, src_path, dst_path): + self.log.debug(f"Extracting thumbnail with OIIO: {dst_path}") + + try: + resolution_arg = self._get_resolution_arg("oiiotool", src_path) + except RuntimeError: + self.log.warning( + "Failed to create thumbnail using oiio", exc_info=True + ) + return False + + input_info = get_oiio_info_for_input(src_path, logger=self.log) + input_arg, channels_arg = get_oiio_input_and_channel_args(input_info) + oiio_cmd = get_oiio_tool_args( + "oiiotool", + input_arg, src_path, + # Tell oiiotool which channels should be put to top stack + # (and output) + "--ch", channels_arg, + # Use first subimage + "--subimage", "0" + ) + oiio_cmd.extend(resolution_arg) + oiio_cmd.extend(("-o", dst_path)) + self.log.debug("Running: {}".format(" ".join(oiio_cmd))) + try: + run_subprocess(oiio_cmd, logger=self.log) + return True + except Exception: + self.log.warning( + "Failed to create thumbnail using oiiotool", + exc_info=True + ) + return False + def _create_thumbnail_ffmpeg(self, src_path, dst_path): - self.log.debug("Extracting thumbnail with FFMPEG: {}".format(dst_path)) - resolution_arg = self._get_resolution_arg("ffmpeg", src_path) + try: + resolution_arg = self._get_resolution_arg("ffmpeg", src_path) + except RuntimeError: + self.log.warning( + "Failed to create thumbnail using ffmpeg", exc_info=True + ) + return False + ffmpeg_path_args = get_ffmpeg_tool_args("ffmpeg") ffmpeg_args = self.ffmpeg_args or {} diff --git a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py index ec1fddc6b1..0dc9a5e34d 100644 --- a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py +++ b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py @@ -256,6 +256,7 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin, label = "Collect USD Layer Contributions (Asset/Shot)" families = ["usd"] enabled = True + settings_category = "core" # A contribution defines a contribution into a (department) layer which # will get layered into the target product, usually the asset or shot. @@ -633,6 +634,8 @@ class ExtractUSDLayerContribution(publish.Extractor): label = "Extract USD Layer Contributions (Asset/Shot)" order = pyblish.api.ExtractorOrder + 0.45 + settings_category = "core" + use_ayon_entity_uri = False def process(self, instance): @@ -795,6 +798,8 @@ class ExtractUSDAssetContribution(publish.Extractor): label = "Extract USD Asset/Shot Contributions" order = ExtractUSDLayerContribution.order + 0.01 + settings_category = "core" + use_ayon_entity_uri = False def process(self, instance): diff --git a/client/ayon_core/plugins/publish/integrate.py b/client/ayon_core/plugins/publish/integrate.py index f1e066018c..d18e546392 100644 --- a/client/ayon_core/plugins/publish/integrate.py +++ b/client/ayon_core/plugins/publish/integrate.py @@ -121,7 +121,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "version", "representation", "username", - "user", "output", # OpenPype keys - should be removed "asset", # folder[name] @@ -796,6 +795,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin): if value is not None: repre_context[key] = value + # Keep only username + # NOTE This is to avoid storing all user attributes and data + # to representation + if "user" not in repre_context: + repre_context["user"] = { + "name": template_data["user"]["name"] + } + # Use previous representation's id if there is a name match existing = existing_repres_by_name.get(repre["name"].lower()) repre_id = None diff --git a/client/ayon_core/plugins/publish/integrate_hero_version.py b/client/ayon_core/plugins/publish/integrate_hero_version.py index 43f93da293..a591cfe880 100644 --- a/client/ayon_core/plugins/publish/integrate_hero_version.py +++ b/client/ayon_core/plugins/publish/integrate_hero_version.py @@ -61,6 +61,8 @@ class IntegrateHeroVersion( # Must happen after IntegrateNew order = pyblish.api.IntegratorOrder + 0.1 + settings_category = "core" + optional = True active = True @@ -87,7 +89,6 @@ class IntegrateHeroVersion( "family", "representation", "username", - "user", "output" ] # QUESTION/TODO this process should happen on server if crashed due to @@ -362,6 +363,14 @@ class IntegrateHeroVersion( if value is not None: repre_context[key] = value + # Keep only username + # NOTE This is to avoid storing all user attributes and data + # to representation + if "user" not in repre_context: + repre_context["user"] = { + "name": anatomy_data["user"]["name"] + } + # Prepare new repre repre_entity = copy.deepcopy(repre_info["representation"]) repre_entity.pop("id", None) diff --git a/client/ayon_core/plugins/publish/integrate_inputlinks.py b/client/ayon_core/plugins/publish/integrate_inputlinks.py index a3b6a228d6..be399a95fc 100644 --- a/client/ayon_core/plugins/publish/integrate_inputlinks.py +++ b/client/ayon_core/plugins/publish/integrate_inputlinks.py @@ -105,7 +105,7 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin): created links by its type """ if workfile_instance is None: - self.log.warn("No workfile in this publish session.") + self.log.warning("No workfile in this publish session.") return workfile_version_id = workfile_instance.data["versionEntity"]["id"] diff --git a/client/ayon_core/plugins/publish/integrate_product_group.py b/client/ayon_core/plugins/publish/integrate_product_group.py index 90887a359d..8904d21d69 100644 --- a/client/ayon_core/plugins/publish/integrate_product_group.py +++ b/client/ayon_core/plugins/publish/integrate_product_group.py @@ -24,6 +24,8 @@ class IntegrateProductGroup(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder - 0.1 label = "Product Group" + settings_category = "core" + # Attributes set by settings product_grouping_profiles = None diff --git a/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py b/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py index 8bd67c0183..900febc236 100644 --- a/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py +++ b/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py @@ -22,6 +22,8 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin): label = "Override Integrate Thumbnail Representations" order = pyblish.api.IntegratorOrder - 0.1 + settings_category = "core" + integrate_profiles = [] def process(self, instance): diff --git a/client/ayon_core/plugins/publish/validate_containers.py b/client/ayon_core/plugins/publish/validate_containers.py index 520e7a7ce9..fda3d93627 100644 --- a/client/ayon_core/plugins/publish/validate_containers.py +++ b/client/ayon_core/plugins/publish/validate_containers.py @@ -31,6 +31,7 @@ class ValidateOutdatedContainers( label = "Validate Outdated Containers" order = pyblish.api.ValidatorOrder + settings_category = "core" optional = True actions = [ShowInventory] diff --git a/client/ayon_core/plugins/publish/validate_file_saved.py b/client/ayon_core/plugins/publish/validate_file_saved.py index f8fdd27342..28734ba714 100644 --- a/client/ayon_core/plugins/publish/validate_file_saved.py +++ b/client/ayon_core/plugins/publish/validate_file_saved.py @@ -37,7 +37,7 @@ class ValidateCurrentSaveFile(pyblish.api.ContextPlugin): label = "Validate File Saved" order = pyblish.api.ValidatorOrder - 0.1 hosts = ["fusion", "houdini", "max", "maya", "nuke", "substancepainter", - "cinema4d", "silhouette", "gaffer", "blender"] + "cinema4d", "silhouette", "gaffer", "blender", "loki"] actions = [SaveByVersionUpAction, ShowWorkfilesAction] def process(self, context): diff --git a/client/ayon_core/plugins/publish/validate_intent.py b/client/ayon_core/plugins/publish/validate_intent.py index 71df652e92..fa5e5af093 100644 --- a/client/ayon_core/plugins/publish/validate_intent.py +++ b/client/ayon_core/plugins/publish/validate_intent.py @@ -14,6 +14,8 @@ class ValidateIntent(pyblish.api.ContextPlugin): order = pyblish.api.ValidatorOrder label = "Validate Intent" + settings_category = "core" + enabled = False # Can be modified by settings diff --git a/client/ayon_core/plugins/publish/validate_unique_subsets.py b/client/ayon_core/plugins/publish/validate_unique_subsets.py index 4067dd75a5..26c9ada116 100644 --- a/client/ayon_core/plugins/publish/validate_unique_subsets.py +++ b/client/ayon_core/plugins/publish/validate_unique_subsets.py @@ -34,7 +34,11 @@ class ValidateProductUniqueness(pyblish.api.ContextPlugin): for instance in context: # Ignore disabled instances - if not instance.data.get('publish', True): + if not instance.data.get("publish", True): + continue + + # Ignore instances not marked to integrate + if not instance.data.get("integrate", True): continue # Ignore instance without folder data diff --git a/client/ayon_core/plugins/publish/validate_version.py b/client/ayon_core/plugins/publish/validate_version.py index 0359f8fb53..d63c4e1f03 100644 --- a/client/ayon_core/plugins/publish/validate_version.py +++ b/client/ayon_core/plugins/publish/validate_version.py @@ -17,6 +17,7 @@ class ValidateVersion(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): order = pyblish.api.ValidatorOrder label = "Validate Version" + settings_category = "core" optional = False active = True diff --git a/client/ayon_core/scripts/otio_burnin.py b/client/ayon_core/scripts/otio_burnin.py index 77eeecaff6..bd94225979 100644 --- a/client/ayon_core/scripts/otio_burnin.py +++ b/client/ayon_core/scripts/otio_burnin.py @@ -6,7 +6,12 @@ import json import tempfile from string import Formatter -import opentimelineio_contrib.adapters.ffmpeg_burnins as ffmpeg_burnins +try: + from otio_burnins_adapter import ffmpeg_burnins +except ImportError: + import opentimelineio_contrib.adapters.ffmpeg_burnins as ffmpeg_burnins +from PIL import ImageFont + from ayon_core.lib import ( get_ffmpeg_tool_args, get_ffmpeg_codec_args, @@ -36,6 +41,39 @@ TIMECODE_KEY = "{timecode}" SOURCE_TIMECODE_KEY = "{source_timecode}" +def _drawtext(align, resolution, text, options): + """ + :rtype: {'x': int, 'y': int} + """ + x_pos = "0" + if align in (ffmpeg_burnins.TOP_CENTERED, ffmpeg_burnins.BOTTOM_CENTERED): + x_pos = "w/2-tw/2" + + elif align in (ffmpeg_burnins.TOP_RIGHT, ffmpeg_burnins.BOTTOM_RIGHT): + ifont = ImageFont.truetype(options["font"], options["font_size"]) + if hasattr(ifont, "getbbox"): + left, top, right, bottom = ifont.getbbox(text) + box_size = right - left, bottom - top + else: + box_size = ifont.getsize(text) + x_pos = resolution[0] - (box_size[0] + options["x_offset"]) + elif align in (ffmpeg_burnins.TOP_LEFT, ffmpeg_burnins.BOTTOM_LEFT): + x_pos = options["x_offset"] + + if align in ( + ffmpeg_burnins.TOP_CENTERED, + ffmpeg_burnins.TOP_RIGHT, + ffmpeg_burnins.TOP_LEFT + ): + y_pos = "%d" % options["y_offset"] + else: + y_pos = "h-text_h-%d" % (options["y_offset"]) + return {"x": x_pos, "y": y_pos} + + +ffmpeg_burnins._drawtext = _drawtext + + def _get_ffprobe_data(source): """Reimplemented from otio burnins to be able use full path to ffprobe :param str source: source media file diff --git a/client/ayon_core/settings/lib.py b/client/ayon_core/settings/lib.py index 72af07799f..a875e0116a 100644 --- a/client/ayon_core/settings/lib.py +++ b/client/ayon_core/settings/lib.py @@ -4,6 +4,8 @@ import logging import collections import copy import time +import warnings +from urllib.parse import urlencode import ayon_api @@ -35,6 +37,37 @@ class CacheItem: return time.time() > self._outdate_time +def _get_addons_settings( + studio_bundle_name, + project_bundle_name, + variant, + project_name=None, +): + """Modified version of `ayon_api.get_addons_settings` function.""" + query_values = { + key: value + for key, value in ( + ("bundle_name", studio_bundle_name), + ("variant", variant), + ("project_name", project_name), + ) + if value + } + if project_bundle_name != studio_bundle_name: + query_values["project_bundle_name"] = project_bundle_name + + site_id = ayon_api.get_site_id() + if site_id: + query_values["site_id"] = site_id + + response = ayon_api.get(f"settings?{urlencode(query_values)}") + response.raise_for_status() + return { + addon["name"]: addon["settings"] + for addon in response.data["addons"] + } + + class _AyonSettingsCache: use_bundles = None variant = None @@ -67,53 +100,70 @@ class _AyonSettingsCache: return _AyonSettingsCache.variant @classmethod - def _get_bundle_name(cls): + def _get_studio_bundle_name(cls): + bundle_name = os.environ.get("AYON_STUDIO_BUNDLE_NAME") + if bundle_name: + return bundle_name + return os.environ["AYON_BUNDLE_NAME"] + + @classmethod + def _get_project_bundle_name(cls): return os.environ["AYON_BUNDLE_NAME"] @classmethod def get_value_by_project(cls, project_name): cache_item = _AyonSettingsCache.cache_by_project_name[project_name] if cache_item.is_outdated: - if cls._use_bundles(): - value = ayon_api.get_addons_settings( - bundle_name=cls._get_bundle_name(), + cache_item.update_value( + _get_addons_settings( + studio_bundle_name=cls._get_studio_bundle_name(), + project_bundle_name=cls._get_project_bundle_name(), project_name=project_name, - variant=cls._get_variant() + variant=cls._get_variant(), ) - else: - value = ayon_api.get_addons_settings(project_name) - cache_item.update_value(value) + ) return cache_item.get_value() @classmethod def _get_addon_versions_from_bundle(cls): - expected_bundle = cls._get_bundle_name() + studio_bundle_name = cls._get_studio_bundle_name() + project_bundle_name = cls._get_project_bundle_name() bundles = ayon_api.get_bundles()["bundles"] - bundle = next( + project_bundle = next( ( bundle for bundle in bundles - if bundle["name"] == expected_bundle + if bundle["name"] == project_bundle_name ), None ) - if bundle is not None: - return bundle["addons"] + studio_bundle = None + if studio_bundle_name and project_bundle_name != studio_bundle_name: + studio_bundle = next( + ( + bundle + for bundle in bundles + if bundle["name"] == studio_bundle_name + ), + None + ) + + if studio_bundle and project_bundle: + addons = copy.deepcopy(studio_bundle["addons"]) + addons.update(project_bundle["addons"]) + project_bundle["addons"] = addons + + if project_bundle is not None: + return project_bundle["addons"] return {} @classmethod def get_addon_versions(cls): cache_item = _AyonSettingsCache.addon_versions if cache_item.is_outdated: - if cls._use_bundles(): - addons = cls._get_addon_versions_from_bundle() - else: - settings_data = ayon_api.get_addons_settings( - only_values=False, - variant=cls._get_variant() - ) - addons = settings_data["versions"] - cache_item.update_value(addons) + cache_item.update_value( + cls._get_addon_versions_from_bundle() + ) return cache_item.get_value() @@ -175,17 +225,22 @@ def get_project_environments(project_name, project_settings=None): def get_current_project_settings(): - """Project settings for current context project. + """DEPRECATE Project settings for current context project. + + Function requires access to pipeline context which is in + 'ayon_core.pipeline'. + + Returns: + dict[str, Any]: Project settings for current context project. - Project name should be stored in environment variable `AYON_PROJECT_NAME`. - This function should be used only in host context where environment - variable must be set and should not happen that any part of process will - change the value of the environment variable. """ - project_name = os.environ.get("AYON_PROJECT_NAME") - if not project_name: - raise ValueError( - "Missing context project in environment" - " variable `AYON_PROJECT_NAME`." - ) - return get_project_settings(project_name) + warnings.warn( + "Used deprecated function 'get_current_project_settings' in" + " 'ayon_core.settings'. The function was moved to" + " 'ayon_core.pipeline.context_tools'.", + DeprecationWarning, + stacklevel=2 + ) + from ayon_core.pipeline.context_tools import get_current_project_settings + + return get_current_project_settings() diff --git a/client/ayon_core/style/data.json b/client/ayon_core/style/data.json index 24629ec085..56d2190e09 100644 --- a/client/ayon_core/style/data.json +++ b/client/ayon_core/style/data.json @@ -97,6 +97,7 @@ }, "publisher": { "error": "#AA5050", + "disabled": "#5b6779", "crash": "#FF6432", "success": "#458056", "warning": "#ffc671", diff --git a/client/ayon_core/style/style.css b/client/ayon_core/style/style.css index b26d36fb7e..0d057beb7b 100644 --- a/client/ayon_core/style/style.css +++ b/client/ayon_core/style/style.css @@ -1153,6 +1153,10 @@ PixmapButton:disabled { color: {color:publisher:error}; } +#ListViewProductName[state="disabled"] { + color: {color:publisher:disabled}; +} + #PublishInfoFrame { background: {color:bg}; border-radius: 0.3em; diff --git a/client/ayon_core/tools/attribute_defs/files_widget.py b/client/ayon_core/tools/attribute_defs/files_widget.py index 8a40b3ff38..4c55ae5620 100644 --- a/client/ayon_core/tools/attribute_defs/files_widget.py +++ b/client/ayon_core/tools/attribute_defs/files_widget.py @@ -892,6 +892,29 @@ class FilesWidget(QtWidgets.QFrame): self._add_filepaths(new_items) self._remove_item_by_ids(item_ids) + def _on_merge_request(self): + if self._multivalue: + return + + item_ids = self._files_view.get_selected_item_ids() + if not item_ids: + return + + all_paths = set() + merged_item_ids = set() + for item_id in item_ids: + file_item = self._files_model.get_file_item_by_id(item_id) + if file_item is None: + continue + merged_item_ids.add(item_id) + all_paths |= { + os.path.join(file_item.directory, filename) + for filename in file_item.filenames + } + self._remove_item_by_ids(merged_item_ids) + new_items = FileDefItem.from_value(list(all_paths), True) + self._add_filepaths(new_items) + def _on_remove_requested(self): if self._multivalue: return @@ -911,6 +934,9 @@ class FilesWidget(QtWidgets.QFrame): split_action.triggered.connect(self._on_split_request) menu.addAction(split_action) + merge_action = QtWidgets.QAction("Merge sequence", menu) + merge_action.triggered.connect(self._on_merge_request) + menu.addAction(merge_action) remove_action = QtWidgets.QAction("Remove", menu) remove_action.triggered.connect(self._on_remove_requested) menu.addAction(remove_action) diff --git a/client/ayon_core/tools/common_models/__init__.py b/client/ayon_core/tools/common_models/__init__.py index ec69e20b64..77cc2dfb0f 100644 --- a/client/ayon_core/tools/common_models/__init__.py +++ b/client/ayon_core/tools/common_models/__init__.py @@ -10,6 +10,7 @@ from .projects import ( PROJECTS_MODEL_SENDER, FolderTypeItem, TaskTypeItem, + ProductTypeIconMapping, ) from .hierarchy import ( FolderItem, @@ -34,6 +35,7 @@ __all__ = ( "PROJECTS_MODEL_SENDER", "FolderTypeItem", "TaskTypeItem", + "ProductTypeIconMapping", "FolderItem", "TaskItem", diff --git a/client/ayon_core/tools/common_models/projects.py b/client/ayon_core/tools/common_models/projects.py index 034947de3a..250c3b020d 100644 --- a/client/ayon_core/tools/common_models/projects.py +++ b/client/ayon_core/tools/common_models/projects.py @@ -2,7 +2,7 @@ from __future__ import annotations import contextlib from abc import ABC, abstractmethod -from typing import Dict, Any +from typing import Any, Optional from dataclasses import dataclass import ayon_api @@ -51,7 +51,7 @@ class StatusItem: self.icon: str = icon self.state: str = state - def to_data(self) -> Dict[str, Any]: + def to_data(self) -> dict[str, Any]: return { "name": self.name, "color": self.color, @@ -125,16 +125,24 @@ class TaskTypeItem: icon (str): Icon name in MaterialIcons ("fiber_new"). """ - def __init__(self, name, short, icon): + def __init__( + self, + name: str, + short: str, + icon: str, + color: Optional[str], + ): self.name = name self.short = short self.icon = icon + self.color = color def to_data(self): return { "name": self.name, "short": self.short, "icon": self.icon, + "color": self.color, } @classmethod @@ -147,6 +155,7 @@ class TaskTypeItem: name=task_type_data["name"], short=task_type_data["shortName"], icon=task_type_data["icon"], + color=task_type_data.get("color"), ) @@ -218,6 +227,54 @@ class ProjectItem: return cls(**data) +class ProductTypeIconMapping: + def __init__( + self, + default: Optional[dict[str, str]] = None, + definitions: Optional[list[dict[str, str]]] = None, + ): + self._default = default or {} + self._definitions = definitions or [] + + self._default_def = None + self._definitions_by_name = None + + def get_icon( + self, + product_base_type: Optional[str] = None, + product_type: Optional[str] = None, + ) -> dict[str, str]: + defs = self._get_defs_by_name() + icon = defs.get(product_type) + if icon is None: + icon = defs.get(product_base_type) + if icon is None: + icon = self._get_default_def() + return icon.copy() + + def _get_default_def(self) -> dict[str, str]: + if self._default_def is None: + self._default_def = { + "type": "material-symbols", + "name": self._default.get("icon", "deployed_code"), + "color": self._default.get("color", "#cccccc"), + } + + return self._default_def + + def _get_defs_by_name(self) -> dict[str, dict[str, str]]: + if self._definitions_by_name is None: + self._definitions_by_name = { + product_base_type_def["name"]: { + "type": "material-symbols", + "name": product_base_type_def.get("icon", "deployed_code"), + "color": product_base_type_def.get("color", "#cccccc"), + } + for product_base_type_def in self._definitions + } + return self._definitions_by_name + + def _get_project_items_from_entitiy( projects: list[dict[str, Any]] ) -> list[ProjectItem]: @@ -242,6 +299,9 @@ class ProjectsModel(object): self._projects_by_name = NestedCacheItem( levels=1, default_factory=list ) + self._product_type_icons_mapping = NestedCacheItem( + levels=1, default_factory=ProductTypeIconMapping + ) self._project_statuses_cache = {} self._folder_types_cache = {} self._task_types_cache = {} @@ -255,6 +315,7 @@ class ProjectsModel(object): self._task_types_cache = {} self._projects_cache.reset() self._projects_by_name.reset() + self._product_type_icons_mapping.reset() def refresh(self): """Refresh project items. @@ -390,6 +451,27 @@ class ProjectsModel(object): self._task_type_items_getter, ) + def get_product_type_icons_mapping( + self, project_name: Optional[str] + ) -> ProductTypeIconMapping: + cache = self._product_type_icons_mapping[project_name] + if cache.is_valid: + return cache.get_data() + + project_entity = self.get_project_entity(project_name) + icons_mapping = ProductTypeIconMapping() + if project_entity: + product_base_types = ( + project_entity["config"].get("productBaseTypes", {}) + ) + icons_mapping = ProductTypeIconMapping( + product_base_types.get("default"), + product_base_types.get("definitions") + ) + + cache.update_data(icons_mapping) + return icons_mapping + def _get_project_items( self, project_name, sender, item_type, cache_obj, getter ): diff --git a/client/ayon_core/tools/creator/__init__.py b/client/ayon_core/tools/creator/__init__.py deleted file mode 100644 index 585b8bdf80..0000000000 --- a/client/ayon_core/tools/creator/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from .window import ( - show, - CreatorWindow -) - -__all__ = ( - "show", - "CreatorWindow" -) diff --git a/client/ayon_core/tools/creator/constants.py b/client/ayon_core/tools/creator/constants.py deleted file mode 100644 index ec555fbe9c..0000000000 --- a/client/ayon_core/tools/creator/constants.py +++ /dev/null @@ -1,8 +0,0 @@ -from qtpy import QtCore - - -PRODUCT_TYPE_ROLE = QtCore.Qt.UserRole + 1 -ITEM_ID_ROLE = QtCore.Qt.UserRole + 2 - -SEPARATOR = "---" -SEPARATORS = {"---", "---separator---"} diff --git a/client/ayon_core/tools/creator/model.py b/client/ayon_core/tools/creator/model.py deleted file mode 100644 index bf6c7380a1..0000000000 --- a/client/ayon_core/tools/creator/model.py +++ /dev/null @@ -1,61 +0,0 @@ -import uuid -from qtpy import QtGui, QtCore - -from ayon_core.pipeline import discover_legacy_creator_plugins - -from . constants import ( - PRODUCT_TYPE_ROLE, - ITEM_ID_ROLE -) - - -class CreatorsModel(QtGui.QStandardItemModel): - def __init__(self, *args, **kwargs): - super(CreatorsModel, self).__init__(*args, **kwargs) - - self._creators_by_id = {} - - def reset(self): - # TODO change to refresh when clearing is not needed - self.clear() - self._creators_by_id = {} - - items = [] - creators = discover_legacy_creator_plugins() - for creator in creators: - if not creator.enabled: - continue - item_id = str(uuid.uuid4()) - self._creators_by_id[item_id] = creator - - label = creator.label or creator.product_type - item = QtGui.QStandardItem(label) - item.setEditable(False) - item.setData(item_id, ITEM_ID_ROLE) - item.setData(creator.product_type, PRODUCT_TYPE_ROLE) - items.append(item) - - if not items: - item = QtGui.QStandardItem("No registered create plugins") - item.setEnabled(False) - item.setData(False, QtCore.Qt.ItemIsEnabled) - items.append(item) - - items.sort(key=lambda item: item.text()) - self.invisibleRootItem().appendRows(items) - - def get_creator_by_id(self, item_id): - return self._creators_by_id.get(item_id) - - def get_indexes_by_product_type(self, product_type): - indexes = [] - for row in range(self.rowCount()): - index = self.index(row, 0) - item_id = index.data(ITEM_ID_ROLE) - creator_plugin = self._creators_by_id.get(item_id) - if creator_plugin and ( - creator_plugin.label.lower() == product_type.lower() - or creator_plugin.product_type.lower() == product_type.lower() - ): - indexes.append(index) - return indexes diff --git a/client/ayon_core/tools/creator/widgets.py b/client/ayon_core/tools/creator/widgets.py deleted file mode 100644 index bbc6848e6c..0000000000 --- a/client/ayon_core/tools/creator/widgets.py +++ /dev/null @@ -1,275 +0,0 @@ -import re -import inspect - -from qtpy import QtWidgets, QtCore, QtGui - -import qtawesome - -from ayon_core.pipeline.create import PRODUCT_NAME_ALLOWED_SYMBOLS -from ayon_core.tools.utils import ErrorMessageBox - -if hasattr(QtGui, "QRegularExpressionValidator"): - RegularExpressionValidatorClass = QtGui.QRegularExpressionValidator - RegularExpressionClass = QtCore.QRegularExpression -else: - RegularExpressionValidatorClass = QtGui.QRegExpValidator - RegularExpressionClass = QtCore.QRegExp - - -class CreateErrorMessageBox(ErrorMessageBox): - def __init__( - self, - product_type, - product_name, - folder_path, - exc_msg, - formatted_traceback, - parent - ): - self._product_type = product_type - self._product_name = product_name - self._folder_path = folder_path - self._exc_msg = exc_msg - self._formatted_traceback = formatted_traceback - super(CreateErrorMessageBox, self).__init__("Creation failed", parent) - - def _create_top_widget(self, parent_widget): - label_widget = QtWidgets.QLabel(parent_widget) - label_widget.setText( - "Failed to create" - ) - return label_widget - - def _get_report_data(self): - report_message = ( - "Failed to create Product: \"{product_name}\"" - " Type: \"{product_type}\"" - " in Folder: \"{folder_path}\"" - "\n\nError: {message}" - ).format( - product_name=self._product_name, - product_type=self._product_type, - folder_path=self._folder_path, - message=self._exc_msg - ) - if self._formatted_traceback: - report_message += "\n\n{}".format(self._formatted_traceback) - return [report_message] - - def _create_content(self, content_layout): - item_name_template = ( - "{}: {{}}
" - "{}: {{}}
" - "{}: {{}}
" - ).format( - "Product type", - "Product name", - "Folder" - ) - exc_msg_template = "{}" - - line = self._create_line() - content_layout.addWidget(line) - - item_name_widget = QtWidgets.QLabel(self) - item_name_widget.setText( - item_name_template.format( - self._product_type, self._product_name, self._folder_path - ) - ) - content_layout.addWidget(item_name_widget) - - message_label_widget = QtWidgets.QLabel(self) - message_label_widget.setText( - exc_msg_template.format(self.convert_text_for_html(self._exc_msg)) - ) - content_layout.addWidget(message_label_widget) - - if self._formatted_traceback: - line_widget = self._create_line() - tb_widget = self._create_traceback_widget( - self._formatted_traceback - ) - content_layout.addWidget(line_widget) - content_layout.addWidget(tb_widget) - - -class ProductNameValidator(RegularExpressionValidatorClass): - invalid = QtCore.Signal(set) - pattern = "^[{}]*$".format(PRODUCT_NAME_ALLOWED_SYMBOLS) - - def __init__(self): - reg = RegularExpressionClass(self.pattern) - super(ProductNameValidator, self).__init__(reg) - - def validate(self, text, pos): - results = super(ProductNameValidator, self).validate(text, pos) - if results[0] == RegularExpressionValidatorClass.Invalid: - self.invalid.emit(self.invalid_chars(text)) - return results - - def invalid_chars(self, text): - invalid = set() - re_valid = re.compile(self.pattern) - for char in text: - if char == " ": - invalid.add("' '") - continue - if not re_valid.match(char): - invalid.add(char) - return invalid - - -class VariantLineEdit(QtWidgets.QLineEdit): - report = QtCore.Signal(str) - colors = { - "empty": (QtGui.QColor("#78879b"), ""), - "exists": (QtGui.QColor("#4E76BB"), "border-color: #4E76BB;"), - "new": (QtGui.QColor("#7AAB8F"), "border-color: #7AAB8F;"), - } - - def __init__(self, *args, **kwargs): - super(VariantLineEdit, self).__init__(*args, **kwargs) - - validator = ProductNameValidator() - self.setValidator(validator) - self.setToolTip("Only alphanumeric characters (A-Z a-z 0-9), " - "'_' and '.' are allowed.") - - self._status_color = self.colors["empty"][0] - - anim = QtCore.QPropertyAnimation() - anim.setTargetObject(self) - anim.setPropertyName(b"status_color") - anim.setEasingCurve(QtCore.QEasingCurve.InCubic) - anim.setDuration(300) - anim.setStartValue(QtGui.QColor("#C84747")) # `Invalid` status color - self.animation = anim - - validator.invalid.connect(self.on_invalid) - - def on_invalid(self, invalid): - message = "Invalid character: %s" % ", ".join(invalid) - self.report.emit(message) - self.animation.stop() - self.animation.start() - - def as_empty(self): - self._set_border("empty") - self.report.emit("Empty product name ..") - - def as_exists(self): - self._set_border("exists") - self.report.emit("Existing product, appending next version.") - - def as_new(self): - self._set_border("new") - self.report.emit("New product, creating first version.") - - def _set_border(self, status): - qcolor, style = self.colors[status] - self.animation.setEndValue(qcolor) - self.setStyleSheet(style) - - def _get_status_color(self): - return self._status_color - - def _set_status_color(self, color): - self._status_color = color - self.setStyleSheet("border-color: %s;" % color.name()) - - status_color = QtCore.Property( - QtGui.QColor, _get_status_color, _set_status_color - ) - - -class ProductTypeDescriptionWidget(QtWidgets.QWidget): - """A product type description widget. - - Shows a product type icon, name and a help description. - Used in creator header. - - _______________________ - | ____ | - | |icon| PRODUCT TYPE | - | |____| help | - |_______________________| - - """ - - SIZE = 35 - - def __init__(self, parent=None): - super(ProductTypeDescriptionWidget, self).__init__(parent=parent) - - icon_label = QtWidgets.QLabel(self) - icon_label.setSizePolicy( - QtWidgets.QSizePolicy.Maximum, - QtWidgets.QSizePolicy.Maximum - ) - - # Add 4 pixel padding to avoid icon being cut off - icon_label.setFixedWidth(self.SIZE + 4) - icon_label.setFixedHeight(self.SIZE + 4) - - label_layout = QtWidgets.QVBoxLayout() - label_layout.setSpacing(0) - - product_type_label = QtWidgets.QLabel(self) - product_type_label.setObjectName("CreatorProductTypeLabel") - product_type_label.setAlignment( - QtCore.Qt.AlignBottom | QtCore.Qt.AlignLeft - ) - - help_label = QtWidgets.QLabel(self) - help_label.setAlignment(QtCore.Qt.AlignTop | QtCore.Qt.AlignLeft) - - label_layout.addWidget(product_type_label) - label_layout.addWidget(help_label) - - layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.setSpacing(5) - layout.addWidget(icon_label) - layout.addLayout(label_layout) - - self._help_label = help_label - self._product_type_label = product_type_label - self._icon_label = icon_label - - def set_item(self, creator_plugin): - """Update elements to display information of a product type item. - - Args: - creator_plugin (dict): A product type item as registered with - name, help and icon. - - Returns: - None - - """ - if not creator_plugin: - self._icon_label.setPixmap(None) - self._product_type_label.setText("") - self._help_label.setText("") - return - - # Support a font-awesome icon - icon_name = getattr(creator_plugin, "icon", None) or "info-circle" - try: - icon = qtawesome.icon("fa.{}".format(icon_name), color="white") - pixmap = icon.pixmap(self.SIZE, self.SIZE) - except Exception: - print("BUG: Couldn't load icon \"fa.{}\"".format(str(icon_name))) - # Create transparent pixmap - pixmap = QtGui.QPixmap() - pixmap.fill(QtCore.Qt.transparent) - pixmap = pixmap.scaled(self.SIZE, self.SIZE) - - # Parse a clean line from the Creator's docstring - docstring = inspect.getdoc(creator_plugin) - creator_help = docstring.splitlines()[0] if docstring else "" - - self._icon_label.setPixmap(pixmap) - self._product_type_label.setText(creator_plugin.product_type) - self._help_label.setText(creator_help) diff --git a/client/ayon_core/tools/creator/window.py b/client/ayon_core/tools/creator/window.py deleted file mode 100644 index 5d1c0a272a..0000000000 --- a/client/ayon_core/tools/creator/window.py +++ /dev/null @@ -1,508 +0,0 @@ -import sys -import traceback -import re - -import ayon_api -from qtpy import QtWidgets, QtCore - -from ayon_core import style -from ayon_core.settings import get_current_project_settings -from ayon_core.tools.utils.lib import qt_app_context -from ayon_core.pipeline import ( - get_current_project_name, - get_current_folder_path, - get_current_task_name, -) -from ayon_core.pipeline.create import ( - PRODUCT_NAME_ALLOWED_SYMBOLS, - legacy_create, - CreatorError, -) - -from .model import CreatorsModel -from .widgets import ( - CreateErrorMessageBox, - VariantLineEdit, - ProductTypeDescriptionWidget -) -from .constants import ( - ITEM_ID_ROLE, - SEPARATOR, - SEPARATORS -) - -module = sys.modules[__name__] -module.window = None - - -class CreatorWindow(QtWidgets.QDialog): - def __init__(self, parent=None): - super(CreatorWindow, self).__init__(parent) - self.setWindowTitle("Instance Creator") - self.setFocusPolicy(QtCore.Qt.StrongFocus) - if not parent: - self.setWindowFlags( - self.windowFlags() | QtCore.Qt.WindowStaysOnTopHint - ) - - creator_info = ProductTypeDescriptionWidget(self) - - creators_model = CreatorsModel() - - creators_proxy = QtCore.QSortFilterProxyModel() - creators_proxy.setSourceModel(creators_model) - - creators_view = QtWidgets.QListView(self) - creators_view.setObjectName("CreatorsView") - creators_view.setModel(creators_proxy) - - folder_path_input = QtWidgets.QLineEdit(self) - variant_input = VariantLineEdit(self) - product_name_input = QtWidgets.QLineEdit(self) - product_name_input.setEnabled(False) - - variants_btn = QtWidgets.QPushButton() - variants_btn.setFixedWidth(18) - variants_menu = QtWidgets.QMenu(variants_btn) - variants_btn.setMenu(variants_menu) - - name_layout = QtWidgets.QHBoxLayout() - name_layout.addWidget(variant_input) - name_layout.addWidget(variants_btn) - name_layout.setSpacing(3) - name_layout.setContentsMargins(0, 0, 0, 0) - - body_layout = QtWidgets.QVBoxLayout() - body_layout.setContentsMargins(0, 0, 0, 0) - - body_layout.addWidget(creator_info, 0) - body_layout.addWidget(QtWidgets.QLabel("Product type", self), 0) - body_layout.addWidget(creators_view, 1) - body_layout.addWidget(QtWidgets.QLabel("Folder path", self), 0) - body_layout.addWidget(folder_path_input, 0) - body_layout.addWidget(QtWidgets.QLabel("Product name", self), 0) - body_layout.addLayout(name_layout, 0) - body_layout.addWidget(product_name_input, 0) - - useselection_chk = QtWidgets.QCheckBox("Use selection", self) - useselection_chk.setCheckState(QtCore.Qt.Checked) - - create_btn = QtWidgets.QPushButton("Create", self) - # Need to store error_msg to prevent garbage collection - msg_label = QtWidgets.QLabel(self) - - footer_layout = QtWidgets.QVBoxLayout() - footer_layout.addWidget(create_btn, 0) - footer_layout.addWidget(msg_label, 0) - footer_layout.setContentsMargins(0, 0, 0, 0) - - layout = QtWidgets.QVBoxLayout(self) - layout.addLayout(body_layout, 1) - layout.addWidget(useselection_chk, 0, QtCore.Qt.AlignLeft) - layout.addLayout(footer_layout, 0) - - msg_timer = QtCore.QTimer() - msg_timer.setSingleShot(True) - msg_timer.setInterval(5000) - - validation_timer = QtCore.QTimer() - validation_timer.setSingleShot(True) - validation_timer.setInterval(300) - - msg_timer.timeout.connect(self._on_msg_timer) - validation_timer.timeout.connect(self._on_validation_timer) - - create_btn.clicked.connect(self._on_create) - variant_input.returnPressed.connect(self._on_create) - variant_input.textChanged.connect(self._on_data_changed) - variant_input.report.connect(self.echo) - folder_path_input.textChanged.connect(self._on_data_changed) - creators_view.selectionModel().currentChanged.connect( - self._on_selection_changed - ) - - # Store valid states and - self._is_valid = False - create_btn.setEnabled(self._is_valid) - - self._first_show = True - - # Message dialog when something goes wrong during creation - self._message_dialog = None - - self._creator_info = creator_info - self._create_btn = create_btn - self._useselection_chk = useselection_chk - self._variant_input = variant_input - self._product_name_input = product_name_input - self._folder_path_input = folder_path_input - - self._creators_model = creators_model - self._creators_proxy = creators_proxy - self._creators_view = creators_view - - self._variants_btn = variants_btn - self._variants_menu = variants_menu - - self._msg_label = msg_label - - self._validation_timer = validation_timer - self._msg_timer = msg_timer - - # Defaults - self.resize(300, 500) - variant_input.setFocus() - - def _set_valid_state(self, valid): - if self._is_valid == valid: - return - self._is_valid = valid - self._create_btn.setEnabled(valid) - - def _build_menu(self, default_names=None): - """Create optional predefined variants. - - Args: - default_names(list): all predefined names - - Returns: - None - """ - if not default_names: - default_names = [] - - menu = self._variants_menu - button = self._variants_btn - - # Get and destroy the action group - group = button.findChild(QtWidgets.QActionGroup) - if group: - group.deleteLater() - - state = any(default_names) - button.setEnabled(state) - if state is False: - return - - # Build new action group - group = QtWidgets.QActionGroup(button) - for name in default_names: - if name in SEPARATORS: - menu.addSeparator() - continue - action = group.addAction(name) - menu.addAction(action) - - group.triggered.connect(self._on_action_clicked) - - def _on_action_clicked(self, action): - self._variant_input.setText(action.text()) - - def _on_data_changed(self, *args): - # Set invalid state until it's reconfirmed to be valid by the - # scheduled callback so any form of creation is held back until - # valid again - self._set_valid_state(False) - - self._validation_timer.start() - - def _on_validation_timer(self): - index = self._creators_view.currentIndex() - item_id = index.data(ITEM_ID_ROLE) - creator_plugin = self._creators_model.get_creator_by_id(item_id) - user_input_text = self._variant_input.text() - folder_path = self._folder_path_input.text() - - # Early exit if no folder path - if not folder_path: - self._build_menu() - self.echo("Folder is required ..") - self._set_valid_state(False) - return - - project_name = get_current_project_name() - folder_entity = None - if creator_plugin: - # Get the folder from the database which match with the name - folder_entity = ayon_api.get_folder_by_path( - project_name, folder_path, fields={"id"} - ) - - # Get plugin - if not folder_entity or not creator_plugin: - self._build_menu() - - if not creator_plugin: - self.echo("No registered product types ..") - else: - self.echo("Folder '{}' not found ..".format(folder_path)) - self._set_valid_state(False) - return - - folder_id = folder_entity["id"] - - task_name = get_current_task_name() - task_entity = ayon_api.get_task_by_name( - project_name, folder_id, task_name - ) - - # Calculate product name with Creator plugin - product_name = creator_plugin.get_product_name( - project_name, folder_entity, task_entity, user_input_text - ) - # Force replacement of prohibited symbols - # QUESTION should Creator care about this and here should be only - # validated with schema regex? - - # Allow curly brackets in product name for dynamic keys - curly_left = "__cbl__" - curly_right = "__cbr__" - tmp_product_name = ( - product_name - .replace("{", curly_left) - .replace("}", curly_right) - ) - # Replace prohibited symbols - tmp_product_name = re.sub( - "[^{}]+".format(PRODUCT_NAME_ALLOWED_SYMBOLS), - "", - tmp_product_name - ) - product_name = ( - tmp_product_name - .replace(curly_left, "{") - .replace(curly_right, "}") - ) - self._product_name_input.setText(product_name) - - # Get all products of the current folder - product_entities = ayon_api.get_products( - project_name, folder_ids={folder_id}, fields={"name"} - ) - existing_product_names = { - product_entity["name"] - for product_entity in product_entities - } - existing_product_names_low = set( - _name.lower() - for _name in existing_product_names - ) - - # Defaults to dropdown - defaults = [] - # Check if Creator plugin has set defaults - if ( - creator_plugin.defaults - and isinstance(creator_plugin.defaults, (list, tuple, set)) - ): - defaults = list(creator_plugin.defaults) - - # Replace - compare_regex = re.compile(re.sub( - user_input_text, "(.+)", product_name, flags=re.IGNORECASE - )) - variant_hints = set() - if user_input_text: - for _name in existing_product_names: - _result = compare_regex.search(_name) - if _result: - variant_hints |= set(_result.groups()) - - if variant_hints: - if defaults: - defaults.append(SEPARATOR) - defaults.extend(variant_hints) - self._build_menu(defaults) - - # Indicate product existence - if not user_input_text: - self._variant_input.as_empty() - elif product_name.lower() in existing_product_names_low: - # validate existence of product name with lowered text - # - "renderMain" vs. "rensermain" mean same path item for - # windows - self._variant_input.as_exists() - else: - self._variant_input.as_new() - - # Update the valid state - valid = product_name.strip() != "" - - self._set_valid_state(valid) - - def _on_selection_changed(self, old_idx, new_idx): - index = self._creators_view.currentIndex() - item_id = index.data(ITEM_ID_ROLE) - - creator_plugin = self._creators_model.get_creator_by_id(item_id) - - self._creator_info.set_item(creator_plugin) - - if creator_plugin is None: - return - - default = None - if hasattr(creator_plugin, "get_default_variant"): - default = creator_plugin.get_default_variant() - - if not default: - if ( - creator_plugin.defaults - and isinstance(creator_plugin.defaults, list) - ): - default = creator_plugin.defaults[0] - else: - default = "Default" - - self._variant_input.setText(default) - - self._on_data_changed() - - def keyPressEvent(self, event): - """Custom keyPressEvent. - - Override keyPressEvent to do nothing so that Maya's panels won't - take focus when pressing "SHIFT" whilst mouse is over viewport or - outliner. This way users don't accidentally perform Maya commands - whilst trying to name an instance. - - """ - pass - - def showEvent(self, event): - super(CreatorWindow, self).showEvent(event) - if self._first_show: - self._first_show = False - self.setStyleSheet(style.load_stylesheet()) - - def refresh(self): - self._folder_path_input.setText(get_current_folder_path()) - - self._creators_model.reset() - - product_types_smart_select = ( - get_current_project_settings() - ["core"] - ["tools"] - ["creator"] - ["product_types_smart_select"] - ) - current_index = None - product_type = None - task_name = get_current_task_name() or None - lowered_task_name = task_name.lower() - if task_name: - for smart_item in product_types_smart_select: - _low_task_names = { - name.lower() for name in smart_item["task_names"] - } - for _task_name in _low_task_names: - if _task_name in lowered_task_name: - product_type = smart_item["name"] - break - if product_type: - break - - if product_type: - indexes = self._creators_model.get_indexes_by_product_type( - product_type - ) - if indexes: - index = indexes[0] - current_index = self._creators_proxy.mapFromSource(index) - - if current_index is None or not current_index.isValid(): - current_index = self._creators_proxy.index(0, 0) - - self._creators_view.setCurrentIndex(current_index) - - def _on_create(self): - # Do not allow creation in an invalid state - if not self._is_valid: - return - - index = self._creators_view.currentIndex() - item_id = index.data(ITEM_ID_ROLE) - creator_plugin = self._creators_model.get_creator_by_id(item_id) - if creator_plugin is None: - return - - product_name = self._product_name_input.text() - folder_path = self._folder_path_input.text() - use_selection = self._useselection_chk.isChecked() - - variant = self._variant_input.text() - - error_info = None - try: - legacy_create( - creator_plugin, - product_name, - folder_path, - options={"useSelection": use_selection}, - data={"variant": variant} - ) - - except CreatorError as exc: - self.echo("Creator error: {}".format(str(exc))) - error_info = (str(exc), None) - - except Exception as exc: - self.echo("Program error: %s" % str(exc)) - - exc_type, exc_value, exc_traceback = sys.exc_info() - formatted_traceback = "".join(traceback.format_exception( - exc_type, exc_value, exc_traceback - )) - error_info = (str(exc), formatted_traceback) - - if error_info: - box = CreateErrorMessageBox( - creator_plugin.product_type, - product_name, - folder_path, - *error_info, - parent=self - ) - box.show() - # Store dialog so is not garbage collected before is shown - self._message_dialog = box - - else: - self.echo("Created %s .." % product_name) - - def _on_msg_timer(self): - self._msg_label.setText("") - - def echo(self, message): - self._msg_label.setText(str(message)) - self._msg_timer.start() - - -def show(parent=None): - """Display product creator GUI - - Arguments: - debug (bool, optional): Run loader in debug-mode, - defaults to False - parent (QtCore.QObject, optional): When provided parent the interface - to this QObject. - - """ - - try: - module.window.close() - del module.window - except (AttributeError, RuntimeError): - pass - - with qt_app_context(): - window = CreatorWindow(parent) - window.refresh() - window.show() - - module.window = window - - # Pull window to the front. - module.window.raise_() - module.window.activateWindow() diff --git a/client/ayon_core/tools/launcher/abstract.py b/client/ayon_core/tools/launcher/abstract.py index 1d7dafd62f..a94500116b 100644 --- a/client/ayon_core/tools/launcher/abstract.py +++ b/client/ayon_core/tools/launcher/abstract.py @@ -4,6 +4,7 @@ from abc import ABC, abstractmethod from dataclasses import dataclass from typing import Optional, Any +from ayon_core.addon import AddonsManager from ayon_core.tools.common_models import ( ProjectItem, FolderItem, @@ -20,6 +21,7 @@ class WebactionContext: project_name: str folder_id: str task_id: str + workfile_id: str addon_name: str addon_version: str @@ -33,7 +35,7 @@ class ActionItem: identifier (str): Unique identifier of action item. order (int): Action ordering. label (str): Action label. - variant_label (Union[str, None]): Variant label, full label is + variant_label (Optional[str]): Variant label, full label is concatenated with space. Actions are grouped under single action if it has same 'label' and have set 'variant_label'. full_label (str): Full label, if not set it is generated @@ -56,6 +58,15 @@ class ActionItem: addon_version: Optional[str] = None +@dataclass +class WorkfileItem: + workfile_id: str + filename: str + exists: bool + icon: Optional[str] + version: Optional[int] + + class AbstractLauncherCommon(ABC): @abstractmethod def register_event_callback(self, topic, callback): @@ -85,12 +96,16 @@ class AbstractLauncherBackend(AbstractLauncherCommon): pass + @abstractmethod + def get_addons_manager(self) -> AddonsManager: + pass + @abstractmethod def get_project_settings(self, project_name): """Project settings for current project. Args: - project_name (Union[str, None]): Project name. + project_name (Optional[str]): Project name. Returns: dict[str, Any]: Project settings. @@ -254,7 +269,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon): """Selected project name. Returns: - Union[str, None]: Selected project name. + Optional[str]: Selected project name. """ pass @@ -264,7 +279,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon): """Selected folder id. Returns: - Union[str, None]: Selected folder id. + Optional[str]: Selected folder id. """ pass @@ -274,7 +289,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon): """Selected task id. Returns: - Union[str, None]: Selected task id. + Optional[str]: Selected task id. """ pass @@ -284,7 +299,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon): """Selected task name. Returns: - Union[str, None]: Selected task name. + Optional[str]: Selected task name. """ pass @@ -302,7 +317,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon): } Returns: - dict[str, Union[str, None]]: Selected context. + dict[str, Optional[str]]: Selected context. """ pass @@ -312,7 +327,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon): """Change selected folder. Args: - project_name (Union[str, None]): Project nameor None if no project + project_name (Optional[str]): Project nameor None if no project is selected. """ @@ -323,7 +338,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon): """Change selected folder. Args: - folder_id (Union[str, None]): Folder id or None if no folder + folder_id (Optional[str]): Folder id or None if no folder is selected. """ @@ -336,14 +351,24 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon): """Change selected task. Args: - task_id (Union[str, None]): Task id or None if no task + task_id (Optional[str]): Task id or None if no task is selected. - task_name (Union[str, None]): Task name or None if no task + task_name (Optional[str]): Task name or None if no task is selected. """ pass + @abstractmethod + def set_selected_workfile(self, workfile_id: Optional[str]): + """Change selected workfile. + + Args: + workfile_id (Optional[str]): Workfile id or None. + + """ + pass + # Actions @abstractmethod def get_action_items( @@ -351,13 +376,15 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon): project_name: Optional[str], folder_id: Optional[str], task_id: Optional[str], + workfile_id: Optional[str], ) -> list[ActionItem]: """Get action items for given context. Args: - project_name (Union[str, None]): Project name. - folder_id (Union[str, None]): Folder id. - task_id (Union[str, None]): Task id. + project_name (Optional[str]): Project name. + folder_id (Optional[str]): Folder id. + task_id (Optional[str]): Task id. + workfile_id (Optional[str]): Workfile id. Returns: list[ActionItem]: List of action items that should be shown @@ -373,14 +400,16 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon): project_name: Optional[str], folder_id: Optional[str], task_id: Optional[str], + workfile_id: Optional[str], ): """Trigger action on given context. Args: action_id (str): Action identifier. - project_name (Union[str, None]): Project name. - folder_id (Union[str, None]): Folder id. - task_id (Union[str, None]): Task id. + project_name (Optional[str]): Project name. + folder_id (Optional[str]): Folder id. + task_id (Optional[str]): Task id. + workfile_id (Optional[str]): Task id. """ pass @@ -465,3 +494,21 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon): """ pass + + @abstractmethod + def get_workfile_items( + self, + project_name: Optional[str], + task_id: Optional[str], + ) -> list[WorkfileItem]: + """Get workfile items for a given context. + + Args: + project_name (Optional[str]): Project name. + task_id (Optional[str]): Task id. + + Returns: + list[WorkfileItem]: List of workfile items. + + """ + pass diff --git a/client/ayon_core/tools/launcher/control.py b/client/ayon_core/tools/launcher/control.py index 58d22453be..85b362f9d7 100644 --- a/client/ayon_core/tools/launcher/control.py +++ b/client/ayon_core/tools/launcher/control.py @@ -1,10 +1,21 @@ +from typing import Optional + from ayon_core.lib import Logger, get_ayon_username from ayon_core.lib.events import QueuedEventSystem +from ayon_core.addon import AddonsManager from ayon_core.settings import get_project_settings, get_studio_settings from ayon_core.tools.common_models import ProjectsModel, HierarchyModel -from .abstract import AbstractLauncherFrontEnd, AbstractLauncherBackend -from .models import LauncherSelectionModel, ActionsModel +from .abstract import ( + AbstractLauncherFrontEnd, + AbstractLauncherBackend, + WorkfileItem, +) +from .models import ( + LauncherSelectionModel, + ActionsModel, + WorkfilesModel, +) NOT_SET = object() @@ -17,12 +28,15 @@ class BaseLauncherController( self._event_system = None self._log = None + self._addons_manager = None + self._username = NOT_SET self._selection_model = LauncherSelectionModel(self) self._projects_model = ProjectsModel(self) self._hierarchy_model = HierarchyModel(self) self._actions_model = ActionsModel(self) + self._workfiles_model = WorkfilesModel(self) @property def log(self): @@ -59,6 +73,11 @@ class BaseLauncherController( def register_event_callback(self, topic, callback): self.event_system.add_callback(topic, callback) + def get_addons_manager(self) -> AddonsManager: + if self._addons_manager is None: + self._addons_manager = AddonsManager() + return self._addons_manager + # Entity items for UI def get_project_items(self, sender=None): return self._projects_model.get_project_items(sender) @@ -125,6 +144,9 @@ class BaseLauncherController( def set_selected_task(self, task_id, task_name): self._selection_model.set_selected_task(task_id, task_name) + def set_selected_workfile(self, workfile_id): + self._selection_model.set_selected_workfile(workfile_id) + def get_selected_context(self): return { "project_name": self.get_selected_project_name(), @@ -133,10 +155,24 @@ class BaseLauncherController( "task_name": self.get_selected_task_name(), } + # Workfiles + def get_workfile_items( + self, + project_name: Optional[str], + task_id: Optional[str], + ) -> list[WorkfileItem]: + return self._workfiles_model.get_workfile_items( + project_name, + task_id, + ) + # Actions - def get_action_items(self, project_name, folder_id, task_id): + def get_action_items( + self, project_name, folder_id, task_id, workfile_id + ): return self._actions_model.get_action_items( - project_name, folder_id, task_id) + project_name, folder_id, task_id, workfile_id + ) def trigger_action( self, @@ -144,12 +180,14 @@ class BaseLauncherController( project_name, folder_id, task_id, + workfile_id, ): self._actions_model.trigger_action( identifier, project_name, folder_id, task_id, + workfile_id, ) def trigger_webaction(self, context, action_label, form_data=None): @@ -186,6 +224,8 @@ class BaseLauncherController( self._projects_model.reset() # Refresh actions self._actions_model.refresh() + # Reset workfiles model + self._workfiles_model.reset() self._emit_event("controller.refresh.actions.finished") diff --git a/client/ayon_core/tools/launcher/models/__init__.py b/client/ayon_core/tools/launcher/models/__init__.py index 1bc60c85f0..efc0de96ca 100644 --- a/client/ayon_core/tools/launcher/models/__init__.py +++ b/client/ayon_core/tools/launcher/models/__init__.py @@ -1,8 +1,10 @@ from .actions import ActionsModel from .selection import LauncherSelectionModel +from .workfiles import WorkfilesModel __all__ = ( "ActionsModel", "LauncherSelectionModel", + "WorkfilesModel", ) diff --git a/client/ayon_core/tools/launcher/models/actions.py b/client/ayon_core/tools/launcher/models/actions.py index adb8d371ed..709ae2e9a8 100644 --- a/client/ayon_core/tools/launcher/models/actions.py +++ b/client/ayon_core/tools/launcher/models/actions.py @@ -15,7 +15,6 @@ from ayon_core.lib import ( get_settings_variant, run_detached_ayon_launcher_process, ) -from ayon_core.addon import AddonsManager from ayon_core.pipeline.actions import ( discover_launcher_actions, LauncherActionSelection, @@ -104,8 +103,6 @@ class ActionsModel: levels=2, default_factory=list, lifetime=20, ) - self._addons_manager = None - self._variant = get_settings_variant() @staticmethod @@ -131,19 +128,28 @@ class ActionsModel: self._get_action_objects() self._controller.emit_event("actions.refresh.finished") - def get_action_items(self, project_name, folder_id, task_id): + def get_action_items( + self, + project_name: Optional[str], + folder_id: Optional[str], + task_id: Optional[str], + workfile_id: Optional[str], + ) -> list[ActionItem]: """Get actions for project. Args: - project_name (Union[str, None]): Project name. - folder_id (Union[str, None]): Folder id. - task_id (Union[str, None]): Task id. + project_name (Optional[str]): Project name. + folder_id (Optional[str]): Folder id. + task_id (Optional[str]): Task id. + workfile_id (Optional[str]): Workfile id. Returns: list[ActionItem]: List of actions. """ - selection = self._prepare_selection(project_name, folder_id, task_id) + selection = self._prepare_selection( + project_name, folder_id, task_id, workfile_id + ) output = [] action_items = self._get_action_items(project_name) for identifier, action in self._get_action_objects().items(): @@ -159,8 +165,11 @@ class ActionsModel: project_name, folder_id, task_id, + workfile_id, ): - selection = self._prepare_selection(project_name, folder_id, task_id) + selection = self._prepare_selection( + project_name, folder_id, task_id, workfile_id + ) failed = False error_message = None action_label = identifier @@ -202,11 +211,15 @@ class ActionsModel: identifier = context.identifier folder_id = context.folder_id task_id = context.task_id + workfile_id = context.workfile_id project_name = context.project_name addon_name = context.addon_name addon_version = context.addon_version - if task_id: + if workfile_id: + entity_type = "workfile" + entity_ids.append(workfile_id) + elif task_id: entity_type = "task" entity_ids.append(task_id) elif folder_id: @@ -272,6 +285,7 @@ class ActionsModel: "project_name": project_name, "folder_id": folder_id, "task_id": task_id, + "workfile_id": workfile_id, "addon_name": addon_name, "addon_version": addon_version, }) @@ -282,7 +296,10 @@ class ActionsModel: def get_action_config_values(self, context: WebactionContext): selection = self._prepare_selection( - context.project_name, context.folder_id, context.task_id + context.project_name, + context.folder_id, + context.task_id, + context.workfile_id, ) if not selection.is_project_selected: return {} @@ -309,7 +326,10 @@ class ActionsModel: def set_action_config_values(self, context, values): selection = self._prepare_selection( - context.project_name, context.folder_id, context.task_id + context.project_name, + context.folder_id, + context.task_id, + context.workfile_id, ) if not selection.is_project_selected: return {} @@ -333,12 +353,9 @@ class ActionsModel: exc_info=True ) - def _get_addons_manager(self): - if self._addons_manager is None: - self._addons_manager = AddonsManager() - return self._addons_manager - - def _prepare_selection(self, project_name, folder_id, task_id): + def _prepare_selection( + self, project_name, folder_id, task_id, workfile_id + ): project_entity = None if project_name: project_entity = self._controller.get_project_entity(project_name) @@ -347,6 +364,7 @@ class ActionsModel: project_name, folder_id, task_id, + workfile_id, project_entity=project_entity, project_settings=project_settings, ) @@ -355,7 +373,12 @@ class ActionsModel: entity_type = None entity_id = None entity_subtypes = [] - if selection.is_task_selected: + if selection.is_workfile_selected: + entity_type = "workfile" + entity_id = selection.workfile_id + entity_subtypes = [] + + elif selection.is_task_selected: entity_type = "task" entity_id = selection.task_entity["id"] entity_subtypes = [selection.task_entity["taskType"]] @@ -399,7 +422,11 @@ class ActionsModel: return cache.get_data() try: - response = ayon_api.post("actions/list", **request_data) + # 'variant' query is supported since AYON backend 1.10.4 + query = urlencode({"variant": self._variant, "mode": "all"}) + response = ayon_api.post( + f"actions/list?{query}", **request_data + ) response.raise_for_status() except Exception: self.log.warning("Failed to collect webactions.", exc_info=True) @@ -513,7 +540,12 @@ class ActionsModel: uri = payload["uri"] else: uri = data["uri"] - run_detached_ayon_launcher_process(uri) + + # Remove bundles from environment variables + env = os.environ.copy() + env.pop("AYON_BUNDLE_NAME", None) + env.pop("AYON_STUDIO_BUNDLE_NAME", None) + run_detached_ayon_launcher_process(uri, env=env) elif response_type in ("query", "navigate"): response.error_message = ( @@ -533,7 +565,7 @@ class ActionsModel: # NOTE We don't need to register the paths, but that would # require to change discovery logic and deprecate all functions # related to registering and discovering launcher actions. - addons_manager = self._get_addons_manager() + addons_manager = self._controller.get_addons_manager() actions_paths = addons_manager.collect_launcher_action_paths() for path in actions_paths: if path and os.path.exists(path): diff --git a/client/ayon_core/tools/launcher/models/selection.py b/client/ayon_core/tools/launcher/models/selection.py index b156d2084c..9d5ad47d89 100644 --- a/client/ayon_core/tools/launcher/models/selection.py +++ b/client/ayon_core/tools/launcher/models/selection.py @@ -1,26 +1,37 @@ -class LauncherSelectionModel(object): +from __future__ import annotations + +import typing +from typing import Optional + +if typing.TYPE_CHECKING: + from ayon_core.tools.launcher.abstract import AbstractLauncherBackend + + +class LauncherSelectionModel: """Model handling selection changes. Triggering events: - "selection.project.changed" - "selection.folder.changed" - "selection.task.changed" + - "selection.workfile.changed" """ event_source = "launcher.selection.model" - def __init__(self, controller): + def __init__(self, controller: AbstractLauncherBackend) -> None: self._controller = controller self._project_name = None self._folder_id = None self._task_name = None self._task_id = None + self._workfile_id = None - def get_selected_project_name(self): + def get_selected_project_name(self) -> Optional[str]: return self._project_name - def set_selected_project(self, project_name): + def set_selected_project(self, project_name: Optional[str]) -> None: if project_name == self._project_name: return @@ -31,10 +42,10 @@ class LauncherSelectionModel(object): self.event_source ) - def get_selected_folder_id(self): + def get_selected_folder_id(self) -> Optional[str]: return self._folder_id - def set_selected_folder(self, folder_id): + def set_selected_folder(self, folder_id: Optional[str]) -> None: if folder_id == self._folder_id: return @@ -48,13 +59,15 @@ class LauncherSelectionModel(object): self.event_source ) - def get_selected_task_name(self): + def get_selected_task_name(self) -> Optional[str]: return self._task_name - def get_selected_task_id(self): + def get_selected_task_id(self) -> Optional[str]: return self._task_id - def set_selected_task(self, task_id, task_name): + def set_selected_task( + self, task_id: Optional[str], task_name: Optional[str] + ) -> None: if task_id == self._task_id: return @@ -70,3 +83,23 @@ class LauncherSelectionModel(object): }, self.event_source ) + + def get_selected_workfile(self) -> Optional[str]: + return self._workfile_id + + def set_selected_workfile(self, workfile_id: Optional[str]) -> None: + if workfile_id == self._workfile_id: + return + + self._workfile_id = workfile_id + self._controller.emit_event( + "selection.workfile.changed", + { + "project_name": self._project_name, + "folder_id": self._folder_id, + "task_name": self._task_name, + "task_id": self._task_id, + "workfile_id": workfile_id, + }, + self.event_source + ) diff --git a/client/ayon_core/tools/launcher/models/workfiles.py b/client/ayon_core/tools/launcher/models/workfiles.py new file mode 100644 index 0000000000..649a87353c --- /dev/null +++ b/client/ayon_core/tools/launcher/models/workfiles.py @@ -0,0 +1,102 @@ +import os +from typing import Optional, Any + +import ayon_api + +from ayon_core.lib import ( + Logger, + NestedCacheItem, +) +from ayon_core.pipeline import Anatomy +from ayon_core.tools.launcher.abstract import ( + WorkfileItem, + AbstractLauncherBackend, +) + + +class WorkfilesModel: + def __init__(self, controller: AbstractLauncherBackend): + self._controller = controller + + self._log = Logger.get_logger(self.__class__.__name__) + + self._host_icons = None + self._workfile_items = NestedCacheItem( + levels=2, default_factory=list, lifetime=60, + ) + + def reset(self) -> None: + self._workfile_items.reset() + + def get_workfile_items( + self, + project_name: Optional[str], + task_id: Optional[str], + ) -> list[WorkfileItem]: + if not project_name or not task_id: + return [] + + cache = self._workfile_items[project_name][task_id] + if cache.is_valid: + return cache.get_data() + + project_entity = self._controller.get_project_entity(project_name) + anatomy = Anatomy(project_name, project_entity=project_entity) + items = [] + for workfile_entity in ayon_api.get_workfiles_info( + project_name, task_ids={task_id}, fields={"id", "path", "data"} + ): + rootless_path = workfile_entity["path"] + exists = False + try: + path = anatomy.fill_root(rootless_path) + exists = os.path.exists(path) + except Exception: + self._log.warning( + "Failed to fill root for workfile path", + exc_info=True, + ) + workfile_data = workfile_entity["data"] + host_name = workfile_data.get("host_name") + version = workfile_data.get("version") + + items.append(WorkfileItem( + workfile_id=workfile_entity["id"], + filename=os.path.basename(rootless_path), + exists=exists, + icon=self._get_host_icon(host_name), + version=version, + )) + cache.update_data(items) + return items + + def _get_host_icon( + self, host_name: Optional[str] + ) -> Optional[dict[str, Any]]: + if self._host_icons is None: + host_icons = {} + try: + host_icons = self._get_host_icons() + except Exception: + self._log.warning( + "Failed to get host icons", + exc_info=True, + ) + self._host_icons = host_icons + return self._host_icons.get(host_name) + + def _get_host_icons(self) -> dict[str, Any]: + addons_manager = self._controller.get_addons_manager() + applications_addon = addons_manager["applications"] + apps_manager = applications_addon.get_applications_manager() + output = {} + for app_group in apps_manager.app_groups.values(): + host_name = app_group.host_name + icon_filename = app_group.icon + if not host_name or not icon_filename: + continue + icon_url = applications_addon.get_app_icon_url( + icon_filename, server=True + ) + output[host_name] = icon_url + return output diff --git a/client/ayon_core/tools/launcher/ui/actions_widget.py b/client/ayon_core/tools/launcher/ui/actions_widget.py index 51cb8e73bc..31b303ca2b 100644 --- a/client/ayon_core/tools/launcher/ui/actions_widget.py +++ b/client/ayon_core/tools/launcher/ui/actions_widget.py @@ -136,6 +136,10 @@ class ActionsQtModel(QtGui.QStandardItemModel): "selection.task.changed", self._on_selection_task_changed, ) + controller.register_event_callback( + "selection.workfile.changed", + self._on_selection_workfile_changed, + ) self._controller = controller @@ -146,6 +150,7 @@ class ActionsQtModel(QtGui.QStandardItemModel): self._selected_project_name = None self._selected_folder_id = None self._selected_task_id = None + self._selected_workfile_id = None def get_selected_project_name(self): return self._selected_project_name @@ -156,6 +161,9 @@ class ActionsQtModel(QtGui.QStandardItemModel): def get_selected_task_id(self): return self._selected_task_id + def get_selected_workfile_id(self): + return self._selected_workfile_id + def get_group_items(self, action_id): return self._groups_by_id[action_id] @@ -194,6 +202,7 @@ class ActionsQtModel(QtGui.QStandardItemModel): self._selected_project_name, self._selected_folder_id, self._selected_task_id, + self._selected_workfile_id, ) if not items: self._clear_items() @@ -286,18 +295,28 @@ class ActionsQtModel(QtGui.QStandardItemModel): self._selected_project_name = event["project_name"] self._selected_folder_id = None self._selected_task_id = None + self._selected_workfile_id = None self.refresh() def _on_selection_folder_changed(self, event): self._selected_project_name = event["project_name"] self._selected_folder_id = event["folder_id"] self._selected_task_id = None + self._selected_workfile_id = None self.refresh() def _on_selection_task_changed(self, event): self._selected_project_name = event["project_name"] self._selected_folder_id = event["folder_id"] self._selected_task_id = event["task_id"] + self._selected_workfile_id = None + self.refresh() + + def _on_selection_workfile_changed(self, event): + self._selected_project_name = event["project_name"] + self._selected_folder_id = event["folder_id"] + self._selected_task_id = event["task_id"] + self._selected_workfile_id = event["workfile_id"] self.refresh() @@ -578,9 +597,6 @@ class ActionMenuPopup(QtWidgets.QWidget): if not index or not index.isValid(): return - if not index.data(ACTION_HAS_CONFIGS_ROLE): - return - action_id = index.data(ACTION_ID_ROLE) self.action_triggered.emit(action_id) @@ -970,10 +986,11 @@ class ActionsWidget(QtWidgets.QWidget): event["project_name"], event["folder_id"], event["task_id"], + event["workfile_id"], event["addon_name"], event["addon_version"], ), - event["action_label"], + event["full_label"], form_data, ) @@ -1050,24 +1067,26 @@ class ActionsWidget(QtWidgets.QWidget): project_name = self._model.get_selected_project_name() folder_id = self._model.get_selected_folder_id() task_id = self._model.get_selected_task_id() + workfile_id = self._model.get_selected_workfile_id() action_item = self._model.get_action_item_by_id(action_id) if action_item.action_type == "webaction": action_item = self._model.get_action_item_by_id(action_id) context = WebactionContext( - action_id, - project_name, - folder_id, - task_id, - action_item.addon_name, - action_item.addon_version + identifier=action_id, + project_name=project_name, + folder_id=folder_id, + task_id=task_id, + workfile_id=workfile_id, + addon_name=action_item.addon_name, + addon_version=action_item.addon_version, ) self._controller.trigger_webaction( context, action_item.full_label ) else: self._controller.trigger_action( - action_id, project_name, folder_id, task_id + action_id, project_name, folder_id, task_id, workfile_id ) if index is None: @@ -1087,11 +1106,13 @@ class ActionsWidget(QtWidgets.QWidget): project_name = self._model.get_selected_project_name() folder_id = self._model.get_selected_folder_id() task_id = self._model.get_selected_task_id() + workfile_id = self._model.get_selected_workfile_id() context = WebactionContext( - action_id, + identifier=action_id, project_name=project_name, folder_id=folder_id, task_id=task_id, + workfile_id=workfile_id, addon_name=action_item.addon_name, addon_version=action_item.addon_version, ) diff --git a/client/ayon_core/tools/launcher/ui/hierarchy_page.py b/client/ayon_core/tools/launcher/ui/hierarchy_page.py index 65efdc27ac..47388d9685 100644 --- a/client/ayon_core/tools/launcher/ui/hierarchy_page.py +++ b/client/ayon_core/tools/launcher/ui/hierarchy_page.py @@ -12,6 +12,8 @@ from ayon_core.tools.utils import ( ) from ayon_core.tools.utils.lib import checkstate_int_to_enum +from .workfiles_page import WorkfilesPage + class HierarchyPage(QtWidgets.QWidget): def __init__(self, controller, parent): @@ -73,10 +75,15 @@ class HierarchyPage(QtWidgets.QWidget): # - Tasks widget tasks_widget = TasksWidget(controller, content_body) + # - Third page - Workfiles + workfiles_page = WorkfilesPage(controller, content_body) + content_body.addWidget(folders_widget) content_body.addWidget(tasks_widget) - content_body.setStretchFactor(0, 100) - content_body.setStretchFactor(1, 65) + content_body.addWidget(workfiles_page) + content_body.setStretchFactor(0, 120) + content_body.setStretchFactor(1, 85) + content_body.setStretchFactor(2, 220) main_layout = QtWidgets.QVBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) @@ -99,6 +106,7 @@ class HierarchyPage(QtWidgets.QWidget): self._my_tasks_checkbox = my_tasks_checkbox self._folders_widget = folders_widget self._tasks_widget = tasks_widget + self._workfiles_page = workfiles_page self._project_name = None @@ -117,6 +125,7 @@ class HierarchyPage(QtWidgets.QWidget): def refresh(self): self._folders_widget.refresh() self._tasks_widget.refresh() + self._workfiles_page.refresh() self._on_my_tasks_checkbox_state_changed( self._my_tasks_checkbox.checkState() ) diff --git a/client/ayon_core/tools/launcher/ui/window.py b/client/ayon_core/tools/launcher/ui/window.py index 819e141d59..ad2fd2d3c2 100644 --- a/client/ayon_core/tools/launcher/ui/window.py +++ b/client/ayon_core/tools/launcher/ui/window.py @@ -177,7 +177,7 @@ class LauncherWindow(QtWidgets.QWidget): self._page_slide_anim = page_slide_anim hierarchy_page.setVisible(not self._is_on_projects_page) - self.resize(520, 740) + self.resize(920, 740) def showEvent(self, event): super().showEvent(event) diff --git a/client/ayon_core/tools/launcher/ui/workfiles_page.py b/client/ayon_core/tools/launcher/ui/workfiles_page.py new file mode 100644 index 0000000000..1ea223031e --- /dev/null +++ b/client/ayon_core/tools/launcher/ui/workfiles_page.py @@ -0,0 +1,175 @@ +from typing import Optional + +import ayon_api +from qtpy import QtCore, QtWidgets, QtGui + +from ayon_core.tools.utils import get_qt_icon +from ayon_core.tools.launcher.abstract import AbstractLauncherFrontEnd + +VERSION_ROLE = QtCore.Qt.UserRole + 1 +WORKFILE_ID_ROLE = QtCore.Qt.UserRole + 2 + + +class WorkfilesModel(QtGui.QStandardItemModel): + refreshed = QtCore.Signal() + + def __init__(self, controller: AbstractLauncherFrontEnd) -> None: + super().__init__() + + self.setColumnCount(1) + self.setHeaderData(0, QtCore.Qt.Horizontal, "Workfiles") + + controller.register_event_callback( + "selection.project.changed", + self._on_selection_project_changed, + ) + controller.register_event_callback( + "selection.folder.changed", + self._on_selection_folder_changed, + ) + controller.register_event_callback( + "selection.task.changed", + self._on_selection_task_changed, + ) + + self._controller = controller + self._selected_project_name = None + self._selected_folder_id = None + self._selected_task_id = None + + self._transparent_icon = None + + self._cached_icons = {} + + def refresh(self) -> None: + root_item = self.invisibleRootItem() + root_item.removeRows(0, root_item.rowCount()) + + workfile_items = self._controller.get_workfile_items( + self._selected_project_name, self._selected_task_id + ) + new_items = [] + for workfile_item in workfile_items: + icon = self._get_icon(workfile_item.icon) + item = QtGui.QStandardItem(workfile_item.filename) + item.setData(icon, QtCore.Qt.DecorationRole) + item.setData(workfile_item.version, VERSION_ROLE) + item.setData(workfile_item.workfile_id, WORKFILE_ID_ROLE) + flags = QtCore.Qt.NoItemFlags + if workfile_item.exists: + flags = QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable + item.setFlags(flags) + new_items.append(item) + + if not new_items: + title = "< No workfiles >" + if not self._selected_project_name: + title = "< Select a project >" + elif not self._selected_folder_id: + title = "< Select a folder >" + elif not self._selected_task_id: + title = "< Select a task >" + item = QtGui.QStandardItem(title) + item.setFlags(QtCore.Qt.NoItemFlags) + new_items.append(item) + root_item.appendRows(new_items) + + self.refreshed.emit() + + def _on_selection_project_changed(self, event) -> None: + self._selected_project_name = event["project_name"] + self._selected_folder_id = None + self._selected_task_id = None + self.refresh() + + def _on_selection_folder_changed(self, event) -> None: + self._selected_project_name = event["project_name"] + self._selected_folder_id = event["folder_id"] + self._selected_task_id = None + self.refresh() + + def _on_selection_task_changed(self, event) -> None: + self._selected_project_name = event["project_name"] + self._selected_folder_id = event["folder_id"] + self._selected_task_id = event["task_id"] + self.refresh() + + def _get_transparent_icon(self) -> QtGui.QIcon: + if self._transparent_icon is None: + self._transparent_icon = get_qt_icon({ + "type": "transparent", "size": 256 + }) + return self._transparent_icon + + def _get_icon(self, icon_url: Optional[str]) -> QtGui.QIcon: + if icon_url is None: + return self._get_transparent_icon() + icon = self._cached_icons.get(icon_url) + if icon is not None: + return icon + + base_url = ayon_api.get_base_url() + if icon_url.startswith(base_url): + icon_def = { + "type": "ayon_url", + "url": icon_url[len(base_url) + 1:], + } + else: + icon_def = { + "type": "url", + "url": icon_url, + } + + icon = get_qt_icon(icon_def) + if icon is None: + icon = self._get_transparent_icon() + self._cached_icons[icon_url] = icon + return icon + + +class WorkfilesView(QtWidgets.QTreeView): + def drawBranches(self, painter, rect, index): + return + + +class WorkfilesPage(QtWidgets.QWidget): + def __init__( + self, + controller: AbstractLauncherFrontEnd, + parent: QtWidgets.QWidget, + ) -> None: + super().__init__(parent) + + workfiles_view = WorkfilesView(self) + workfiles_view.setIndentation(0) + workfiles_model = WorkfilesModel(controller) + workfiles_proxy = QtCore.QSortFilterProxyModel() + workfiles_proxy.setSourceModel(workfiles_model) + + workfiles_view.setModel(workfiles_proxy) + + layout = QtWidgets.QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addWidget(workfiles_view, 1) + + workfiles_view.selectionModel().selectionChanged.connect( + self._on_selection_changed + ) + workfiles_model.refreshed.connect(self._on_refresh) + + self._controller = controller + self._workfiles_view = workfiles_view + self._workfiles_model = workfiles_model + self._workfiles_proxy = workfiles_proxy + + def refresh(self) -> None: + self._workfiles_model.refresh() + + def _on_refresh(self) -> None: + self._workfiles_proxy.sort(0, QtCore.Qt.DescendingOrder) + + def _on_selection_changed(self, selected, _deselected) -> None: + workfile_id = None + for index in selected.indexes(): + workfile_id = index.data(WORKFILE_ID_ROLE) + self._controller.set_selected_workfile(workfile_id) diff --git a/client/ayon_core/tools/loader/abstract.py b/client/ayon_core/tools/loader/abstract.py index 5ab7e78212..9c7934d2db 100644 --- a/client/ayon_core/tools/loader/abstract.py +++ b/client/ayon_core/tools/loader/abstract.py @@ -9,7 +9,11 @@ from ayon_core.lib.attribute_definitions import ( deserialize_attr_defs, serialize_attr_defs, ) -from ayon_core.tools.common_models import TaskItem, TagItem +from ayon_core.tools.common_models import ( + TaskItem, + TagItem, + ProductTypeIconMapping, +) class ProductTypeItem: @@ -78,7 +82,6 @@ class ProductItem: product_type (str): Product type. product_name (str): Product name. product_icon (dict[str, Any]): Product icon definition. - product_type_icon (dict[str, Any]): Product type icon definition. product_in_scene (bool): Is product in scene (only when used in DCC). group_name (str): Group name. folder_id (str): Folder id. @@ -93,8 +96,6 @@ class ProductItem: product_base_type: str, product_name: str, product_icon: dict[str, Any], - product_type_icon: dict[str, Any], - product_base_type_icon: dict[str, Any], group_name: str, folder_id: str, folder_label: str, @@ -106,8 +107,6 @@ class ProductItem: self.product_base_type = product_base_type self.product_name = product_name self.product_icon = product_icon - self.product_type_icon = product_type_icon - self.product_base_type_icon = product_base_type_icon self.product_in_scene = product_in_scene self.group_name = group_name self.folder_id = folder_id @@ -121,8 +120,6 @@ class ProductItem: "product_base_type": self.product_base_type, "product_name": self.product_name, "product_icon": self.product_icon, - "product_type_icon": self.product_type_icon, - "product_base_type_icon": self.product_base_type_icon, "product_in_scene": self.product_in_scene, "group_name": self.group_name, "folder_id": self.folder_id, @@ -499,8 +496,8 @@ class BackendLoaderController(_BaseLoaderController): topic (str): Event topic name. data (Optional[dict[str, Any]]): Event data. source (Optional[str]): Event source. - """ + """ pass @abstractmethod @@ -509,8 +506,20 @@ class BackendLoaderController(_BaseLoaderController): Returns: set[str]: Set of loaded product ids. - """ + """ + pass + + @abstractmethod + def get_product_type_icons_mapping( + self, project_name: Optional[str] + ) -> ProductTypeIconMapping: + """Product type icons mapping. + + Returns: + ProductTypeIconMapping: Product type icons mapping. + + """ pass diff --git a/client/ayon_core/tools/loader/control.py b/client/ayon_core/tools/loader/control.py index 7ba42a0981..9f159bfb21 100644 --- a/client/ayon_core/tools/loader/control.py +++ b/client/ayon_core/tools/loader/control.py @@ -2,6 +2,7 @@ from __future__ import annotations import logging import uuid +from typing import Optional import ayon_api @@ -16,6 +17,7 @@ from ayon_core.tools.common_models import ( HierarchyModel, ThumbnailsModel, TagItem, + ProductTypeIconMapping, ) from .abstract import ( @@ -198,6 +200,13 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): project_name, sender ) + def get_product_type_icons_mapping( + self, project_name: Optional[str] + ) -> ProductTypeIconMapping: + return self._projects_model.get_product_type_icons_mapping( + project_name + ) + def get_folder_items(self, project_name, sender=None): return self._hierarchy_model.get_folder_items(project_name, sender) diff --git a/client/ayon_core/tools/loader/models/products.py b/client/ayon_core/tools/loader/models/products.py index 87e2406c81..7915a75bcf 100644 --- a/client/ayon_core/tools/loader/models/products.py +++ b/client/ayon_core/tools/loader/models/products.py @@ -9,9 +9,9 @@ import arrow import ayon_api from ayon_api.operations import OperationsSession - from ayon_core.lib import NestedCacheItem from ayon_core.style import get_default_entity_icon_color +from ayon_core.tools.common_models import ProductTypeIconMapping from ayon_core.tools.loader.abstract import ( ProductTypeItem, ProductBaseTypeItem, @@ -21,8 +21,11 @@ from ayon_core.tools.loader.abstract import ( ) if TYPE_CHECKING: - from ayon_api.typing import ProductBaseTypeDict, ProductDict, VersionDict - + from ayon_api.typing import ( + ProductBaseTypeDict, + ProductDict, + VersionDict, + ) PRODUCTS_MODEL_SENDER = "products.model" @@ -84,42 +87,18 @@ def version_item_from_entity(version): def product_item_from_entity( product_entity: ProductDict, version_entities, - product_type_items_by_name: dict[str, ProductTypeItem], - product_base_type_items_by_name: dict[str, ProductBaseTypeItem], folder_label, + icons_mapping, product_in_scene, ): product_attribs = product_entity["attrib"] group = product_attribs.get("productGroup") product_type = product_entity["productType"] - product_type_item = product_type_items_by_name.get(product_type) - # NOTE This is needed for cases when products were not created on server - # using api functions. In that case product type item may not be - # available and we need to create a default. - if product_type_item is None: - product_type_item = create_default_product_type_item(product_type) - # Cache the item for future use - product_type_items_by_name[product_type] = product_type_item - product_base_type = product_entity.get("productBaseType") - product_base_type_item = product_base_type_items_by_name.get( - product_base_type) - # Same as for product type item above. Not sure if this is still needed - # though. - if product_base_type_item is None: - product_base_type_item = create_default_product_base_type_item( - product_base_type) - # Cache the item for future use - product_base_type_items_by_name[product_base_type] = ( - product_base_type_item) - product_type_icon = product_type_item.icon - product_base_type_icon = product_base_type_item.icon - product_icon = { - "type": "awesome-font", - "name": "fa.file-o", - "color": get_default_entity_icon_color(), - } + product_icon = icons_mapping.get_icon( + product_base_type, product_type + ) version_items = { version_entity["id"]: version_item_from_entity(version_entity) for version_entity in version_entities @@ -131,8 +110,6 @@ def product_item_from_entity( product_base_type=product_base_type, product_name=product_entity["name"], product_icon=product_icon, - product_type_icon=product_type_icon, - product_base_type_icon=product_base_type_icon, product_in_scene=product_in_scene, group_name=group, folder_id=product_entity["folderId"], @@ -141,22 +118,8 @@ def product_item_from_entity( ) -def product_type_item_from_data( - product_type_data: ProductDict) -> ProductTypeItem: - # TODO implement icon implementation - # icon = product_type_data["icon"] - # color = product_type_data["color"] - icon = { - "type": "awesome-font", - "name": "fa.folder", - "color": "#0091B2", - } - # TODO implement checked logic - return ProductTypeItem(product_type_data["name"], icon) - - def product_base_type_item_from_data( - product_base_type_data: ProductBaseTypeDict + product_base_type_data: ProductBaseTypeDict ) -> ProductBaseTypeItem: """Create product base type item from data. @@ -174,34 +137,8 @@ def product_base_type_item_from_data( } return ProductBaseTypeItem( name=product_base_type_data["name"], - icon=icon) - - -def create_default_product_type_item(product_type: str) -> ProductTypeItem: - icon = { - "type": "awesome-font", - "name": "fa.folder", - "color": "#0091B2", - } - return ProductTypeItem(product_type, icon) - - -def create_default_product_base_type_item( - product_base_type: str) -> ProductBaseTypeItem: - """Create default product base type item. - - Args: - product_base_type (str): Product base type name. - - Returns: - ProductBaseTypeItem: Default product base type item. - """ - icon = { - "type": "awesome-font", - "name": "fa.folder", - "color": "#0091B2", - } - return ProductBaseTypeItem(product_base_type, icon) + icon=icon + ) class ProductsModel: @@ -247,7 +184,9 @@ class ProductsModel: self._product_items_cache.reset() self._repre_items_cache.reset() - def get_product_type_items(self, project_name): + def get_product_type_items( + self, project_name: Optional[str] + ) -> list[ProductTypeItem]: """Product type items for project. Args: @@ -255,25 +194,33 @@ class ProductsModel: Returns: list[ProductTypeItem]: Product type items. - """ + """ if not project_name: return [] cache = self._product_type_items_cache[project_name] if not cache.is_valid: + icons_mapping = self._get_product_type_icons(project_name) product_types = ayon_api.get_project_product_types(project_name) cache.update_data([ - product_type_item_from_data(product_type) + ProductTypeItem( + product_type["name"], + icons_mapping.get_icon(product_type=product_type["name"]), + ) for product_type in product_types ]) return cache.get_data() def get_product_base_type_items( - self, - project_name: Optional[str]) -> list[ProductBaseTypeItem]: + self, project_name: Optional[str] + ) -> list[ProductBaseTypeItem]: """Product base type items for the project. + Notes: + This will be used for filtering product types in UI when + product base types are fully implemented. + Args: project_name (optional, str): Project name. @@ -286,6 +233,7 @@ class ProductsModel: cache = self._product_base_type_items_cache[project_name] if not cache.is_valid: + icons_mapping = self._get_product_type_icons(project_name) product_base_types = [] # TODO add temp implementation here when it is actually # implemented and available on server. @@ -294,7 +242,10 @@ class ProductsModel: project_name ) cache.update_data([ - product_base_type_item_from_data(product_base_type) + ProductBaseTypeItem( + product_base_type["name"], + icons_mapping.get_icon(product_base_type["name"]), + ) for product_base_type in product_base_types ]) return cache.get_data() @@ -511,6 +462,11 @@ class ProductsModel: PRODUCTS_MODEL_SENDER ) + def _get_product_type_icons( + self, project_name: Optional[str] + ) -> ProductTypeIconMapping: + return self._controller.get_product_type_icons_mapping(project_name) + def _get_product_items_by_id(self, project_name, product_ids): product_item_by_id = self._product_item_by_id[project_name] missing_product_ids = set() @@ -524,7 +480,7 @@ class ProductsModel: output.update( self._query_product_items_by_ids( - project_name, missing_product_ids + project_name, product_ids=missing_product_ids ) ) return output @@ -553,36 +509,18 @@ class ProductsModel: products: Iterable[ProductDict], versions: Iterable[VersionDict], folder_items=None, - product_type_items=None, - product_base_type_items: Optional[Iterable[ProductBaseTypeItem]] = None ): if folder_items is None: folder_items = self._controller.get_folder_items(project_name) - if product_type_items is None: - product_type_items = self.get_product_type_items(project_name) - - if product_base_type_items is None: - product_base_type_items = self.get_product_base_type_items( - project_name - ) - loaded_product_ids = self._controller.get_loaded_product_ids() versions_by_product_id = collections.defaultdict(list) for version in versions: versions_by_product_id[version["productId"]].append(version) - product_type_items_by_name = { - product_type_item.name: product_type_item - for product_type_item in product_type_items - } - - product_base_type_items_by_name: dict[str, ProductBaseTypeItem] = { - product_base_type_item.name: product_base_type_item - for product_base_type_item in product_base_type_items - } output: dict[str, ProductItem] = {} + icons_mapping = self._get_product_type_icons(project_name) for product in products: product_id = product["id"] folder_id = product["folderId"] @@ -595,9 +533,8 @@ class ProductsModel: product_item = product_item_from_entity( product, versions, - product_type_items_by_name, - product_base_type_items_by_name, folder_item.label, + icons_mapping, product_id in loaded_product_ids, ) output[product_id] = product_item diff --git a/client/ayon_core/tools/loader/ui/products_model.py b/client/ayon_core/tools/loader/ui/products_model.py index f3e5271f51..79ed197d83 100644 --- a/client/ayon_core/tools/loader/ui/products_model.py +++ b/client/ayon_core/tools/loader/ui/products_model.py @@ -17,7 +17,6 @@ PRODUCT_ID_ROLE = QtCore.Qt.UserRole + 6 PRODUCT_NAME_ROLE = QtCore.Qt.UserRole + 7 PRODUCT_TYPE_ROLE = QtCore.Qt.UserRole + 8 PRODUCT_BASE_TYPE_ROLE = QtCore.Qt.UserRole + 9 -PRODUCT_TYPE_ICON_ROLE = QtCore.Qt.UserRole + 10 PRODUCT_IN_SCENE_ROLE = QtCore.Qt.UserRole + 11 VERSION_ID_ROLE = QtCore.Qt.UserRole + 12 VERSION_HERO_ROLE = QtCore.Qt.UserRole + 13 @@ -228,10 +227,7 @@ class ProductsModel(QtGui.QStandardItemModel): return super().data(index, role) if role == QtCore.Qt.DecorationRole: - if col == 1: - role = PRODUCT_TYPE_ICON_ROLE - else: - return None + return None if ( role == VERSION_NAME_EDIT_ROLE @@ -455,7 +451,6 @@ class ProductsModel(QtGui.QStandardItemModel): model_item = QtGui.QStandardItem(product_item.product_name) model_item.setEditable(False) icon = get_qt_icon(product_item.product_icon) - product_type_icon = get_qt_icon(product_item.product_type_icon) model_item.setColumnCount(self.columnCount()) model_item.setData(icon, QtCore.Qt.DecorationRole) model_item.setData(product_id, PRODUCT_ID_ROLE) @@ -464,7 +459,6 @@ class ProductsModel(QtGui.QStandardItemModel): product_item.product_base_type, PRODUCT_BASE_TYPE_ROLE ) model_item.setData(product_item.product_type, PRODUCT_TYPE_ROLE) - model_item.setData(product_type_icon, PRODUCT_TYPE_ICON_ROLE) model_item.setData(product_item.folder_id, FOLDER_ID_ROLE) self._product_items_by_id[product_id] = product_item diff --git a/client/ayon_core/tools/publisher/abstract.py b/client/ayon_core/tools/publisher/abstract.py index 6d0027d35d..14da15793d 100644 --- a/client/ayon_core/tools/publisher/abstract.py +++ b/client/ayon_core/tools/publisher/abstract.py @@ -13,7 +13,7 @@ from typing import ( ) from ayon_core.lib import AbstractAttrDef -from ayon_core.host import HostBase +from ayon_core.host import AbstractHost from ayon_core.pipeline.create import ( CreateContext, ConvertorItem, @@ -176,7 +176,7 @@ class AbstractPublisherBackend(AbstractPublisherCommon): pass @abstractmethod - def get_host(self) -> HostBase: + def get_host(self) -> AbstractHost: pass @abstractmethod diff --git a/client/ayon_core/tools/publisher/models/create.py b/client/ayon_core/tools/publisher/models/create.py index 75ed2c73fe..5098826b8b 100644 --- a/client/ayon_core/tools/publisher/models/create.py +++ b/client/ayon_core/tools/publisher/models/create.py @@ -219,6 +219,8 @@ class InstanceItem: is_active: bool, is_mandatory: bool, has_promised_context: bool, + parent_instance_id: Optional[str], + parent_flags: int, ): self._instance_id: str = instance_id self._creator_identifier: str = creator_identifier @@ -232,6 +234,8 @@ class InstanceItem: self._is_active: bool = is_active self._is_mandatory: bool = is_mandatory self._has_promised_context: bool = has_promised_context + self._parent_instance_id: Optional[str] = parent_instance_id + self._parent_flags: int = parent_flags @property def id(self): @@ -261,6 +265,14 @@ class InstanceItem: def has_promised_context(self): return self._has_promised_context + @property + def parent_instance_id(self): + return self._parent_instance_id + + @property + def parent_flags(self) -> int: + return self._parent_flags + def get_variant(self): return self._variant @@ -312,6 +324,8 @@ class InstanceItem: instance["active"], instance.is_mandatory, instance.has_promised_context, + instance.parent_instance_id, + instance.parent_flags, ) @@ -486,6 +500,9 @@ class CreateModel: self._create_context.add_instance_requirement_change_callback( self._cc_instance_requirement_changed ) + self._create_context.add_instance_parent_change_callback( + self._cc_instance_parent_changed + ) self._create_context.reset_finalization() @@ -566,15 +583,21 @@ class CreateModel: def set_instances_active_state( self, active_state_by_id: Dict[str, bool] ): + changed_ids = set() with self._create_context.bulk_value_changes(CREATE_EVENT_SOURCE): for instance_id, active in active_state_by_id.items(): instance = self._create_context.get_instance_by_id(instance_id) - instance["active"] = active + if instance["active"] is not active: + instance["active"] = active + changed_ids.add(instance_id) + + if not changed_ids: + return self._emit_event( "create.model.instances.context.changed", { - "instance_ids": set(active_state_by_id.keys()) + "instance_ids": changed_ids } ) @@ -1191,6 +1214,16 @@ class CreateModel: {"instance_ids": instance_ids}, ) + def _cc_instance_parent_changed(self, event): + instance_ids = { + instance.id + for instance in event.data["instances"] + } + self._emit_event( + "create.model.instance.parent.changed", + {"instance_ids": instance_ids}, + ) + def _get_allowed_creators_pattern(self) -> Union[Pattern, None]: """Provide regex pattern for configured creator labels in this context diff --git a/client/ayon_core/tools/publisher/widgets/card_view_widgets.py b/client/ayon_core/tools/publisher/widgets/card_view_widgets.py index 8a4eddf058..84786a671e 100644 --- a/client/ayon_core/tools/publisher/widgets/card_view_widgets.py +++ b/client/ayon_core/tools/publisher/widgets/card_view_widgets.py @@ -19,18 +19,21 @@ Only one item can be selected at a time. └──────────────────────┘ ``` """ +from __future__ import annotations import re import collections -from typing import Dict +from typing import Optional from qtpy import QtWidgets, QtCore -from ayon_core.tools.utils import NiceCheckbox +from ayon_core.pipeline.create import ( + InstanceContextInfo, + ParentFlags, +) -from ayon_core.tools.utils import BaseClickableFrame +from ayon_core.tools.utils import BaseClickableFrame, NiceCheckbox from ayon_core.tools.utils.lib import html_escape - from ayon_core.tools.publisher.abstract import AbstractPublisherFrontend from ayon_core.tools.publisher.constants import ( CONTEXT_ID, @@ -38,7 +41,9 @@ from ayon_core.tools.publisher.constants import ( CONTEXT_GROUP, CONVERTOR_ITEM_GROUP, ) - +from ayon_core.tools.publisher.models.create import ( + InstanceItem, +) from .widgets import ( AbstractInstanceView, ContextWarningLabel, @@ -82,7 +87,6 @@ class BaseGroupWidget(QtWidgets.QWidget): self._group = group_name self._widgets_by_id = {} - self._ordered_item_ids = [] self._label_widget = label_widget self._content_layout = layout @@ -97,48 +101,25 @@ class BaseGroupWidget(QtWidgets.QWidget): return self._group - def get_widget_by_item_id(self, item_id): - """Get instance widget by its id.""" + def set_widgets( + self, + widgets_by_id: dict[str, QtWidgets.QWidget], + ordered_ids: list[str], + ) -> None: + self._remove_all_except(set(self._widgets_by_id)) + idx = 1 + for item_id in ordered_ids: + widget = widgets_by_id[item_id] + self._content_layout.insertWidget(idx, widget) + self._widgets_by_id[item_id] = widget + idx += 1 - return self._widgets_by_id.get(item_id) - - def get_selected_item_ids(self): - """Selected instance ids. - - Returns: - Set[str]: Instance ids that are selected. - """ - - return { - instance_id - for instance_id, widget in self._widgets_by_id.items() - if widget.is_selected - } - - def get_selected_widgets(self): - """Access to widgets marked as selected. - - Returns: - List[InstanceCardWidget]: Instance widgets that are selected. - """ - - return [ - widget - for instance_id, widget in self._widgets_by_id.items() - if widget.is_selected - ] - - def get_ordered_widgets(self): - """Get instance ids in order as are shown in ui. - - Returns: - List[str]: Instance ids. - """ - - return [ - self._widgets_by_id[instance_id] - for instance_id in self._ordered_item_ids - ] + def take_widgets(self, widget_ids: set[str]): + for widget_id in widget_ids: + widget = self._widgets_by_id.pop(widget_id) + index = self._content_layout.indexOf(widget) + if index >= 0: + self._content_layout.takeAt(index) def _remove_all_except(self, item_ids): item_ids = set(item_ids) @@ -155,131 +136,6 @@ class BaseGroupWidget(QtWidgets.QWidget): self._content_layout.removeWidget(widget) widget.deleteLater() - def _update_ordered_item_ids(self): - ordered_item_ids = [] - for idx in range(self._content_layout.count()): - if idx > 0: - item = self._content_layout.itemAt(idx) - widget = item.widget() - if widget is not None: - ordered_item_ids.append(widget.id) - - self._ordered_item_ids = ordered_item_ids - - def _on_widget_selection(self, instance_id, group_id, selection_type): - self.selected.emit(instance_id, group_id, selection_type) - - def set_active_toggle_enabled(self, enabled): - for widget in self._widgets_by_id.values(): - if isinstance(widget, InstanceCardWidget): - widget.set_active_toggle_enabled(enabled) - - -class ConvertorItemsGroupWidget(BaseGroupWidget): - def update_items(self, items_by_id): - items_by_label = collections.defaultdict(list) - for item in items_by_id.values(): - items_by_label[item.label].append(item) - - # Remove instance widgets that are not in passed instances - self._remove_all_except(items_by_id.keys()) - - # Sort instances by product name - sorted_labels = list(sorted(items_by_label.keys())) - - # Add new instances to widget - widget_idx = 1 - for label in sorted_labels: - for item in items_by_label[label]: - if item.id in self._widgets_by_id: - widget = self._widgets_by_id[item.id] - widget.update_item(item) - else: - widget = ConvertorItemCardWidget(item, self) - widget.selected.connect(self._on_widget_selection) - widget.double_clicked.connect(self.double_clicked) - self._widgets_by_id[item.id] = widget - self._content_layout.insertWidget(widget_idx, widget) - widget_idx += 1 - - self._update_ordered_item_ids() - - -class InstanceGroupWidget(BaseGroupWidget): - """Widget wrapping instances under group.""" - - active_changed = QtCore.Signal(str, str, bool) - - def __init__(self, group_icons, *args, **kwargs): - super().__init__(*args, **kwargs) - - self._group_icons = group_icons - - def update_icons(self, group_icons): - self._group_icons = group_icons - - def update_instance_values( - self, context_info_by_id, instance_items_by_id, instance_ids - ): - """Trigger update on instance widgets.""" - - for instance_id, widget in self._widgets_by_id.items(): - if instance_ids is not None and instance_id not in instance_ids: - continue - widget.update_instance( - instance_items_by_id[instance_id], - context_info_by_id[instance_id] - ) - - def update_instances(self, instances, context_info_by_id): - """Update instances for the group. - - Args: - instances (list[InstanceItem]): List of instances in - CreateContext. - context_info_by_id (Dict[str, InstanceContextInfo]): Instance - context info by instance id. - - """ - # Store instances by id and by product name - instances_by_id = {} - instances_by_product_name = collections.defaultdict(list) - for instance in instances: - instances_by_id[instance.id] = instance - product_name = instance.product_name - instances_by_product_name[product_name].append(instance) - - # Remove instance widgets that are not in passed instances - self._remove_all_except(instances_by_id.keys()) - - # Sort instances by product name - sorted_product_names = list(sorted(instances_by_product_name.keys())) - - # Add new instances to widget - widget_idx = 1 - for product_names in sorted_product_names: - for instance in instances_by_product_name[product_names]: - context_info = context_info_by_id[instance.id] - if instance.id in self._widgets_by_id: - widget = self._widgets_by_id[instance.id] - widget.update_instance(instance, context_info) - else: - group_icon = self._group_icons[instance.creator_identifier] - widget = InstanceCardWidget( - instance, context_info, group_icon, self - ) - widget.selected.connect(self._on_widget_selection) - widget.active_changed.connect(self._on_active_changed) - widget.double_clicked.connect(self.double_clicked) - self._widgets_by_id[instance.id] = widget - self._content_layout.insertWidget(widget_idx, widget) - widget_idx += 1 - - self._update_ordered_item_ids() - - def _on_active_changed(self, instance_id, value): - self.active_changed.emit(self.group_name, instance_id, value) - class CardWidget(BaseClickableFrame): """Clickable card used as bigger button.""" @@ -400,20 +256,34 @@ class ConvertorItemCardWidget(CardWidget): self._icon_widget = icon_widget self._label_widget = label_widget + def update_item(self, item): + self._id = item.id + self.identifier = item.identifier + class InstanceCardWidget(CardWidget): """Card widget representing instance.""" active_changed = QtCore.Signal(str, bool) - def __init__(self, instance, context_info, group_icon, parent): + def __init__( + self, + instance, + context_info, + is_parent_active: bool, + group_icon, + parent: BaseGroupWidget, + ): super().__init__(parent) + self.instance = instance + self._is_active = instance.is_active + self._id = instance.id self._group_identifier = instance.group_label self._group_icon = group_icon - - self.instance = instance + self._is_parent_active = is_parent_active + self._toggle_is_enabled = True self._last_product_name = None self._last_variant = None @@ -439,10 +309,6 @@ class InstanceCardWidget(CardWidget): expand_btn.setMaximumWidth(14) expand_btn.setEnabled(False) - detail_widget = QtWidgets.QWidget(self) - detail_widget.setVisible(False) - self.detail_widget = detail_widget - top_layout = QtWidgets.QHBoxLayout() top_layout.addLayout(icon_layout, 0) top_layout.addWidget(label_widget, 1) @@ -450,6 +316,9 @@ class InstanceCardWidget(CardWidget): top_layout.addWidget(active_checkbox, 0) top_layout.addWidget(expand_btn, 0) + detail_widget = QtWidgets.QWidget(self) + detail_widget.setVisible(False) + layout = QtWidgets.QHBoxLayout(self) layout.setContentsMargins(0, 2, 10, 2) layout.addLayout(top_layout) @@ -467,28 +336,47 @@ class InstanceCardWidget(CardWidget): self._active_checkbox = active_checkbox self._expand_btn = expand_btn - self._update_instance_values(context_info) + self._detail_widget = detail_widget - def set_active_toggle_enabled(self, enabled): - self._active_checkbox.setEnabled(enabled) + self._update_instance_values(context_info, is_parent_active) - @property - def is_active(self): + def set_active_toggle_enabled(self, enabled: bool) -> None: + if self._toggle_is_enabled is enabled: + return + self._toggle_is_enabled = enabled + self._update_checkbox_state() + + def is_active(self) -> bool: return self._active_checkbox.isChecked() - def _set_active(self, new_value): - """Set instance as active.""" - checkbox_value = self._active_checkbox.isChecked() - if checkbox_value != new_value: - self._active_checkbox.setChecked(new_value) + def set_active(self, active: Optional[bool]) -> None: + if not self.is_checkbox_enabled(): + return + if active is None: + active = not self._is_active + self._set_checked(active) - def _set_is_mandatory(self, is_mandatory: bool) -> None: - self._active_checkbox.setVisible(not is_mandatory) + def is_parent_active(self) -> bool: + return self._is_parent_active - def update_instance(self, instance, context_info): + def set_parent_active(self, is_active: bool) -> None: + if self._is_parent_active is is_active: + return + self._is_parent_active = is_active + self._update_checkbox_state() + + def is_checkbox_enabled(self) -> bool: + """Checkbox can be changed by user.""" + return ( + self._used_parent_active() + and not self.instance.is_mandatory + ) + + def update_instance(self, instance, context_info, is_parent_active): """Update instance object and update UI.""" self.instance = instance - self._update_instance_values(context_info) + self._is_active = instance.is_active + self._update_instance_values(context_info, is_parent_active) def _validate_context(self, context_info): valid = context_info.is_valid @@ -499,6 +387,7 @@ class InstanceCardWidget(CardWidget): variant = self.instance.variant product_name = self.instance.product_name label = self.instance.label + if ( variant == self._last_variant and product_name == self._last_product_name @@ -524,24 +413,53 @@ class InstanceCardWidget(CardWidget): QtCore.Qt.NoTextInteraction ) - def _update_instance_values(self, context_info): + def _update_instance_values(self, context_info, is_parent_active): """Update instance data""" + self._is_parent_active = is_parent_active self._update_product_name() - self._set_active(self.instance.is_active) - self._set_is_mandatory(self.instance.is_mandatory) + self._update_checkbox_state() self._validate_context(context_info) + def _update_checkbox_state(self): + parent_is_enabled = self._used_parent_active() + self._label_widget.setEnabled(parent_is_enabled) + self._active_checkbox.setEnabled( + self._toggle_is_enabled + and not self.instance.is_mandatory + and parent_is_enabled + ) + # Hide checkbox for mandatory instances + self._active_checkbox.setVisible(not self.instance.is_mandatory) + + # Visually disable instance if parent is disabled + checked = parent_is_enabled and self._is_active + self._set_checked(checked) + + def _set_checked(self, checked: bool) -> None: + if checked is not self._active_checkbox.isChecked(): + self._active_checkbox.blockSignals(True) + self._active_checkbox.setChecked(checked) + self._active_checkbox.blockSignals(False) + + def _used_parent_active(self) -> bool: + parent_enabled = True + if self.instance.parent_flags & ParentFlags.share_active: + parent_enabled = self._is_parent_active + return parent_enabled + def _set_expanded(self, expanded=None): if expanded is None: - expanded = not self.detail_widget.isVisible() - self.detail_widget.setVisible(expanded) + expanded = not self._detail_widget.isVisible() + self._detail_widget.setVisible(expanded) def _on_active_change(self): - new_value = self._active_checkbox.isChecked() - old_value = self.instance.is_active - if new_value == old_value: + if not self.is_checkbox_enabled(): return - + new_value = self._active_checkbox.isChecked() + old_value = self._is_active + if new_value is old_value: + return + self._is_active = new_value self.active_changed.emit(self._id, new_value) def _on_expend_clicked(self): @@ -595,11 +513,22 @@ class InstanceCardView(AbstractInstanceView): self._content_layout = content_layout self._content_widget = content_widget - self._context_widget = None - self._convertor_items_group = None - self._active_toggle_enabled = True - self._widgets_by_group: Dict[str, InstanceGroupWidget] = {} + self._active_toggle_enabled: bool = True + self._convertors_group: Optional[BaseGroupWidget] = None + self._convertor_widgets_by_id: dict[str, ConvertorItemCardWidget] = {} + self._convertor_ids: list[str] = [] + + self._group_name_by_instance_id: dict[str, str] = {} + self._instance_ids_by_group_name: dict[str, list[str]] = ( + collections.defaultdict(list) + ) self._ordered_groups = [] + self._context_widget: Optional[ContextCardWidget] = None + self._widgets_by_id: dict[str, InstanceCardWidget] = {} + self._widgets_by_group: dict[str, BaseGroupWidget] = {} + + self._parent_id_by_id = {} + self._instance_ids_by_parent_id = collections.defaultdict(set) self._explicitly_selected_instance_ids = [] self._explicitly_selected_groups = [] @@ -622,42 +551,104 @@ class InstanceCardView(AbstractInstanceView): result.setWidth(width) return result - def _toggle_instances(self, value): - if not self._active_toggle_enabled: - return + def get_current_instance_count(self) -> int: + """How many instances are currently in the view.""" + return len(self._widgets_by_id) - widgets = self._get_selected_widgets() - active_state_by_id = {} - for widget in widgets: - if not isinstance(widget, InstanceCardWidget): + def _get_affected_ids(self, instance_ids: set[str]) -> set[str]: + affected_ids = set() + affected_queue = collections.deque() + affected_queue.extend(instance_ids) + while affected_queue: + instance_id = affected_queue.popleft() + if instance_id in affected_ids: continue + affected_ids.add(instance_id) + parent_id = instance_id + while True: + parent_id = self._parent_id_by_id[parent_id] + if parent_id is None: + break + affected_ids.add(parent_id) - instance_id = widget.id - is_active = widget.is_active - if value == -1: - active_state_by_id[instance_id] = not is_active - continue + child_ids = set(self._instance_ids_by_parent_id[instance_id]) + affected_queue.extend(child_ids - affected_ids) + return affected_ids - _value = bool(value) - if is_active is not _value: - active_state_by_id[instance_id] = _value + def _toggle_instances( + self, + new_value: Optional[bool], + active_id: Optional[str] = None, + ) -> None: + instance_ids = { + widget.id + for widget in self._get_selected_instance_widgets() + if widget.is_selected + } + active_by_id = {} + if active_id and active_id not in instance_ids: + instance_ids = {active_id} - if not active_state_by_id: - return + ids_to_toggle = set(instance_ids) - self._controller.set_instances_active_state(active_state_by_id) + affected_ids = self._get_affected_ids(instance_ids) + + _queue = collections.deque() + _queue.append((set(self._instance_ids_by_parent_id[None]), True)) + discarted_ids = set() + while _queue: + if not instance_ids: + break + + chilren_ids, is_parent_active = _queue.pop() + for instance_id in chilren_ids: + if instance_id not in affected_ids: + continue + + widget = self._widgets_by_id[instance_id] + if is_parent_active is not widget.is_parent_active(): + widget.set_parent_active(is_parent_active) + + instance_ids.discard(instance_id) + if instance_id in ids_to_toggle: + discarted_ids.add(instance_id) + old_value = widget.is_active() + value = new_value + if value is None: + value = not old_value + + widget.set_active(value) + if widget.is_parent_active(): + active_by_id[instance_id] = widget.is_active() + + children_ids = self._instance_ids_by_parent_id[instance_id] + children = { + child_id + for child_id in children_ids + if child_id not in discarted_ids + } + + if children: + instance_ids |= children + _queue.append((children, widget.is_active())) + + if not instance_ids: + break + + if active_by_id: + self._controller.set_instances_active_state(active_by_id) def keyPressEvent(self, event): if event.key() == QtCore.Qt.Key_Space: - self._toggle_instances(-1) + self._toggle_instances(None) return True elif event.key() == QtCore.Qt.Key_Backspace: - self._toggle_instances(0) + self._toggle_instances(False) return True elif event.key() == QtCore.Qt.Key_Return: - self._toggle_instances(1) + self._toggle_instances(True) return True return super().keyPressEvent(event) @@ -670,15 +661,25 @@ class InstanceCardView(AbstractInstanceView): ): output.append(self._context_widget) - if self._convertor_items_group is not None: - output.extend(self._convertor_items_group.get_selected_widgets()) - - for group_widget in self._widgets_by_group.values(): - for widget in group_widget.get_selected_widgets(): - output.append(widget) + output.extend(self._get_selected_convertor_widgets()) + output.extend(self._get_selected_instance_widgets()) return output - def _get_selected_instance_ids(self): + def _get_selected_instance_widgets(self) -> list[InstanceCardWidget]: + return [ + widget + for widget in self._widgets_by_id.values() + if widget.is_selected + ] + + def _get_selected_convertor_widgets(self) -> list[ConvertorItemCardWidget]: + return [ + widget + for widget in self._convertor_widgets_by_id.values() + if widget.is_selected + ] + + def _get_selected_item_ids(self): output = [] if ( self._context_widget is not None @@ -686,11 +687,17 @@ class InstanceCardView(AbstractInstanceView): ): output.append(CONTEXT_ID) - if self._convertor_items_group is not None: - output.extend(self._convertor_items_group.get_selected_item_ids()) + output.extend( + conv_id + for conv_id, widget in self._widgets_by_id.items() + if widget.is_selected + ) - for group_widget in self._widgets_by_group.values(): - output.extend(group_widget.get_selected_item_ids()) + output.extend( + widget.id + for instance_id, widget in self._widgets_by_id.items() + if widget.is_selected + ) return output def refresh(self): @@ -698,25 +705,102 @@ class InstanceCardView(AbstractInstanceView): self._make_sure_context_widget_exists() - self._update_convertor_items_group() + self._update_convertors_group() context_info_by_id = self._controller.get_instances_context_info() # Prepare instances by group and identifiers by group instances_by_group = collections.defaultdict(list) identifiers_by_group = collections.defaultdict(set) - for instance in self._controller.get_instance_items(): + identifiers: set[str] = set() + instances_by_id = {} + parent_id_by_id = {} + instance_ids_by_parent_id = collections.defaultdict(set) + instance_items = self._controller.get_instance_items() + for instance in instance_items: group_name = instance.group_label instances_by_group[group_name].append(instance) identifiers_by_group[group_name].add( instance.creator_identifier ) + identifiers.add(instance.creator_identifier) + instances_by_id[instance.id] = instance + instance_ids_by_parent_id[instance.parent_instance_id].add( + instance.id + ) + parent_id_by_id[instance.id] = instance.parent_instance_id - # Remove groups that were not found in apassed instances - for group_name in tuple(self._widgets_by_group.keys()): - if group_name in instances_by_group: - continue + parent_active_by_id = { + instance_id: False + for instance_id in instances_by_id + } + _queue = collections.deque() + _queue.append((None, True)) + while _queue: + parent_id, is_parent_active = _queue.popleft() + for instance_id in instance_ids_by_parent_id[parent_id]: + instance_item = instances_by_id[instance_id] + is_active = instance_item.is_active + if ( + not is_parent_active + and instance_item.parent_flags & ParentFlags.share_active + ): + is_active = False + parent_active_by_id[instance_id] = is_parent_active + _queue.append( + (instance_id, is_active) + ) + + # Remove groups that were not found in passed instances + groups_to_remove = ( + set(self._widgets_by_group) - set(instances_by_group) + ) + ids_to_remove = ( + set(self._widgets_by_id) - set(instances_by_id) + ) + + # Sort groups + sorted_group_names = list(sorted(instances_by_group.keys())) + + # Keep track of widget indexes + # - we start with 1 because Context item as at the top + widget_idx = 1 + if self._convertors_group is not None: + widget_idx += 1 + + group_by_instance_id = {} + instance_ids_by_group_name = collections.defaultdict(list) + group_icons = { + identifier: self._controller.get_creator_icon(identifier) + for identifier in identifiers + } + for group_name in sorted_group_names: + if group_name not in self._widgets_by_group: + group_widget = BaseGroupWidget( + group_name, self._content_widget + ) + group_widget.double_clicked.connect(self.double_clicked) + self._content_layout.insertWidget(widget_idx, group_widget) + self._widgets_by_group[group_name] = group_widget + + widget_idx += 1 + + instances = instances_by_group[group_name] + for instance in instances: + group_by_instance_id[instance.id] = group_name + instance_ids_by_group_name[group_name].append(instance.id) + + self._update_instance_widgets( + group_name, + instances, + context_info_by_id, + parent_active_by_id, + group_icons, + ) + + # Remove empty groups + for group_name in groups_to_remove: widget = self._widgets_by_group.pop(group_name) widget.setVisible(False) self._content_layout.removeWidget(widget) @@ -725,61 +809,89 @@ class InstanceCardView(AbstractInstanceView): if group_name in self._explicitly_selected_groups: self._explicitly_selected_groups.remove(group_name) - # Sort groups - sorted_group_names = list(sorted(instances_by_group.keys())) + for instance_id in ids_to_remove: + widget = self._widgets_by_id.pop(instance_id) + widget.setVisible(False) + widget.deleteLater() - # Keep track of widget indexes - # - we start with 1 because Context item as at the top - widget_idx = 1 - if self._convertor_items_group is not None: - widget_idx += 1 + self._parent_id_by_id = parent_id_by_id + self._instance_ids_by_parent_id = instance_ids_by_parent_id + self._group_name_by_instance_id = group_by_instance_id + self._instance_ids_by_group_name = instance_ids_by_group_name + self._ordered_groups = sorted_group_names - for group_name in sorted_group_names: - group_icons = { - identifier: self._controller.get_creator_icon(identifier) - for identifier in identifiers_by_group[group_name] - } - if group_name in self._widgets_by_group: - group_widget = self._widgets_by_group[group_name] - group_widget.update_icons(group_icons) - - else: - group_widget = InstanceGroupWidget( - group_icons, group_name, self._content_widget - ) - group_widget.active_changed.connect(self._on_active_changed) - group_widget.selected.connect(self._on_widget_selection) - group_widget.double_clicked.connect(self.double_clicked) - self._content_layout.insertWidget(widget_idx, group_widget) - self._widgets_by_group[group_name] = group_widget - - widget_idx += 1 - group_widget.update_instances( - instances_by_group[group_name], context_info_by_id - ) - group_widget.set_active_toggle_enabled( - self._active_toggle_enabled - ) - - self._update_ordered_group_names() - - def has_items(self): - if self._convertor_items_group is not None: + def has_items(self) -> bool: + if self._convertors_group is not None: return True - if self._widgets_by_group: + if self._widgets_by_id: return True return False - def _update_ordered_group_names(self): - ordered_group_names = [CONTEXT_GROUP] - for idx in range(self._content_layout.count()): - if idx > 0: - item = self._content_layout.itemAt(idx) - group_widget = item.widget() - if group_widget is not None: - ordered_group_names.append(group_widget.group_name) + def _update_instance_widgets( + self, + group_name: str, + instances: list[InstanceItem], + context_info_by_id: dict[str, InstanceContextInfo], + parent_active_by_id: dict[str, bool], + group_icons: dict[str, str], + ) -> None: + """Update instances for the group. - self._ordered_groups = ordered_group_names + Args: + instances (list[InstanceItem]): List of instances in + CreateContext. + context_info_by_id (dict[str, InstanceContextInfo]): Instance + context info by instance id. + parent_active_by_id (dict[str, bool]): Instance has active parent. + + """ + # Store instances by id and by product name + group_widget: BaseGroupWidget = self._widgets_by_group[group_name] + instances_by_id = {} + instances_by_product_name = collections.defaultdict(list) + for instance in instances: + instances_by_id[instance.id] = instance + product_name = instance.product_name + instances_by_product_name[product_name].append(instance) + + to_remove_ids = set( + self._instance_ids_by_group_name[group_name] + ) - set(instances_by_id) + group_widget.take_widgets(to_remove_ids) + + # Sort instances by product name + sorted_product_names = list(sorted(instances_by_product_name.keys())) + + # Add new instances to widget + ordered_ids = [] + widgets_by_id = {} + for product_names in sorted_product_names: + for instance in instances_by_product_name[product_names]: + context_info = context_info_by_id[instance.id] + is_parent_active = parent_active_by_id[instance.id] + if instance.id in self._widgets_by_id: + widget = self._widgets_by_id[instance.id] + widget.update_instance( + instance, context_info, is_parent_active + ) + else: + group_icon = group_icons[instance.creator_identifier] + widget = InstanceCardWidget( + instance, + context_info, + is_parent_active, + group_icon, + group_widget + ) + widget.selected.connect(self._on_widget_selection) + widget.active_changed.connect(self._on_active_changed) + widget.double_clicked.connect(self.double_clicked) + self._widgets_by_id[instance.id] = widget + + ordered_ids.append(instance.id) + widgets_by_id[instance.id] = widget + + group_widget.set_widgets(widgets_by_id, ordered_ids) def _make_sure_context_widget_exists(self): # Create context item if is not already existing @@ -797,28 +909,65 @@ class InstanceCardView(AbstractInstanceView): self.selection_changed.emit() self._content_layout.insertWidget(0, widget) - def _update_convertor_items_group(self): + def _update_convertors_group(self): convertor_items = self._controller.get_convertor_items() - if not convertor_items and self._convertor_items_group is None: + if not convertor_items and self._convertors_group is None: return + ids_to_remove = set(self._convertor_widgets_by_id) - set( + convertor_items + ) + if ids_to_remove: + self._convertors_group.take_widgets(ids_to_remove) + + for conv_id in ids_to_remove: + widget = self._convertor_widgets_by_id.pop(conv_id) + widget.setVisible(False) + widget.deleteLater() + if not convertor_items: - self._convertor_items_group.setVisible(False) - self._content_layout.removeWidget(self._convertor_items_group) - self._convertor_items_group.deleteLater() - self._convertor_items_group = None + self._convertors_group.setVisible(False) + self._content_layout.removeWidget(self._convertors_group) + self._convertors_group.deleteLater() + self._convertors_group = None + self._convertor_ids = [] + self._convertor_widgets_by_id = {} return - if self._convertor_items_group is None: - group_widget = ConvertorItemsGroupWidget( + if self._convertors_group is None: + group_widget = BaseGroupWidget( CONVERTOR_ITEM_GROUP, self._content_widget ) - group_widget.selected.connect(self._on_widget_selection) - group_widget.double_clicked.connect(self.double_clicked) self._content_layout.insertWidget(1, group_widget) - self._convertor_items_group = group_widget + self._convertors_group = group_widget - self._convertor_items_group.update_items(convertor_items) + # TODO create convertor widgets + items_by_label = collections.defaultdict(list) + for item in convertor_items.values(): + items_by_label[item.label].append(item) + + # Sort instances by product name + sorted_labels = list(sorted(items_by_label.keys())) + + # Add new instances to widget + convertor_ids: list[str] = [] + widgets_by_id: dict[str, ConvertorItemCardWidget] = {} + for label in sorted_labels: + for item in items_by_label[label]: + convertor_ids.append(item.id) + if item.id in self._convertor_widgets_by_id: + widget = self._convertor_widgets_by_id[item.id] + widget.update_item(item) + else: + widget = ConvertorItemCardWidget(item, self) + widget.selected.connect(self._on_widget_selection) + widget.double_clicked.connect(self.double_clicked) + self._convertor_widgets_by_id[item.id] = widget + widgets_by_id[item.id] = widget + + self._convertors_group.set_widgets(widgets_by_id, convertor_ids) + self._convertor_ids = convertor_ids + self._convertor_widgets_by_id = widgets_by_id def refresh_instance_states(self, instance_ids=None): """Trigger update of instances on group widgets.""" @@ -828,23 +977,45 @@ class InstanceCardView(AbstractInstanceView): instance_items_by_id = self._controller.get_instance_items_by_id( instance_ids ) - for widget in self._widgets_by_group.values(): - widget.update_instance_values( - context_info_by_id, instance_items_by_id, instance_ids - ) + instance_ids: set[str] = set(instance_items_by_id) + available_ids: set[str] = set(instance_items_by_id) - def _on_active_changed(self, group_name, instance_id, value): - group_widget = self._widgets_by_group[group_name] - instance_widget = group_widget.get_widget_by_item_id(instance_id) - active_state_by_id = {} - if not instance_widget.is_selected: - active_state_by_id[instance_id] = value - else: - for widget in self._get_selected_widgets(): - if isinstance(widget, InstanceCardWidget): - active_state_by_id[widget.id] = value + affected_ids = self._get_affected_ids(instance_ids) - self._controller.set_instances_active_state(active_state_by_id) + _queue = collections.deque() + _queue.append((set(self._instance_ids_by_parent_id[None]), True)) + while _queue: + if not affected_ids: + break + + chilren_ids, is_parent_active = _queue.pop() + for instance_id in chilren_ids: + if instance_id not in affected_ids: + continue + affected_ids.discard(instance_id) + widget = self._widgets_by_id[instance_id] + if instance_id in instance_ids: + instance_ids.discard(instance_id) + if instance_id in available_ids: + available_ids.discard(instance_id) + widget.update_instance( + instance_items_by_id[instance_id], + context_info_by_id[instance_id], + is_parent_active, + ) + else: + widget.set_parent_active(is_parent_active) + + if not affected_ids: + break + + children = set(self._instance_ids_by_parent_id[instance_id]) + if children: + instance_ids |= children + _queue.append((children, widget.is_active())) + + def _on_active_changed(self, instance_id: str, value: bool) -> None: + self._toggle_instances(value, instance_id) def _on_widget_selection(self, instance_id, group_name, selection_type): """Select specific item by instance id. @@ -857,10 +1028,9 @@ class InstanceCardView(AbstractInstanceView): else: if group_name == CONVERTOR_ITEM_GROUP: - group_widget = self._convertor_items_group + new_widget = self._convertor_widgets_by_id[instance_id] else: - group_widget = self._widgets_by_group[group_name] - new_widget = group_widget.get_widget_by_item_id(instance_id) + new_widget = self._widgets_by_id[instance_id] if selection_type == SelectionTypes.clear: self._select_item_clear(instance_id, group_name, new_widget) @@ -896,7 +1066,7 @@ class InstanceCardView(AbstractInstanceView): """ self._explicitly_selected_instance_ids = ( - self._get_selected_instance_ids() + self._get_selected_item_ids() ) if new_widget.is_selected: self._explicitly_selected_instance_ids.remove(instance_id) @@ -905,11 +1075,21 @@ class InstanceCardView(AbstractInstanceView): if instance_id == CONTEXT_ID: remove_group = True else: + has_selected_items = False if group_name == CONVERTOR_ITEM_GROUP: - group_widget = self._convertor_items_group + for widget in self._convertor_widgets_by_id.values(): + if widget.is_selected: + has_selected_items = True + break else: - group_widget = self._widgets_by_group[group_name] - if not group_widget.get_selected_widgets(): + group_ids = self._instance_ids_by_group_name[group_name] + for instance_id in group_ids: + widget = self._widgets_by_id[instance_id] + if widget.is_selected: + has_selected_items = True + break + + if not has_selected_items: remove_group = True if remove_group: @@ -1021,10 +1201,16 @@ class InstanceCardView(AbstractInstanceView): sorted_widgets = [self._context_widget] else: if name == CONVERTOR_ITEM_GROUP: - group_widget = self._convertor_items_group + sorted_widgets = [ + self._convertor_widgets_by_id[conv_id] + for conv_id in self._convertor_ids + ] else: - group_widget = self._widgets_by_group[name] - sorted_widgets = group_widget.get_ordered_widgets() + instance_ids = self._instance_ids_by_group_name[name] + sorted_widgets = [ + self._widgets_by_id[instance_id] + for instance_id in instance_ids + ] # Change selection based on explicit selection if start group # was not passed yet @@ -1136,21 +1322,18 @@ class InstanceCardView(AbstractInstanceView): def get_selected_items(self): """Get selected instance ids and context.""" - convertor_identifiers = [] - instances = [] - selected_widgets = self._get_selected_widgets() - - context_selected = False - for widget in selected_widgets: - if widget is self._context_widget: - context_selected = True - - elif isinstance(widget, InstanceCardWidget): - instances.append(widget.id) - - elif isinstance(widget, ConvertorItemCardWidget): - convertor_identifiers.append(widget.identifier) - + context_selected = ( + self._context_widget is not None + and self._context_widget.is_selected + ) + instances = [ + widget.id + for widget in self._get_selected_instance_widgets() + ] + convertor_identifiers = [ + widget.identifier + for widget in self._get_selected_convertor_widgets() + ] return instances, context_selected, convertor_identifiers def set_selected_items( @@ -1182,12 +1365,19 @@ class InstanceCardView(AbstractInstanceView): is_convertor_group = group_name == CONVERTOR_ITEM_GROUP if is_convertor_group: - group_widget = self._convertor_items_group + sorted_widgets = [ + self._convertor_widgets_by_id[conv_id] + for conv_id in self._convertor_ids + ] else: - group_widget = self._widgets_by_group[group_name] + instance_ids = self._instance_ids_by_group_name[group_name] + sorted_widgets = [ + self._widgets_by_id[instance_id] + for instance_id in instance_ids + ] group_selected = False - for widget in group_widget.get_ordered_widgets(): + for widget in sorted_widgets: select = False if is_convertor_group: is_in = widget.identifier in s_convertor_identifiers @@ -1209,5 +1399,5 @@ class InstanceCardView(AbstractInstanceView): if self._active_toggle_enabled is enabled: return self._active_toggle_enabled = enabled - for group_widget in self._widgets_by_group.values(): - group_widget.set_active_toggle_enabled(enabled) + for widget in self._widgets_by_id.values(): + widget.set_active_toggle_enabled(enabled) diff --git a/client/ayon_core/tools/publisher/widgets/list_view_widgets.py b/client/ayon_core/tools/publisher/widgets/list_view_widgets.py index 969bec11e5..c524b96d5f 100644 --- a/client/ayon_core/tools/publisher/widgets/list_view_widgets.py +++ b/client/ayon_core/tools/publisher/widgets/list_view_widgets.py @@ -22,15 +22,26 @@ selection can be enabled disabled using checkbox or keyboard key presses: ... ``` """ +from __future__ import annotations + import collections +from typing import Optional from qtpy import QtWidgets, QtCore, QtGui from ayon_core.style import get_objected_colors -from ayon_core.tools.utils import NiceCheckbox -from ayon_core.tools.utils.lib import html_escape, checkstate_int_to_enum +from ayon_core.pipeline.create import ( + InstanceContextInfo, + ParentFlags, +) + +from ayon_core.tools.utils import NiceCheckbox, BaseClickableFrame +from ayon_core.tools.utils.lib import html_escape, checkstate_int_to_enum from ayon_core.tools.publisher.abstract import AbstractPublisherFrontend +from ayon_core.tools.publisher.models.create import ( + InstanceItem, +) from ayon_core.tools.publisher.constants import ( INSTANCE_ID_ROLE, SORT_VALUE_ROLE, @@ -115,7 +126,13 @@ class InstanceListItemWidget(QtWidgets.QWidget): active_changed = QtCore.Signal(str, bool) double_clicked = QtCore.Signal() - def __init__(self, instance, context_info, parent): + def __init__( + self, + instance: InstanceItem, + context_info: InstanceContextInfo, + parent_is_active: bool, + parent: QtWidgets.QWidget, + ): super().__init__(parent) self._instance_id = instance.id @@ -131,30 +148,40 @@ class InstanceListItemWidget(QtWidgets.QWidget): product_name_label.setObjectName("ListViewProductName") active_checkbox = NiceCheckbox(parent=self) - active_checkbox.setChecked(instance.is_active) - active_checkbox.setVisible(not instance.is_mandatory) layout = QtWidgets.QHBoxLayout(self) - content_margins = layout.contentsMargins() - layout.setContentsMargins(content_margins.left() + 2, 0, 2, 0) + layout.setContentsMargins(2, 0, 2, 0) layout.addWidget(product_name_label) layout.addStretch(1) layout.addWidget(active_checkbox) - self.setAttribute(QtCore.Qt.WA_TranslucentBackground) - product_name_label.setAttribute(QtCore.Qt.WA_TranslucentBackground) - active_checkbox.setAttribute(QtCore.Qt.WA_TranslucentBackground) + for widget in ( + self, + product_name_label, + active_checkbox, + ): + widget.setAttribute(QtCore.Qt.WA_TranslucentBackground) active_checkbox.stateChanged.connect(self._on_active_change) self._instance_label_widget = product_name_label self._active_checkbox = active_checkbox - self._has_valid_context = None + # Instance info + self._has_valid_context = context_info.is_valid + self._is_mandatory = instance.is_mandatory + self._instance_is_active = instance.is_active + self._parent_flags = instance.parent_flags - self._checkbox_enabled = not instance.is_mandatory + # Parent active state is fluent and can change + self._parent_is_active = parent_is_active - self._set_valid_property(context_info.is_valid) + # Widget logic info + self._state = None + self._toggle_is_enabled = True + + self._update_style_state() + self._update_checkbox_state() def mouseDoubleClickEvent(self, event): widget = self.childAt(event.pos()) @@ -162,59 +189,119 @@ class InstanceListItemWidget(QtWidgets.QWidget): if widget is not self._active_checkbox: self.double_clicked.emit() - def _set_valid_property(self, valid): - if self._has_valid_context == valid: - return - self._has_valid_context = valid - state = "" - if not valid: - state = "invalid" - self._instance_label_widget.setProperty("state", state) - self._instance_label_widget.style().polish(self._instance_label_widget) - - def is_active(self): + def is_active(self) -> bool: """Instance is activated.""" return self._active_checkbox.isChecked() - def set_active(self, new_value): - """Change active state of instance and checkbox.""" - old_value = self.is_active() - if new_value is None: - new_value = not old_value - - if new_value != old_value: - self._active_checkbox.blockSignals(True) - self._active_checkbox.setChecked(new_value) - self._active_checkbox.blockSignals(False) - def is_checkbox_enabled(self) -> bool: """Checkbox can be changed by user.""" - return self._checkbox_enabled + return ( + self._used_parent_active() + and not self._is_mandatory + ) - def update_instance(self, instance, context_info): + def set_active_toggle_enabled(self, enabled: bool) -> None: + """Toggle can be available for user.""" + self._toggle_is_enabled = enabled + self._update_checkbox_state() + + def set_active(self, new_value: Optional[bool]) -> None: + """Change active state of instance and checkbox by user interaction. + + Args: + new_value (Optional[bool]): New active state of instance. Toggle + if is 'None'. + + """ + # Do not allow to change state if is mandatory or parent is not active + if not self.is_checkbox_enabled(): + return + + if new_value is None: + new_value = not self._active_checkbox.isChecked() + # Update instance active state + self._instance_is_active = new_value + self._set_checked(new_value) + + def update_instance( + self, + instance: InstanceItem, + context_info: InstanceContextInfo, + parent_is_active: bool, + ) -> None: """Update instance object.""" # Check product name + self._instance_id = instance.id label = instance.label if label != self._instance_label_widget.text(): self._instance_label_widget.setText(html_escape(label)) - # Check active state - self.set_active(instance.is_active) - self._set_is_mandatory(instance.is_mandatory) - # Check valid states - self._set_valid_property(context_info.is_valid) + + self._is_mandatory = instance.is_mandatory + self._instance_is_active = instance.is_active + self._has_valid_context = context_info.is_valid + self._parent_is_active = parent_is_active + self._parent_flags = instance.parent_flags + + self._update_checkbox_state() + self._update_style_state() + + def is_parent_active(self) -> bool: + return self._parent_is_active + + def _used_parent_active(self) -> bool: + parent_enabled = True + if self._parent_flags & ParentFlags.share_active: + parent_enabled = self._parent_is_active + return parent_enabled + + def set_parent_is_active(self, active: bool) -> None: + if self._parent_is_active is active: + return + self._parent_is_active = active + self._update_style_state() + self._update_checkbox_state() + + def _set_checked(self, checked: bool) -> None: + """Change checked state in UI without triggering checkstate change.""" + old_value = self._active_checkbox.isChecked() + if checked is not old_value: + self._active_checkbox.blockSignals(True) + self._active_checkbox.setChecked(checked) + self._active_checkbox.blockSignals(False) + + def _update_style_state(self) -> None: + state = "" + if not self._used_parent_active(): + state = "disabled" + elif not self._has_valid_context: + state = "invalid" + + if state == self._state: + return + self._state = state + self._instance_label_widget.setProperty("state", state) + self._instance_label_widget.style().polish(self._instance_label_widget) + + def _update_checkbox_state(self) -> None: + parent_enabled = self._used_parent_active() + + self._active_checkbox.setEnabled( + self._toggle_is_enabled + and not self._is_mandatory + and parent_enabled + ) + # Hide checkbox for mandatory instances + self._active_checkbox.setVisible(not self._is_mandatory) + + # Visually disable instance if parent is disabled + checked = parent_enabled and self._instance_is_active + self._set_checked(checked) def _on_active_change(self): self.active_changed.emit( self._instance_id, self._active_checkbox.isChecked() ) - def set_active_toggle_enabled(self, enabled): - self._active_checkbox.setEnabled(enabled) - - def _set_is_mandatory(self, is_mandatory: bool) -> None: - self._checkbox_enabled = not is_mandatory - self._active_checkbox.setVisible(not is_mandatory) - class ListContextWidget(QtWidgets.QFrame): """Context (or global attributes) widget.""" @@ -241,43 +328,33 @@ class ListContextWidget(QtWidgets.QFrame): self.double_clicked.emit() -class InstanceListGroupWidget(QtWidgets.QFrame): +class InstanceListGroupWidget(BaseClickableFrame): """Widget representing group of instances. - Has collapse/expand indicator, label of group and checkbox modifying all - of its children. + Has label of group and checkbox modifying all of its children. """ - expand_changed = QtCore.Signal(str, bool) toggle_requested = QtCore.Signal(str, int) + expand_change_requested = QtCore.Signal(str) def __init__(self, group_name, parent): super().__init__(parent) self.setObjectName("InstanceListGroupWidget") self.group_name = group_name - self._expanded = False - - expand_btn = QtWidgets.QToolButton(self) - expand_btn.setObjectName("ArrowBtn") - expand_btn.setArrowType(QtCore.Qt.RightArrow) - expand_btn.setMaximumWidth(14) name_label = QtWidgets.QLabel(group_name, self) toggle_checkbox = NiceCheckbox(parent=self) layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(5, 0, 2, 0) - layout.addWidget(expand_btn) + layout.setContentsMargins(2, 0, 2, 0) layout.addWidget( name_label, 1, QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter ) layout.addWidget(toggle_checkbox, 0) name_label.setAttribute(QtCore.Qt.WA_TranslucentBackground) - expand_btn.setAttribute(QtCore.Qt.WA_TranslucentBackground) - expand_btn.clicked.connect(self._on_expand_clicked) toggle_checkbox.stateChanged.connect(self._on_checkbox_change) self._ignore_state_change = False @@ -285,7 +362,6 @@ class InstanceListGroupWidget(QtWidgets.QFrame): self._expected_checkstate = None self.name_label = name_label - self.expand_btn = expand_btn self.toggle_checkbox = toggle_checkbox def set_checkstate(self, state): @@ -307,26 +383,15 @@ class InstanceListGroupWidget(QtWidgets.QFrame): return self.toggle_checkbox.checkState() + def set_active_toggle_enabled(self, enabled): + self.toggle_checkbox.setEnabled(enabled) + def _on_checkbox_change(self, state): if not self._ignore_state_change: self.toggle_requested.emit(self.group_name, state) - def _on_expand_clicked(self): - self.expand_changed.emit(self.group_name, not self._expanded) - - def set_expanded(self, expanded): - """Change icon of collapse/expand identifier.""" - if self._expanded == expanded: - return - - self._expanded = expanded - if expanded: - self.expand_btn.setArrowType(QtCore.Qt.DownArrow) - else: - self.expand_btn.setArrowType(QtCore.Qt.RightArrow) - - def set_active_toggle_enabled(self, enabled): - self.toggle_checkbox.setEnabled(enabled) + def _mouse_release_callback(self): + self.expand_change_requested.emit(self.group_name) class InstanceTreeView(QtWidgets.QTreeView): @@ -339,24 +404,11 @@ class InstanceTreeView(QtWidgets.QTreeView): self.setObjectName("InstanceListView") self.setHeaderHidden(True) - self.setIndentation(0) self.setExpandsOnDoubleClick(False) self.setSelectionMode( QtWidgets.QAbstractItemView.ExtendedSelection ) self.viewport().setMouseTracking(True) - self._pressed_group_index = None - - def _expand_item(self, index, expand=None): - is_expanded = self.isExpanded(index) - if expand is None: - expand = not is_expanded - - if expand != is_expanded: - if expand: - self.expand(index) - else: - self.collapse(index) def get_selected_instance_ids(self): """Ids of selected instances.""" @@ -388,53 +440,6 @@ class InstanceTreeView(QtWidgets.QTreeView): return super().event(event) - def _mouse_press(self, event): - """Store index of pressed group. - - This is to be able to change state of group and process mouse - "double click" as 2x "single click". - """ - if event.button() != QtCore.Qt.LeftButton: - return - - pressed_group_index = None - pos_index = self.indexAt(event.pos()) - if pos_index.data(IS_GROUP_ROLE): - pressed_group_index = pos_index - - self._pressed_group_index = pressed_group_index - - def mousePressEvent(self, event): - self._mouse_press(event) - super().mousePressEvent(event) - - def mouseDoubleClickEvent(self, event): - self._mouse_press(event) - super().mouseDoubleClickEvent(event) - - def _mouse_release(self, event, pressed_index): - if event.button() != QtCore.Qt.LeftButton: - return False - - pos_index = self.indexAt(event.pos()) - if not pos_index.data(IS_GROUP_ROLE) or pressed_index != pos_index: - return False - - if self.state() == QtWidgets.QTreeView.State.DragSelectingState: - indexes = self.selectionModel().selectedIndexes() - if len(indexes) != 1 or indexes[0] != pos_index: - return False - - self._expand_item(pos_index) - return True - - def mouseReleaseEvent(self, event): - pressed_index = self._pressed_group_index - self._pressed_group_index = None - result = self._mouse_release(event, pressed_index) - if not result: - super().mouseReleaseEvent(event) - class InstanceListView(AbstractInstanceView): """Widget providing abstract methods of AbstractInstanceView for list view. @@ -472,18 +477,21 @@ class InstanceListView(AbstractInstanceView): instance_view.selectionModel().selectionChanged.connect( self._on_selection_change ) - instance_view.collapsed.connect(self._on_collapse) - instance_view.expanded.connect(self._on_expand) instance_view.toggle_requested.connect(self._on_toggle_request) instance_view.double_clicked.connect(self.double_clicked) self._group_items = {} self._group_widgets = {} - self._widgets_by_id = {} + self._widgets_by_id: dict[str, InstanceListItemWidget] = {} + self._items_by_id = {} + self._parent_id_by_id = {} + self._instance_ids_by_parent_id = collections.defaultdict(set) # Group by instance id for handling of active state self._group_by_instance_id = {} self._context_item = None self._context_widget = None + self._missing_parent_item = None + self._parent_grouping = True self._convertor_group_item = None self._convertor_group_widget = None @@ -496,47 +504,17 @@ class InstanceListView(AbstractInstanceView): self._active_toggle_enabled = True - def _on_expand(self, index): - self._update_widget_expand_state(index, True) - - def _on_collapse(self, index): - self._update_widget_expand_state(index, False) - - def _update_widget_expand_state(self, index, expanded): - group_name = index.data(GROUP_ROLE) - if group_name == CONVERTOR_ITEM_GROUP: - group_widget = self._convertor_group_widget - else: - group_widget = self._group_widgets.get(group_name) - - if group_widget: - group_widget.set_expanded(expanded) - - def _on_toggle_request(self, toggle): + def _on_toggle_request(self, toggle: int) -> None: if not self._active_toggle_enabled: return - selected_instance_ids = self._instance_view.get_selected_instance_ids() if toggle == -1: active = None elif toggle == 1: active = True else: active = False - - group_names = set() - for instance_id in selected_instance_ids: - widget = self._widgets_by_id.get(instance_id) - if widget is None: - continue - - widget.set_active(active) - group_name = self._group_by_instance_id.get(instance_id) - if group_name is not None: - group_names.add(group_name) - - for group_name in group_names: - self._update_group_checkstate(group_name) + self._toggle_active_state(active) def _update_group_checkstate(self, group_name): """Update checkstate of one group.""" @@ -545,8 +523,10 @@ class InstanceListView(AbstractInstanceView): return activity = None - for instance_id, _group_name in self._group_by_instance_id.items(): - if _group_name != group_name: + for ( + instance_id, instance_group_name + ) in self._group_by_instance_id.items(): + if instance_group_name != group_name: continue instance_widget = self._widgets_by_id.get(instance_id) @@ -583,14 +563,29 @@ class InstanceListView(AbstractInstanceView): self._update_convertor_items_group() context_info_by_id = self._controller.get_instances_context_info() - + instance_items = self._controller.get_instance_items() # Prepare instances by their groups instances_by_group_name = collections.defaultdict(list) + instances_by_parent_id = collections.defaultdict(list) + instance_ids_by_parent_id = collections.defaultdict(set) group_names = set() - for instance in self._controller.get_instance_items(): + instance_ids = set() + for instance in instance_items: + instance_ids.add(instance.id) + instance_ids_by_parent_id[instance.parent_instance_id].add( + instance.id + ) + if instance.parent_instance_id: + instances_by_parent_id[instance.parent_instance_id].append( + instance + ) + if self._parent_grouping: + continue + group_label = instance.group_label group_names.add(group_label) instances_by_group_name[group_label].append(instance) + self._group_by_instance_id[instance.id] = group_label # Create new groups based on prepared `instances_by_group_name` if self._make_sure_groups_exists(group_names): @@ -598,95 +593,88 @@ class InstanceListView(AbstractInstanceView): # Remove groups that are not available anymore self._remove_groups_except(group_names) + self._remove_instances_except(instance_items) - # Store which groups should be expanded at the end - expand_groups = set() + expand_to_items = [] + widgets_by_id = {} + group_items = [ + ( + self._group_widgets[group_name], + instances_by_group_name[group_name], + group_item, + ) + for group_name, group_item in self._group_items.items() + ] + + # Handle orphaned instances + missing_parent_ids = set(instances_by_parent_id) - instance_ids + if not missing_parent_ids: + # Make sure the item is not in view if there are no orhpaned items + self._remove_missing_parent_item() + else: + # Add orphaned group item and append them to 'group_items' + orphans_item = self._add_missing_parent_item() + for instance_id in missing_parent_ids: + group_items.append(( + None, + instances_by_parent_id[instance_id], + orphans_item, + )) + + items_with_instance = {} # Process changes in each group item # - create new instance, update existing and remove not existing - for group_name, group_item in self._group_items.items(): - # Instance items to remove - # - will contain all existing instance ids at the start - # - instance ids may be removed when existing instances are checked - to_remove = set() - # Mapping of existing instances under group item - existing_mapping = {} + for group_widget, group_instances, group_item in group_items: + # Group widget is not set if is orphaned + # - This might need to be changed in future if widget could + # be 'None' + is_orpaned_item = group_widget is None - # Get group index to be able to get children indexes - group_index = self._instance_model.index( - group_item.row(), group_item.column() - ) + # Collect all new instances by parent id + # - 'None' is used if parent is group item + new_items = collections.defaultdict(list) + # Tuples of model item and instance itself + for instance in group_instances: + _queue = collections.deque() + _queue.append((instance, group_item, None)) + while _queue: + instance, parent_item, parent_id = _queue.popleft() + instance_id = instance.id + # Remove group name from groups mapping + if parent_id is not None: + self._group_by_instance_id.pop(instance_id, None) - # Iterate over children indexes of group item - for idx in range(group_item.rowCount()): - index = self._instance_model.index(idx, 0, group_index) - instance_id = index.data(INSTANCE_ID_ROLE) - # Add all instance into `to_remove` set - to_remove.add(instance_id) - existing_mapping[instance_id] = idx + # Create new item and store it as new + item = self._items_by_id.get(instance_id) + if item is None: + item = QtGui.QStandardItem() + item.setData(instance_id, INSTANCE_ID_ROLE) + self._items_by_id[instance_id] = item + new_items[parent_id].append(item) - # Collect all new instances that are not existing under group - # New items - new_items = [] - # Tuples of new instance and instance itself - new_items_with_instance = [] - # Group activity (should be {-1;0;1} at the end) - # - 0 when all instances are disabled - # - 1 when all instances are enabled - # - -1 when it's mixed - activity = None - for instance in instances_by_group_name[group_name]: - instance_id = instance.id - # Handle group activity - if activity is None: - activity = int(instance.is_active) - elif activity == -1: - pass - elif activity != instance.is_active: - activity = -1 + elif item.parent() is not parent_item: + current_parent = item.parent() + if current_parent is not None: + current_parent.takeRow(item.row()) + new_items[parent_id].append(item) - context_info = context_info_by_id[instance_id] + self._parent_id_by_id[instance_id] = parent_id - self._group_by_instance_id[instance_id] = group_name - # Remove instance id from `to_remove` if already exists and - # trigger update of widget - if instance_id in to_remove: - to_remove.remove(instance_id) - widget = self._widgets_by_id[instance_id] - widget.update_instance(instance, context_info) - continue + items_with_instance[instance.id] = ( + item, + instance, + is_orpaned_item, + ) - # Create new item and store it as new - item = QtGui.QStandardItem() - item.setData(instance.product_name, SORT_VALUE_ROLE) - item.setData(instance.product_name, GROUP_ROLE) - item.setData(instance_id, INSTANCE_ID_ROLE) - new_items.append(item) - new_items_with_instance.append((item, instance)) + item.setData(instance.product_name, SORT_VALUE_ROLE) + item.setData(instance.product_name, GROUP_ROLE) - # Set checkstate of group checkbox - state = QtCore.Qt.PartiallyChecked - if activity == 0: - state = QtCore.Qt.Unchecked - elif activity == 1: - state = QtCore.Qt.Checked + if not self._parent_grouping: + continue - widget = self._group_widgets[group_name] - widget.set_checkstate(state) - - # Remove items that were not found - idx_to_remove = [] - for instance_id in to_remove: - idx_to_remove.append(existing_mapping[instance_id]) - - # Remove them in reverse order to prevent row index changes - for idx in reversed(sorted(idx_to_remove)): - group_item.removeRows(idx, 1) - - # Cleanup instance related widgets - for instance_id in to_remove: - self._group_by_instance_id.pop(instance_id) - widget = self._widgets_by_id.pop(instance_id) - widget.deleteLater() + children = instances_by_parent_id.pop(instance_id, []) + for child in children: + _queue.append((child, item, instance_id)) # Process new instance items and add them to model and create # their widgets @@ -695,41 +683,106 @@ class InstanceListView(AbstractInstanceView): sort_at_the_end = True # Add items under group item - group_item.appendRows(new_items) + for parent_id, items in new_items.items(): + if parent_id is None or not self._parent_grouping: + parent_item = group_item + else: + parent_item = self._items_by_id[parent_id] - for item, instance in new_items_with_instance: - context_info = context_info_by_id[instance.id] - if not context_info.is_valid: - expand_groups.add(group_name) - item_index = self._instance_model.index( - item.row(), - item.column(), - group_index - ) - proxy_index = self._proxy_model.mapFromSource(item_index) - widget = InstanceListItemWidget( - instance, context_info, self._instance_view - ) - widget.set_active_toggle_enabled( - self._active_toggle_enabled - ) - widget.active_changed.connect(self._on_active_changed) - widget.double_clicked.connect(self.double_clicked) - self._instance_view.setIndexWidget(proxy_index, widget) - self._widgets_by_id[instance.id] = widget + parent_item.appendRows(items) - # Trigger sort at the end of refresh - if sort_at_the_end: - self._proxy_model.sort(0) + ids_order = [] + ids_queue = collections.deque() + ids_queue.extend(instance_ids_by_parent_id[None]) + while ids_queue: + parent_id = ids_queue.popleft() + ids_order.append(parent_id) + ids_queue.extend(instance_ids_by_parent_id[parent_id]) + ids_order.extend(set(items_with_instance) - set(ids_order)) - # Expand groups marked for expanding - for group_name in expand_groups: - group_item = self._group_items[group_name] - proxy_index = self._proxy_model.mapFromSource(group_item.index()) + for instance_id in ids_order: + item, instance, is_orpaned_item = items_with_instance[instance_id] + context_info = context_info_by_id[instance.id] + # TODO expand all parents + if not context_info.is_valid: + expand_to_items.append(item) + parent_active = True + if is_orpaned_item: + parent_active = False + + parent_id = instance.parent_instance_id + if parent_id: + parent_widget = widgets_by_id.get(parent_id) + parent_active = False + if parent_widget is not None: + parent_active = parent_widget.is_active() + item_index = self._instance_model.indexFromItem(item) + proxy_index = self._proxy_model.mapFromSource(item_index) + widget = self._instance_view.indexWidget(proxy_index) + if isinstance(widget, InstanceListItemWidget): + widget.update_instance( + instance, + context_info, + parent_active, + ) + else: + widget = InstanceListItemWidget( + instance, + context_info, + parent_active, + self._instance_view + ) + widget.active_changed.connect(self._on_active_changed) + widget.double_clicked.connect(self.double_clicked) + self._instance_view.setIndexWidget(proxy_index, widget) + widget.set_active_toggle_enabled( + self._active_toggle_enabled + ) + + widgets_by_id[instance.id] = widget + self._widgets_by_id.pop(instance.id, None) + + for widget in self._widgets_by_id.values(): + widget.setVisible(False) + widget.deleteLater() + + self._widgets_by_id = widgets_by_id + self._instance_ids_by_parent_id = instance_ids_by_parent_id + + # Set checkstate of group checkbox + for group_name in self._group_items: + self._update_group_checkstate(group_name) + + # Expand items marked for expanding + items_to_expand = [] + _marked_ids = set() + for item in expand_to_items: + parent = item.parent() + _items = [] + while True: + # Parent is not set or is group (groups are separate) + if parent is None or parent.data(IS_GROUP_ROLE): + break + instance_id = parent.data(INSTANCE_ID_ROLE) + # Parent was already marked for expanding + if instance_id in _marked_ids: + break + _marked_ids.add(instance_id) + _items.append(parent) + parent = parent.parent() + + items_to_expand.extend(reversed(_items)) + + for item in items_to_expand: + proxy_index = self._proxy_model.mapFromSource(item.index()) self._instance_view.expand(proxy_index) - def _make_sure_context_item_exists(self): + # Trigger sort at the end of refresh + if sort_at_the_end: + self._proxy_model.sort(0) + + def _make_sure_context_item_exists(self) -> bool: if self._context_item is not None: return False @@ -752,7 +805,7 @@ class InstanceListView(AbstractInstanceView): self._context_item = context_item return True - def _update_convertor_items_group(self): + def _update_convertor_items_group(self) -> bool: created_new_items = False convertor_items_by_id = self._controller.get_convertor_items() group_item = self._convertor_group_item @@ -761,7 +814,7 @@ class InstanceListView(AbstractInstanceView): root_item = self._instance_model.invisibleRootItem() if not convertor_items_by_id: - root_item.removeRow(group_item.row()) + root_item.takeRow(group_item.row()) self._convertor_group_widget.deleteLater() self._convertor_group_widget = None self._convertor_items_by_id = {} @@ -785,9 +838,7 @@ class InstanceListView(AbstractInstanceView): CONVERTOR_ITEM_GROUP, self._instance_view ) widget.toggle_checkbox.setVisible(False) - widget.expand_changed.connect( - self._on_convertor_group_expand_request - ) + self._instance_view.setIndexWidget(proxy_index, widget) self._convertor_group_item = group_item @@ -798,7 +849,7 @@ class InstanceListView(AbstractInstanceView): child_identifier = child_item.data(CONVERTER_IDENTIFIER_ROLE) if child_identifier not in convertor_items_by_id: self._convertor_items_by_id.pop(child_identifier, None) - group_item.removeRows(row, 1) + group_item.takeRow(row) new_items = [] for identifier, convertor_item in convertor_items_by_id.items(): @@ -820,7 +871,7 @@ class InstanceListView(AbstractInstanceView): return created_new_items - def _make_sure_groups_exists(self, group_names): + def _make_sure_groups_exists(self, group_names: set[str]) -> bool: new_group_items = [] for group_name in group_names: if group_name in self._group_items: @@ -853,14 +904,16 @@ class InstanceListView(AbstractInstanceView): widget.set_active_toggle_enabled( self._active_toggle_enabled ) - widget.expand_changed.connect(self._on_group_expand_request) widget.toggle_requested.connect(self._on_group_toggle_request) + widget.expand_change_requested.connect( + self._on_expand_toggle_request + ) self._group_widgets[group_name] = widget self._instance_view.setIndexWidget(proxy_index, widget) return True - def _remove_groups_except(self, group_names): + def _remove_groups_except(self, group_names: set[str]) -> None: # Remove groups that are not available anymore root_item = self._instance_model.invisibleRootItem() for group_name in tuple(self._group_items.keys()): @@ -868,42 +921,197 @@ class InstanceListView(AbstractInstanceView): continue group_item = self._group_items.pop(group_name) - root_item.removeRow(group_item.row()) + root_item.takeRow(group_item.row()) widget = self._group_widgets.pop(group_name) + widget.setVisible(False) widget.deleteLater() + def _remove_instances_except(self, instance_items: list[InstanceItem]): + parent_id_by_id = { + item.id: item.parent_instance_id + for item in instance_items + } + instance_ids = set(parent_id_by_id) + all_removed_ids = set(self._items_by_id) - instance_ids + queue = collections.deque() + for group_item in self._group_items.values(): + queue.append((group_item, None)) + while queue: + parent_item, parent_id = queue.popleft() + children = [ + parent_item.child(row) + for row in range(parent_item.rowCount()) + ] + for child in children: + instance_id = child.data(INSTANCE_ID_ROLE) + if instance_id not in parent_id_by_id: + parent_item.takeRow(child.row()) + elif parent_id != parent_id_by_id[instance_id]: + parent_item.takeRow(child.row()) + + queue.append((child, instance_id)) + + for instance_id in all_removed_ids: + self._items_by_id.pop(instance_id) + self._parent_id_by_id.pop(instance_id) + self._group_by_instance_id.pop(instance_id, None) + widget = self._widgets_by_id.pop(instance_id, None) + if widget is not None: + widget.setVisible(False) + widget.deleteLater() + + def _add_missing_parent_item(self) -> QtGui.QStandardItem: + label = "! Orphaned instances !" + if self._missing_parent_item is None: + item = QtGui.QStandardItem() + item.setData(label, GROUP_ROLE) + item.setData("_", SORT_VALUE_ROLE) + item.setData(True, IS_GROUP_ROLE) + item.setFlags(QtCore.Qt.ItemIsEnabled) + self._missing_parent_item = item + + if self._missing_parent_item.row() < 0: + root_item = self._instance_model.invisibleRootItem() + root_item.appendRow(self._missing_parent_item) + index = self._missing_parent_item.index() + proxy_index = self._proxy_model.mapFromSource(index) + widget = InstanceListGroupWidget(label, self._instance_view) + widget.toggle_checkbox.setVisible(False) + self._instance_view.setIndexWidget(proxy_index, widget) + return self._missing_parent_item + + def _remove_missing_parent_item(self) -> None: + if self._missing_parent_item is None: + return + + row = self._missing_parent_item.row() + if row < 0: + return + + parent = self._missing_parent_item.parent() + if parent is None: + parent = self._instance_model.invisibleRootItem() + index = self._missing_parent_item.index() + proxy_index = self._proxy_model.mapFromSource(index) + widget = self._instance_view.indexWidget(proxy_index) + if widget is not None: + widget.setVisible(False) + widget.deleteLater() + parent.takeRow(self._missing_parent_item.row()) + _queue = collections.deque() + _queue.append(self._missing_parent_item) + while _queue: + item = _queue.popleft() + for _ in range(item.rowCount()): + child = item.child(0) + _queue.append(child) + item.takeRow(0) + + self._missing_parent_item = None + def refresh_instance_states(self, instance_ids=None): """Trigger update of all instances.""" if instance_ids is not None: instance_ids = set(instance_ids) - context_info_by_id = self._controller.get_instances_context_info() + + context_info_by_id = self._controller.get_instances_context_info( + instance_ids + ) instance_items_by_id = self._controller.get_instance_items_by_id( instance_ids ) - for instance_id, widget in self._widgets_by_id.items(): - if instance_ids is not None and instance_id not in instance_ids: - continue - widget.update_instance( - instance_items_by_id[instance_id], - context_info_by_id[instance_id], - ) + instance_ids = set(instance_items_by_id) + available_ids = set(instance_ids) + + _queue = collections.deque() + _queue.append((set(self._instance_ids_by_parent_id[None]), True)) + + discarted_ids = set() + while _queue: + if not instance_ids: + break + + children_ids, parent_active = _queue.popleft() + for instance_id in children_ids: + widget = self._widgets_by_id[instance_id] + # Parent active state changed -> traverse children too + add_children = False + if instance_id in instance_ids: + add_children = ( + parent_active is not widget.is_parent_active() + ) + if instance_id in available_ids: + available_ids.discard(instance_id) + widget.update_instance( + instance_items_by_id[instance_id], + context_info_by_id[instance_id], + parent_active, + ) + + instance_ids.discard(instance_id) + discarted_ids.add(instance_id) + + if parent_active is not widget.is_parent_active(): + widget.set_parent_is_active(parent_active) + add_children = True + + if not add_children: + if not instance_ids: + break + continue + + _children = set(self._instance_ids_by_parent_id[instance_id]) + if _children: + instance_ids |= _children + _queue.append((_children, widget.is_active())) + + if not instance_ids: + break + + def parent_grouping_enabled(self) -> bool: + return self._parent_grouping + + def set_parent_grouping(self, parent_grouping: bool) -> None: + self._parent_grouping = parent_grouping def _on_active_changed(self, changed_instance_id, new_value): - selected_instance_ids, _, _ = self.get_selected_items() + self._toggle_active_state(new_value, changed_instance_id) + + def _toggle_active_state( + self, + new_value: Optional[bool], + active_id: Optional[str] = None, + instance_ids: Optional[set[str]] = None, + ) -> None: + if instance_ids is None: + instance_ids, _, _ = self.get_selected_items() + if active_id and active_id not in instance_ids: + instance_ids = {active_id} active_by_id = {} - found = False - for instance_id in selected_instance_ids: - active_by_id[instance_id] = new_value - if not found and instance_id == changed_instance_id: - found = True + _queue = collections.deque() + _queue.append((set(self._instance_ids_by_parent_id[None]), True)) - if not found: - active_by_id = {changed_instance_id: new_value} + while _queue: + children_ids, parent_active = _queue.popleft() + for instance_id in children_ids: + widget = self._widgets_by_id[instance_id] + widget.set_parent_is_active(parent_active) + if instance_id in instance_ids: + value = new_value + if value is None: + value = not widget.is_active() + widget.set_active(value) + active_by_id[instance_id] = value + + children = set( + self._instance_ids_by_parent_id[instance_id] + ) + if children: + _queue.append((children, widget.is_active())) self._controller.set_instances_active_state(active_by_id) - self._change_active_instances(active_by_id, new_value) group_names = set() for instance_id in active_by_id: group_name = self._group_by_instance_id.get(instance_id) @@ -913,93 +1121,55 @@ class InstanceListView(AbstractInstanceView): for group_name in group_names: self._update_group_checkstate(group_name) - def _change_active_instances(self, instance_ids, new_value): - if not instance_ids: - return - - for instance_id in instance_ids: - widget = self._widgets_by_id.get(instance_id) - if widget: - widget.set_active(new_value) - def _on_selection_change(self, *_args): self.selection_changed.emit() - def _on_group_expand_request(self, group_name, expanded): + def _on_expand_toggle_request(self, group_name): group_item = self._group_items.get(group_name) if not group_item: return - - group_index = self._instance_model.index( - group_item.row(), group_item.column() - ) - proxy_index = self._proxy_model.mapFromSource(group_index) - self._instance_view.setExpanded(proxy_index, expanded) - - def _on_convertor_group_expand_request(self, _, expanded): - group_item = self._convertor_group_item - if not group_item: - return - group_index = self._instance_model.index( - group_item.row(), group_item.column() - ) - proxy_index = self._proxy_model.mapFromSource(group_index) - self._instance_view.setExpanded(proxy_index, expanded) + proxy_index = self._proxy_model.mapFromSource(group_item.index()) + new_state = not self._instance_view.isExpanded(proxy_index) + self._instance_view.setExpanded(proxy_index, new_state) def _on_group_toggle_request(self, group_name, state): state = checkstate_int_to_enum(state) if state == QtCore.Qt.PartiallyChecked: return - if state == QtCore.Qt.Checked: - active = True - else: - active = False - group_item = self._group_items.get(group_name) if not group_item: return - active_by_id = {} - all_changed = True + active = state == QtCore.Qt.Checked + + instance_ids = set() for row in range(group_item.rowCount()): - item = group_item.child(row) - instance_id = item.data(INSTANCE_ID_ROLE) - widget = self._widgets_by_id.get(instance_id) - if widget is None: - continue - if widget.is_checkbox_enabled(): - active_by_id[instance_id] = active - else: - all_changed = False + child = group_item.child(row) + instance_id = child.data(INSTANCE_ID_ROLE) + instance_ids.add(instance_id) - self._controller.set_instances_active_state(active_by_id) - - self._change_active_instances(active_by_id, active) + self._toggle_active_state(active, instance_ids=instance_ids) proxy_index = self._proxy_model.mapFromSource(group_item.index()) if not self._instance_view.isExpanded(proxy_index): self._instance_view.expand(proxy_index) - if not all_changed: - # If not all instances were changed, update group checkstate - self._update_group_checkstate(group_name) - - def has_items(self): + def has_items(self) -> bool: if self._convertor_group_widget is not None: return True if self._group_items: return True return False - def get_selected_items(self): + def get_selected_items(self) -> tuple[list[str], bool, list[str]]: """Get selected instance ids and context selection. Returns: - tuple: Selected instance ids and boolean if context - is selected. - """ + tuple[list[str], bool, list[str]]: Selected instance ids, + boolean if context is selected and selected convertor ids. + """ instance_ids = [] convertor_identifiers = [] context_selected = False @@ -1123,7 +1293,7 @@ class InstanceListView(AbstractInstanceView): | QtCore.QItemSelectionModel.Rows ) - def set_active_toggle_enabled(self, enabled): + def set_active_toggle_enabled(self, enabled: bool) -> None: if self._active_toggle_enabled is enabled: return diff --git a/client/ayon_core/tools/publisher/widgets/overview_widget.py b/client/ayon_core/tools/publisher/widgets/overview_widget.py index 46395328e0..01799ac908 100644 --- a/client/ayon_core/tools/publisher/widgets/overview_widget.py +++ b/client/ayon_core/tools/publisher/widgets/overview_widget.py @@ -1,3 +1,7 @@ +from __future__ import annotations + +from typing import Generator + from qtpy import QtWidgets, QtCore from ayon_core.tools.publisher.abstract import AbstractPublisherFrontend @@ -6,6 +10,7 @@ from .border_label_widget import BorderedLabelWidget from .card_view_widgets import InstanceCardView from .list_view_widgets import InstanceListView from .widgets import ( + AbstractInstanceView, CreateInstanceBtn, RemoveInstanceBtn, ChangeViewBtn, @@ -43,10 +48,16 @@ class OverviewWidget(QtWidgets.QFrame): product_view_cards = InstanceCardView(controller, product_views_widget) product_list_view = InstanceListView(controller, product_views_widget) + product_list_view.set_parent_grouping(False) + product_list_view_grouped = InstanceListView( + controller, product_views_widget + ) + product_list_view_grouped.set_parent_grouping(True) product_views_layout = QtWidgets.QStackedLayout() product_views_layout.addWidget(product_view_cards) product_views_layout.addWidget(product_list_view) + product_views_layout.addWidget(product_list_view_grouped) product_views_layout.setCurrentWidget(product_view_cards) # Buttons at the bottom of product view @@ -118,6 +129,12 @@ class OverviewWidget(QtWidgets.QFrame): product_list_view.double_clicked.connect( self.publish_tab_requested ) + product_list_view_grouped.selection_changed.connect( + self._on_product_change + ) + product_list_view_grouped.double_clicked.connect( + self.publish_tab_requested + ) product_view_cards.selection_changed.connect( self._on_product_change ) @@ -159,16 +176,22 @@ class OverviewWidget(QtWidgets.QFrame): "create.model.instance.requirement.changed", self._on_instance_requirement_changed ) + controller.register_event_callback( + "create.model.instance.parent.changed", + self._on_instance_parent_changed + ) self._product_content_widget = product_content_widget self._product_content_layout = product_content_layout self._product_view_cards = product_view_cards self._product_list_view = product_list_view + self._product_list_view_grouped = product_list_view_grouped self._product_views_layout = product_views_layout self._create_btn = create_btn self._delete_btn = delete_btn + self._change_view_btn = change_view_btn self._product_attributes_widget = product_attributes_widget self._create_widget = create_widget @@ -246,7 +269,7 @@ class OverviewWidget(QtWidgets.QFrame): ) def has_items(self): - view = self._product_views_layout.currentWidget() + view = self._get_current_view() return view.has_items() def _on_create_clicked(self): @@ -361,17 +384,18 @@ class OverviewWidget(QtWidgets.QFrame): def _on_instance_requirement_changed(self, event): self._refresh_instance_states(event["instance_ids"]) - def _refresh_instance_states(self, instance_ids): - current_idx = self._product_views_layout.currentIndex() - for idx in range(self._product_views_layout.count()): - if idx == current_idx: - continue - widget = self._product_views_layout.widget(idx) - if widget.refreshed: - widget.set_refreshed(False) + def _on_instance_parent_changed(self, event): + self._refresh_instance_states(event["instance_ids"]) - current_widget = self._product_views_layout.widget(current_idx) - current_widget.refresh_instance_states(instance_ids) + def _refresh_instance_states(self, instance_ids): + current_view = self._get_current_view() + for view in self._iter_views(): + if view is current_view: + current_view = view + elif view.refreshed: + view.set_refreshed(False) + + current_view.refresh_instance_states(instance_ids) def _on_convert_requested(self): self.convert_requested.emit() @@ -385,7 +409,7 @@ class OverviewWidget(QtWidgets.QFrame): convertor plugins. """ - view = self._product_views_layout.currentWidget() + view = self._get_current_view() return view.get_selected_items() def get_selected_legacy_convertors(self): @@ -400,12 +424,12 @@ class OverviewWidget(QtWidgets.QFrame): return convertor_identifiers def _change_view_type(self): + old_view = self._get_current_view() + idx = self._product_views_layout.currentIndex() new_idx = (idx + 1) % self._product_views_layout.count() - old_view = self._product_views_layout.currentWidget() - new_view = self._product_views_layout.widget(new_idx) - + new_view = self._get_view_by_idx(new_idx) if not new_view.refreshed: new_view.refresh() new_view.set_refreshed(True) @@ -418,22 +442,52 @@ class OverviewWidget(QtWidgets.QFrame): new_view.set_selected_items( instance_ids, context_selected, convertor_identifiers ) + view_type = "list" + if new_view is self._product_list_view_grouped: + view_type = "card" + elif new_view is self._product_list_view: + view_type = "list-parent-grouping" + self._change_view_btn.set_view_type(view_type) self._product_views_layout.setCurrentIndex(new_idx) self._on_product_change() + def _iter_views(self) -> Generator[AbstractInstanceView, None, None]: + for idx in range(self._product_views_layout.count()): + widget = self._product_views_layout.widget(idx) + if not isinstance(widget, AbstractInstanceView): + raise TypeError( + "Current widget is not instance of 'AbstractInstanceView'" + ) + yield widget + + def _get_current_view(self) -> AbstractInstanceView: + widget = self._product_views_layout.currentWidget() + if isinstance(widget, AbstractInstanceView): + return widget + raise TypeError( + "Current widget is not instance of 'AbstractInstanceView'" + ) + + def _get_view_by_idx(self, idx: int) -> AbstractInstanceView: + widget = self._product_views_layout.widget(idx) + if isinstance(widget, AbstractInstanceView): + return widget + raise TypeError( + "Current widget is not instance of 'AbstractInstanceView'" + ) + def _refresh_instances(self): if self._refreshing_instances: return self._refreshing_instances = True - for idx in range(self._product_views_layout.count()): - widget = self._product_views_layout.widget(idx) - widget.set_refreshed(False) + for view in self._iter_views(): + view.set_refreshed(False) - view = self._product_views_layout.currentWidget() + view = self._get_current_view() view.refresh() view.set_refreshed(True) @@ -444,25 +498,22 @@ class OverviewWidget(QtWidgets.QFrame): # Give a change to process Resize Request QtWidgets.QApplication.processEvents() - # Trigger update geometry of - widget = self._product_views_layout.currentWidget() - widget.updateGeometry() + # Trigger update geometry + view.updateGeometry() def _on_publish_start(self): """Publish started.""" self._create_btn.setEnabled(False) self._product_attributes_wrap.setEnabled(False) - for idx in range(self._product_views_layout.count()): - widget = self._product_views_layout.widget(idx) - widget.set_active_toggle_enabled(False) + for view in self._iter_views(): + view.set_active_toggle_enabled(False) def _on_controller_reset_start(self): """Controller reset started.""" - for idx in range(self._product_views_layout.count()): - widget = self._product_views_layout.widget(idx) - widget.set_active_toggle_enabled(True) + for view in self._iter_views(): + view.set_active_toggle_enabled(True) def _on_publish_reset(self): """Context in controller has been reseted.""" @@ -477,7 +528,19 @@ class OverviewWidget(QtWidgets.QFrame): self._refresh_instances() def _on_instances_added(self): + view = self._get_current_view() + is_card_view = False + count = 0 + if isinstance(view, InstanceCardView): + is_card_view = True + count = view.get_current_instance_count() + self._refresh_instances() + if is_card_view and count < 10: + new_count = view.get_current_instance_count() + if new_count > count and new_count >= 10: + self._change_view_type() + def _on_instances_removed(self): self._refresh_instances() diff --git a/client/ayon_core/tools/publisher/widgets/report_page.py b/client/ayon_core/tools/publisher/widgets/report_page.py index 1e46e7e52c..033ddab0ef 100644 --- a/client/ayon_core/tools/publisher/widgets/report_page.py +++ b/client/ayon_core/tools/publisher/widgets/report_page.py @@ -1147,6 +1147,8 @@ class LogItemMessage(QtWidgets.QTextEdit): QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum ) + self.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) + self.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) document = self.document() document.documentLayout().documentSizeChanged.connect( self._adjust_minimum_size diff --git a/client/ayon_core/tools/publisher/widgets/tasks_model.py b/client/ayon_core/tools/publisher/widgets/tasks_model.py index 8bfa81116a..1803e46c5f 100644 --- a/client/ayon_core/tools/publisher/widgets/tasks_model.py +++ b/client/ayon_core/tools/publisher/widgets/tasks_model.py @@ -146,19 +146,19 @@ class TasksModel(QtGui.QStandardItemModel): self._controller.get_current_project_name() ) } - icon_name_by_task_name = {} + type_item_by_task_name = {} for task_items in task_items_by_folder_path.values(): for task_item in task_items: task_name = task_item.name if ( task_name not in new_task_names - or task_name in icon_name_by_task_name + or task_name in type_item_by_task_name ): continue task_type_name = task_item.task_type task_type_item = task_type_items.get(task_type_name) if task_type_item: - icon_name_by_task_name[task_name] = task_type_item.icon + type_item_by_task_name[task_name] = task_type_item for task_name in new_task_names: item = self._items_by_name.get(task_name) @@ -171,13 +171,18 @@ class TasksModel(QtGui.QStandardItemModel): if not task_name: continue - icon_name = icon_name_by_task_name.get(task_name) - icon = None + icon = icon_name = icon_color = None + task_type_item = type_item_by_task_name.get(task_name) + if task_type_item is not None: + icon_name = task_type_item.icon + icon_color = task_type_item.color if icon_name: + if not icon_color: + icon_color = get_default_entity_icon_color() icon = get_qt_icon({ "type": "material-symbols", "name": icon_name, - "color": get_default_entity_icon_color(), + "color": icon_color, }) if icon is None: icon = default_icon diff --git a/client/ayon_core/tools/publisher/widgets/widgets.py b/client/ayon_core/tools/publisher/widgets/widgets.py index a9d34c4c66..793b0f501b 100644 --- a/client/ayon_core/tools/publisher/widgets/widgets.py +++ b/client/ayon_core/tools/publisher/widgets/widgets.py @@ -10,6 +10,7 @@ from ayon_core.tools.flickcharm import FlickCharm from ayon_core.tools.utils import ( IconButton, PixmapLabel, + get_qt_icon, ) from ayon_core.tools.publisher.constants import ResetKeySequence @@ -287,12 +288,32 @@ class RemoveInstanceBtn(PublishIconBtn): self.setToolTip("Remove selected instances") -class ChangeViewBtn(PublishIconBtn): - """Create toggle view button.""" +class ChangeViewBtn(IconButton): + """Toggle views button.""" def __init__(self, parent=None): - icon_path = get_icon_path("change_view") - super().__init__(icon_path, parent) - self.setToolTip("Swap between views") + super().__init__(parent) + self.set_view_type("list") + + def set_view_type(self, view_type): + if view_type == "list": + # icon_name = "data_table" + icon_name = "dehaze" + tooltip = "Change to list view" + elif view_type == "card": + icon_name = "view_agenda" + tooltip = "Change to card view" + else: + icon_name = "segment" + tooltip = "Change to parent grouping view" + + # "format_align_right" + # "segment" + icon = get_qt_icon({ + "type": "material-symbols", + "name": icon_name, + }) + self.setIcon(icon) + self.setToolTip(tooltip) class AbstractInstanceView(QtWidgets.QWidget): @@ -370,6 +391,20 @@ class AbstractInstanceView(QtWidgets.QWidget): "{} Method 'set_active_toggle_enabled' is not implemented." ).format(self.__class__.__name__)) + def refresh_instance_states(self, instance_ids=None): + """Refresh instance states. + + Args: + instance_ids: Optional[Iterable[str]]: Instance ids to refresh. + If not passed then all instances are refreshed. + + """ + + raise NotImplementedError( + f"{self.__class__.__name__} Method 'refresh_instance_states'" + " is not implemented." + ) + class ClickableLineEdit(QtWidgets.QLineEdit): """QLineEdit capturing left mouse click. diff --git a/client/ayon_core/tools/push_to_project/control.py b/client/ayon_core/tools/push_to_project/control.py index fb080d158b..b4e0d56dfd 100644 --- a/client/ayon_core/tools/push_to_project/control.py +++ b/client/ayon_core/tools/push_to_project/control.py @@ -1,4 +1,5 @@ import threading +from typing import Dict import ayon_api @@ -13,10 +14,11 @@ from .models import ( UserPublishValuesModel, IntegrateModel, ) +from .models.integrate import ProjectPushItemProcess class PushToContextController: - def __init__(self, project_name=None, version_id=None): + def __init__(self, project_name=None, version_ids=None): self._event_system = self._create_event_system() self._projects_model = ProjectsModel(self) @@ -27,18 +29,20 @@ class PushToContextController: self._user_values = UserPublishValuesModel(self) self._src_project_name = None - self._src_version_id = None + self._src_version_ids = [] self._src_folder_entity = None self._src_folder_task_entities = {} - self._src_product_entity = None - self._src_version_entity = None + self._src_version_entities = [] + self._src_product_entities = {} self._src_label = None self._submission_enabled = False self._process_thread = None self._process_item_id = None - self.set_source(project_name, version_id) + self._use_original_name = False + + self.set_source(project_name, version_ids) # Events system def emit_event(self, topic, data=None, source=None): @@ -51,38 +55,47 @@ class PushToContextController: def register_event_callback(self, topic, callback): self._event_system.add_callback(topic, callback) - def set_source(self, project_name, version_id): + def set_source(self, project_name, version_ids): """Set source project and version. + There is currently assumption that tool is working on products of same + folder. + Args: project_name (Union[str, None]): Source project name. - version_id (Union[str, None]): Source version id. + version_ids (Optional[list[str]]): Version ids. """ - + if not project_name or not version_ids: + return if ( project_name == self._src_project_name - and version_id == self._src_version_id + and version_ids == self._src_version_ids ): return self._src_project_name = project_name - self._src_version_id = version_id + self._src_version_ids = version_ids self._src_label = None folder_entity = None task_entities = {} - product_entity = None - version_entity = None - if project_name and version_id: - version_entity = ayon_api.get_version_by_id( - project_name, version_id + product_entities = [] + version_entities = [] + if project_name and self._src_version_ids: + version_entities = list(ayon_api.get_versions( + project_name, version_ids=self._src_version_ids)) + + if version_entities: + product_ids = [ + version_entity["productId"] + for version_entity in version_entities + ] + product_entities = list(ayon_api.get_products( + project_name, product_ids=product_ids) ) - if version_entity: - product_entity = ayon_api.get_product_by_id( - project_name, version_entity["productId"] - ) - - if product_entity: + if product_entities: + # all products for same folder + product_entity = product_entities[0] folder_entity = ayon_api.get_folder_by_id( project_name, product_entity["folderId"] ) @@ -97,15 +110,18 @@ class PushToContextController: self._src_folder_entity = folder_entity self._src_folder_task_entities = task_entities - self._src_product_entity = product_entity - self._src_version_entity = version_entity + self._src_version_entities = version_entities + self._src_product_entities = { + product["id"]: product + for product in product_entities + } if folder_entity: self._user_values.set_new_folder_name(folder_entity["name"]) variant = self._get_src_variant() if variant: self._user_values.set_variant(variant) - comment = version_entity["attrib"].get("comment") + comment = version_entities[0]["attrib"].get("comment") if comment: self._user_values.set_comment(comment) @@ -113,7 +129,7 @@ class PushToContextController: "source.changed", { "project_name": project_name, - "version_id": version_id + "version_ids": self._src_version_ids } ) @@ -142,6 +158,14 @@ class PushToContextController: def get_user_values(self): return self._user_values.get_data() + def original_names_required(self): + """Checks if original product names must be used. + + Currently simple check if multiple versions, but if multiple products + with different product_type were used, it wouldn't be necessary. + """ + return len(self._src_version_entities) > 1 + def set_user_value_folder_name(self, folder_name): self._user_values.set_new_folder_name(folder_name) self._invalidate() @@ -165,8 +189,9 @@ class PushToContextController: def set_selected_task(self, task_id, task_name): self._selection_model.set_selected_task(task_id, task_name) - def get_process_item_status(self, item_id): - return self._integrate_model.get_item_status(item_id) + def get_process_items(self) -> Dict[str, ProjectPushItemProcess]: + """Returns dict of all ProjectPushItemProcess items """ + return self._integrate_model.get_items() # Processing methods def submit(self, wait=True): @@ -176,29 +201,33 @@ class PushToContextController: if self._process_thread is not None: return - item_id = self._integrate_model.create_process_item( - self._src_project_name, - self._src_version_id, - self._selection_model.get_selected_project_name(), - self._selection_model.get_selected_folder_id(), - self._selection_model.get_selected_task_name(), - self._user_values.variant, - comment=self._user_values.comment, - new_folder_name=self._user_values.new_folder_name, - dst_version=1 - ) + item_ids = [] + for src_version_entity in self._src_version_entities: + item_id = self._integrate_model.create_process_item( + self._src_project_name, + src_version_entity["id"], + self._selection_model.get_selected_project_name(), + self._selection_model.get_selected_folder_id(), + self._selection_model.get_selected_task_name(), + self._user_values.variant, + comment=self._user_values.comment, + new_folder_name=self._user_values.new_folder_name, + dst_version=1, + use_original_name=self._use_original_name, + ) + item_ids.append(item_id) - self._process_item_id = item_id + self._process_item_ids = item_ids self._emit_event("submit.started") if wait: self._submit_callback() - self._process_item_id = None + self._process_item_ids = [] return item_id thread = threading.Thread(target=self._submit_callback) self._process_thread = thread thread.start() - return item_id + return item_ids def wait_for_process_thread(self): if self._process_thread is None: @@ -207,7 +236,7 @@ class PushToContextController: self._process_thread = None def _prepare_source_label(self): - if not self._src_project_name or not self._src_version_id: + if not self._src_project_name or not self._src_version_ids: return "Source is not defined" folder_entity = self._src_folder_entity @@ -215,14 +244,21 @@ class PushToContextController: return "Source is invalid" folder_path = folder_entity["path"] - product_entity = self._src_product_entity - version_entity = self._src_version_entity - return "Source: {}{}/{}/v{:0>3}".format( - self._src_project_name, - folder_path, - product_entity["name"], - version_entity["version"] - ) + src_labels = [] + for version_entity in self._src_version_entities: + product_entity = self._src_product_entities.get( + version_entity["productId"] + ) + src_labels.append( + "Source: {}{}/{}/v{:0>3}".format( + self._src_project_name, + folder_path, + product_entity["name"], + version_entity["version"], + ) + ) + + return "\n".join(src_labels) def _get_task_info_from_repre_entities( self, task_entities, repre_entities @@ -256,7 +292,8 @@ class PushToContextController: def _get_src_variant(self): project_name = self._src_project_name - version_entity = self._src_version_entity + # parse variant only from first version + version_entity = self._src_version_entities[0] task_entities = self._src_folder_task_entities repre_entities = ayon_api.get_representations( project_name, version_ids={version_entity["id"]} @@ -264,9 +301,12 @@ class PushToContextController: task_name, task_type = self._get_task_info_from_repre_entities( task_entities, repre_entities ) + product_entity = self._src_product_entities.get( + version_entity["productId"] + ) project_settings = get_project_settings(project_name) - product_type = self._src_product_entity["productType"] + product_type = product_entity["productType"] template = get_product_name_template( self._src_project_name, product_type, @@ -300,7 +340,7 @@ class PushToContextController: print("Failed format", exc) return "" - product_name = self._src_product_entity["name"] + product_name = product_entity["name"] if ( (product_s and not product_name.startswith(product_s)) or (product_e and not product_name.endswith(product_e)) @@ -314,9 +354,6 @@ class PushToContextController: return product_name def _check_submit_validations(self): - if not self._user_values.is_valid: - return False - if not self._selection_model.get_selected_project_name(): return False @@ -325,6 +362,13 @@ class PushToContextController: and not self._selection_model.get_selected_folder_id() ): return False + + if self._use_original_name: + return True + + if not self._user_values.is_valid: + return False + return True def _invalidate(self): @@ -338,13 +382,14 @@ class PushToContextController: ) def _submit_callback(self): - process_item_id = self._process_item_id - if process_item_id is None: - return - self._integrate_model.integrate_item(process_item_id) + process_item_ids = self._process_item_ids + for process_item_id in process_item_ids: + self._integrate_model.integrate_item(process_item_id) + self._emit_event("submit.finished", {}) - if process_item_id == self._process_item_id: - self._process_item_id = None + + if process_item_ids is self._process_item_ids: + self._process_item_ids = [] def _emit_event(self, topic, data=None): if data is None: diff --git a/client/ayon_core/tools/push_to_project/main.py b/client/ayon_core/tools/push_to_project/main.py index a6ff38c16f..d3c9d3a537 100644 --- a/client/ayon_core/tools/push_to_project/main.py +++ b/client/ayon_core/tools/push_to_project/main.py @@ -4,28 +4,28 @@ from ayon_core.tools.utils import get_ayon_qt_app from ayon_core.tools.push_to_project.ui import PushToContextSelectWindow -def main_show(project_name, version_id): +def main_show(project_name, version_ids): app = get_ayon_qt_app() window = PushToContextSelectWindow() window.show() - window.set_source(project_name, version_id) + window.set_source(project_name, version_ids) app.exec_() @click.command() @click.option("--project", help="Source project name") -@click.option("--version", help="Source version id") -def main(project, version): +@click.option("--versions", help="Source version ids") +def main(project, versions): """Run PushToProject tool to integrate version in different project. Args: project (str): Source project name. - version (str): Version id. + versions (str): comma separated versions for same context """ - main_show(project, version) + main_show(project, versions.split(",")) if __name__ == "__main__": diff --git a/client/ayon_core/tools/push_to_project/models/integrate.py b/client/ayon_core/tools/push_to_project/models/integrate.py index 6bd4279219..cacce44942 100644 --- a/client/ayon_core/tools/push_to_project/models/integrate.py +++ b/client/ayon_core/tools/push_to_project/models/integrate.py @@ -3,8 +3,10 @@ import re import copy import itertools import sys +import tempfile import traceback import uuid +from typing import Optional, Any import ayon_api from ayon_api.utils import create_entity_id @@ -21,6 +23,7 @@ from ayon_core.lib import ( source_hash, ) from ayon_core.lib.file_transaction import FileTransaction +from ayon_core.pipeline.thumbnails import get_thumbnail_path from ayon_core.settings import get_project_settings from ayon_core.pipeline import Anatomy from ayon_core.pipeline.version_start import get_versioning_start @@ -88,6 +91,7 @@ class ProjectPushItem: new_folder_name, dst_version, item_id=None, + use_original_name=False ): if not item_id: item_id = uuid.uuid4().hex @@ -102,6 +106,7 @@ class ProjectPushItem: self.comment = comment or "" self.item_id = item_id self._repr_value = None + self.use_original_name = use_original_name @property def _repr(self): @@ -113,7 +118,8 @@ class ProjectPushItem: str(self.dst_folder_id), str(self.new_folder_name), str(self.dst_task_name), - str(self.dst_version) + str(self.dst_version), + self.use_original_name ]) return self._repr_value @@ -132,6 +138,7 @@ class ProjectPushItem: "comment": self.comment, "new_folder_name": self.new_folder_name, "item_id": self.item_id, + "use_original_name": self.use_original_name } @classmethod @@ -219,8 +226,8 @@ class ProjectPushRepreItem: but filenames are not template based. Args: - repre_entity (Dict[str, Ant]): Representation entity. - roots (Dict[str, str]): Project roots (based on project anatomy). + repre_entity (dict[str, Ant]): Representation entity. + roots (dict[str, str]): Project roots (based on project anatomy). """ def __init__(self, repre_entity, roots): @@ -311,7 +318,7 @@ class ProjectPushRepreItem: if self._src_files is not None: return self._src_files, self._resource_files - repre_context = self._repre_entity["context"] + repre_context = self.repre_entity["context"] if "frame" in repre_context or "udim" in repre_context: src_files, resource_files = self._get_source_files_with_frames() else: @@ -328,7 +335,7 @@ class ProjectPushRepreItem: udim_placeholder = "__udim__" src_files = [] resource_files = [] - template = self._repre_entity["attrib"]["template"] + template = self.repre_entity["attrib"]["template"] # Remove padding from 'udim' and 'frame' formatting keys # - "{frame:0>4}" -> "{frame}" for key in ("udim", "frame"): @@ -336,7 +343,7 @@ class ProjectPushRepreItem: replacement = "{{{}}}".format(key) template = re.sub(sub_part, replacement, template) - repre_context = self._repre_entity["context"] + repre_context = self.repre_entity["context"] fill_repre_context = copy.deepcopy(repre_context) if "frame" in fill_repre_context: fill_repre_context["frame"] = frame_placeholder @@ -357,7 +364,7 @@ class ProjectPushRepreItem: .replace(udim_placeholder, "(?P[0-9]+)") ) src_basename_regex = re.compile("^{}$".format(src_basename)) - for file_info in self._repre_entity["files"]: + for file_info in self.repre_entity["files"]: filepath_template = self._clean_path(file_info["path"]) filepath = self._clean_path( filepath_template.format(root=self._roots) @@ -371,7 +378,6 @@ class ProjectPushRepreItem: resource_files.append(ResourceFile(filepath, relative_path)) continue - filepath = os.path.join(src_dirpath, basename) frame = None udim = None for item in src_basename_regex.finditer(basename): @@ -389,8 +395,8 @@ class ProjectPushRepreItem: def _get_source_files(self): src_files = [] resource_files = [] - template = self._repre_entity["attrib"]["template"] - repre_context = self._repre_entity["context"] + template = self.repre_entity["attrib"]["template"] + repre_context = self.repre_entity["context"] fill_repre_context = copy.deepcopy(repre_context) fill_roots = fill_repre_context["root"] for root_name in tuple(fill_roots.keys()): @@ -399,7 +405,7 @@ class ProjectPushRepreItem: fill_repre_context) repre_path = self._clean_path(repre_path) src_dirpath = os.path.dirname(repre_path) - for file_info in self._repre_entity["files"]: + for file_info in self.repre_entity["files"]: filepath_template = self._clean_path(file_info["path"]) filepath = self._clean_path( filepath_template.format(root=self._roots)) @@ -477,6 +483,8 @@ class ProjectPushItemProcess: self._log_info("Destination project was found") self._fill_or_create_destination_folder() self._log_info("Destination folder was determined") + self._fill_or_create_destination_task() + self._log_info("Destination task was determined") self._determine_product_type() self._determine_publish_template_name() self._determine_product_name() @@ -492,8 +500,11 @@ class ProjectPushItemProcess: except Exception as exc: _exc, _value, _tb = sys.exc_info() + product_name = self._src_product_entity["name"] self._status.set_failed( - "Unhandled error happened: {}".format(str(exc)), + "Unhandled error happened for `{}`: {}".format( + product_name, str(exc) + ), (_exc, _value, _tb) ) @@ -642,10 +653,10 @@ class ProjectPushItemProcess: def _create_folder( self, - src_folder_entity, - project_entity, - parent_folder_entity, - folder_name + src_folder_entity: dict[str, Any], + project_entity: dict[str, Any], + parent_folder_entity: dict[str, Any], + folder_name: str ): parent_id = None if parent_folder_entity: @@ -694,12 +705,19 @@ class ProjectPushItemProcess: if new_folder_name != folder_name: folder_label = folder_name - # TODO find out how to define folder type + src_folder_type = src_folder_entity["folderType"] + dst_folder_type = self._get_dst_folder_type( + project_entity, + src_folder_type + ) + new_thumbnail_id = self._create_new_folder_thumbnail( + project_entity, src_folder_entity) folder_entity = new_folder_entity( folder_name, - "Folder", + dst_folder_type, parent_id=parent_id, - attribs=new_folder_attrib + attribs=new_folder_attrib, + thumbnail_id=new_thumbnail_id ) if folder_label: folder_entity["label"] = folder_label @@ -719,10 +737,59 @@ class ProjectPushItemProcess: folder_entity["path"] = "/".join([parent_path, folder_name]) return folder_entity + def _create_new_folder_thumbnail( + self, + project_entity: dict[str, Any], + src_folder_entity: dict[str, Any] + ) -> Optional[str]: + """Copy thumbnail possibly set on folder. + + Could be different from representation thumbnails, and it is only shown + when folder is selected. + """ + if not src_folder_entity["thumbnailId"]: + return None + + thumbnail = ayon_api.get_folder_thumbnail( + self._item.src_project_name, + src_folder_entity["id"], + src_folder_entity["thumbnailId"] + ) + if not thumbnail.id: + return None + + with tempfile.NamedTemporaryFile(delete=False) as tmp_file: + tmp_file.write(thumbnail.content) + temp_file_path = tmp_file.name + + new_thumbnail_id = None + try: + new_thumbnail_id = ayon_api.create_thumbnail( + project_entity["name"], temp_file_path) + finally: + if os.path.exists(temp_file_path): + os.remove(temp_file_path) + return new_thumbnail_id + + def _get_dst_folder_type( + self, + project_entity: dict[str, Any], + src_folder_type: str + ) -> str: + """Get new folder type.""" + for folder_type in project_entity["folderTypes"]: + if folder_type["name"].lower() == src_folder_type.lower(): + return folder_type["name"] + + self._status.set_failed( + f"'{src_folder_type}' folder type is not configured in " + f"project Anatomy." + ) + raise PushToProjectError(self._status.fail_reason) + def _fill_or_create_destination_folder(self): dst_project_name = self._item.dst_project_name dst_folder_id = self._item.dst_folder_id - dst_task_name = self._item.dst_task_name new_folder_name = self._item.new_folder_name if not dst_folder_id and not new_folder_name: self._status.set_failed( @@ -753,9 +820,11 @@ class ProjectPushItemProcess: new_folder_name ) self._folder_entity = folder_entity - if not dst_task_name: - self._task_info = {} - return + + def _fill_or_create_destination_task(self): + folder_entity = self._folder_entity + dst_task_name = self._item.dst_task_name + dst_project_name = self._item.dst_project_name folder_path = folder_entity["path"] folder_tasks = { @@ -764,6 +833,20 @@ class ProjectPushItemProcess: dst_project_name, folder_ids=[folder_entity["id"]] ) } + + if not dst_task_name: + src_task_info = self._get_src_task_info() + if not src_task_info: # really no task selected nor on source + self._task_info = {} + return + + dst_task_name = src_task_info["name"] + if dst_task_name.lower() not in folder_tasks: + task_info = self._make_sure_task_exists( + folder_entity, src_task_info + ) + folder_tasks[dst_task_name.lower()] = task_info + task_info = folder_tasks.get(dst_task_name.lower()) if not task_info: self._status.set_failed( @@ -782,7 +865,10 @@ class ProjectPushItemProcess: task_type["name"]: task_type for task_type in self._project_entity["taskTypes"] } - task_type_info = task_types_by_name.get(task_type_name, {}) + task_type_info = copy.deepcopy( + task_types_by_name.get(task_type_name, {}) + ) + task_type_info.pop("name") # do not overwrite real task name task_info.update(task_type_info) self._task_info = task_info @@ -816,31 +902,34 @@ class ProjectPushItemProcess: self._template_name = template_name def _determine_product_name(self): - product_type = self._product_type - task_info = self._task_info - task_name = task_type = None - if task_info: - task_name = task_info["name"] - task_type = task_info["taskType"] + if self._item.use_original_name: + product_name = self._src_product_entity["name"] + else: + product_type = self._product_type + task_info = self._task_info + task_name = task_type = None + if task_info: + task_name = task_info["name"] + task_type = task_info["taskType"] - try: - product_name = get_product_name( - self._item.dst_project_name, - task_name, - task_type, - self.host_name, - product_type, - self._item.variant, - project_settings=self._project_settings - ) - except TaskNotSetError: - self._status.set_failed( - "Target product name template requires task name. To continue" - " you have to select target task or change settings" - " ayon+settings://core/tools/creator/product_name_profiles" - f"?project={self._item.dst_project_name}." - ) - raise PushToProjectError(self._status.fail_reason) + try: + product_name = get_product_name( + self._item.dst_project_name, + task_name, + task_type, + self.host_name, + product_type, + self._item.variant, + project_settings=self._project_settings + ) + except TaskNotSetError: + self._status.set_failed( + "Target product name template requires task name. To " + "continue you have to select target task or change settings " # noqa: E501 + " ayon+settings://core/tools/creator/product_name_profiles" # noqa: E501 + f"?project={self._item.dst_project_name}." + ) + raise PushToProjectError(self._status.fail_reason) self._log_info( f"Push will be integrating to product with name '{product_name}'" @@ -914,17 +1003,22 @@ class ProjectPushItemProcess: version = get_versioning_start( project_name, self.host_name, - task_name=self._task_info["name"], - task_type=self._task_info["taskType"], + task_name=self._task_info.get("name"), + task_type=self._task_info.get("taskType"), product_type=product_type, - product_name=product_entity["name"] + product_name=product_entity["name"], ) existing_version_entity = ayon_api.get_version_by_name( project_name, version, product_id ) + thumbnail_id = self._copy_version_thumbnail() + # Update existing version if existing_version_entity: + updata_data = {"attrib": dst_attrib} + if thumbnail_id: + updata_data["thumbnailId"] = thumbnail_id self._operations.update_entity( project_name, "version", @@ -934,17 +1028,65 @@ class ProjectPushItemProcess: existing_version_entity["attrib"].update(dst_attrib) self._version_entity = existing_version_entity return + copied_tags = self._get_transferable_tags(src_version_entity) + copied_status = self._get_transferable_status(src_version_entity) version_entity = new_version_entity( version, product_id, + author=src_version_entity["author"], + status=copied_status, + tags=copied_tags, + task_id=self._task_info.get("id"), attribs=dst_attrib, + thumbnail_id=thumbnail_id, ) self._operations.create_entity( project_name, "version", version_entity ) self._version_entity = version_entity + def _make_sure_task_exists( + self, + folder_entity: dict[str, Any], + task_info: dict[str, Any], + ) -> dict[str, Any]: + """Creates destination task from source task information""" + project_name = self._item.dst_project_name + found_task_type = False + src_task_type = task_info["taskType"] + for task_type in self._project_entity["taskTypes"]: + if task_type["name"].lower() == src_task_type.lower(): + found_task_type = True + break + + if not found_task_type: + self._status.set_failed( + f"'{src_task_type}' task type is not configured in " + "project Anatomy." + ) + + raise PushToProjectError(self._status.fail_reason) + + task_info = self._operations.create_task( + project_name, + task_info["name"], + folder_id=folder_entity["id"], + task_type=src_task_type, + attrib=task_info["attrib"], + ) + self._task_info = task_info.data + return self._task_info + + def _get_src_task_info(self): + src_version_entity = self._src_version_entity + if not src_version_entity["taskId"]: + return None + src_task = ayon_api.get_task_by_id( + self._item.src_project_name, src_version_entity["taskId"] + ) + return src_task + def _integrate_representations(self): try: self._real_integrate_representations() @@ -1005,10 +1147,18 @@ class ProjectPushItemProcess: self, anatomy, template_name, formatting_data, file_template ): processed_repre_items = [] + repre_context = None for repre_item in self._src_repre_items: repre_entity = repre_item.repre_entity repre_name = repre_entity["name"] repre_format_data = copy.deepcopy(formatting_data) + + if not repre_context: + repre_context = self._update_repre_context( + copy.deepcopy(repre_entity), + formatting_data + ) + repre_format_data["representation"] = repre_name for src_file in repre_item.src_files: ext = os.path.splitext(src_file.path)[-1] @@ -1024,7 +1174,6 @@ class ProjectPushItemProcess: "publish", template_name, "directory" ) folder_path = template_obj.format_strict(formatting_data) - repre_context = folder_path.used_values folder_path_rootless = folder_path.rootless repre_filepaths = [] published_path = None @@ -1047,7 +1196,6 @@ class ProjectPushItemProcess: ) if published_path is None or frame == repre_item.frame: published_path = dst_filepath - repre_context.update(filename.used_values) repre_filepaths.append((dst_filepath, dst_rootless_path)) self._file_transaction.add(src_file.path, dst_filepath) @@ -1134,7 +1282,7 @@ class ProjectPushItemProcess: self._item.dst_project_name, "representation", entity_id, - changes + changes, ) existing_repre_names = set(existing_repres_by_low_name.keys()) @@ -1147,6 +1295,69 @@ class ProjectPushItemProcess: {"active": False} ) + def _copy_version_thumbnail(self) -> Optional[str]: + thumbnail_id = self._src_version_entity["thumbnailId"] + if not thumbnail_id: + return None + path = get_thumbnail_path( + self._item.src_project_name, + "version", + self._src_version_entity["id"], + thumbnail_id + ) + if not path: + return None + return ayon_api.create_thumbnail( + self._item.dst_project_name, + path + ) + + def _update_repre_context(self, repre_entity, formatting_data): + """Replace old context value with new ones. + + Folder might change, project definitely changes etc. + """ + repre_context = repre_entity["context"] + for context_key, context_value in repre_context.items(): + if context_value and isinstance(context_value, dict): + for context_sub_key in context_value.keys(): + value_to_update = formatting_data.get(context_key, {}).get( + context_sub_key + ) + if value_to_update: + repre_context[context_key][context_sub_key] = ( + value_to_update + ) + else: + value_to_update = formatting_data.get(context_key) + if value_to_update: + repre_context[context_key] = value_to_update + if "task" not in formatting_data: + repre_context.pop("task", None) + return repre_context + + def _get_transferable_tags(self, src_version_entity): + """Copy over only tags present in destination project""" + dst_project_tags = [ + tag["name"] for tag in self._project_entity["tags"] + ] + copied_tags = [] + for src_tag in src_version_entity["tags"]: + if src_tag in dst_project_tags: + copied_tags.append(src_tag) + return copied_tags + + def _get_transferable_status(self, src_version_entity): + """Copy over status, first status if not matching found""" + dst_project_statuses = { + status["name"]: status + for status in self._project_entity["statuses"] + } + copied_status = dst_project_statuses.get(src_version_entity["status"]) + if copied_status: + return copied_status["name"] + return None + class IntegrateModel: def __init__(self, controller): @@ -1170,6 +1381,7 @@ class IntegrateModel: comment, new_folder_name, dst_version, + use_original_name ): """Create new item for integration. @@ -1183,6 +1395,7 @@ class IntegrateModel: comment (Union[str, None]): Comment. new_folder_name (Union[str, None]): New folder name. dst_version (int): Destination version number. + use_original_name (bool): If original product names should be used Returns: str: Item id. The id can be used to trigger integration or get @@ -1198,7 +1411,8 @@ class IntegrateModel: variant, comment=comment, new_folder_name=new_folder_name, - dst_version=dst_version + dst_version=dst_version, + use_original_name=use_original_name ) process_item = ProjectPushItemProcess(self, item) self._process_items[item.item_id] = process_item @@ -1216,17 +1430,6 @@ class IntegrateModel: return item.integrate() - def get_item_status(self, item_id): - """Status of an item. - - Args: - item_id (str): Item id for which status should be returned. - - Returns: - dict[str, Any]: Status data. - """ - - item = self._process_items.get(item_id) - if item is not None: - return item.get_status_data() - return None + def get_items(self) -> dict[str, ProjectPushItemProcess]: + """Returns dict of all ProjectPushItemProcess items """ + return self._process_items diff --git a/client/ayon_core/tools/push_to_project/ui/window.py b/client/ayon_core/tools/push_to_project/ui/window.py index a69c512fcd..f382ccce64 100644 --- a/client/ayon_core/tools/push_to_project/ui/window.py +++ b/client/ayon_core/tools/push_to_project/ui/window.py @@ -85,6 +85,13 @@ class PushToContextSelectWindow(QtWidgets.QWidget): header_widget = QtWidgets.QWidget(main_context_widget) + library_only_label = QtWidgets.QLabel( + "Show only libraries", + header_widget + ) + library_only_checkbox = NiceCheckbox( + True, parent=header_widget) + header_label = QtWidgets.QLabel( controller.get_source_label(), header_widget @@ -92,7 +99,9 @@ class PushToContextSelectWindow(QtWidgets.QWidget): header_layout = QtWidgets.QHBoxLayout(header_widget) header_layout.setContentsMargins(0, 0, 0, 0) - header_layout.addWidget(header_label) + header_layout.addWidget(header_label, 1) + header_layout.addWidget(library_only_label, 0) + header_layout.addWidget(library_only_checkbox, 0) main_splitter = QtWidgets.QSplitter( QtCore.Qt.Horizontal, main_context_widget @@ -124,6 +133,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget): inputs_widget = QtWidgets.QWidget(main_splitter) new_folder_checkbox = NiceCheckbox(True, parent=inputs_widget) + original_names_checkbox = NiceCheckbox(False, parent=inputs_widget) folder_name_input = PlaceholderLineEdit(inputs_widget) folder_name_input.setPlaceholderText("< Name of new folder >") @@ -142,6 +152,8 @@ class PushToContextSelectWindow(QtWidgets.QWidget): inputs_layout.addRow("Create new folder", new_folder_checkbox) inputs_layout.addRow("New folder name", folder_name_input) inputs_layout.addRow("Variant", variant_input) + inputs_layout.addRow( + "Use original product names", original_names_checkbox) inputs_layout.addRow("Comment", comment_input) main_splitter.addWidget(context_widget) @@ -196,6 +208,10 @@ class PushToContextSelectWindow(QtWidgets.QWidget): show_detail_btn.setToolTip( "Show error detail dialog to copy full error." ) + original_names_checkbox.setToolTip( + "Required for multi copy, doesn't allow changes " + "variant values." + ) overlay_close_btn = QtWidgets.QPushButton( "Close", overlay_btns_widget @@ -240,6 +256,9 @@ class PushToContextSelectWindow(QtWidgets.QWidget): folder_name_input.textChanged.connect(self._on_new_folder_change) variant_input.textChanged.connect(self._on_variant_change) comment_input.textChanged.connect(self._on_comment_change) + library_only_checkbox.stateChanged.connect(self._on_library_only_change) + original_names_checkbox.stateChanged.connect( + self._on_original_names_change) publish_btn.clicked.connect(self._on_select_click) cancel_btn.clicked.connect(self._on_close_click) @@ -288,6 +307,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget): self._new_folder_checkbox = new_folder_checkbox self._folder_name_input = folder_name_input self._comment_input = comment_input + self._use_original_names_checkbox = original_names_checkbox self._publish_btn = publish_btn @@ -316,7 +336,6 @@ class PushToContextSelectWindow(QtWidgets.QWidget): self._main_thread_timer = main_thread_timer self._main_thread_timer_can_stop = True self._last_submit_message = None - self._process_item_id = None self._variant_is_valid = None self._folder_is_valid = None @@ -327,17 +346,17 @@ class PushToContextSelectWindow(QtWidgets.QWidget): overlay_try_btn.setVisible(False) # Support of public api function of controller - def set_source(self, project_name, version_id): + def set_source(self, project_name, version_ids): """Set source project and version. Call the method on controller. Args: project_name (Union[str, None]): Name of project. - version_id (Union[str, None]): Version id. + version_ids (Union[str, None]): comma separated Version ids. """ - self._controller.set_source(project_name, version_id) + self._controller.set_source(project_name, version_ids) def showEvent(self, event): super(PushToContextSelectWindow, self).showEvent(event) @@ -352,10 +371,12 @@ class PushToContextSelectWindow(QtWidgets.QWidget): self._folder_name_input.setText(new_folder_name or "") self._variant_input.setText(variant or "") self._invalidate_variant(user_values["is_variant_valid"]) + self._invalidate_use_original_names( + self._use_original_names_checkbox.isChecked()) self._invalidate_new_folder_name( new_folder_name, user_values["is_new_folder_name_valid"] ) - + self._controller._invalidate() self._projects_combobox.refresh() def _on_first_show(self): @@ -394,6 +415,15 @@ class PushToContextSelectWindow(QtWidgets.QWidget): self._comment_input_text = text self._user_input_changed_timer.start() + def _on_library_only_change(self, state: int) -> None: + """Change toggle state, reset filter, recalculate dropdown""" + state = bool(state) + self._projects_combobox.set_standard_filter_enabled(state) + + def _on_original_names_change(self, state: int) -> None: + use_original_name = bool(state) + self._invalidate_use_original_names(use_original_name) + def _on_user_input_timer(self): folder_name_enabled = self._new_folder_name_enabled folder_name = self._new_folder_name_input_text @@ -456,17 +486,27 @@ class PushToContextSelectWindow(QtWidgets.QWidget): state = "" if folder_name is not None: state = "valid" if is_valid else "invalid" - set_style_property( - self._folder_name_input, "state", state - ) + set_style_property(self._folder_name_input, "state", state) def _invalidate_variant(self, is_valid): - if self._variant_is_valid is is_valid: - return self._variant_is_valid = is_valid state = "valid" if is_valid else "invalid" set_style_property(self._variant_input, "state", state) + def _invalidate_use_original_names(self, use_original_names): + """Checks if original names must be used. + + Invalidates Variant if necessary + """ + if self._controller.original_names_required(): + use_original_names = True + + self._variant_input.setEnabled(not use_original_names) + self._invalidate_variant(not use_original_names) + + self._controller._use_original_name = use_original_names + self._use_original_names_checkbox.setChecked(use_original_names) + def _on_submission_change(self, event): self._publish_btn.setEnabled(event["enabled"]) @@ -495,31 +535,43 @@ class PushToContextSelectWindow(QtWidgets.QWidget): self._overlay_label.setText(self._last_submit_message) self._last_submit_message = None - process_status = self._controller.get_process_item_status( - self._process_item_id - ) - push_failed = process_status["failed"] - fail_traceback = process_status["full_traceback"] + failed_pushes = [] + fail_tracebacks = [] + for process_item in self._controller.get_process_items().values(): + process_status = process_item.get_status_data() + if process_status["failed"]: + failed_pushes.append(process_status) + # push_failed = process_status["failed"] + # fail_traceback = process_status["full_traceback"] if self._main_thread_timer_can_stop: self._main_thread_timer.stop() self._overlay_close_btn.setVisible(True) - if push_failed: + if failed_pushes: self._overlay_try_btn.setVisible(True) - if fail_traceback: + fail_tracebacks = [ + process_status["full_traceback"] + for process_status in failed_pushes + if process_status["full_traceback"] + ] + if fail_tracebacks: self._show_detail_btn.setVisible(True) - if push_failed: - reason = process_status["fail_reason"] - if fail_traceback: + if failed_pushes: + reasons = [ + process_status["fail_reason"] + for process_status in failed_pushes + ] + if fail_tracebacks: + reason = "\n".join(reasons) message = ( "Unhandled error happened." " Check error detail for more information." ) self._error_detail_dialog.set_detail( - reason, fail_traceback + reason, "\n".join(fail_tracebacks) ) else: - message = f"Push Failed:\n{reason}" + message = f"Push Failed:\n{reasons}" self._overlay_label.setText(message) set_style_property(self._overlay_close_btn, "state", "error") @@ -534,7 +586,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget): self._main_thread_timer_can_stop = False self._main_thread_timer.start() self._main_layout.setCurrentWidget(self._overlay_widget) - self._overlay_label.setText("Submittion started") + self._overlay_label.setText("Submission started") def _on_controller_submit_end(self): self._main_thread_timer_can_stop = True diff --git a/client/ayon_core/tools/sceneinventory/control.py b/client/ayon_core/tools/sceneinventory/control.py index 60d9bc77a9..606c9e7298 100644 --- a/client/ayon_core/tools/sceneinventory/control.py +++ b/client/ayon_core/tools/sceneinventory/control.py @@ -1,12 +1,18 @@ +from typing import Optional + import ayon_api from ayon_core.lib.events import QueuedEventSystem -from ayon_core.host import HostBase +from ayon_core.host import ILoadHost from ayon_core.pipeline import ( registered_host, get_current_context, ) -from ayon_core.tools.common_models import HierarchyModel, ProjectsModel +from ayon_core.tools.common_models import ( + HierarchyModel, + ProjectsModel, + ProductTypeIconMapping, +) from .models import SiteSyncModel, ContainersModel @@ -35,7 +41,7 @@ class SceneInventoryController: self._projects_model = ProjectsModel(self) self._event_system = self._create_event_system() - def get_host(self) -> HostBase: + def get_host(self) -> ILoadHost: return self._host def emit_event(self, topic, data=None, source=None): @@ -93,6 +99,13 @@ class SceneInventoryController: project_name, None ) + def get_product_type_icons_mapping( + self, project_name: Optional[str] + ) -> ProductTypeIconMapping: + return self._projects_model.get_product_type_icons_mapping( + project_name + ) + # Containers methods def get_containers(self): return self._containers_model.get_containers() diff --git a/client/ayon_core/tools/sceneinventory/delegates.py b/client/ayon_core/tools/sceneinventory/delegates.py index 6f91587613..9bc4294fda 100644 --- a/client/ayon_core/tools/sceneinventory/delegates.py +++ b/client/ayon_core/tools/sceneinventory/delegates.py @@ -1,10 +1,14 @@ from qtpy import QtWidgets, QtCore, QtGui -from .model import VERSION_LABEL_ROLE +from ayon_core.tools.utils import get_qt_icon + +from .model import VERSION_LABEL_ROLE, CONTAINER_VERSION_LOCKED_ROLE class VersionDelegate(QtWidgets.QStyledItemDelegate): """A delegate that display version integer formatted as version string.""" + _locked_icon = None + def paint(self, painter, option, index): fg_color = index.data(QtCore.Qt.ForegroundRole) if fg_color: @@ -45,10 +49,35 @@ class VersionDelegate(QtWidgets.QStyledItemDelegate): QtWidgets.QStyle.PM_FocusFrameHMargin, option, option.widget ) + 1 + text_rect_f = text_rect.adjusted( + text_margin, 0, - text_margin, 0 + ) + painter.drawText( - text_rect.adjusted(text_margin, 0, - text_margin, 0), + text_rect_f, option.displayAlignment, text ) + if index.data(CONTAINER_VERSION_LOCKED_ROLE) is True: + icon = self._get_locked_icon() + size = max(text_rect_f.height() // 2, 16) + margin = (text_rect_f.height() - size) // 2 + + icon_rect = QtCore.QRect( + text_rect_f.right() - size, + text_rect_f.top() + margin, + size, + size + ) + icon.paint(painter, icon_rect) painter.restore() + + def _get_locked_icon(cls): + if cls._locked_icon is None: + cls._locked_icon = get_qt_icon({ + "type": "material-symbols", + "name": "lock", + "color": "white", + }) + return cls._locked_icon diff --git a/client/ayon_core/tools/sceneinventory/model.py b/client/ayon_core/tools/sceneinventory/model.py index 885553acaf..27211165bf 100644 --- a/client/ayon_core/tools/sceneinventory/model.py +++ b/client/ayon_core/tools/sceneinventory/model.py @@ -37,6 +37,7 @@ REMOTE_SITE_ICON_ROLE = QtCore.Qt.UserRole + 23 # containers inbetween refresh. ITEM_UNIQUE_NAME_ROLE = QtCore.Qt.UserRole + 24 PROJECT_NAME_ROLE = QtCore.Qt.UserRole + 25 +CONTAINER_VERSION_LOCKED_ROLE = QtCore.Qt.UserRole + 26 class InventoryModel(QtGui.QStandardItemModel): @@ -214,9 +215,6 @@ class InventoryModel(QtGui.QStandardItemModel): group_icon = qtawesome.icon( "fa.object-group", color=self._default_icon_color ) - product_type_icon = qtawesome.icon( - "fa.folder", color="#0091B2" - ) group_item_font = QtGui.QFont() group_item_font.setBold(True) @@ -294,6 +292,10 @@ class InventoryModel(QtGui.QStandardItemModel): item.setData(container_item.object_name, OBJECT_NAME_ROLE) item.setData(True, IS_CONTAINER_ITEM_ROLE) item.setData(unique_name, ITEM_UNIQUE_NAME_ROLE) + item.setData( + container_item.version_locked, + CONTAINER_VERSION_LOCKED_ROLE + ) container_model_items.append(item) progress = progress_by_id[repre_id] @@ -303,7 +305,7 @@ class InventoryModel(QtGui.QStandardItemModel): remote_site_progress = "{}%".format( max(progress["remote_site"], 0) * 100 ) - + product_type_icon = get_qt_icon(repre_info.product_type_icon) group_item = QtGui.QStandardItem() group_item.setColumnCount(root_item.columnCount()) group_item.setData(group_name, QtCore.Qt.DisplayRole) diff --git a/client/ayon_core/tools/sceneinventory/models/containers.py b/client/ayon_core/tools/sceneinventory/models/containers.py index f841f87c8e..0e19f381cd 100644 --- a/client/ayon_core/tools/sceneinventory/models/containers.py +++ b/client/ayon_core/tools/sceneinventory/models/containers.py @@ -95,7 +95,8 @@ class ContainerItem: namespace, object_name, item_id, - project_name + project_name, + version_locked, ): self.representation_id = representation_id self.loader_name = loader_name @@ -103,6 +104,7 @@ class ContainerItem: self.namespace = namespace self.item_id = item_id self.project_name = project_name + self.version_locked = version_locked @classmethod def from_container_data(cls, current_project_name, container): @@ -114,7 +116,8 @@ class ContainerItem: item_id=uuid.uuid4().hex, project_name=container.get( "project_name", current_project_name - ) + ), + version_locked=container.get("version_locked", False), ) @@ -126,6 +129,7 @@ class RepresentationInfo: product_id, product_name, product_type, + product_type_icon, product_group, version_id, representation_name, @@ -135,6 +139,7 @@ class RepresentationInfo: self.product_id = product_id self.product_name = product_name self.product_type = product_type + self.product_type_icon = product_type_icon self.product_group = product_group self.version_id = version_id self.representation_name = representation_name @@ -153,7 +158,17 @@ class RepresentationInfo: @classmethod def new_invalid(cls): - return cls(None, None, None, None, None, None, None, None) + return cls( + None, + None, + None, + None, + None, + None, + None, + None, + None, + ) class VersionItem: @@ -229,6 +244,9 @@ class ContainersModel: def get_representation_info_items(self, project_name, representation_ids): output = {} missing_repre_ids = set() + icons_mapping = self._controller.get_product_type_icons_mapping( + project_name + ) for repre_id in representation_ids: try: uuid.UUID(repre_id) @@ -253,6 +271,7 @@ class ContainersModel: "product_id": None, "product_name": None, "product_type": None, + "product_type_icon": None, "product_group": None, "version_id": None, "representation_name": None, @@ -265,10 +284,17 @@ class ContainersModel: kwargs["folder_id"] = folder["id"] kwargs["folder_path"] = folder["path"] if product: + product_type = product["productType"] + product_base_type = product.get("productBaseType") + icon = icons_mapping.get_icon( + product_base_type=product_base_type, + product_type=product_type, + ) group = product["attrib"]["productGroup"] kwargs["product_id"] = product["id"] kwargs["product_name"] = product["name"] kwargs["product_type"] = product["productType"] + kwargs["product_type_icon"] = icon kwargs["product_group"] = group if version: kwargs["version_id"] = version["id"] diff --git a/client/ayon_core/tools/sceneinventory/select_version_dialog.py b/client/ayon_core/tools/sceneinventory/select_version_dialog.py index 68284ad1fe..18a39e495c 100644 --- a/client/ayon_core/tools/sceneinventory/select_version_dialog.py +++ b/client/ayon_core/tools/sceneinventory/select_version_dialog.py @@ -127,6 +127,7 @@ class SelectVersionComboBox(QtWidgets.QComboBox): status_text_rect.setLeft(icon_rect.right() + 2) if status_text_rect.width() <= 0: + painter.restore() return if status_text_rect.width() < metrics.width(status_name): @@ -144,6 +145,7 @@ class SelectVersionComboBox(QtWidgets.QComboBox): QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter, status_name ) + painter.restore() def set_current_index(self, index): model = self._combo_view.model() diff --git a/client/ayon_core/tools/sceneinventory/view.py b/client/ayon_core/tools/sceneinventory/view.py index fdd1bdbe75..22bc170230 100644 --- a/client/ayon_core/tools/sceneinventory/view.py +++ b/client/ayon_core/tools/sceneinventory/view.py @@ -17,6 +17,7 @@ from ayon_core.tools.utils.lib import ( format_version, preserve_expanded_rows, preserve_selection, + get_qt_icon, ) from ayon_core.tools.utils.delegates import StatusDelegate @@ -46,7 +47,7 @@ class SceneInventoryView(QtWidgets.QTreeView): hierarchy_view_changed = QtCore.Signal(bool) def __init__(self, controller, parent): - super(SceneInventoryView, self).__init__(parent=parent) + super().__init__(parent=parent) # view settings self.setIndentation(12) @@ -524,7 +525,14 @@ class SceneInventoryView(QtWidgets.QTreeView): submenu = QtWidgets.QMenu("Actions", self) for action in custom_actions: color = action.color or DEFAULT_COLOR - icon = qtawesome.icon("fa.%s" % action.icon, color=color) + icon_def = action.icon + if not isinstance(action.icon, dict): + icon_def = { + "type": "awesome-font", + "name": icon_def, + "color": color, + } + icon = get_qt_icon(icon_def) action_item = QtWidgets.QAction(icon, action.label, submenu) action_item.triggered.connect( partial( @@ -622,7 +630,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if isinstance(result, (list, set)): self._select_items_by_action(result) - if isinstance(result, dict): + elif isinstance(result, dict): self._select_items_by_action( result["objectNames"], result["options"] ) diff --git a/client/ayon_core/tools/subsetmanager/README.md b/client/ayon_core/tools/subsetmanager/README.md deleted file mode 100644 index 35b80ea114..0000000000 --- a/client/ayon_core/tools/subsetmanager/README.md +++ /dev/null @@ -1,19 +0,0 @@ -Subset manager --------------- - -Simple UI showing list of created subset that will be published via Pyblish. -Useful for applications (Photoshop, AfterEffects, TVPaint, Harmony) which are -storing metadata about instance hidden from user. - -This UI allows listing all created subset and removal of them if needed ( -in case use doesn't want to publish anymore, its using workfile as a starting -file for different task and instances should be completely different etc. -) - -Host is expected to implemented: -- `list_instances` - returning list of dictionaries (instances), must contain - unique uuid field - example: - ```[{"uuid":"15","active":true,"subset":"imageBG","family":"image","id":"ayon.create.instance","asset":"Town"}]``` -- `remove_instance(instance)` - removes instance from file's metadata - instance is a dictionary, with uuid field \ No newline at end of file diff --git a/client/ayon_core/tools/subsetmanager/__init__.py b/client/ayon_core/tools/subsetmanager/__init__.py deleted file mode 100644 index 6cfca7db66..0000000000 --- a/client/ayon_core/tools/subsetmanager/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from .window import ( - show, - SubsetManagerWindow -) - -__all__ = ( - "show", - "SubsetManagerWindow" -) diff --git a/client/ayon_core/tools/subsetmanager/model.py b/client/ayon_core/tools/subsetmanager/model.py deleted file mode 100644 index 4964abd86d..0000000000 --- a/client/ayon_core/tools/subsetmanager/model.py +++ /dev/null @@ -1,56 +0,0 @@ -import uuid - -from qtpy import QtCore, QtGui - -from ayon_core.pipeline import registered_host - -ITEM_ID_ROLE = QtCore.Qt.UserRole + 1 - - -class InstanceModel(QtGui.QStandardItemModel): - def __init__(self, *args, **kwargs): - super(InstanceModel, self).__init__(*args, **kwargs) - self._instances_by_item_id = {} - - def get_instance_by_id(self, item_id): - return self._instances_by_item_id.get(item_id) - - def refresh(self): - self.clear() - - self._instances_by_item_id = {} - - instances = None - host = registered_host() - list_instances = getattr(host, "list_instances", None) - if list_instances: - instances = list_instances() - - if not instances: - return - - items = [] - for instance_data in instances: - item_id = str(uuid.uuid4()) - product_name = ( - instance_data.get("productName") - or instance_data.get("subset") - ) - label = instance_data.get("label") or product_name - item = QtGui.QStandardItem(label) - item.setEnabled(True) - item.setEditable(False) - item.setData(item_id, ITEM_ID_ROLE) - items.append(item) - self._instances_by_item_id[item_id] = instance_data - - if items: - self.invisibleRootItem().appendRows(items) - - def headerData(self, section, orientation, role): - if role == QtCore.Qt.DisplayRole and section == 0: - return "Instance" - - return super(InstanceModel, self).headerData( - section, orientation, role - ) diff --git a/client/ayon_core/tools/subsetmanager/widgets.py b/client/ayon_core/tools/subsetmanager/widgets.py deleted file mode 100644 index 1067474c44..0000000000 --- a/client/ayon_core/tools/subsetmanager/widgets.py +++ /dev/null @@ -1,110 +0,0 @@ -import json -from qtpy import QtWidgets, QtCore - - -class InstanceDetail(QtWidgets.QWidget): - save_triggered = QtCore.Signal() - - def __init__(self, parent=None): - super(InstanceDetail, self).__init__(parent) - - details_widget = QtWidgets.QPlainTextEdit(self) - details_widget.setObjectName("SubsetManagerDetailsText") - - save_btn = QtWidgets.QPushButton("Save", self) - - self._block_changes = False - self._editable = False - self._item_id = None - - layout = QtWidgets.QVBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(details_widget, 1) - layout.addWidget(save_btn, 0, QtCore.Qt.AlignRight) - - save_btn.clicked.connect(self._on_save_clicked) - details_widget.textChanged.connect(self._on_text_change) - - self._details_widget = details_widget - self._save_btn = save_btn - - self.set_editable(False) - - def _on_save_clicked(self): - if self.is_valid(): - self.save_triggered.emit() - - def set_editable(self, enabled=True): - self._editable = enabled - self.update_state() - - def update_state(self, valid=None): - editable = self._editable - if not self._item_id: - editable = False - - self._save_btn.setVisible(editable) - self._details_widget.setReadOnly(not editable) - if valid is None: - valid = self.is_valid() - - self._save_btn.setEnabled(valid) - self._set_invalid_detail(valid) - - def _set_invalid_detail(self, valid): - state = "" - if not valid: - state = "invalid" - - current_state = self._details_widget.property("state") - if current_state != state: - self._details_widget.setProperty("state", state) - self._details_widget.style().polish(self._details_widget) - - def set_details(self, container, item_id): - self._item_id = item_id - - text = "Nothing selected" - if item_id: - try: - text = json.dumps(container, indent=4) - except Exception: - text = str(container) - - self._block_changes = True - self._details_widget.setPlainText(text) - self._block_changes = False - - self.update_state() - - def instance_data_from_text(self): - try: - jsoned = json.loads(self._details_widget.toPlainText()) - except Exception: - jsoned = None - return jsoned - - def item_id(self): - return self._item_id - - def is_valid(self): - if not self._item_id: - return True - - value = self._details_widget.toPlainText() - valid = False - try: - jsoned = json.loads(value) - if jsoned and isinstance(jsoned, dict): - valid = True - - except Exception: - pass - return valid - - def _on_text_change(self): - if self._block_changes or not self._item_id: - return - - valid = self.is_valid() - self.update_state(valid) diff --git a/client/ayon_core/tools/subsetmanager/window.py b/client/ayon_core/tools/subsetmanager/window.py deleted file mode 100644 index 164ffa95a7..0000000000 --- a/client/ayon_core/tools/subsetmanager/window.py +++ /dev/null @@ -1,218 +0,0 @@ -import os -import sys - -from qtpy import QtWidgets, QtCore -import qtawesome - -from ayon_core import style -from ayon_core.pipeline import registered_host -from ayon_core.tools.utils import PlaceholderLineEdit -from ayon_core.tools.utils.lib import ( - iter_model_rows, - qt_app_context -) -from ayon_core.tools.utils.models import RecursiveSortFilterProxyModel -from .model import ( - InstanceModel, - ITEM_ID_ROLE -) -from .widgets import InstanceDetail - - -module = sys.modules[__name__] -module.window = None - - -class SubsetManagerWindow(QtWidgets.QDialog): - def __init__(self, parent=None): - super(SubsetManagerWindow, self).__init__(parent=parent) - self.setWindowTitle("Subset Manager 0.1") - self.setObjectName("SubsetManager") - if not parent: - self.setWindowFlags( - self.windowFlags() | QtCore.Qt.WindowStaysOnTopHint - ) - - self.resize(780, 430) - - # Trigger refresh on first called show - self._first_show = True - - left_side_widget = QtWidgets.QWidget(self) - - # Header part - header_widget = QtWidgets.QWidget(left_side_widget) - - # Filter input - filter_input = PlaceholderLineEdit(header_widget) - filter_input.setPlaceholderText("Filter products..") - - # Refresh button - icon = qtawesome.icon("fa.refresh", color="white") - refresh_btn = QtWidgets.QPushButton(header_widget) - refresh_btn.setIcon(icon) - - header_layout = QtWidgets.QHBoxLayout(header_widget) - header_layout.setContentsMargins(0, 0, 0, 0) - header_layout.addWidget(filter_input) - header_layout.addWidget(refresh_btn) - - # Instances view - view = QtWidgets.QTreeView(left_side_widget) - view.setIndentation(0) - view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) - - model = InstanceModel(view) - proxy = RecursiveSortFilterProxyModel() - proxy.setSourceModel(model) - proxy.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive) - - view.setModel(proxy) - - left_side_layout = QtWidgets.QVBoxLayout(left_side_widget) - left_side_layout.setContentsMargins(0, 0, 0, 0) - left_side_layout.addWidget(header_widget) - left_side_layout.addWidget(view) - - details_widget = InstanceDetail(self) - - layout = QtWidgets.QHBoxLayout(self) - layout.addWidget(left_side_widget, 0) - layout.addWidget(details_widget, 1) - - filter_input.textChanged.connect(proxy.setFilterFixedString) - refresh_btn.clicked.connect(self._on_refresh_clicked) - view.clicked.connect(self._on_activated) - view.customContextMenuRequested.connect(self.on_context_menu) - details_widget.save_triggered.connect(self._on_save) - - self._model = model - self._proxy = proxy - self._view = view - self._details_widget = details_widget - self._refresh_btn = refresh_btn - - def _on_refresh_clicked(self): - self.refresh() - - def _on_activated(self, index): - container = None - item_id = None - if index.isValid(): - item_id = index.data(ITEM_ID_ROLE) - container = self._model.get_instance_by_id(item_id) - - self._details_widget.set_details(container, item_id) - - def _on_save(self): - host = registered_host() - if not hasattr(host, "save_instances"): - print("BUG: Host does not have \"save_instances\" method") - return - - current_index = self._view.selectionModel().currentIndex() - if not current_index.isValid(): - return - - item_id = current_index.data(ITEM_ID_ROLE) - if item_id != self._details_widget.item_id(): - return - - item_data = self._details_widget.instance_data_from_text() - new_instances = [] - for index in iter_model_rows(self._model, 0): - _item_id = index.data(ITEM_ID_ROLE) - if _item_id == item_id: - instance_data = item_data - else: - instance_data = self._model.get_instance_by_id(item_id) - new_instances.append(instance_data) - - host.save_instances(new_instances) - - def on_context_menu(self, point): - point_index = self._view.indexAt(point) - item_id = point_index.data(ITEM_ID_ROLE) - instance_data = self._model.get_instance_by_id(item_id) - if instance_data is None: - return - - # Prepare menu - menu = QtWidgets.QMenu(self) - actions = [] - host = registered_host() - if hasattr(host, "remove_instance"): - action = QtWidgets.QAction("Remove instance", menu) - action.setData(host.remove_instance) - actions.append(action) - - if hasattr(host, "select_instance"): - action = QtWidgets.QAction("Select instance", menu) - action.setData(host.select_instance) - actions.append(action) - - if not actions: - actions.append(QtWidgets.QAction("* Nothing to do", menu)) - - for action in actions: - menu.addAction(action) - - # Show menu under mouse - global_point = self._view.mapToGlobal(point) - action = menu.exec_(global_point) - if not action or not action.data(): - return - - # Process action - # TODO catch exceptions - function = action.data() - function(instance_data) - - # Reset modified data - self.refresh() - - def refresh(self): - self._details_widget.set_details(None, None) - self._model.refresh() - - host = registered_host() - dev_mode = os.environ.get("AVALON_DEVELOP_MODE") or "" - editable = False - if dev_mode.lower() in ("1", "yes", "true", "on"): - editable = hasattr(host, "save_instances") - self._details_widget.set_editable(editable) - - def showEvent(self, *args, **kwargs): - super(SubsetManagerWindow, self).showEvent(*args, **kwargs) - if self._first_show: - self._first_show = False - self.setStyleSheet(style.load_stylesheet()) - self.refresh() - - -def show(root=None, debug=False, parent=None): - """Display Scene Inventory GUI - - Arguments: - debug (bool, optional): Run in debug-mode, - defaults to False - parent (QtCore.QObject, optional): When provided parent the interface - to this QObject. - - """ - - try: - module.window.close() - del module.window - except (RuntimeError, AttributeError): - pass - - with qt_app_context(): - window = SubsetManagerWindow(parent) - window.show() - - module.window = window - - # Pull window to the front. - module.window.raise_() - module.window.activateWindow() diff --git a/client/ayon_core/tools/tray/ui/tray.py b/client/ayon_core/tools/tray/ui/tray.py index aad89b6081..cea8d4f747 100644 --- a/client/ayon_core/tools/tray/ui/tray.py +++ b/client/ayon_core/tools/tray/ui/tray.py @@ -240,6 +240,16 @@ class TrayManager: self.log.warning("Other tray started meanwhile. Exiting.") self.exit() + project_bundle = os.getenv("AYON_BUNDLE_NAME") + studio_bundle = os.getenv("AYON_STUDIO_BUNDLE_NAME") + if studio_bundle and project_bundle != studio_bundle: + self.log.info( + f"Project bundle '{project_bundle}' is defined, but tray" + " cannot be running in project scope. Restarting tray to use" + " studio bundle." + ) + self.restart() + def get_services_submenu(self): return self._services_submenu @@ -270,11 +280,18 @@ class TrayManager: elif is_staging_enabled(): additional_args.append("--use-staging") + if "--project" in additional_args: + idx = additional_args.index("--project") + additional_args.pop(idx) + additional_args.pop(idx) + args.extend(additional_args) envs = dict(os.environ.items()) for key in { "AYON_BUNDLE_NAME", + "AYON_STUDIO_BUNDLE_NAME", + "AYON_PROJECT_NAME", }: envs.pop(key, None) @@ -329,6 +346,7 @@ class TrayManager: return json_response({ "username": self._cached_username, "bundle": os.getenv("AYON_BUNDLE_NAME"), + "studio_bundle": os.getenv("AYON_STUDIO_BUNDLE_NAME"), "dev_mode": is_dev_mode_enabled(), "staging_mode": is_staging_enabled(), "addons": { @@ -516,6 +534,8 @@ class TrayManager: "AYON_SERVER_URL", "AYON_API_KEY", "AYON_BUNDLE_NAME", + "AYON_STUDIO_BUNDLE_NAME", + "AYON_PROJECT_NAME", }: os.environ.pop(key, None) self.restart() @@ -549,6 +569,8 @@ class TrayManager: envs = dict(os.environ.items()) for key in { "AYON_BUNDLE_NAME", + "AYON_STUDIO_BUNDLE_NAME", + "AYON_PROJECT_NAME", }: envs.pop(key, None) diff --git a/client/ayon_core/tools/utils/delegates.py b/client/ayon_core/tools/utils/delegates.py index 1cc18b5722..059fc1da0e 100644 --- a/client/ayon_core/tools/utils/delegates.py +++ b/client/ayon_core/tools/utils/delegates.py @@ -186,8 +186,15 @@ class StatusDelegate(QtWidgets.QStyledItemDelegate): ) fm = QtGui.QFontMetrics(option.font) if text_rect.width() < fm.width(text): - text = self._get_status_short_name(index) - if text_rect.width() < fm.width(text): + short_text = self._get_status_short_name(index) + if short_text: + text = short_text + + text = fm.elidedText( + text, QtCore.Qt.ElideRight, text_rect.width() + ) + # Allow at least one character + if len(text) < 2: text = "" fg_color = self._get_status_color(index) diff --git a/client/ayon_core/tools/utils/dialogs.py b/client/ayon_core/tools/utils/dialogs.py index 5dd0ddd54e..6dc3cf1d8b 100644 --- a/client/ayon_core/tools/utils/dialogs.py +++ b/client/ayon_core/tools/utils/dialogs.py @@ -41,7 +41,7 @@ class ScrollMessageBox(QtWidgets.QDialog): """ def __init__(self, icon, title, messages, cancelable=False): - super(ScrollMessageBox, self).__init__() + super().__init__() self.setWindowTitle(title) self.icon = icon @@ -49,8 +49,6 @@ class ScrollMessageBox(QtWidgets.QDialog): self.setWindowFlags(QtCore.Qt.WindowTitleHint) - layout = QtWidgets.QVBoxLayout(self) - scroll_widget = QtWidgets.QScrollArea(self) scroll_widget.setWidgetResizable(True) content_widget = QtWidgets.QWidget(self) @@ -63,14 +61,8 @@ class ScrollMessageBox(QtWidgets.QDialog): content_layout.addWidget(label_widget) message_len = max(message_len, len(message)) - # guess size of scrollable area - # WARNING: 'desktop' method probably won't work in PySide6 - desktop = QtWidgets.QApplication.desktop() - max_width = desktop.availableGeometry().width() - scroll_widget.setMinimumWidth( - min(max_width, message_len * 6) - ) - layout.addWidget(scroll_widget) + # Set minimum width + scroll_widget.setMinimumWidth(360) buttons = QtWidgets.QDialogButtonBox.Ok if cancelable: @@ -86,7 +78,9 @@ class ScrollMessageBox(QtWidgets.QDialog): btn.clicked.connect(self._on_copy_click) btn_box.addButton(btn, QtWidgets.QDialogButtonBox.NoRole) - layout.addWidget(btn_box) + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.addWidget(scroll_widget, 1) + main_layout.addWidget(btn_box, 0) def _on_copy_click(self): clipboard = QtWidgets.QApplication.clipboard() @@ -104,7 +98,7 @@ class SimplePopup(QtWidgets.QDialog): on_clicked = QtCore.Signal() def __init__(self, parent=None, *args, **kwargs): - super(SimplePopup, self).__init__(parent=parent, *args, **kwargs) + super().__init__(parent=parent, *args, **kwargs) # Set default title self.setWindowTitle("Popup") @@ -161,7 +155,7 @@ class SimplePopup(QtWidgets.QDialog): geo = self._calculate_window_geometry() self.setGeometry(geo) - return super(SimplePopup, self).showEvent(event) + return super().showEvent(event) def _on_clicked(self): """Callback for when the 'show' button is clicked. @@ -228,9 +222,7 @@ class PopupUpdateKeys(SimplePopup): on_clicked_state = QtCore.Signal(bool) def __init__(self, parent=None, *args, **kwargs): - super(PopupUpdateKeys, self).__init__( - parent=parent, *args, **kwargs - ) + super().__init__(parent=parent, *args, **kwargs) layout = self.layout() diff --git a/client/ayon_core/tools/utils/host_tools.py b/client/ayon_core/tools/utils/host_tools.py index 3d356555f3..96b7615e3c 100644 --- a/client/ayon_core/tools/utils/host_tools.py +++ b/client/ayon_core/tools/utils/host_tools.py @@ -31,9 +31,7 @@ class HostToolsHelper: # Prepare attributes for all tools self._workfiles_tool = None self._loader_tool = None - self._creator_tool = None self._publisher_tool = None - self._subset_manager_tool = None self._scene_inventory_tool = None self._experimental_tools_dialog = None @@ -96,49 +94,6 @@ class HostToolsHelper: loader_tool.refresh() - def get_creator_tool(self, parent): - """Create, cache and return creator tool window.""" - if self._creator_tool is None: - from ayon_core.tools.creator import CreatorWindow - - creator_window = CreatorWindow(parent=parent or self._parent) - self._creator_tool = creator_window - - return self._creator_tool - - def show_creator(self, parent=None): - """Show tool to create new instantes for publishing.""" - with qt_app_context(): - creator_tool = self.get_creator_tool(parent) - creator_tool.refresh() - creator_tool.show() - - # Pull window to the front. - creator_tool.raise_() - creator_tool.activateWindow() - - def get_subset_manager_tool(self, parent): - """Create, cache and return subset manager tool window.""" - if self._subset_manager_tool is None: - from ayon_core.tools.subsetmanager import SubsetManagerWindow - - subset_manager_window = SubsetManagerWindow( - parent=parent or self._parent - ) - self._subset_manager_tool = subset_manager_window - - return self._subset_manager_tool - - def show_subset_manager(self, parent=None): - """Show tool display/remove existing created instances.""" - with qt_app_context(): - subset_manager_tool = self.get_subset_manager_tool(parent) - subset_manager_tool.show() - - # Pull window to the front. - subset_manager_tool.raise_() - subset_manager_tool.activateWindow() - def get_scene_inventory_tool(self, parent): """Create, cache and return scene inventory tool window.""" if self._scene_inventory_tool is None: @@ -261,35 +216,29 @@ class HostToolsHelper: if tool_name == "workfiles": return self.get_workfiles_tool(parent, *args, **kwargs) - elif tool_name == "loader": + if tool_name == "loader": return self.get_loader_tool(parent, *args, **kwargs) - elif tool_name == "libraryloader": + if tool_name == "libraryloader": return self.get_library_loader_tool(parent, *args, **kwargs) - elif tool_name == "creator": - return self.get_creator_tool(parent, *args, **kwargs) - - elif tool_name == "subsetmanager": - return self.get_subset_manager_tool(parent, *args, **kwargs) - - elif tool_name == "sceneinventory": + if tool_name == "sceneinventory": return self.get_scene_inventory_tool(parent, *args, **kwargs) - elif tool_name == "publish": - self.log.info("Can't return publish tool window.") - - # "new" publisher - elif tool_name == "publisher": + if tool_name == "publisher": return self.get_publisher_tool(parent, *args, **kwargs) - elif tool_name == "experimental_tools": + if tool_name == "experimental_tools": return self.get_experimental_tools_dialog(parent, *args, **kwargs) - else: - self.log.warning( - "Can't show unknown tool name: \"{}\"".format(tool_name) - ) + if tool_name == "publish": + self.log.info("Can't return publish tool window.") + return None + + self.log.warning( + "Can't show unknown tool name: \"{}\"".format(tool_name) + ) + return None def show_tool_by_name(self, tool_name, parent=None, *args, **kwargs): """Show tool by it's name. @@ -305,12 +254,6 @@ class HostToolsHelper: elif tool_name == "libraryloader": self.show_library_loader(parent, *args, **kwargs) - elif tool_name == "creator": - self.show_creator(parent, *args, **kwargs) - - elif tool_name == "subsetmanager": - self.show_subset_manager(parent, *args, **kwargs) - elif tool_name == "sceneinventory": self.show_scene_inventory(parent, *args, **kwargs) @@ -379,14 +322,6 @@ def show_library_loader(parent=None): _SingletonPoint.show_tool_by_name("libraryloader", parent) -def show_creator(parent=None): - _SingletonPoint.show_tool_by_name("creator", parent) - - -def show_subset_manager(parent=None): - _SingletonPoint.show_tool_by_name("subsetmanager", parent) - - def show_scene_inventory(parent=None): _SingletonPoint.show_tool_by_name("sceneinventory", parent) diff --git a/client/ayon_core/tools/utils/nice_checkbox.py b/client/ayon_core/tools/utils/nice_checkbox.py index 3d9d63b6bc..c33533b0e4 100644 --- a/client/ayon_core/tools/utils/nice_checkbox.py +++ b/client/ayon_core/tools/utils/nice_checkbox.py @@ -1,4 +1,5 @@ -from math import floor, sqrt, ceil +from math import floor, ceil + from qtpy import QtWidgets, QtCore, QtGui from ayon_core.style import get_objected_colors @@ -9,12 +10,15 @@ class NiceCheckbox(QtWidgets.QFrame): clicked = QtCore.Signal() _checked_bg_color = None + _checked_bg_color_disabled = None _unchecked_bg_color = None + _unchecked_bg_color_disabled = None _checker_color = None + _checker_color_disabled = None _checker_hover_color = None def __init__(self, checked=False, draw_icons=False, parent=None): - super(NiceCheckbox, self).__init__(parent) + super().__init__(parent) self.setObjectName("NiceCheckbox") self.setAttribute(QtCore.Qt.WA_TranslucentBackground) @@ -48,8 +52,6 @@ class NiceCheckbox(QtWidgets.QFrame): self._pressed = False self._under_mouse = False - self.icon_scale_factor = sqrt(2) / 2 - icon_path_stroker = QtGui.QPainterPathStroker() icon_path_stroker.setCapStyle(QtCore.Qt.RoundCap) icon_path_stroker.setJoinStyle(QtCore.Qt.RoundJoin) @@ -61,35 +63,6 @@ class NiceCheckbox(QtWidgets.QFrame): self._base_size = QtCore.QSize(90, 50) self._load_colors() - @classmethod - def _load_colors(cls): - if cls._checked_bg_color is not None: - return - - colors_info = get_objected_colors("nice-checkbox") - - cls._checked_bg_color = colors_info["bg-checked"].get_qcolor() - cls._unchecked_bg_color = colors_info["bg-unchecked"].get_qcolor() - - cls._checker_color = colors_info["bg-checker"].get_qcolor() - cls._checker_hover_color = colors_info["bg-checker-hover"].get_qcolor() - - @property - def checked_bg_color(self): - return self._checked_bg_color - - @property - def unchecked_bg_color(self): - return self._unchecked_bg_color - - @property - def checker_color(self): - return self._checker_color - - @property - def checker_hover_color(self): - return self._checker_hover_color - def setTristate(self, tristate=True): if self._is_tristate != tristate: self._is_tristate = tristate @@ -121,14 +94,14 @@ class NiceCheckbox(QtWidgets.QFrame): def setFixedHeight(self, *args, **kwargs): self._fixed_height_set = True - super(NiceCheckbox, self).setFixedHeight(*args, **kwargs) + super().setFixedHeight(*args, **kwargs) if not self._fixed_width_set: width = self.get_width_hint_by_height(self.height()) self.setFixedWidth(width) def setFixedWidth(self, *args, **kwargs): self._fixed_width_set = True - super(NiceCheckbox, self).setFixedWidth(*args, **kwargs) + super().setFixedWidth(*args, **kwargs) if not self._fixed_height_set: height = self.get_height_hint_by_width(self.width()) self.setFixedHeight(height) @@ -136,7 +109,7 @@ class NiceCheckbox(QtWidgets.QFrame): def setFixedSize(self, *args, **kwargs): self._fixed_height_set = True self._fixed_width_set = True - super(NiceCheckbox, self).setFixedSize(*args, **kwargs) + super().setFixedSize(*args, **kwargs) def steps(self): return self._steps @@ -242,7 +215,7 @@ class NiceCheckbox(QtWidgets.QFrame): if event.buttons() & QtCore.Qt.LeftButton: self._pressed = True self.repaint() - super(NiceCheckbox, self).mousePressEvent(event) + super().mousePressEvent(event) def mouseReleaseEvent(self, event): if self._pressed and not event.buttons() & QtCore.Qt.LeftButton: @@ -252,7 +225,7 @@ class NiceCheckbox(QtWidgets.QFrame): self.clicked.emit() event.accept() return - super(NiceCheckbox, self).mouseReleaseEvent(event) + super().mouseReleaseEvent(event) def mouseMoveEvent(self, event): if self._pressed: @@ -261,19 +234,19 @@ class NiceCheckbox(QtWidgets.QFrame): self._under_mouse = under_mouse self.repaint() - super(NiceCheckbox, self).mouseMoveEvent(event) + super().mouseMoveEvent(event) def enterEvent(self, event): self._under_mouse = True if self.isEnabled(): self.repaint() - super(NiceCheckbox, self).enterEvent(event) + super().enterEvent(event) def leaveEvent(self, event): self._under_mouse = False if self.isEnabled(): self.repaint() - super(NiceCheckbox, self).leaveEvent(event) + super().leaveEvent(event) def _on_animation_timeout(self): if self._checkstate == QtCore.Qt.Checked: @@ -302,24 +275,13 @@ class NiceCheckbox(QtWidgets.QFrame): @staticmethod def steped_color(color1, color2, offset_ratio): - red_dif = ( - color1.red() - color2.red() - ) - green_dif = ( - color1.green() - color2.green() - ) - blue_dif = ( - color1.blue() - color2.blue() - ) - red = int(color2.red() + ( - red_dif * offset_ratio - )) - green = int(color2.green() + ( - green_dif * offset_ratio - )) - blue = int(color2.blue() + ( - blue_dif * offset_ratio - )) + red_dif = color1.red() - color2.red() + green_dif = color1.green() - color2.green() + blue_dif = color1.blue() - color2.blue() + + red = int(color2.red() + (red_dif * offset_ratio)) + green = int(color2.green() + (green_dif * offset_ratio)) + blue = int(color2.blue() + (blue_dif * offset_ratio)) return QtGui.QColor(red, green, blue) @@ -334,20 +296,28 @@ class NiceCheckbox(QtWidgets.QFrame): painter = QtGui.QPainter(self) painter.setRenderHint(QtGui.QPainter.Antialiasing) + painter.setPen(QtCore.Qt.NoPen) # Draw inner background - if self._current_step == self._steps: - bg_color = self.checked_bg_color + if not self.isEnabled(): + bg_color = ( + self._checked_bg_color_disabled + if self._current_step == self._steps + else self._unchecked_bg_color_disabled + ) + + elif self._current_step == self._steps: + bg_color = self._checked_bg_color elif self._current_step == 0: - bg_color = self.unchecked_bg_color + bg_color = self._unchecked_bg_color else: offset_ratio = float(self._current_step) / self._steps # Animation bg bg_color = self.steped_color( - self.checked_bg_color, - self.unchecked_bg_color, + self._checked_bg_color, + self._unchecked_bg_color, offset_ratio ) @@ -378,14 +348,20 @@ class NiceCheckbox(QtWidgets.QFrame): -margin_size_c, -margin_size_c ) - if checkbox_rect.width() > checkbox_rect.height(): - radius = floor(checkbox_rect.height() * 0.5) - else: - radius = floor(checkbox_rect.width() * 0.5) + slider_rect = QtCore.QRect(checkbox_rect) + slider_offset = int( + ceil(min(slider_rect.width(), slider_rect.height())) * 0.08 + ) + if slider_offset < 1: + slider_offset = 1 + slider_rect.adjust( + slider_offset, slider_offset, + -slider_offset, -slider_offset + ) + radius = floor(min(slider_rect.width(), slider_rect.height()) * 0.5) - painter.setPen(QtCore.Qt.NoPen) painter.setBrush(bg_color) - painter.drawRoundedRect(checkbox_rect, radius, radius) + painter.drawRoundedRect(slider_rect, radius, radius) # Draw checker checker_size = size_without_margins - (margin_size_c * 2) @@ -394,9 +370,8 @@ class NiceCheckbox(QtWidgets.QFrame): - (margin_size_c * 2) - checker_size ) - if self._current_step == 0: - x_offset = 0 - else: + x_offset = 0 + if self._current_step != 0: x_offset = (float(area_width) / self._steps) * self._current_step pos_x = checkbox_rect.x() + x_offset + margin_size_c @@ -404,55 +379,80 @@ class NiceCheckbox(QtWidgets.QFrame): checker_rect = QtCore.QRect(pos_x, pos_y, checker_size, checker_size) - under_mouse = self.isEnabled() and self._under_mouse - if under_mouse: - checker_color = self.checker_hover_color - else: - checker_color = self.checker_color + checker_color = self._checker_color + if not self.isEnabled(): + checker_color = self._checker_color_disabled + elif self._under_mouse: + checker_color = self._checker_hover_color painter.setBrush(checker_color) painter.drawEllipse(checker_rect) if self._draw_icons: painter.setBrush(bg_color) - icon_path = self._get_icon_path(painter, checker_rect) + icon_path = self._get_icon_path(checker_rect) painter.drawPath(icon_path) - # Draw shadow overlay - if not self.isEnabled(): - level = 33 - alpha = 127 - painter.setPen(QtCore.Qt.transparent) - painter.setBrush(QtGui.QColor(level, level, level, alpha)) - painter.drawRoundedRect(checkbox_rect, radius, radius) - painter.end() - def _get_icon_path(self, painter, checker_rect): + @classmethod + def _load_colors(cls): + if cls._checked_bg_color is not None: + return + + colors_info = get_objected_colors("nice-checkbox") + + disabled_color = QtGui.QColor(33, 33, 33, 127) + + cls._checked_bg_color = colors_info["bg-checked"].get_qcolor() + cls._checked_bg_color_disabled = cls._merge_colors( + cls._checked_bg_color, disabled_color + ) + cls._unchecked_bg_color = colors_info["bg-unchecked"].get_qcolor() + cls._unchecked_bg_color_disabled = cls._merge_colors( + cls._unchecked_bg_color, disabled_color + ) + + cls._checker_color = colors_info["bg-checker"].get_qcolor() + cls._checker_color_disabled = cls._merge_colors( + cls._checker_color, disabled_color + ) + cls._checker_hover_color = colors_info["bg-checker-hover"].get_qcolor() + + @staticmethod + def _merge_colors(color_1, color_2): + a = color_2.alphaF() + return QtGui.QColor( + floor((color_1.red() + (color_2.red() * a)) * 0.5), + floor((color_1.green() + (color_2.green() * a)) * 0.5), + floor((color_1.blue() + (color_2.blue() * a)) * 0.5), + color_1.alpha() + ) + + def _get_icon_path(self, checker_rect): self.icon_path_stroker.setWidth(checker_rect.height() / 5) if self._current_step == self._steps: - return self._get_enabled_icon_path(painter, checker_rect) + return self._get_enabled_icon_path(checker_rect) if self._current_step == 0: - return self._get_disabled_icon_path(painter, checker_rect) + return self._get_disabled_icon_path(checker_rect) if self._current_step == self._middle_step: - return self._get_middle_circle_path(painter, checker_rect) + return self._get_middle_circle_path(checker_rect) disabled_step = self._steps - self._current_step enabled_step = self._steps - disabled_step half_steps = self._steps + 1 - ((self._steps + 1) % 2) if enabled_step > disabled_step: return self._get_enabled_icon_path( - painter, checker_rect, enabled_step, half_steps - ) - else: - return self._get_disabled_icon_path( - painter, checker_rect, disabled_step, half_steps + checker_rect, enabled_step, half_steps ) + return self._get_disabled_icon_path( + checker_rect, disabled_step, half_steps + ) - def _get_middle_circle_path(self, painter, checker_rect): + def _get_middle_circle_path(self, checker_rect): width = self.icon_path_stroker.width() path = QtGui.QPainterPath() path.addEllipse(checker_rect.center(), width, width) @@ -460,7 +460,7 @@ class NiceCheckbox(QtWidgets.QFrame): return path def _get_enabled_icon_path( - self, painter, checker_rect, step=None, half_steps=None + self, checker_rect, step=None, half_steps=None ): fifteenth = float(checker_rect.height()) / 15 # Left point @@ -509,7 +509,7 @@ class NiceCheckbox(QtWidgets.QFrame): return self.icon_path_stroker.createStroke(path) def _get_disabled_icon_path( - self, painter, checker_rect, step=None, half_steps=None + self, checker_rect, step=None, half_steps=None ): center_point = QtCore.QPointF( float(checker_rect.width()) / 2, diff --git a/client/ayon_core/tools/utils/tasks_widget.py b/client/ayon_core/tools/utils/tasks_widget.py index 744eb6060a..d77ce1e1f4 100644 --- a/client/ayon_core/tools/utils/tasks_widget.py +++ b/client/ayon_core/tools/utils/tasks_widget.py @@ -234,10 +234,11 @@ class TasksQtModel(QtGui.QStandardItemModel): ) icon = None if task_type_item is not None: + color = task_type_item.color or get_default_entity_icon_color() icon = get_qt_icon({ "type": "material-symbols", "name": task_type_item.icon, - "color": get_default_entity_icon_color() + "color": color, }) if icon is None: diff --git a/client/ayon_core/tools/utils/widgets.py b/client/ayon_core/tools/utils/widgets.py index de2c42c91f..4b787ff830 100644 --- a/client/ayon_core/tools/utils/widgets.py +++ b/client/ayon_core/tools/utils/widgets.py @@ -418,7 +418,7 @@ class ExpandingTextEdit(QtWidgets.QTextEdit): """QTextEdit which does not have sroll area but expands height.""" def __init__(self, parent=None): - super(ExpandingTextEdit, self).__init__(parent) + super().__init__(parent) size_policy = self.sizePolicy() size_policy.setHeightForWidth(True) @@ -441,14 +441,18 @@ class ExpandingTextEdit(QtWidgets.QTextEdit): margins = self.contentsMargins() document_width = 0 - if width >= margins.left() + margins.right(): - document_width = width - margins.left() - margins.right() + margins_size = margins.left() + margins.right() + if width >= margins_size: + document_width = width - margins_size document = self.document().clone() document.setTextWidth(document_width) return math.ceil( - margins.top() + document.size().height() + margins.bottom() + margins.top() + + document.size().height() + + margins.bottom() + + 2 ) def sizeHint(self): diff --git a/client/ayon_core/tools/workfiles/abstract.py b/client/ayon_core/tools/workfiles/abstract.py index 152ca33d99..863d6bb9bc 100644 --- a/client/ayon_core/tools/workfiles/abstract.py +++ b/client/ayon_core/tools/workfiles/abstract.py @@ -4,76 +4,6 @@ from abc import ABC, abstractmethod from ayon_core.style import get_default_entity_icon_color -class WorkfileInfo: - """Information about workarea file with possible additional from database. - - Args: - folder_id (str): Folder id. - task_id (str): Task id. - filepath (str): Filepath. - filesize (int): File size. - creation_time (float): Creation time (timestamp). - modification_time (float): Modification time (timestamp). - created_by (Union[str, none]): User who created the file. - updated_by (Union[str, none]): User who last updated the file. - note (str): Note. - """ - - def __init__( - self, - folder_id, - task_id, - filepath, - filesize, - creation_time, - modification_time, - created_by, - updated_by, - note, - ): - self.folder_id = folder_id - self.task_id = task_id - self.filepath = filepath - self.filesize = filesize - self.creation_time = creation_time - self.modification_time = modification_time - self.created_by = created_by - self.updated_by = updated_by - self.note = note - - def to_data(self): - """Converts WorkfileInfo item to data. - - Returns: - dict[str, Any]: Folder item data. - """ - - return { - "folder_id": self.folder_id, - "task_id": self.task_id, - "filepath": self.filepath, - "filesize": self.filesize, - "creation_time": self.creation_time, - "modification_time": self.modification_time, - "created_by": self.created_by, - "updated_by": self.updated_by, - "note": self.note, - } - - @classmethod - def from_data(cls, data): - """Re-creates WorkfileInfo item from data. - - Args: - data (dict[str, Any]): Workfile info item data. - - Returns: - WorkfileInfo: Workfile info item. - """ - - return cls(**data) - - class FolderItem: """Item representing folder entity on a server. @@ -87,8 +17,8 @@ class FolderItem: label (str): Folder label. icon_name (str): Name of icon from font awesome. icon_color (str): Hex color string that will be used for icon. - """ + """ def __init__( self, entity_id, parent_id, name, label, icon_name, icon_color ): @@ -104,8 +34,8 @@ class FolderItem: Returns: dict[str, Any]: Folder item data. - """ + """ return { "entity_id": self.entity_id, "parent_id": self.parent_id, @@ -124,8 +54,8 @@ class FolderItem: Returns: FolderItem: Folder item. - """ + """ return cls(**data) @@ -144,8 +74,8 @@ class TaskItem: parent_id (str): Parent folder id. icon_name (str): Name of icon from font awesome. icon_color (str): Hex color string that will be used for icon. - """ + """ def __init__( self, task_id, name, task_type, parent_id, icon_name, icon_color ): @@ -163,8 +93,8 @@ class TaskItem: Returns: str: Task id. - """ + """ return self.task_id @property @@ -173,8 +103,8 @@ class TaskItem: Returns: str: Label of task item. - """ + """ if self._label is None: self._label = "{} ({})".format(self.name, self.task_type) return self._label @@ -184,8 +114,8 @@ class TaskItem: Returns: dict[str, Any]: Task item data. - """ + """ return { "task_id": self.task_id, "name": self.name, @@ -204,116 +134,11 @@ class TaskItem: Returns: TaskItem: Task item. - """ + """ return cls(**data) -class FileItem: - """File item that represents a file. - - Can be used for both Workarea and Published workfile. Workarea file - will always exist on disk which is not the case for Published workfile. - - Args: - dirpath (str): Directory path of file. - filename (str): Filename. - modified (float): Modified timestamp. - created_by (Optional[str]): Username. - representation_id (Optional[str]): Representation id of published - workfile. - filepath (Optional[str]): Prepared filepath. - exists (Optional[bool]): If file exists on disk. - """ - - def __init__( - self, - dirpath, - filename, - modified, - created_by=None, - updated_by=None, - representation_id=None, - filepath=None, - exists=None - ): - self.filename = filename - self.dirpath = dirpath - self.modified = modified - self.created_by = created_by - self.updated_by = updated_by - self.representation_id = representation_id - self._filepath = filepath - self._exists = exists - - @property - def filepath(self): - """Filepath of file. - - Returns: - str: Full path to a file. - """ - - if self._filepath is None: - self._filepath = os.path.join(self.dirpath, self.filename) - return self._filepath - - @property - def exists(self): - """File is available. - - Returns: - bool: If file exists on disk. - """ - - if self._exists is None: - self._exists = os.path.exists(self.filepath) - return self._exists - - def to_data(self): - """Converts file item to data. - - Returns: - dict[str, Any]: File item data. - """ - - return { - "filename": self.filename, - "dirpath": self.dirpath, - "modified": self.modified, - "created_by": self.created_by, - "representation_id": self.representation_id, - "filepath": self.filepath, - "exists": self.exists, - } - - @classmethod - def from_data(cls, data): - """Re-creates file item from data. - - Args: - data (dict[str, Any]): File item data. - - Returns: - FileItem: File item. - """ - - required_keys = { - "filename", - "dirpath", - "modified", - "representation_id" - } - missing_keys = required_keys - set(data.keys()) - if missing_keys: - raise KeyError("Missing keys: {}".format(missing_keys)) - - return cls(**{ - key: data[key] - for key in required_keys - }) - - class WorkareaFilepathResult: """Result of workarea file formatting. @@ -323,8 +148,8 @@ class WorkareaFilepathResult: exists (bool): True if file exists. filepath (str): Filepath. If not provided it will be constructed from root and filename. - """ + """ def __init__(self, root, filename, exists, filepath=None): if not filepath and root and filename: filepath = os.path.join(root, filename) @@ -341,8 +166,8 @@ class AbstractWorkfilesCommon(ABC): Returns: bool: True if host is valid. - """ + """ pass @abstractmethod @@ -353,8 +178,8 @@ class AbstractWorkfilesCommon(ABC): Returns: Iterable[str]: List of extensions. - """ + """ pass @abstractmethod @@ -363,8 +188,8 @@ class AbstractWorkfilesCommon(ABC): Returns: bool: True if save is enabled. - """ + """ pass @abstractmethod @@ -373,8 +198,8 @@ class AbstractWorkfilesCommon(ABC): Args: enabled (bool): Enable save workfile when True. - """ + """ pass @@ -386,6 +211,7 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: str: Name of host. + """ pass @@ -395,8 +221,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: str: Name of project. - """ + """ pass @abstractmethod @@ -406,8 +232,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: Union[str, None]: Folder id or None if host does not have any context. - """ + """ pass @abstractmethod @@ -417,8 +243,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: Union[str, None]: Task name or None if host does not have any context. - """ + """ pass @abstractmethod @@ -428,8 +254,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: Union[str, None]: Path to workfile or None if host does not have opened specific file. - """ + """ pass @property @@ -439,8 +265,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: Anatomy: Project anatomy. - """ + """ pass @property @@ -450,8 +276,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Project settings. - """ + """ pass @abstractmethod @@ -463,8 +289,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Project entity data. - """ + """ pass @abstractmethod @@ -477,8 +303,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Folder entity data. - """ + """ pass @abstractmethod @@ -491,10 +317,24 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Task entity data. - """ + """ pass + @abstractmethod + def get_workfile_entities(self, task_id: str): + """Workfile entities for given task. + + Args: + task_id (str): Task id. + + Returns: + list[dict[str, Any]]: List of workfile entities. + + """ + pass + + @abstractmethod def emit_event(self, topic, data=None, source=None): """Emit event. @@ -502,8 +342,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): topic (str): Event topic used for callbacks filtering. data (Optional[dict[str, Any]]): Event data. source (Optional[str]): Event source. - """ + """ pass @@ -530,8 +370,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): topic (str): Name of topic. callback (Callable): Callback that will be called when event is triggered. - """ + """ pass @abstractmethod @@ -592,8 +432,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: List[str]: File extensions that can be used as workfile for current host. - """ + """ pass # Selection information @@ -603,8 +443,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: Union[str, None]: Folder id or None if no folder is selected. - """ + """ pass @abstractmethod @@ -616,8 +456,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Args: folder_id (Union[str, None]): Folder id or None if no folder is selected. - """ + """ pass @abstractmethod @@ -626,8 +466,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: Union[str, None]: Task id or None if no folder is selected. - """ + """ pass @abstractmethod @@ -649,8 +489,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): is selected. task_name (Union[str, None]): Task name or None if no task is selected. - """ + """ pass @abstractmethod @@ -659,18 +499,22 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: Union[str, None]: Selected workfile path. - """ + """ pass @abstractmethod - def set_selected_workfile_path(self, path): + def set_selected_workfile_path( + self, rootless_path, path, workfile_entity_id + ): """Change selected workfile path. Args: + rootless_path (Union[str, None]): Selected workfile rootless path. path (Union[str, None]): Selected workfile path. - """ + workfile_entity_id (Union[str, None]): Workfile entity id. + """ pass @abstractmethod @@ -680,8 +524,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: Union[str, None]: Representation id or None if no representation is selected. - """ + """ pass @abstractmethod @@ -691,8 +535,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Args: representation_id (Union[str, None]): Selected workfile representation id. - """ + """ pass def get_selected_context(self): @@ -700,8 +544,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: dict[str, Union[str, None]]: Selected context. - """ + """ return { "folder_id": self.get_selected_folder_id(), "task_id": self.get_selected_task_id(), @@ -737,8 +581,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): files UI element. representation_id (Optional[str]): Representation id. Used for published filed UI element. - """ + """ pass @abstractmethod @@ -750,8 +594,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Expected selection data. - """ + """ pass @abstractmethod @@ -760,8 +604,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Args: folder_id (str): Folder id which was selected. - """ + """ pass @abstractmethod @@ -771,8 +615,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Args: folder_id (str): Folder id under which task is. task_name (str): Task name which was selected. - """ + """ pass @abstractmethod @@ -785,8 +629,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): folder_id (str): Folder id under which representation is. task_name (str): Task name under which representation is. representation_id (str): Representation id which was selected. - """ + """ pass @abstractmethod @@ -797,8 +641,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): folder_id (str): Folder id under which workfile is. task_name (str): Task name under which workfile is. workfile_name (str): Workfile filename which was selected. - """ + """ pass @abstractmethod @@ -823,8 +667,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: list[FolderItem]: Minimum possible information needed for visualisation of folder hierarchy. - """ + """ pass @abstractmethod @@ -843,8 +687,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: list[TaskItem]: Minimum possible information needed for visualisation of tasks. - """ + """ pass @abstractmethod @@ -853,8 +697,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: bool: Has unsaved changes. - """ + """ pass @abstractmethod @@ -867,8 +711,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: str: Workarea directory. - """ + """ pass @abstractmethod @@ -881,9 +725,9 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): sender (Optional[str]): Who requested workarea file items. Returns: - list[FileItem]: List of workarea file items. - """ + list[WorkfileInfo]: List of workarea file items. + """ pass @abstractmethod @@ -899,8 +743,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Data for Save As operation. - """ + """ pass @abstractmethod @@ -925,12 +769,12 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: WorkareaFilepathResult: Result of the operation. - """ + """ pass @abstractmethod - def get_published_file_items(self, folder_id, task_id): + def get_published_file_items(self, folder_id: str, task_id: str): """Get published file items. Args: @@ -938,44 +782,52 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): task_id (Union[str, None]): Task id. Returns: - list[FileItem]: List of published file items. - """ + list[PublishedWorkfileInfo]: List of published file items. + """ pass @abstractmethod - def get_workfile_info(self, folder_id, task_name, filepath): + def get_workfile_info(self, folder_id, task_id, rootless_path): """Workfile info from database. Args: folder_id (str): Folder id. - task_name (str): Task id. - filepath (str): Workfile path. + task_id (str): Task id. + rootless_path (str): Workfile path. Returns: - Union[WorkfileInfo, None]: Workfile info or None if was passed + Optional[WorkfileInfo]: Workfile info or None if was passed invalid context. - """ + """ pass @abstractmethod - def save_workfile_info(self, folder_id, task_name, filepath, note): + def save_workfile_info( + self, + task_id, + rootless_path, + version=None, + comment=None, + description=None, + ): """Save workfile info to database. At this moment the only information which can be saved about - workfile is 'note'. + workfile is 'description'. - When 'note' is 'None' it is only validated if workfile info exists, - and if not then creates one with empty note. + If value of 'version', 'comment' or 'description' is 'None' it is not + added/updated to entity. Args: - folder_id (str): Folder id. - task_name (str): Task id. - filepath (str): Workfile path. - note (Union[str, None]): Note. - """ + task_id (str): Task id. + rootless_path (str): Rootless workfile path. + version (Optional[int]): Version of workfile. + comment (Optional[str]): User's comment (subversion). + description (Optional[str]): Workfile description. + """ pass # General commands @@ -985,8 +837,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Triggers 'controller.reset.started' event at the beginning and 'controller.reset.finished' at the end. - """ + """ pass # Controller actions @@ -998,8 +850,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): folder_id (str): Folder id. task_id (str): Task id. filepath (str): Workfile path. - """ + """ pass @abstractmethod @@ -1013,22 +865,27 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): self, folder_id, task_id, + rootless_workdir, workdir, filename, - template_key, - artist_note, + version, + comment, + description, ): """Save current state of workfile to workarea. Args: folder_id (str): Folder id. task_id (str): Task id. - workdir (str): Workarea directory. + rootless_workdir (str): Workarea directory. filename (str): Workarea filename. template_key (str): Template key used to get the workdir and filename. - """ + version (Optional[int]): Version of workfile. + comment (Optional[str]): User's comment (subversion). + description (Optional[str]): Workfile description. + """ pass @abstractmethod @@ -1040,8 +897,10 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): task_id, workdir, filename, - template_key, - artist_note, + rootless_workdir, + version, + comment, + description, ): """Action to copy published workfile representation to workarea. @@ -1055,23 +914,40 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): task_id (str): Task id. workdir (str): Workarea directory. filename (str): Workarea filename. - template_key (str): Template key. - artist_note (str): Artist note. - """ + rootless_workdir (str): Rootless workdir. + version (int): Workfile version. + comment (str): User's comment (subversion). + description (str): Description note. + """ pass @abstractmethod - def duplicate_workfile(self, src_filepath, workdir, filename, artist_note): + def duplicate_workfile( + self, + folder_id, + task_id, + src_filepath, + rootless_workdir, + workdir, + filename, + description, + version, + comment + ): """Duplicate workfile. Workfiles is not opened when done. Args: + folder_id (str): Folder id. + task_id (str): Task id. src_filepath (str): Source workfile path. + rootless_workdir (str): Rootless workdir. workdir (str): Destination workdir. filename (str): Destination filename. - artist_note (str): Artist note. + version (int): Workfile version. + comment (str): User's comment (subversion). + description (str): Workfile description. """ - pass diff --git a/client/ayon_core/tools/workfiles/control.py b/client/ayon_core/tools/workfiles/control.py index 3a7459da0c..f0e0f0e416 100644 --- a/client/ayon_core/tools/workfiles/control.py +++ b/client/ayon_core/tools/workfiles/control.py @@ -1,33 +1,28 @@ import os -import shutil import ayon_api from ayon_core.host import IWorkfileHost -from ayon_core.lib import Logger, emit_event +from ayon_core.lib import Logger, get_ayon_username from ayon_core.lib.events import QueuedEventSystem -from ayon_core.settings import get_project_settings from ayon_core.pipeline import Anatomy, registered_host -from ayon_core.pipeline.context_tools import ( - change_current_context, - get_current_host_name, - get_global_context, -) -from ayon_core.pipeline.workfile import create_workdir_extra_folders - +from ayon_core.pipeline.context_tools import get_global_context +from ayon_core.settings import get_project_settings from ayon_core.tools.common_models import ( - HierarchyModel, HierarchyExpectedSelection, + HierarchyModel, ProjectsModel, UsersModel, ) from .abstract import ( - AbstractWorkfilesFrontend, AbstractWorkfilesBackend, + AbstractWorkfilesFrontend, ) from .models import SelectionModel, WorkfilesModel +NOT_SET = object() + class WorkfilesToolExpectedSelection(HierarchyExpectedSelection): def __init__(self, controller): @@ -140,12 +135,7 @@ class BaseWorkfileController( if host is None: host = registered_host() - host_is_valid = False - if host is not None: - missing_methods = ( - IWorkfileHost.get_missing_workfile_methods(host) - ) - host_is_valid = len(missing_methods) == 0 + host_is_valid = isinstance(host, IWorkfileHost) self._host = host self._host_is_valid = host_is_valid @@ -154,6 +144,7 @@ class BaseWorkfileController( self._project_settings = None self._event_system = None self._log = None + self._username = NOT_SET self._current_project_name = None self._current_folder_path = None @@ -182,7 +173,7 @@ class BaseWorkfileController( return UsersModel(self) def _create_workfiles_model(self): - return WorkfilesModel(self) + return WorkfilesModel(self._host, self) def _create_expected_selection_obj(self): return WorkfilesToolExpectedSelection(self) @@ -293,28 +284,14 @@ class BaseWorkfileController( # Host information def get_workfile_extensions(self): - host = self._host - if isinstance(host, IWorkfileHost): - return host.get_workfile_extensions() - return host.file_extensions() + return self._host.get_workfile_extensions() def has_unsaved_changes(self): - host = self._host - if isinstance(host, IWorkfileHost): - return host.workfile_has_unsaved_changes() - return host.has_unsaved_changes() + return self._host.workfile_has_unsaved_changes() # Current context def get_host_name(self): - host = self._host - if isinstance(host, IWorkfileHost): - return host.name - return get_current_host_name() - - def _get_host_current_context(self): - if hasattr(self._host, "get_current_context"): - return self._host.get_current_context() - return get_global_context() + return self._host.name def get_current_project_name(self): return self._current_project_name @@ -326,10 +303,7 @@ class BaseWorkfileController( return self._current_task_name def get_current_workfile(self): - host = self._host - if isinstance(host, IWorkfileHost): - return host.get_current_workfile() - return host.current_file() + return self._workfiles_model.get_current_workfile() # Selection information def get_selected_folder_id(self): @@ -350,8 +324,12 @@ class BaseWorkfileController( def get_selected_workfile_path(self): return self._selection_model.get_selected_workfile_path() - def set_selected_workfile_path(self, path): - self._selection_model.set_selected_workfile_path(path) + def set_selected_workfile_path( + self, rootless_path, path, workfile_entity_id + ): + self._selection_model.set_selected_workfile_path( + rootless_path, path, workfile_entity_id + ) def get_selected_representation_id(self): return self._selection_model.get_selected_representation_id() @@ -424,7 +402,7 @@ class BaseWorkfileController( def get_workarea_file_items(self, folder_id, task_name, sender=None): task_id = self._get_task_id(folder_id, task_name) return self._workfiles_model.get_workarea_file_items( - folder_id, task_id, task_name + folder_id, task_id ) def get_workarea_save_as_data(self, folder_id, task_id): @@ -450,28 +428,34 @@ class BaseWorkfileController( ) def get_published_file_items(self, folder_id, task_id): - task_name = None - if task_id: - task = self.get_task_entity( - self.get_current_project_name(), task_id - ) - task_name = task.get("name") - return self._workfiles_model.get_published_file_items( - folder_id, task_name) + folder_id, task_id + ) - def get_workfile_info(self, folder_id, task_name, filepath): - task_id = self._get_task_id(folder_id, task_name) + def get_workfile_info(self, folder_id, task_id, rootless_path): return self._workfiles_model.get_workfile_info( - folder_id, task_id, filepath + folder_id, task_id, rootless_path ) - def save_workfile_info(self, folder_id, task_name, filepath, note): - task_id = self._get_task_id(folder_id, task_name) + def save_workfile_info( + self, + task_id, + rootless_path, + version=None, + comment=None, + description=None, + ): self._workfiles_model.save_workfile_info( - folder_id, task_id, filepath, note + task_id, + rootless_path, + version, + comment, + description, ) + def get_workfile_entities(self, task_id): + return self._workfiles_model.get_workfile_entities(task_id) + def reset(self): if not self._host_is_valid: self._emit_event("controller.reset.started") @@ -509,6 +493,7 @@ class BaseWorkfileController( self._projects_model.reset() self._hierarchy_model.reset() + self._workfiles_model.reset() if not expected_folder_id: expected_folder_id = folder_id @@ -528,53 +513,31 @@ class BaseWorkfileController( # Controller actions def open_workfile(self, folder_id, task_id, filepath): - self._emit_event("open_workfile.started") - - failed = False - try: - self._open_workfile(folder_id, task_id, filepath) - - except Exception: - failed = True - self.log.warning("Open of workfile failed", exc_info=True) - - self._emit_event( - "open_workfile.finished", - {"failed": failed}, - ) + self._workfiles_model.open_workfile(folder_id, task_id, filepath) def save_current_workfile(self): - current_file = self.get_current_workfile() - self._host_save_workfile(current_file) + self._workfiles_model.save_current_workfile() def save_as_workfile( self, folder_id, task_id, + rootless_workdir, workdir, filename, - template_key, - artist_note, + version, + comment, + description, ): - self._emit_event("save_as.started") - - failed = False - try: - self._save_as_workfile( - folder_id, - task_id, - workdir, - filename, - template_key, - artist_note=artist_note, - ) - except Exception: - failed = True - self.log.warning("Save as failed", exc_info=True) - - self._emit_event( - "save_as.finished", - {"failed": failed}, + self._workfiles_model.save_as_workfile( + folder_id, + task_id, + rootless_workdir, + workdir, + filename, + version, + comment, + description, ) def copy_workfile_representation( @@ -585,63 +548,61 @@ class BaseWorkfileController( task_id, workdir, filename, - template_key, - artist_note, + rootless_workdir, + version, + comment, + description, ): - self._emit_event("copy_representation.started") - - failed = False - try: - self._save_as_workfile( - folder_id, - task_id, - workdir, - filename, - template_key, - artist_note, - src_filepath=representation_filepath - ) - except Exception: - failed = True - self.log.warning( - "Copy of workfile representation failed", exc_info=True - ) - - self._emit_event( - "copy_representation.finished", - {"failed": failed}, + self._workfiles_model.copy_workfile_representation( + representation_id, + representation_filepath, + folder_id, + task_id, + workdir, + filename, + rootless_workdir, + version, + comment, + description, ) - def duplicate_workfile(self, src_filepath, workdir, filename, artist_note): - self._emit_event("workfile_duplicate.started") - - failed = False - try: - dst_filepath = os.path.join(workdir, filename) - shutil.copy(src_filepath, dst_filepath) - except Exception: - failed = True - self.log.warning("Duplication of workfile failed", exc_info=True) - - self._emit_event( - "workfile_duplicate.finished", - {"failed": failed}, + def duplicate_workfile( + self, + folder_id, + task_id, + src_filepath, + rootless_workdir, + workdir, + filename, + version, + comment, + description + ): + self._workfiles_model.duplicate_workfile( + folder_id, + task_id, + src_filepath, + rootless_workdir, + workdir, + filename, + version, + comment, + description, ) - # Helper host methods that resolve 'IWorkfileHost' interface - def _host_open_workfile(self, filepath): - host = self._host - if isinstance(host, IWorkfileHost): - host.open_workfile(filepath) - else: - host.open_file(filepath) + def get_my_tasks_entity_ids(self, project_name: str): + username = self._get_my_username() + assignees = [] + if username: + assignees.append(username) + return self._hierarchy_model.get_entity_ids_for_assignees( + project_name, assignees + ) - def _host_save_workfile(self, filepath): - host = self._host - if isinstance(host, IWorkfileHost): - host.save_workfile(filepath) - else: - host.save_file(filepath) + def _get_my_username(self): + if self._username is NOT_SET: + self._username = get_ayon_username() + return self._username def _emit_event(self, topic, data=None): self.emit_event(topic, data, "controller") @@ -657,6 +618,11 @@ class BaseWorkfileController( return None return task_item.id + def _get_host_current_context(self): + if hasattr(self._host, "get_current_context"): + return self._host.get_current_context() + return get_global_context() + # Expected selection # - expected selection is used to restore selection after refresh # or when current context should be used @@ -665,123 +631,3 @@ class BaseWorkfileController( "expected_selection_changed", self._expected_selection.get_expected_selection_data(), ) - - def _get_event_context_data( - self, project_name, folder_id, task_id, folder=None, task=None - ): - if folder is None: - folder = self.get_folder_entity(project_name, folder_id) - if task is None: - task = self.get_task_entity(project_name, task_id) - return { - "project_name": project_name, - "folder_id": folder_id, - "folder_path": folder["path"], - "task_id": task_id, - "task_name": task["name"], - "host_name": self.get_host_name(), - } - - def _open_workfile(self, folder_id, task_id, filepath): - project_name = self.get_current_project_name() - event_data = self._get_event_context_data( - project_name, folder_id, task_id - ) - event_data["filepath"] = filepath - - emit_event("workfile.open.before", event_data, source="workfiles.tool") - - # Change context - task_name = event_data["task_name"] - if ( - folder_id != self.get_current_folder_id() - or task_name != self.get_current_task_name() - ): - self._change_current_context(project_name, folder_id, task_id) - - self._host_open_workfile(filepath) - - emit_event("workfile.open.after", event_data, source="workfiles.tool") - - def _save_as_workfile( - self, - folder_id: str, - task_id: str, - workdir: str, - filename: str, - template_key: str, - artist_note: str, - src_filepath=None, - ): - # Trigger before save event - project_name = self.get_current_project_name() - folder = self.get_folder_entity(project_name, folder_id) - task = self.get_task_entity(project_name, task_id) - task_name = task["name"] - - # QUESTION should the data be different for 'before' and 'after'? - event_data = self._get_event_context_data( - project_name, folder_id, task_id, folder, task - ) - event_data.update({ - "filename": filename, - "workdir_path": workdir, - }) - - emit_event("workfile.save.before", event_data, source="workfiles.tool") - - # Create workfiles root folder - if not os.path.exists(workdir): - self.log.debug("Initializing work directory: %s", workdir) - os.makedirs(workdir) - - # Change context - if ( - folder_id != self.get_current_folder_id() - or task_name != self.get_current_task_name() - ): - self._change_current_context( - project_name, folder_id, task_id, template_key - ) - - # Save workfile - dst_filepath = os.path.join(workdir, filename) - if src_filepath: - shutil.copyfile(src_filepath, dst_filepath) - self._host_open_workfile(dst_filepath) - else: - self._host_save_workfile(dst_filepath) - - # Make sure workfile info exists - if not artist_note: - artist_note = None - self.save_workfile_info( - folder_id, task_name, dst_filepath, note=artist_note - ) - - # Create extra folders - create_workdir_extra_folders( - workdir, - self.get_host_name(), - task["taskType"], - task_name, - project_name - ) - - # Trigger after save events - emit_event("workfile.save.after", event_data, source="workfiles.tool") - - def _change_current_context( - self, project_name, folder_id, task_id, template_key=None - ): - # Change current context - folder_entity = self.get_folder_entity(project_name, folder_id) - task_entity = self.get_task_entity(project_name, task_id) - change_current_context( - folder_entity, - task_entity, - template_key=template_key - ) - self._current_folder_id = folder_entity["id"] - self._current_folder_path = folder_entity["path"] - self._current_task_name = task_entity["name"] diff --git a/client/ayon_core/tools/workfiles/models/selection.py b/client/ayon_core/tools/workfiles/models/selection.py index 2f0896842d..9a6440b2a1 100644 --- a/client/ayon_core/tools/workfiles/models/selection.py +++ b/client/ayon_core/tools/workfiles/models/selection.py @@ -62,7 +62,9 @@ class SelectionModel(object): def get_selected_workfile_path(self): return self._workfile_path - def set_selected_workfile_path(self, path): + def set_selected_workfile_path( + self, rootless_path, path, workfile_entity_id + ): if path == self._workfile_path: return @@ -72,9 +74,11 @@ class SelectionModel(object): { "project_name": self._controller.get_current_project_name(), "path": path, + "rootless_path": rootless_path, "folder_id": self._folder_id, "task_name": self._task_name, "task_id": self._task_id, + "workfile_entity_id": workfile_entity_id, }, self.event_source ) diff --git a/client/ayon_core/tools/workfiles/models/workfiles.py b/client/ayon_core/tools/workfiles/models/workfiles.py index cc034571f3..5b5591fe43 100644 --- a/client/ayon_core/tools/workfiles/models/workfiles.py +++ b/client/ayon_core/tools/workfiles/models/workfiles.py @@ -1,13 +1,31 @@ +from __future__ import annotations import os -import re import copy -import uuid +import platform +import typing +from typing import Optional, Any -import arrow import ayon_api -from ayon_api.operations import OperationsSession -from ayon_core.lib import get_ayon_username +from ayon_core.lib import ( + get_ayon_username, + NestedCacheItem, + CacheItem, + Logger, +) +from ayon_core.host import ( + IWorkfileHost, + WorkfileInfo, + PublishedWorkfileInfo, +) +from ayon_core.host.interfaces import ( + OpenWorkfileOptionalData, + ListWorkfilesOptionalData, + ListPublishedWorkfilesOptionalData, + SaveWorkfileOptionalData, + CopyWorkfileOptionalData, + CopyPublishedWorkfileOptionalData, +) from ayon_core.pipeline.template_data import ( get_template_data, get_task_template_data, @@ -16,147 +34,335 @@ from ayon_core.pipeline.template_data import ( from ayon_core.pipeline.workfile import ( get_workdir_with_workdir_data, get_workfile_template_key, - get_last_workfile_with_version, + save_workfile_info, ) from ayon_core.pipeline.version_start import get_versioning_start from ayon_core.tools.workfiles.abstract import ( WorkareaFilepathResult, - FileItem, - WorkfileInfo, + AbstractWorkfilesBackend, ) +if typing.TYPE_CHECKING: + from ayon_core.pipeline import Anatomy + _NOT_SET = object() -class CommentMatcher(object): - """Use anatomy and work file data to parse comments from filenames. +class WorkfilesModel: + """Workfiles model.""" - Args: - extensions (set[str]): Set of extensions. - file_template (AnatomyStringTemplate): File template. - data (dict[str, Any]): Data to fill the template with. + def __init__( + self, + host: IWorkfileHost, + controller: AbstractWorkfilesBackend + ): + self._host: IWorkfileHost = host + self._controller: AbstractWorkfilesBackend = controller - """ - def __init__(self, extensions, file_template, data): - self.fname_regex = None - - if "{comment}" not in file_template: - # Don't look for comment if template doesn't allow it - return - - # Create a regex group for extensions - any_extension = "(?:{})".format( - "|".join(re.escape(ext.lstrip(".")) for ext in extensions) - ) - - # Use placeholders that will never be in the filename - temp_data = copy.deepcopy(data) - temp_data["comment"] = "<>" - temp_data["version"] = "<>" - temp_data["ext"] = "<>" - - fname_pattern = file_template.format_strict(temp_data) - fname_pattern = re.escape(fname_pattern) - - # Replace comment and version with something we can match with regex - replacements = { - "<>": "(.+)", - "<>": "[0-9]+", - "<>": any_extension, - } - for src, dest in replacements.items(): - fname_pattern = fname_pattern.replace(re.escape(src), dest) - - # Match from beginning to end of string to be safe - fname_pattern = "^{}$".format(fname_pattern) - - self.fname_regex = re.compile(fname_pattern) - - def parse_comment(self, filepath): - """Parse the {comment} part from a filename""" - if not self.fname_regex: - return - - fname = os.path.basename(filepath) - match = self.fname_regex.match(fname) - if match: - return match.group(1) - - -class WorkareaModel: - """Workfiles model looking for workfiles in workare folder. - - Workarea folder is usually task and host specific, defined by - anatomy templates. Is looking for files with extensions defined - by host integration. - """ - - def __init__(self, controller): - self._controller = controller + self._log = Logger.get_logger("WorkfilesModel") extensions = None if controller.is_host_valid(): extensions = controller.get_workfile_extensions() - self._extensions = extensions + self._extensions: Optional[set[str]] = extensions + + self._current_username = _NOT_SET + + # Workarea self._base_data = None self._fill_data_by_folder_id = {} self._task_data_by_folder_id = {} self._workdir_by_context = {} + self._workarea_file_items_mapping = {} + self._workarea_file_items_cache = NestedCacheItem( + levels=1, default_factory=list + ) - @property - def project_name(self): - return self._controller.get_current_project_name() + # Published workfiles + self._repre_by_id = {} + self._published_workfile_items_cache = NestedCacheItem( + levels=1, default_factory=list + ) + + # Entities + self._workfile_entities_by_task_id = {} def reset(self): self._base_data = None self._fill_data_by_folder_id = {} self._task_data_by_folder_id = {} + self._workdir_by_context = {} + self._workarea_file_items_mapping = {} + self._workarea_file_items_cache.reset() - def _get_base_data(self): - if self._base_data is None: - base_data = get_template_data( - ayon_api.get_project(self.project_name) - ) - base_data["app"] = self._controller.get_host_name() - self._base_data = base_data - return copy.deepcopy(self._base_data) + self._repre_by_id = {} + self._published_workfile_items_cache.reset() - def _get_folder_data(self, folder_id): - fill_data = self._fill_data_by_folder_id.get(folder_id) - if fill_data is None: - folder = self._controller.get_folder_entity( - self.project_name, folder_id - ) - fill_data = get_folder_template_data(folder, self.project_name) - self._fill_data_by_folder_id[folder_id] = fill_data - return copy.deepcopy(fill_data) + self._workfile_entities_by_task_id = {} - def _get_task_data(self, project_entity, folder_id, task_id): - task_data = self._task_data_by_folder_id.setdefault(folder_id, {}) - if task_id not in task_data: - task = self._controller.get_task_entity( - self.project_name, task_id - ) - if task: - task_data[task_id] = get_task_template_data( - project_entity, task) - return copy.deepcopy(task_data[task_id]) + # Host functionality + def get_current_workfile(self): + return self._host.get_current_workfile() - def _prepare_fill_data(self, folder_id, task_id): - if not folder_id or not task_id: - return {} + def open_workfile(self, folder_id, task_id, filepath): + self._emit_event("open_workfile.started") - base_data = self._get_base_data() - project_name = base_data["project"]["name"] - folder_data = self._get_folder_data(folder_id) + failed = False + try: + self._open_workfile(folder_id, task_id, filepath) + + except Exception: + failed = True + self._log.warning("Open of workfile failed", exc_info=True) + + self._emit_event( + "open_workfile.finished", + {"failed": failed}, + ) + + def save_current_workfile(self): + current_file = self.get_current_workfile() + self._host.save_workfile(current_file) + + def save_as_workfile( + self, + folder_id, + task_id, + rootless_workdir, + workdir, + filename, + version, + comment, + description, + ): + self._emit_event("save_as.started") + + filepath = os.path.join(workdir, filename) + rootless_path = f"{rootless_workdir}/{filename}" + project_name = self._controller.get_current_project_name() project_entity = self._controller.get_project_entity(project_name) - task_data = self._get_task_data(project_entity, folder_id, task_id) + folder_entity = self._controller.get_folder_entity( + project_name, folder_id + ) + task_entity = self._controller.get_task_entity( + project_name, task_id + ) - base_data.update(folder_data) - base_data.update(task_data) + prepared_data = SaveWorkfileOptionalData( + project_entity=project_entity, + anatomy=self._controller.project_anatomy, + project_settings=self._controller.project_settings, + rootless_path=rootless_path, + workfile_entities=self.get_workfile_entities(task_id), + ) + failed = False + try: + self._host.save_workfile_with_context( + filepath, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + self._update_workfile_info( + task_id, rootless_path, description + ) + self._update_current_context( + folder_id, folder_entity["path"], task_entity["name"] + ) - return base_data + except Exception: + failed = True + self._log.warning("Save as failed", exc_info=True) - def get_workarea_dir_by_context(self, folder_id, task_id): + self._emit_event( + "save_as.finished", + {"failed": failed}, + ) + + def copy_workfile_representation( + self, + representation_id, + representation_filepath, + folder_id, + task_id, + workdir, + filename, + rootless_workdir, + version, + comment, + description, + ): + self._emit_event("copy_representation.started") + + project_name = self._project_name + project_entity = self._controller.get_project_entity(project_name) + folder_entity = self._controller.get_folder_entity( + project_name, folder_id + ) + task_entity = self._controller.get_task_entity( + project_name, task_id + ) + repre_entity = self._repre_by_id.get(representation_id) + dst_filepath = os.path.join(workdir, filename) + rootless_path = f"{rootless_workdir}/{filename}" + + prepared_data = CopyPublishedWorkfileOptionalData( + project_entity=project_entity, + anatomy=self._controller.project_anatomy, + project_settings=self._controller.project_settings, + rootless_path=rootless_path, + representation_path=representation_filepath, + workfile_entities=self.get_workfile_entities(task_id), + src_anatomy=self._controller.project_anatomy, + ) + failed = False + try: + self._host.copy_workfile_representation( + project_name, + repre_entity, + dst_filepath, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + self._update_workfile_info( + task_id, rootless_path, description + ) + self._update_current_context( + folder_id, folder_entity["path"], task_entity["name"] + ) + + except Exception: + failed = True + self._log.warning( + "Copy of workfile representation failed", exc_info=True + ) + + self._emit_event( + "copy_representation.finished", + {"failed": failed}, + ) + + def duplicate_workfile( + self, + folder_id, + task_id, + src_filepath, + rootless_workdir, + workdir, + filename, + version, + comment, + description + ): + self._emit_event("workfile_duplicate.started") + + project_name = self._controller.get_current_project_name() + project_entity = self._controller.get_project_entity(project_name) + folder_entity = self._controller.get_folder_entity( + project_name, folder_id + ) + task_entity = self._controller.get_task_entity(project_name, task_id) + workfile_entities = self.get_workfile_entities(task_id) + rootless_path = f"{rootless_workdir}/{filename}" + workfile_path = os.path.join(workdir, filename) + + prepared_data = CopyWorkfileOptionalData( + project_entity=project_entity, + project_settings=self._controller.project_settings, + anatomy=self._controller.project_anatomy, + rootless_path=rootless_path, + workfile_entities=workfile_entities, + ) + failed = False + try: + self._host.copy_workfile( + src_filepath, + workfile_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + + except Exception: + failed = True + self._log.warning("Duplication of workfile failed", exc_info=True) + + self._emit_event( + "workfile_duplicate.finished", + {"failed": failed}, + ) + + def get_workfile_entities(self, task_id: str): + if not task_id: + return [] + workfile_entities = self._workfile_entities_by_task_id.get(task_id) + if workfile_entities is None: + workfile_entities = list(ayon_api.get_workfiles_info( + self._project_name, + task_ids=[task_id], + )) + self._workfile_entities_by_task_id[task_id] = workfile_entities + return workfile_entities + + def get_workfile_info( + self, + folder_id: Optional[str], + task_id: Optional[str], + rootless_path: Optional[str] + ): + if not folder_id or not task_id or not rootless_path: + return None + + mapping = self._workarea_file_items_mapping.get(task_id) + if mapping is None: + self._cache_file_items(folder_id, task_id) + mapping = self._workarea_file_items_mapping[task_id] + return mapping.get(rootless_path) + + def save_workfile_info( + self, + task_id: str, + rootless_path: str, + version: Optional[int], + comment: Optional[str], + description: Optional[str], + ): + self._save_workfile_info( + task_id, + rootless_path, + version, + comment, + description, + ) + + self._update_file_description( + task_id, rootless_path, description + ) + + def get_workarea_dir_by_context( + self, folder_id: str, task_id: str + ) -> Optional[str]: + """Workarea dir for passed context. + + The directory path is based on project anatomy templates. + + Args: + folder_id (str): Folder id. + task_id (str): Task id. + + Returns: + Optional[str]: Workarea dir path or None for invalid context. + + """ if not folder_id or not task_id: return None folder_mapping = self._workdir_by_context.setdefault(folder_id, {}) @@ -168,164 +374,48 @@ class WorkareaModel: workdir = get_workdir_with_workdir_data( workdir_data, - self.project_name, + self._project_name, anatomy=self._controller.project_anatomy, ) folder_mapping[task_id] = workdir return workdir - def get_file_items(self, folder_id, task_id, task_name): - items = [] - if not folder_id or not task_id: - return items - - workdir = self.get_workarea_dir_by_context(folder_id, task_id) - if not os.path.exists(workdir): - return items - - for filename in os.listdir(workdir): - # We want to support both files and folders. e.g. Silhoutte uses - # folders as its project files. So we do not check whether it is - # a file or not. - filepath = os.path.join(workdir, filename) - - ext = os.path.splitext(filename)[1].lower() - if ext not in self._extensions: - continue - - workfile_info = self._controller.get_workfile_info( - folder_id, task_name, filepath - ) - modified = os.path.getmtime(filepath) - items.append(FileItem( - workdir, - filename, - modified, - workfile_info.created_by, - workfile_info.updated_by, - )) - return items - - def _get_template_key(self, fill_data): - task_type = fill_data.get("task", {}).get("type") - # TODO cache - return get_workfile_template_key( - self.project_name, - task_type, - self._controller.get_host_name(), - ) - - def _get_last_workfile_version( - self, workdir, file_template, fill_data, extensions - ): - """ - - Todos: - Validate if logic of this function is correct. It does return - last version + 1 which might be wrong. + def get_workarea_file_items(self, folder_id, task_id): + """Workfile items for passed context from workarea. Args: - workdir (str): Workdir path. - file_template (str): File template. - fill_data (dict[str, Any]): Fill data. - extensions (set[str]): Extensions. + folder_id (Optional[str]): Folder id. + task_id (Optional[str]): Task id. Returns: - int: Next workfile version. + list[WorkfileInfo]: List of file items matching workarea of passed + context. """ - version = get_last_workfile_with_version( - workdir, file_template, fill_data, extensions - )[1] + return self._cache_file_items(folder_id, task_id) - if version is None: - task_info = fill_data.get("task", {}) - version = get_versioning_start( - self.project_name, - self._controller.get_host_name(), - task_name=task_info.get("name"), - task_type=task_info.get("type"), - product_type="workfile", - project_settings=self._controller.project_settings, - ) - else: - version += 1 - return version - - def _get_comments_from_root( - self, - file_template, - extensions, - fill_data, - root, - current_filename, - ): - """Get comments from root directory. - - Args: - file_template (AnatomyStringTemplate): File template. - extensions (set[str]): Extensions. - fill_data (dict[str, Any]): Fill data. - root (str): Root directory. - current_filename (str): Current filename. - - Returns: - Tuple[list[str], Union[str, None]]: Comment hints and current - comment. - - """ - current_comment = None - filenames = [] - if root and os.path.exists(root): - for filename in os.listdir(root): - path = os.path.join(root, filename) - if not os.path.isfile(path): - continue - - ext = os.path.splitext(filename)[-1].lower() - if ext in extensions: - filenames.append(filename) - - if not filenames: - return [], current_comment - - matcher = CommentMatcher(extensions, file_template, fill_data) - - comment_hints = set() - for filename in filenames: - comment = matcher.parse_comment(filename) - if comment: - comment_hints.add(comment) - if filename == current_filename: - current_comment = comment - - return list(comment_hints), current_comment - - def _get_workdir(self, anatomy, template_key, fill_data): - directory_template = anatomy.get_template_item( - "work", template_key, "directory" - ) - return directory_template.format_strict(fill_data).normalized() - - def get_workarea_save_as_data(self, folder_id, task_id): + def get_workarea_save_as_data( + self, folder_id: Optional[str], task_id: Optional[str] + ) -> dict[str, Any]: folder_entity = None task_entity = None if folder_id: folder_entity = self._controller.get_folder_entity( - self.project_name, folder_id + self._project_name, folder_id ) if folder_entity and task_id: task_entity = self._controller.get_task_entity( - self.project_name, task_id + self._project_name, task_id ) - if not folder_entity or not task_entity: + if not folder_entity or not task_entity or self._extensions is None: return { "template_key": None, "template_has_version": None, "template_has_comment": None, "ext": None, "workdir": None, + "rootless_workdir": None, "comment": None, "comment_hints": None, "last_version": None, @@ -349,6 +439,17 @@ class WorkareaModel: workdir = self._get_workdir(anatomy, template_key, fill_data) + rootless_workdir = workdir + if platform.system().lower() == "windows": + rootless_workdir = rootless_workdir.replace("\\", "/") + + used_roots = workdir.used_values.get("root") + if used_roots: + used_root_name = next(iter(used_roots)) + root_value = used_roots[used_root_name] + workdir_end = rootless_workdir[len(root_value):].lstrip("/") + rootless_workdir = f"{{root[{used_root_name}]}}/{workdir_end}" + file_template = anatomy.get_template_item( "work", template_key, "file" ) @@ -357,15 +458,21 @@ class WorkareaModel: template_has_version = "{version" in file_template_str template_has_comment = "{comment" in file_template_str - comment_hints, comment = self._get_comments_from_root( - file_template, - extensions, - fill_data, - workdir, - current_filename, - ) + file_items = self.get_workarea_file_items(folder_id, task_id) + comment_hints = set() + comment = None + for item in file_items: + filepath = item.filepath + filename = os.path.basename(filepath) + if filename == current_filename: + comment = item.comment + + if item.comment: + comment_hints.add(item.comment) + comment_hints = list(comment_hints) + last_version = self._get_last_workfile_version( - workdir, file_template_str, fill_data, extensions + file_items, task_entity ) return { @@ -374,6 +481,7 @@ class WorkareaModel: "template_has_comment": template_has_comment, "ext": current_ext, "workdir": workdir, + "rootless_workdir": rootless_workdir, "comment": comment, "comment_hints": comment_hints, "last_version": last_version, @@ -382,13 +490,13 @@ class WorkareaModel: def fill_workarea_filepath( self, - folder_id, - task_id, - extension, - use_last_version, - version, - comment, - ): + folder_id: str, + task_id: str, + extension: str, + use_last_version: bool, + version: int, + comment: str, + ) -> WorkareaFilepathResult: """Fill workarea filepath based on context. Args: @@ -415,8 +523,12 @@ class WorkareaModel: ) if use_last_version: + file_items = self.get_workarea_file_items(folder_id, task_id) + task_entity = self._controller.get_task_entity( + self._project_name, task_id + ) version = self._get_last_workfile_version( - workdir, file_template.template, fill_data, self._extensions + file_items, task_entity ) fill_data["version"] = version fill_data["ext"] = extension.lstrip(".") @@ -439,374 +551,350 @@ class WorkareaModel: exists ) - -class WorkfileEntitiesModel: - """Workfile entities model. - - Args: - control (AbstractWorkfileController): Controller object. - """ - - def __init__(self, controller): - self._controller = controller - self._cache = {} - self._items = {} - self._current_username = _NOT_SET - - def _get_workfile_info_identifier( - self, folder_id, task_id, rootless_path - ): - return "_".join([folder_id, task_id, rootless_path]) - - def _get_rootless_path(self, filepath): - anatomy = self._controller.project_anatomy - - workdir, filename = os.path.split(filepath) - _, rootless_dir = anatomy.find_root_template_from_path(workdir) - return "/".join([ - os.path.normpath(rootless_dir).replace("\\", "/"), - filename - ]) - - def _prepare_workfile_info_item( - self, folder_id, task_id, workfile_info, filepath - ): - note = "" - created_by = None - updated_by = None - if workfile_info: - note = workfile_info["attrib"].get("description") or "" - created_by = workfile_info.get("createdBy") - updated_by = workfile_info.get("updatedBy") - - filestat = os.stat(filepath) - return WorkfileInfo( - folder_id, - task_id, - filepath, - filesize=filestat.st_size, - creation_time=filestat.st_ctime, - modification_time=filestat.st_mtime, - created_by=created_by, - updated_by=updated_by, - note=note - ) - - def _get_workfile_info(self, folder_id, task_id, identifier): - workfile_info = self._cache.get(identifier) - if workfile_info is not None: - return workfile_info - - for workfile_info in ayon_api.get_workfiles_info( - self._controller.get_current_project_name(), - task_ids=[task_id], - fields=["id", "path", "attrib", "createdBy", "updatedBy"], - ): - workfile_identifier = self._get_workfile_info_identifier( - folder_id, task_id, workfile_info["path"] - ) - self._cache[workfile_identifier] = workfile_info - return self._cache.get(identifier) - - def get_workfile_info( - self, folder_id, task_id, filepath, rootless_path=None - ): - if not folder_id or not task_id or not filepath: - return None - - if rootless_path is None: - rootless_path = self._get_rootless_path(filepath) - - identifier = self._get_workfile_info_identifier( - folder_id, task_id, rootless_path) - item = self._items.get(identifier) - if item is None: - workfile_info = self._get_workfile_info( - folder_id, task_id, identifier - ) - item = self._prepare_workfile_info_item( - folder_id, task_id, workfile_info, filepath - ) - self._items[identifier] = item - return item - - def save_workfile_info(self, folder_id, task_id, filepath, note): - rootless_path = self._get_rootless_path(filepath) - identifier = self._get_workfile_info_identifier( - folder_id, task_id, rootless_path - ) - workfile_info = self._get_workfile_info( - folder_id, task_id, identifier - ) - if not workfile_info: - self._cache[identifier] = self._create_workfile_info_entity( - task_id, rootless_path, note or "") - self._items.pop(identifier, None) - return - - old_note = workfile_info.get("attrib", {}).get("note") - - new_workfile_info = copy.deepcopy(workfile_info) - update_data = {} - if note is not None and old_note != note: - update_data["attrib"] = {"description": note} - attrib = new_workfile_info.setdefault("attrib", {}) - attrib["description"] = note - - username = self._get_current_username() - # Automatically fix 'createdBy' and 'updatedBy' fields - # NOTE both fields were not automatically filled by server - # until 1.1.3 release. - if workfile_info.get("createdBy") is None: - update_data["createdBy"] = username - new_workfile_info["createdBy"] = username - - if workfile_info.get("updatedBy") != username: - update_data["updatedBy"] = username - new_workfile_info["updatedBy"] = username - - if not update_data: - return - - self._cache[identifier] = new_workfile_info - self._items.pop(identifier, None) - - project_name = self._controller.get_current_project_name() - - session = OperationsSession() - session.update_entity( - project_name, - "workfile", - workfile_info["id"], - update_data, - ) - session.commit() - - def _create_workfile_info_entity(self, task_id, rootless_path, note): - extension = os.path.splitext(rootless_path)[1] - - project_name = self._controller.get_current_project_name() - - username = self._get_current_username() - workfile_info = { - "id": uuid.uuid4().hex, - "path": rootless_path, - "taskId": task_id, - "attrib": { - "extension": extension, - "description": note - }, - # TODO remove 'createdBy' and 'updatedBy' fields when server is - # or above 1.1.3 . - "createdBy": username, - "updatedBy": username, - } - - session = OperationsSession() - session.create_entity(project_name, "workfile", workfile_info) - session.commit() - return workfile_info - - def _get_current_username(self): - if self._current_username is _NOT_SET: - self._current_username = get_ayon_username() - return self._current_username - - -class PublishWorkfilesModel: - """Model for handling of published workfiles. - - Todos: - Cache workfiles products and representations for some time. - Note Representations won't change. Only what can change are - versions. - """ - - def __init__(self, controller): - self._controller = controller - self._cached_extensions = None - self._cached_repre_extensions = None - - @property - def _extensions(self): - if self._cached_extensions is None: - exts = self._controller.get_workfile_extensions() or [] - self._cached_extensions = exts - return self._cached_extensions - - @property - def _repre_extensions(self): - if self._cached_repre_extensions is None: - self._cached_repre_extensions = { - ext.lstrip(".") for ext in self._extensions - } - return self._cached_repre_extensions - - def _file_item_from_representation( - self, repre_entity, project_anatomy, author, task_name=None - ): - if task_name is not None: - task_info = repre_entity["context"].get("task") - if not task_info or task_info["name"] != task_name: - return None - - # Filter by extension - extensions = self._repre_extensions - workfile_path = None - for repre_file in repre_entity["files"]: - ext = ( - os.path.splitext(repre_file["name"])[1] - .lower() - .lstrip(".") - ) - if ext in extensions: - workfile_path = repre_file["path"] - break - - if not workfile_path: - return None - - try: - workfile_path = workfile_path.format( - root=project_anatomy.roots) - except Exception as exc: - print("Failed to format workfile path: {}".format(exc)) - - dirpath, filename = os.path.split(workfile_path) - created_at = arrow.get(repre_entity["createdAt"]).to("local") - return FileItem( - dirpath, - filename, - created_at.float_timestamp, - author, - None, - repre_entity["id"] - ) - - def get_file_items(self, folder_id, task_name): - # TODO refactor to use less server API calls - project_name = self._controller.get_current_project_name() - # Get subset docs of folder - product_entities = ayon_api.get_products( - project_name, - folder_ids={folder_id}, - product_types={"workfile"}, - fields={"id", "name"} - ) - - output = [] - product_ids = {product["id"] for product in product_entities} - if not product_ids: - return output - - # Get version docs of products with their families - version_entities = ayon_api.get_versions( - project_name, - product_ids=product_ids, - fields={"id", "author"} - ) - versions_by_id = { - version["id"]: version - for version in version_entities - } - if not versions_by_id: - return output - - # Query representations of filtered versions and add filter for - # extension - repre_entities = ayon_api.get_representations( - project_name, - version_ids=set(versions_by_id) - ) - project_anatomy = self._controller.project_anatomy - - # Filter queried representations by task name if task is set - file_items = [] - for repre_entity in repre_entities: - version_id = repre_entity["versionId"] - version_entity = versions_by_id[version_id] - file_item = self._file_item_from_representation( - repre_entity, - project_anatomy, - version_entity["author"], - task_name, - ) - if file_item is not None: - file_items.append(file_item) - - return file_items - - -class WorkfilesModel: - """Workfiles model.""" - - def __init__(self, controller): - self._controller = controller - - self._entities_model = WorkfileEntitiesModel(controller) - self._workarea_model = WorkareaModel(controller) - self._published_model = PublishWorkfilesModel(controller) - - def get_workfile_info(self, folder_id, task_id, filepath): - return self._entities_model.get_workfile_info( - folder_id, task_id, filepath - ) - - def save_workfile_info(self, folder_id, task_id, filepath, note): - self._entities_model.save_workfile_info( - folder_id, task_id, filepath, note - ) - - def get_workarea_dir_by_context(self, folder_id, task_id): - """Workarea dir for passed context. - - The directory path is based on project anatomy templates. + def get_published_file_items( + self, folder_id: str, task_id: str + ) -> list[PublishedWorkfileInfo]: + """Published workfiles for passed context. Args: folder_id (str): Folder id. task_id (str): Task id. Returns: - Union[str, None]: Workarea dir path or None for invalid context. + list[PublishedWorkfileInfo]: List of files for published workfiles. + """ + if not folder_id: + return [] - return self._workarea_model.get_workarea_dir_by_context( - folder_id, task_id) + cache = self._published_workfile_items_cache[folder_id] + if not cache.is_valid: + project_name = self._project_name + anatomy = self._controller.project_anatomy - def get_workarea_file_items(self, folder_id, task_id, task_name): - """Workfile items for passed context from workarea. + product_entities = list(ayon_api.get_products( + project_name, + folder_ids={folder_id}, + product_types={"workfile"}, + fields={"id", "name"} + )) - Args: - folder_id (Union[str, None]): Folder id. - task_id (Union[str, None]): Task id. - task_name (Union[str, None]): Task name. + version_entities = [] + product_ids = {product["id"] for product in product_entities} + if product_ids: + # Get version docs of products with their families + version_entities = list(ayon_api.get_versions( + project_name, + product_ids=product_ids, + fields={"id", "author", "taskId"}, + )) - Returns: - list[FileItem]: List of file items matching workarea of passed - context. - """ - return self._workarea_model.get_file_items( - folder_id, task_id, task_name + repre_entities = [] + if version_entities: + repre_entities = list(ayon_api.get_representations( + project_name, + version_ids={v["id"] for v in version_entities} + )) + + self._repre_by_id.update({ + repre_entity["id"]: repre_entity + for repre_entity in repre_entities + }) + project_entity = self._controller.get_project_entity(project_name) + + prepared_data = ListPublishedWorkfilesOptionalData( + project_entity=project_entity, + anatomy=anatomy, + project_settings=self._controller.project_settings, + product_entities=product_entities, + version_entities=version_entities, + repre_entities=repre_entities, + ) + cache.update_data(self._host.list_published_workfiles( + project_name, + folder_id, + prepared_data=prepared_data, + )) + + items = cache.get_data() + + if task_id: + items = [ + item + for item in items + if item.task_id == task_id + ] + return items + + @property + def _project_name(self) -> str: + return self._controller.get_current_project_name() + + @property + def _host_name(self) -> str: + return self._host.name + + def _emit_event(self, topic, data=None): + self._controller.emit_event(topic, data, "workfiles") + + def _get_current_username(self) -> str: + if self._current_username is _NOT_SET: + self._current_username = get_ayon_username() + return self._current_username + + # --- Host --- + def _open_workfile(self, folder_id: str, task_id: str, filepath: str): + # TODO move to workfiles pipeline + project_name = self._project_name + project_entity = self._controller.get_project_entity(project_name) + folder_entity = self._controller.get_folder_entity( + project_name, folder_id + ) + task_entity = self._controller.get_task_entity( + project_name, task_id + ) + prepared_data = OpenWorkfileOptionalData( + project_entity=project_entity, + anatomy=self._controller.project_anatomy, + project_settings=self._controller.project_settings, + ) + self._host.open_workfile_with_context( + filepath, folder_entity, task_entity, prepared_data=prepared_data + ) + self._update_current_context( + folder_id, folder_entity["path"], task_entity["name"] ) - def get_workarea_save_as_data(self, folder_id, task_id): - return self._workarea_model.get_workarea_save_as_data( - folder_id, task_id) + def _update_current_context(self, folder_id, folder_path, task_name): + self._current_folder_id = folder_id + self._current_folder_path = folder_path + self._current_task_name = task_name - def fill_workarea_filepath(self, *args, **kwargs): - return self._workarea_model.fill_workarea_filepath( - *args, **kwargs + # --- Workarea --- + def _reset_workarea_file_items(self, task_id: str): + cache: CacheItem = self._workarea_file_items_cache[task_id] + cache.set_invalid() + self._workarea_file_items_mapping.pop(task_id, None) + + def _get_base_data(self) -> dict[str, Any]: + if self._base_data is None: + base_data = get_template_data( + ayon_api.get_project(self._project_name), + host_name=self._host_name, + ) + self._base_data = base_data + return copy.deepcopy(self._base_data) + + def _get_folder_data(self, folder_id: str) -> dict[str, Any]: + fill_data = self._fill_data_by_folder_id.get(folder_id) + if fill_data is None: + folder = self._controller.get_folder_entity( + self._project_name, folder_id + ) + fill_data = get_folder_template_data(folder, self._project_name) + self._fill_data_by_folder_id[folder_id] = fill_data + return copy.deepcopy(fill_data) + + def _get_task_data( + self, + project_entity: dict[str, Any], + folder_id: str, + task_id: str + ) -> dict[str, Any]: + task_data = self._task_data_by_folder_id.setdefault(folder_id, {}) + if task_id not in task_data: + task_entity = self._controller.get_task_entity( + self._project_name, task_id + ) + if task_entity: + task_data[task_id] = get_task_template_data( + project_entity, task_entity + ) + return copy.deepcopy(task_data[task_id]) + + def _prepare_fill_data( + self, folder_id: str, task_id: str + ) -> dict[str, Any]: + if not folder_id or not task_id: + return {} + + base_data = self._get_base_data() + project_name = base_data["project"]["name"] + folder_data = self._get_folder_data(folder_id) + project_entity = self._controller.get_project_entity(project_name) + task_data = self._get_task_data(project_entity, folder_id, task_id) + + base_data.update(folder_data) + base_data.update(task_data) + + return base_data + + def _cache_file_items( + self, folder_id: Optional[str], task_id: Optional[str] + ) -> list[WorkfileInfo]: + if not folder_id or not task_id: + return [] + + cache: CacheItem = self._workarea_file_items_cache[task_id] + if cache.is_valid: + return cache.get_data() + + project_entity = self._controller.get_project_entity( + self._project_name + ) + folder_entity = self._controller.get_folder_entity( + self._project_name, folder_id + ) + task_entity = self._controller.get_task_entity( + self._project_name, task_id + ) + anatomy = self._controller.project_anatomy + project_settings = self._controller.project_settings + workfile_entities = self._controller.get_workfile_entities(task_id) + + fill_data = self._prepare_fill_data(folder_id, task_id) + template_key = self._get_template_key(fill_data) + + prepared_data = ListWorkfilesOptionalData( + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + template_key=template_key, + workfile_entities=workfile_entities, ) - def get_published_file_items(self, folder_id, task_name): - """Published workfiles for passed context. + items = self._host.list_workfiles( + self._project_name, + folder_entity, + task_entity, + prepared_data=prepared_data, + ) + cache.update_data(items) - Args: - folder_id (str): Folder id. - task_name (str): Task name. + # Cache items by entity ids and rootless path + self._workarea_file_items_mapping[task_id] = { + item.rootless_path: item + for item in items + } - Returns: - list[FileItem]: List of files for published workfiles. + return items + + def _get_template_key(self, fill_data: dict[str, Any]) -> str: + task_type = fill_data.get("task", {}).get("type") + # TODO cache + return get_workfile_template_key( + self._project_name, + task_type, + self._host_name, + project_settings=self._controller.project_settings, + ) + + def _get_last_workfile_version( + self, file_items: list[WorkfileInfo], task_entity: dict[str, Any] + ) -> int: """ - return self._published_model.get_file_items(folder_id, task_name) + Todos: + Validate if logic of this function is correct. It does return + last version + 1 which might be wrong. + + Args: + file_items (list[WorkfileInfo]): Workfile items. + task_entity (dict[str, Any]): Task entity. + + Returns: + int: Next workfile version. + + """ + versions = { + item.version + for item in file_items + if item.version is not None + } + if versions: + return max(versions) + 1 + + return get_versioning_start( + self._project_name, + self._host_name, + task_name=task_entity["name"], + task_type=task_entity["taskType"], + product_type="workfile", + project_settings=self._controller.project_settings, + ) + + def _get_workdir( + self, anatomy: "Anatomy", template_key: str, fill_data: dict[str, Any] + ): + directory_template = anatomy.get_template_item( + "work", template_key, "directory" + ) + return directory_template.format_strict(fill_data).normalized() + + def _update_workfile_info( + self, + task_id: str, + rootless_path: str, + description: str, + ): + self._update_file_description(task_id, rootless_path, description) + self._reset_workarea_file_items(task_id) + + # Update workfile entity cache if are cached + if task_id in self._workfile_entities_by_task_id: + workfile_entities = self.get_workfile_entities(task_id) + + target_workfile_entity = None + for workfile_entity in workfile_entities: + if rootless_path == workfile_entity["path"]: + target_workfile_entity = workfile_entity + break + + if target_workfile_entity is None: + self._workfile_entities_by_task_id.pop(task_id, None) + self.get_workfile_entities(task_id) + else: + target_workfile_entity["attrib"]["description"] = description + + def _update_file_description( + self, task_id: str, rootless_path: str, description: str + ): + mapping = self._workarea_file_items_mapping.get(task_id) + if not mapping: + return + item = mapping.get(rootless_path) + if item is not None: + item.description = description + + # --- Workfile entities --- + def _save_workfile_info( + self, + task_id: str, + rootless_path: str, + version: Optional[int], + comment: Optional[str], + description: Optional[str], + ): + workfile_entity = save_workfile_info( + self._controller.get_current_project_name(), + task_id, + rootless_path, + self._controller.get_host_name(), + version=version, + comment=comment, + description=description, + workfile_entities=self.get_workfile_entities(task_id), + ) + # Update cache + workfile_entities = self.get_workfile_entities(task_id) + match_idx = None + for idx, entity in enumerate(workfile_entities): + if entity["id"] == workfile_entity["id"]: + # Update existing entity + match_idx = idx + break + + if match_idx is None: + workfile_entities.append(workfile_entity) + else: + workfile_entities[match_idx] = workfile_entity diff --git a/client/ayon_core/tools/workfiles/widgets/files_widget.py b/client/ayon_core/tools/workfiles/widgets/files_widget.py index f0b74f4289..9c12fa575c 100644 --- a/client/ayon_core/tools/workfiles/widgets/files_widget.py +++ b/client/ayon_core/tools/workfiles/widgets/files_widget.py @@ -200,6 +200,9 @@ class FilesWidget(QtWidgets.QWidget): self._open_workfile(folder_id, task_id, path) def _on_current_open_requests(self): + # TODO validate if item under mouse is enabled + # - this uses selected item, but that does not have to be the one + # under mouse self._on_workarea_open_clicked() def _on_duplicate_request(self): @@ -210,11 +213,18 @@ class FilesWidget(QtWidgets.QWidget): result = self._exec_save_as_dialog() if result is None: return + folder_id = self._selected_folder_id + task_id = self._selected_task_id self._controller.duplicate_workfile( + folder_id, + task_id, filepath, + result["rootless_workdir"], result["workdir"], result["filename"], - artist_note=result["artist_note"] + version=result["version"], + comment=result["comment"], + description=result["description"] ) def _on_workarea_browse_clicked(self): @@ -259,10 +269,12 @@ class FilesWidget(QtWidgets.QWidget): self._controller.save_as_workfile( result["folder_id"], result["task_id"], + result["rootless_workdir"], result["workdir"], result["filename"], - result["template_key"], - artist_note=result["artist_note"] + version=result["version"], + comment=result["comment"], + description=result["description"] ) def _on_workarea_path_changed(self, event): @@ -275,10 +287,11 @@ class FilesWidget(QtWidgets.QWidget): def _update_published_btns_state(self): enabled = ( self._valid_representation_id - and self._valid_selected_context and self._is_save_enabled ) - self._published_btn_copy_n_open.setEnabled(enabled) + self._published_btn_copy_n_open.setEnabled( + enabled and self._valid_selected_context + ) self._published_btn_change_context.setEnabled(enabled) def _update_workarea_btns_state(self): @@ -314,12 +327,16 @@ class FilesWidget(QtWidgets.QWidget): result["task_id"], result["workdir"], result["filename"], - result["template_key"], - artist_note=result["artist_note"] + result["rootless_workdir"], + version=result["version"], + comment=result["comment"], + description=result["description"], ) def _on_save_as_request(self): - self._on_published_save_clicked() + # Make sure the save is enabled + if self._is_save_enabled and self._valid_selected_context: + self._on_published_save_clicked() def _set_select_contex_mode(self, enabled): if self._select_context_mode is enabled: diff --git a/client/ayon_core/tools/workfiles/widgets/files_widget_published.py b/client/ayon_core/tools/workfiles/widgets/files_widget_published.py index 07122046be..250204a7d7 100644 --- a/client/ayon_core/tools/workfiles/widgets/files_widget_published.py +++ b/client/ayon_core/tools/workfiles/widgets/files_widget_published.py @@ -1,3 +1,5 @@ +import os + import qtawesome from qtpy import QtWidgets, QtCore, QtGui @@ -205,24 +207,25 @@ class PublishedFilesModel(QtGui.QStandardItemModel): new_items.append(item) item.setColumnCount(self.columnCount()) item.setData(self._file_icon, QtCore.Qt.DecorationRole) - item.setData(file_item.filename, QtCore.Qt.DisplayRole) item.setData(repre_id, REPRE_ID_ROLE) - if file_item.exists: + if file_item.available: flags = QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable else: flags = QtCore.Qt.NoItemFlags - author = file_item.created_by + author = file_item.author user_item = user_items_by_name.get(author) if user_item is not None and user_item.full_name: author = user_item.full_name - item.setFlags(flags) + filename = os.path.basename(file_item.filepath) + item.setFlags(flags) + item.setData(filename, QtCore.Qt.DisplayRole) item.setData(file_item.filepath, FILEPATH_ROLE) item.setData(author, AUTHOR_ROLE) - item.setData(file_item.modified, DATE_MODIFIED_ROLE) + item.setData(file_item.file_modified, DATE_MODIFIED_ROLE) self._items_by_id[repre_id] = item diff --git a/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py b/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py index 7f76b6a8ab..47d4902812 100644 --- a/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py +++ b/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py @@ -1,3 +1,5 @@ +import os + import qtawesome from qtpy import QtWidgets, QtCore, QtGui @@ -10,8 +12,10 @@ from ayon_core.tools.utils.delegates import PrettyTimeDelegate FILENAME_ROLE = QtCore.Qt.UserRole + 1 FILEPATH_ROLE = QtCore.Qt.UserRole + 2 -AUTHOR_ROLE = QtCore.Qt.UserRole + 3 -DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 4 +ROOTLESS_PATH_ROLE = QtCore.Qt.UserRole + 3 +AUTHOR_ROLE = QtCore.Qt.UserRole + 4 +DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 5 +WORKFILE_ENTITY_ID_ROLE = QtCore.Qt.UserRole + 6 class WorkAreaFilesModel(QtGui.QStandardItemModel): @@ -198,7 +202,7 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): items_to_remove = set(self._items_by_filename.keys()) new_items = [] for file_item in file_items: - filename = file_item.filename + filename = os.path.basename(file_item.filepath) if filename in self._items_by_filename: items_to_remove.discard(filename) item = self._items_by_filename[filename] @@ -206,23 +210,28 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): item = QtGui.QStandardItem() new_items.append(item) item.setColumnCount(self.columnCount()) - item.setFlags( - QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable - ) item.setData(self._file_icon, QtCore.Qt.DecorationRole) - item.setData(file_item.filename, QtCore.Qt.DisplayRole) - item.setData(file_item.filename, FILENAME_ROLE) + item.setData(filename, QtCore.Qt.DisplayRole) + item.setData(filename, FILENAME_ROLE) + flags = QtCore.Qt.ItemIsSelectable + if file_item.available: + flags |= QtCore.Qt.ItemIsEnabled + item.setFlags(flags) updated_by = file_item.updated_by user_item = user_items_by_name.get(updated_by) if user_item is not None and user_item.full_name: updated_by = user_item.full_name + item.setData( + file_item.workfile_entity_id, WORKFILE_ENTITY_ID_ROLE + ) item.setData(file_item.filepath, FILEPATH_ROLE) + item.setData(file_item.rootless_path, ROOTLESS_PATH_ROLE) + item.setData(file_item.file_modified, DATE_MODIFIED_ROLE) item.setData(updated_by, AUTHOR_ROLE) - item.setData(file_item.modified, DATE_MODIFIED_ROLE) - self._items_by_filename[file_item.filename] = item + self._items_by_filename[filename] = item if new_items: root_item.appendRows(new_items) @@ -354,14 +363,18 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): def _get_selected_info(self): selection_model = self._view.selectionModel() - filepath = None - filename = None + workfile_entity_id = filename = rootless_path = filepath = None for index in selection_model.selectedIndexes(): filepath = index.data(FILEPATH_ROLE) + rootless_path = index.data(ROOTLESS_PATH_ROLE) filename = index.data(FILENAME_ROLE) + workfile_entity_id = index.data(WORKFILE_ENTITY_ID_ROLE) + return { "filepath": filepath, + "rootless_path": rootless_path, "filename": filename, + "workfile_entity_id": workfile_entity_id, } def get_selected_path(self): @@ -374,8 +387,12 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): return self._get_selected_info()["filepath"] def _on_selection_change(self): - filepath = self.get_selected_path() - self._controller.set_selected_workfile_path(filepath) + info = self._get_selected_info() + self._controller.set_selected_workfile_path( + info["rootless_path"], + info["filepath"], + info["workfile_entity_id"], + ) def _on_mouse_double_click(self, event): if event.button() == QtCore.Qt.LeftButton: @@ -430,19 +447,25 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): ) def _on_model_refresh(self): - if ( - not self._change_selection_on_refresh - or self._proxy_model.rowCount() < 1 - ): + if not self._change_selection_on_refresh: return # Find the row with latest date modified + indexes = [ + self._proxy_model.index(idx, 0) + for idx in range(self._proxy_model.rowCount()) + ] + filtered_indexes = [ + index + for index in indexes + if self._proxy_model.flags(index) & QtCore.Qt.ItemIsEnabled + ] + if not filtered_indexes: + return + latest_index = max( - ( - self._proxy_model.index(idx, 0) - for idx in range(self._proxy_model.rowCount()) - ), - key=lambda model_index: model_index.data(DATE_MODIFIED_ROLE) + filtered_indexes, + key=lambda model_index: model_index.data(DATE_MODIFIED_ROLE) or 0 ) # Select row of latest modified diff --git a/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py b/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py index bddff816fe..24d64319ca 100644 --- a/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py +++ b/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py @@ -108,6 +108,7 @@ class SaveAsDialog(QtWidgets.QDialog): self._ext_value = None self._filename = None self._workdir = None + self._rootless_workdir = None self._result = None @@ -144,8 +145,8 @@ class SaveAsDialog(QtWidgets.QDialog): version_layout.addWidget(last_version_check) # Artist note widget - artist_note_input = PlaceholderPlainTextEdit(inputs_widget) - artist_note_input.setPlaceholderText( + description_input = PlaceholderPlainTextEdit(inputs_widget) + description_input.setPlaceholderText( "Provide a note about this workfile.") # Preview widget @@ -166,7 +167,7 @@ class SaveAsDialog(QtWidgets.QDialog): subversion_label = QtWidgets.QLabel("Subversion:", inputs_widget) extension_label = QtWidgets.QLabel("Extension:", inputs_widget) preview_label = QtWidgets.QLabel("Preview:", inputs_widget) - artist_note_label = QtWidgets.QLabel("Artist Note:", inputs_widget) + description_label = QtWidgets.QLabel("Artist Note:", inputs_widget) # Build inputs inputs_layout = QtWidgets.QGridLayout(inputs_widget) @@ -178,8 +179,8 @@ class SaveAsDialog(QtWidgets.QDialog): inputs_layout.addWidget(extension_combobox, 2, 1) inputs_layout.addWidget(preview_label, 3, 0) inputs_layout.addWidget(preview_widget, 3, 1) - inputs_layout.addWidget(artist_note_label, 4, 0, 1, 2) - inputs_layout.addWidget(artist_note_input, 5, 0, 1, 2) + inputs_layout.addWidget(description_label, 4, 0, 1, 2) + inputs_layout.addWidget(description_input, 5, 0, 1, 2) # Build layout main_layout = QtWidgets.QVBoxLayout(self) @@ -214,13 +215,13 @@ class SaveAsDialog(QtWidgets.QDialog): self._extension_combobox = extension_combobox self._subversion_input = subversion_input self._preview_widget = preview_widget - self._artist_note_input = artist_note_input + self._description_input = description_input self._version_label = version_label self._subversion_label = subversion_label self._extension_label = extension_label self._preview_label = preview_label - self._artist_note_label = artist_note_label + self._description_label = description_label # Post init setup @@ -255,6 +256,7 @@ class SaveAsDialog(QtWidgets.QDialog): self._folder_id = folder_id self._task_id = task_id self._workdir = data["workdir"] + self._rootless_workdir = data["rootless_workdir"] self._comment_value = data["comment"] self._ext_value = data["ext"] self._template_key = data["template_key"] @@ -329,10 +331,13 @@ class SaveAsDialog(QtWidgets.QDialog): self._result = { "filename": self._filename, "workdir": self._workdir, + "rootless_workdir": self._rootless_workdir, "folder_id": self._folder_id, "task_id": self._task_id, "template_key": self._template_key, - "artist_note": self._artist_note_input.toPlainText(), + "version": self._version_value, + "comment": self._comment_value, + "description": self._description_input.toPlainText(), } self.close() diff --git a/client/ayon_core/tools/workfiles/widgets/side_panel.py b/client/ayon_core/tools/workfiles/widgets/side_panel.py index 7ba60b5544..b1b91d9721 100644 --- a/client/ayon_core/tools/workfiles/widgets/side_panel.py +++ b/client/ayon_core/tools/workfiles/widgets/side_panel.py @@ -4,6 +4,8 @@ from qtpy import QtWidgets, QtCore def file_size_to_string(file_size): + if not file_size: + return "N/A" size = 0 size_ending_mapping = { "KB": 1024 ** 1, @@ -43,44 +45,47 @@ class SidePanelWidget(QtWidgets.QWidget): details_input = QtWidgets.QPlainTextEdit(self) details_input.setReadOnly(True) - artist_note_widget = QtWidgets.QWidget(self) - note_label = QtWidgets.QLabel("Artist note", artist_note_widget) - note_input = QtWidgets.QPlainTextEdit(artist_note_widget) - btn_note_save = QtWidgets.QPushButton("Save note", artist_note_widget) + description_widget = QtWidgets.QWidget(self) + description_label = QtWidgets.QLabel("Artist note", description_widget) + description_input = QtWidgets.QPlainTextEdit(description_widget) + btn_description_save = QtWidgets.QPushButton( + "Save note", description_widget + ) - artist_note_layout = QtWidgets.QVBoxLayout(artist_note_widget) - artist_note_layout.setContentsMargins(0, 0, 0, 0) - artist_note_layout.addWidget(note_label, 0) - artist_note_layout.addWidget(note_input, 1) - artist_note_layout.addWidget( - btn_note_save, 0, alignment=QtCore.Qt.AlignRight + description_layout = QtWidgets.QVBoxLayout(description_widget) + description_layout.setContentsMargins(0, 0, 0, 0) + description_layout.addWidget(description_label, 0) + description_layout.addWidget(description_input, 1) + description_layout.addWidget( + btn_description_save, 0, alignment=QtCore.Qt.AlignRight ) main_layout = QtWidgets.QVBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) main_layout.addWidget(details_label, 0) main_layout.addWidget(details_input, 1) - main_layout.addWidget(artist_note_widget, 1) + main_layout.addWidget(description_widget, 1) - note_input.textChanged.connect(self._on_note_change) - btn_note_save.clicked.connect(self._on_save_click) + description_input.textChanged.connect(self._on_description_change) + btn_description_save.clicked.connect(self._on_save_click) controller.register_event_callback( "selection.workarea.changed", self._on_selection_change ) self._details_input = details_input - self._artist_note_widget = artist_note_widget - self._note_input = note_input - self._btn_note_save = btn_note_save + self._description_widget = description_widget + self._description_input = description_input + self._btn_description_save = btn_description_save self._folder_id = None - self._task_name = None + self._task_id = None self._filepath = None - self._orig_note = "" + self._rootless_path = None + self._orig_description = "" self._controller = controller - self._set_context(None, None, None) + self._set_context(None, None, None, None) def set_published_mode(self, published_mode): """Change published mode. @@ -89,64 +94,69 @@ class SidePanelWidget(QtWidgets.QWidget): published_mode (bool): Published mode enabled. """ - self._artist_note_widget.setVisible(not published_mode) + self._description_widget.setVisible(not published_mode) def _on_selection_change(self, event): folder_id = event["folder_id"] - task_name = event["task_name"] + task_id = event["task_id"] filepath = event["path"] + rootless_path = event["rootless_path"] - self._set_context(folder_id, task_name, filepath) + self._set_context(folder_id, task_id, rootless_path, filepath) - def _on_note_change(self): - text = self._note_input.toPlainText() - self._btn_note_save.setEnabled(self._orig_note != text) + def _on_description_change(self): + text = self._description_input.toPlainText() + self._btn_description_save.setEnabled(self._orig_description != text) def _on_save_click(self): - note = self._note_input.toPlainText() + description = self._description_input.toPlainText() self._controller.save_workfile_info( - self._folder_id, - self._task_name, - self._filepath, - note + self._task_id, + self._rootless_path, + description=description, ) - self._orig_note = note - self._btn_note_save.setEnabled(False) + self._orig_description = description + self._btn_description_save.setEnabled(False) - def _set_context(self, folder_id, task_name, filepath): + def _set_context(self, folder_id, task_id, rootless_path, filepath): workfile_info = None # Check if folder, task and file are selected - if bool(folder_id) and bool(task_name) and bool(filepath): + if folder_id and task_id and rootless_path: workfile_info = self._controller.get_workfile_info( - folder_id, task_name, filepath + folder_id, task_id, rootless_path ) enabled = workfile_info is not None self._details_input.setEnabled(enabled) - self._note_input.setEnabled(enabled) - self._btn_note_save.setEnabled(enabled) + self._description_input.setEnabled(enabled) + self._btn_description_save.setEnabled(enabled) self._folder_id = folder_id - self._task_name = task_name + self._task_id = task_id self._filepath = filepath + self._rootless_path = rootless_path # Disable inputs and remove texts if any required arguments are # missing if not enabled: - self._orig_note = "" + self._orig_description = "" self._details_input.setPlainText("") - self._note_input.setPlainText("") + self._description_input.setPlainText("") return - note = workfile_info.note - size_value = file_size_to_string(workfile_info.filesize) + description = workfile_info.description + size_value = file_size_to_string(workfile_info.file_size) # Append html string datetime_format = "%b %d %Y %H:%M:%S" - creation_time = datetime.datetime.fromtimestamp( - workfile_info.creation_time) - modification_time = datetime.datetime.fromtimestamp( - workfile_info.modification_time) + file_created = workfile_info.file_created + modification_time = workfile_info.file_modified + if file_created: + file_created = datetime.datetime.fromtimestamp(file_created) + + if modification_time: + modification_time = datetime.datetime.fromtimestamp( + modification_time) user_items_by_name = self._controller.get_user_items_by_name() @@ -156,33 +166,38 @@ class SidePanelWidget(QtWidgets.QWidget): return user_item.full_name return username - created_lines = [ - creation_time.strftime(datetime_format) - ] + created_lines = [] if workfile_info.created_by: - created_lines.insert( - 0, convert_username(workfile_info.created_by) + created_lines.append( + convert_username(workfile_info.created_by) ) + if file_created: + created_lines.append(file_created.strftime(datetime_format)) - modified_lines = [ - modification_time.strftime(datetime_format) - ] + if created_lines: + created_lines.insert(0, "Created:") + + modified_lines = [] if workfile_info.updated_by: - modified_lines.insert( - 0, convert_username(workfile_info.updated_by) + modified_lines.append( + convert_username(workfile_info.updated_by) ) + if modification_time: + modified_lines.append( + modification_time.strftime(datetime_format) + ) + if modified_lines: + modified_lines.insert(0, "Modified:") lines = ( "Size:", size_value, - "Created:", "
".join(created_lines), - "Modified:", "
".join(modified_lines), ) - self._orig_note = note - self._note_input.setPlainText(note) + self._orig_description = description + self._description_input.setPlainText(description) # Set as empty string self._details_input.setPlainText("") - self._details_input.appendHtml("
".join(lines)) + self._details_input.appendHtml("
".join(lines)) diff --git a/client/ayon_core/tools/workfiles/widgets/window.py b/client/ayon_core/tools/workfiles/widgets/window.py index 1649a059cb..00362ea866 100644 --- a/client/ayon_core/tools/workfiles/widgets/window.py +++ b/client/ayon_core/tools/workfiles/widgets/window.py @@ -1,21 +1,21 @@ -from qtpy import QtCore, QtWidgets, QtGui -from ayon_core import style, resources -from ayon_core.tools.utils import ( - PlaceholderLineEdit, - MessageOverlayObject, -) +from qtpy import QtCore, QtGui, QtWidgets -from ayon_core.tools.workfiles.control import BaseWorkfileController +from ayon_core import resources, style from ayon_core.tools.utils import ( - GoToCurrentButton, - RefreshButton, FoldersWidget, + GoToCurrentButton, + MessageOverlayObject, + NiceCheckbox, + PlaceholderLineEdit, + RefreshButton, TasksWidget, ) +from ayon_core.tools.utils.lib import checkstate_int_to_enum +from ayon_core.tools.workfiles.control import BaseWorkfileController -from .side_panel import SidePanelWidget from .files_widget import FilesWidget +from .side_panel import SidePanelWidget from .utils import BaseOverlayFrame @@ -107,7 +107,7 @@ class WorkfilesToolWindow(QtWidgets.QWidget): split_widget.addWidget(tasks_widget) split_widget.addWidget(col_3_widget) split_widget.addWidget(side_panel) - split_widget.setSizes([255, 175, 550, 190]) + split_widget.setSizes([350, 175, 550, 190]) body_layout.addWidget(split_widget) @@ -157,6 +157,8 @@ class WorkfilesToolWindow(QtWidgets.QWidget): self._home_body_widget = home_body_widget self._split_widget = split_widget + self._project_name = self._controller.get_current_project_name() + self._tasks_widget = tasks_widget self._side_panel = side_panel @@ -186,11 +188,24 @@ class WorkfilesToolWindow(QtWidgets.QWidget): controller, col_widget, handle_expected_selection=True ) + my_tasks_tooltip = ( + "Filter folders and task to only those you are assigned to." + ) + + my_tasks_label = QtWidgets.QLabel("My tasks") + my_tasks_label.setToolTip(my_tasks_tooltip) + + my_tasks_checkbox = NiceCheckbox(folder_widget) + my_tasks_checkbox.setChecked(False) + my_tasks_checkbox.setToolTip(my_tasks_tooltip) + header_layout = QtWidgets.QHBoxLayout(header_widget) header_layout.setContentsMargins(0, 0, 0, 0) header_layout.addWidget(folder_filter_input, 1) header_layout.addWidget(go_to_current_btn, 0) header_layout.addWidget(refresh_btn, 0) + header_layout.addWidget(my_tasks_label, 0) + header_layout.addWidget(my_tasks_checkbox, 0) col_layout = QtWidgets.QVBoxLayout(col_widget) col_layout.setContentsMargins(0, 0, 0, 0) @@ -200,6 +215,9 @@ class WorkfilesToolWindow(QtWidgets.QWidget): folder_filter_input.textChanged.connect(self._on_folder_filter_change) go_to_current_btn.clicked.connect(self._on_go_to_current_clicked) refresh_btn.clicked.connect(self._on_refresh_clicked) + my_tasks_checkbox.stateChanged.connect( + self._on_my_tasks_checkbox_state_changed + ) self._folder_filter_input = folder_filter_input self._folders_widget = folder_widget @@ -340,9 +358,8 @@ class WorkfilesToolWindow(QtWidgets.QWidget): if not self._host_is_valid: return - self._folders_widget.set_project_name( - self._controller.get_current_project_name() - ) + self._project_name = self._controller.get_current_project_name() + self._folders_widget.set_project_name(self._project_name) def _on_save_as_finished(self, event): if event["failed"]: @@ -385,3 +402,16 @@ class WorkfilesToolWindow(QtWidgets.QWidget): ) else: self.close() + + def _on_my_tasks_checkbox_state_changed(self, state): + folder_ids = None + task_ids = None + state = checkstate_int_to_enum(state) + if state == QtCore.Qt.Checked: + entity_ids = self._controller.get_my_tasks_entity_ids( + self._project_name + ) + folder_ids = entity_ids["folder_ids"] + task_ids = entity_ids["task_ids"] + self._folders_widget.set_folder_ids_filter(folder_ids) + self._tasks_widget.set_task_ids_filter(task_ids) diff --git a/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined-Regular.ttf b/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined-Regular.ttf deleted file mode 100644 index 26f767e075..0000000000 Binary files a/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined-Regular.ttf and /dev/null differ diff --git a/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined-Regular.codepoints b/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined.codepoints similarity index 93% rename from client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined-Regular.codepoints rename to client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined.codepoints index d5ede9bf32..ec2d854772 100644 --- a/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined-Regular.codepoints +++ b/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined.codepoints @@ -85,12 +85,13 @@ account_circle_off f7b3 account_tree e97a action_key f502 activity_zone e1e6 +acupuncture f2c4 acute e4cb ad e65a ad_group e65b ad_group_off eae5 ad_off f7b2 -ad_units ef39 +ad_units f2eb adaptive_audio_mic f4cc adaptive_audio_mic_off f4cb adb e60e @@ -127,7 +128,7 @@ add_row_below f422 add_shopping_cart e854 add_task f23a add_to_drive e65c -add_to_home_screen e1fe +add_to_home_screen f2b9 add_to_photos e39d add_to_queue e05c add_triangle f48e @@ -208,10 +209,36 @@ amp_stories ea13 analytics ef3e anchor f1cd android e859 +android_cell_4_bar ef06 +android_cell_4_bar_alert ef09 +android_cell_4_bar_off ef08 +android_cell_4_bar_plus ef07 +android_cell_5_bar ef02 +android_cell_5_bar_alert ef05 +android_cell_5_bar_off ef04 +android_cell_5_bar_plus ef03 +android_cell_dual_4_bar ef0d +android_cell_dual_4_bar_alert ef0f +android_cell_dual_4_bar_plus ef0e +android_cell_dual_5_bar ef0a +android_cell_dual_5_bar_alert ef0c +android_cell_dual_5_bar_plus ef0b +android_wifi_3_bar ef16 +android_wifi_3_bar_alert ef1b +android_wifi_3_bar_lock ef1a +android_wifi_3_bar_off ef19 +android_wifi_3_bar_plus ef18 +android_wifi_3_bar_question ef17 +android_wifi_4_bar ef10 +android_wifi_4_bar_alert ef15 +android_wifi_4_bar_lock ef14 +android_wifi_4_bar_off ef13 +android_wifi_4_bar_plus ef12 +android_wifi_4_bar_question ef11 animated_images f49a animation e71c announcement e87f -aod efda +aod f2e6 aod_tablet f89f aod_watch f6ac apartment ea40 @@ -219,14 +246,15 @@ api f1b7 apk_document f88e apk_install f88f app_badging f72f -app_blocking ef3f -app_promo e981 +app_blocking f2e5 +app_promo f2cd app_registration ef40 -app_settings_alt ef41 -app_shortcut eae4 +app_settings_alt f2d9 +app_shortcut f2df apparel ef7b approval e982 approval_delegation f84a +approval_delegation_off f2c5 apps e5c3 apps_outage e7cc aq f55a @@ -265,6 +293,9 @@ arrow_range f69b arrow_right e5df arrow_right_alt e941 arrow_selector_tool f82f +arrow_shape_up eef6 +arrow_shape_up_stack eef7 +arrow_shape_up_stack_2 eef8 arrow_split ea04 arrow_top_left f72e arrow_top_right f72d @@ -287,6 +318,7 @@ aspect_ratio e85b assessment f0cc assignment e85d assignment_add f848 +assignment_globe eeec assignment_ind e85e assignment_late e85f assignment_return e860 @@ -336,6 +368,7 @@ auto_read_pause f219 auto_read_play f216 auto_schedule e214 auto_stories e666 +auto_stories_off f267 auto_timer ef7f auto_towing e71e auto_transmission f53f @@ -352,6 +385,7 @@ av_timer e01b avc f4af avg_pace f6bb avg_time f813 +award_meal f241 award_star f612 azm f6ec baby_changing_station f19b @@ -370,6 +404,7 @@ backup e864 backup_table ef43 badge ea67 badge_critical_battery f156 +badminton f2a8 bakery_dining ea53 balance eaf6 balcony e58f @@ -382,9 +417,11 @@ barcode_reader f85c barcode_scanner e70c barefoot f871 batch_prediction f0f5 +bath_bedrock f286 bath_outdoor f6fb bath_private f6fa bath_public_large f6f9 +bath_soak f2a0 bathroom efdd bathtub ea41 battery_0_bar ebdc @@ -410,6 +447,19 @@ battery_android_5 f308 battery_android_6 f307 battery_android_alert f306 battery_android_bolt f305 +battery_android_frame_1 f257 +battery_android_frame_2 f256 +battery_android_frame_3 f255 +battery_android_frame_4 f254 +battery_android_frame_5 f253 +battery_android_frame_6 f252 +battery_android_frame_alert f251 +battery_android_frame_bolt f250 +battery_android_frame_full f24f +battery_android_frame_plus f24e +battery_android_frame_question f24d +battery_android_frame_share f24c +battery_android_frame_shield f24b battery_android_full f304 battery_android_plus f303 battery_android_question f302 @@ -449,6 +499,7 @@ bedroom_parent efe2 bedtime f159 bedtime_off eb76 beenhere e52d +beer_meal f285 bento f1f4 bia f6eb bid_landscape e678 @@ -490,7 +541,7 @@ book_3 f53d book_4 f53c book_5 f53b book_6 f3df -book_online f217 +book_online f2e4 book_ribbon f3e7 bookmark e8e7 bookmark_add e598 @@ -537,6 +588,7 @@ breaking_news ea08 breaking_news_alt_1 f0ba breastfeeding f856 brick f388 +briefcase_meal f246 brightness_1 e3fa brightness_2 f036 brightness_3 e3a8 @@ -564,6 +616,7 @@ brush e3ae bubble ef83 bubble_chart e6dd bubbles f64e +bucket_check ef2a bug_report e868 build f8cd build_circle ef48 @@ -586,7 +639,11 @@ cake_add f85b calculate ea5f calendar_add_on ef85 calendar_apps_script f0bb +calendar_check f243 calendar_clock f540 +calendar_lock f242 +calendar_meal f296 +calendar_meal_2 f240 calendar_month ebcc calendar_today e935 calendar_view_day e936 @@ -607,10 +664,10 @@ call_to_action e06c camera e3af camera_alt e412 camera_enhance e8fc -camera_front e3b1 +camera_front f2c9 camera_indoor efe9 camera_outdoor efea -camera_rear e3b2 +camera_rear f2c8 camera_roll e3b3 camera_video f7a6 cameraswitch efeb @@ -628,7 +685,9 @@ car_crash ebf2 car_defrost_left f344 car_defrost_low_left f343 car_defrost_low_right f342 +car_defrost_mid_left f278 car_defrost_mid_low_left f341 +car_defrost_mid_low_right f277 car_defrost_mid_right f340 car_defrost_right f33f car_fan_low_left f33e @@ -674,17 +733,21 @@ center_focus_strong e3b4 center_focus_weak e3b5 chair efed chair_alt efee +chair_counter f29f +chair_fireplace f29e +chair_umbrella f29d chalet e585 change_circle e2e7 change_history e86b charger e2ae -charging_station f19d +charging_station f2e3 chart_data e473 chat e0c9 chat_add_on f0f3 chat_apps_script f0bd chat_bubble e0cb chat_bubble_outline e0cb +chat_dashed eeed chat_error f7ac chat_info f52b chat_paste_go f6bd @@ -695,6 +758,7 @@ check_box_outline_blank e835 check_circle f0be check_circle_filled f0be check_circle_outline f0be +check_circle_unread f27e check_in_out f6f6 check_indeterminate_small f88a check_small f88b @@ -707,13 +771,22 @@ checkroom f19e cheer f6a8 chef_hat f357 chess f5e7 +chess_bishop f261 +chess_bishop_2 f262 +chess_king f25f +chess_king_2 f260 +chess_knight f25e chess_pawn f3b6 +chess_pawn_2 f25d +chess_queen f25c +chess_rook f25b chevron_backward f46b chevron_forward f46a chevron_left e5cb chevron_right e5cc child_care eb41 child_friendly eb42 +child_hat ef30 chip_extraction f821 chips e993 chrome_reader_mode e86d @@ -839,6 +912,7 @@ control_camera e074 control_point e3ba control_point_duplicate e3bb controller_gen e83d +conversation ef2f conversion_path f0c1 conversion_path_off f7b4 convert_to_text f41f @@ -984,13 +1058,13 @@ detector_status e1e8 developer_board e30d developer_board_off e4ff developer_guide e99e -developer_mode e1b0 +developer_mode f2e2 developer_mode_tv e874 device_band f2f5 device_hub e335 device_reset e8b3 device_thermostat e1ff -device_unknown e339 +device_unknown f2e1 devices e326 devices_fold ebde devices_fold_2 f406 @@ -1004,10 +1078,14 @@ dialer_sip e0bb dialogs e99f dialpad e0bc diamond ead5 +diamond_shine f2b2 dictionary f539 difference eb7d digital_out_of_home f1de digital_wellbeing ef86 +dine_heart f29c +dine_in f295 +dine_lamp f29b dining eff4 dinner_dining ea57 directions e52e @@ -1057,7 +1135,7 @@ do_not_disturb_on f08f do_not_disturb_on_total_silence effb do_not_step f19f do_not_touch f1b0 -dock e30e +dock f2e0 dock_to_bottom f7e6 dock_to_left f7e5 dock_to_right f7e4 @@ -1112,6 +1190,8 @@ drive_file_move_rtl e9a1 drive_file_rename_outline e9a2 drive_folder_upload e9a3 drive_fusiontable e678 +drone f25a +drone_2 f259 dropdown e9a4 dropper_eye f351 dry f1b3 @@ -1139,8 +1219,8 @@ ecg f80f ecg_heart f6e9 eco ea35 eda f6e8 -edgesensor_high f005 -edgesensor_low f006 +edgesensor_high f2ef +edgesensor_low f2ee edit f097 edit_arrow_down f380 edit_arrow_up f37f @@ -1266,6 +1346,8 @@ extension e87b extension_off e4f5 eye_tracking f4c9 eyeglasses f6ee +eyeglasses_2 f2c7 +eyeglasses_2_sound f265 face f008 face_2 f8da face_3 f8db @@ -1285,6 +1367,7 @@ fact_check f0c5 factory ebbc falling f60d familiar_face_and_zone e21c +family_group eef2 family_history e0ad family_home eb26 family_link eb19 @@ -1379,6 +1462,7 @@ fit_screen ea10 fit_width f779 fitness_center eb43 fitness_tracker f463 +fitness_trackers eef1 flag f0c6 flag_2 f40f flag_check f3d8 @@ -1515,6 +1599,8 @@ forward_media f6f4 forward_to_inbox f187 foundation f200 fragrance f345 +frame_bug eeef +frame_exclamation eeee frame_inspect f772 frame_person f8a6 frame_person_mic f4d5 @@ -1541,8 +1627,10 @@ gallery_thumbnail f86f gamepad e30f games e30f garage f011 +garage_check f28d garage_door e714 garage_home e82d +garage_money f28c garden_cart f8a9 gas_meter ec19 gastroenterology e0f1 @@ -1621,9 +1709,12 @@ h_plus_mobiledata f019 h_plus_mobiledata_badge f7df hail e9b1 hallway e6f8 +hanami_dango f23f hand_bones f894 hand_gesture ef9c hand_gesture_off f3f3 +hand_meal f294 +hand_package f293 handheld_controller f4c6 handshake ebcb handwriting_recognition eb02 @@ -1655,6 +1746,7 @@ headset_off e33a healing e3f3 health_and_beauty ef9d health_and_safety e1d5 +health_cross f2c3 health_metrics f6e2 heap_snapshot_large f76e heap_snapshot_multiple f76d @@ -1662,11 +1754,14 @@ heap_snapshot_thumbnail f76c hearing e023 hearing_aid f464 hearing_aid_disabled f3b0 +hearing_aid_disabled_left f2ec +hearing_aid_left f2ed hearing_disabled f104 heart_broken eac2 heart_check f60a heart_minus f883 heart_plus f884 +heart_smile f292 heat f537 heat_pump ec18 heat_pump_balance e27e @@ -1682,6 +1777,7 @@ hexagon eb39 hide ef9e hide_image f022 hide_source f023 +high_chair f29a high_density f79c high_quality e024 high_res f54b @@ -1770,6 +1866,7 @@ iframe_off f71c image e3f4 image_arrow_up f317 image_aspect_ratio e3f5 +image_inset f247 image_not_supported f116 image_search e43f imagesearch_roller e9b4 @@ -1815,7 +1912,7 @@ insert_photo e3f4 insert_text f827 insights f092 install_desktop eb71 -install_mobile eb72 +install_mobile f2cd instant_mix e026 integration_instructions ef54 interactive_space f7ff @@ -1830,6 +1927,8 @@ ios_share e6b8 iron e583 iso e3f6 jamboard_kiosk e9b5 +japanese_curry f284 +japanese_flag f283 javascript eb7c join f84f join_full f84f @@ -1838,6 +1937,7 @@ join_left eaf2 join_right eaea joystick f5ee jump_to_element f719 +kanji_alcohol f23e kayaking e50c kebab_dining e842 keep f026 @@ -2065,9 +2165,11 @@ magnification_small f83c magnify_docked f7d6 magnify_fullscreen f7d5 mail e159 +mail_asterisk eef4 mail_lock ec0a mail_off f48b mail_outline e159 +mail_shield f249 male e58e man e4eb man_2 f8e1 @@ -2079,6 +2181,8 @@ manage_search f02f manga f5e3 manufacturing e726 map e55b +map_pin_heart f298 +map_pin_review f297 map_search f3ca maps_home_work f030 maps_ugc ef58 @@ -2097,11 +2201,14 @@ markunread_mailbox e89b masked_transitions e72e masked_transitions_add f42b masks f218 +massage f2c2 match_case f6f1 match_case_off f36f match_word f6f0 matter e907 maximize e930 +meal_dinner f23d +meal_lunch f23c measuring_tape f6af media_bluetooth_off f031 media_bluetooth_on f032 @@ -2120,6 +2227,7 @@ memory_alt f7a3 menstrual_health f6e1 menu e5d2 menu_book ea19 +menu_book_2 f291 menu_open e9bd merge eb98 merge_type e252 @@ -2151,17 +2259,57 @@ mist e188 mitre f547 mixture_med e4c8 mms e618 -mobile_friendly e200 +mobile e7ba +mobile_2 f2db +mobile_3 f2da +mobile_alert f2d3 +mobile_arrow_down f2cd +mobile_arrow_right f2d2 +mobile_arrow_up_right f2b9 +mobile_block f2e5 +mobile_camera f44e +mobile_camera_front f2c9 +mobile_camera_rear f2c8 +mobile_cancel f2ea +mobile_cast f2cc +mobile_charge f2e3 +mobile_chat f79f +mobile_check f073 +mobile_code f2e2 +mobile_dots f2d0 +mobile_friendly f073 +mobile_gear f2d9 mobile_hand f323 mobile_hand_left f313 mobile_hand_left_off f312 mobile_hand_off f314 +mobile_info f2dc +mobile_landscape ed3e +mobile_layout f2bf +mobile_lock_landscape f2d8 +mobile_lock_portrait f2be mobile_loupe f322 +mobile_menu f2d1 mobile_off e201 -mobile_screen_share e0e7 +mobile_question f2e1 +mobile_rotate f2d5 +mobile_rotate_lock f2d6 +mobile_screen_share f2df mobile_screensaver f321 +mobile_sensor_hi f2ef +mobile_sensor_lo f2ee +mobile_share f2df +mobile_share_stack f2de +mobile_sound f2e8 mobile_sound_2 f318 +mobile_sound_off f7aa mobile_speaker f320 +mobile_text f2eb +mobile_text_2 f2e6 +mobile_theft f2a9 +mobile_ticket f2e4 +mobile_vibrate f2cb +mobile_wrench f2b0 mobiledata_off f034 mode f097 mode_comment e253 @@ -2186,6 +2334,7 @@ money e57d money_bag f3ee money_off f038 money_off_csred f038 +money_range f245 monitor ef5b monitor_heart eaa2 monitor_weight f039 @@ -2199,6 +2348,7 @@ mood_bad e7f3 moon_stars f34f mop e28d moped eb28 +moped_package f28b more e619 more_down f196 more_horiz e5d3 @@ -2220,6 +2370,7 @@ motion_sensor_idle e783 motion_sensor_urgent e78e motorcycle e91b mountain_flag f5e2 +mountain_steam f282 mouse e323 mouse_lock f490 mouse_lock_off f48f @@ -2241,6 +2392,7 @@ movie_edit f840 movie_filter e43a movie_info e02d movie_off f499 +movie_speaker f2a3 moving e501 moving_beds e73d moving_ministry e73e @@ -2252,6 +2404,7 @@ multiple_airports efab multiple_stop f1b9 museum ea36 music_cast eb1a +music_history f2c1 music_note e405 music_note_add f391 music_off e440 @@ -2288,6 +2441,11 @@ nest_display f124 nest_display_max f125 nest_doorbell_visitor f8bd nest_eco_leaf f8be +nest_farsight_cool f27d +nest_farsight_dual f27c +nest_farsight_eco f27b +nest_farsight_heat f27a +nest_farsight_seasonal f279 nest_farsight_weather f8bf nest_found_savings f8c0 nest_gale_wifi f579 @@ -2356,7 +2514,7 @@ night_sight_max f6c3 nightlife ea62 nightlight f03d nightlight_round f03d -nights_stay ea46 +nights_stay f174 no_accounts f03e no_adult_content f8fe no_backpack f237 @@ -2410,8 +2568,9 @@ odt e6e9 offline_bolt e932 offline_pin e90a offline_pin_off f4d0 -offline_share e9c5 +offline_share f2de oil_barrel ec15 +okonomiyaki f281 on_device_training ebfd on_hub_device e6c3 oncology e114 @@ -2424,7 +2583,7 @@ open_in_full f1ce open_in_new e89e open_in_new_down f70f open_in_new_off e4f6 -open_in_phone e702 +open_in_phone f2d2 open_jam efae open_run f4b7 open_with e89f @@ -2461,10 +2620,12 @@ pacemaker e656 package e48f package_2 f569 padding e9c8 +padel f2a7 page_control e731 page_footer f383 page_header f384 page_info f614 +page_menu_ios eefb pageless f509 pages e7f9 pageview e8a0 @@ -2481,10 +2642,15 @@ panorama_photosphere e9c9 panorama_vertical e40e panorama_wide_angle e40f paragliding e50f +parent_child_dining f22d park ea63 +parking_meter f28a +parking_sign f289 +parking_valet f288 partly_cloudy_day f172 partly_cloudy_night f174 partner_exchange f7f9 +partner_heart ef2e partner_reports efaf party_mode e7fa passkey f87f @@ -2499,6 +2665,8 @@ pause_circle_filled e1a2 pause_circle_outline e1a2 pause_presentation e0ea payment e8a1 +payment_arrow_down f2c0 +payment_card f2a1 payments ef63 pedal_bike eb29 pediatrics e11d @@ -2514,12 +2682,13 @@ people ea21 people_alt ea21 people_outline ea21 percent eb58 +percent_discount f244 performance_max e51a pergola e203 perm_camera_mic e8a2 perm_contact_calendar e8a3 perm_data_setting e8a4 -perm_device_information e8a5 +perm_device_information f2dc perm_identity f0d3 perm_media e8a7 perm_phone_msg e8a8 @@ -2539,6 +2708,7 @@ person_celebrate f7fe person_check f565 person_edit f4fa person_filled f0d3 +person_heart f290 person_off e510 person_outline f0d3 person_pin e55a @@ -2561,24 +2731,24 @@ pets e91d phishing ead7 phone f0d4 phone_alt f0d4 -phone_android e324 +phone_android f2db phone_bluetooth_speaker e61b phone_callback e649 phone_disabled e9cc phone_enabled e9cd phone_forwarded e61c phone_in_talk e61d -phone_iphone e325 +phone_iphone f2da phone_locked e61e phone_missed e61f phone_paused e620 phonelink e326 -phonelink_erase e0db -phonelink_lock e0dc -phonelink_off e327 -phonelink_ring e0dd +phonelink_erase f2ea +phonelink_lock f2be +phonelink_off f7a5 +phonelink_ring f2e8 phonelink_ring_off f7aa -phonelink_setup ef41 +phonelink_setup f2d9 photo e432 photo_album e411 photo_auto_merge f530 @@ -2596,6 +2766,7 @@ php eb8f physical_therapy e11e piano e521 piano_off e520 +pickleball f2a6 picture_as_pdf e415 picture_in_picture e8aa picture_in_picture_alt e911 @@ -2626,6 +2797,7 @@ pivot_table_chart e9ce place f1db place_item f1f0 plagiarism ea5a +plane_contrails f2ac planet f387 planner_banner_ad_pt e692 planner_review e694 @@ -2637,6 +2809,8 @@ play_lesson f047 play_music e6ee play_pause f137 play_shapes f7fc +playground f28e +playground_2 f28f playing_cards f5dc playlist_add e03b playlist_add_check e065 @@ -2818,6 +2992,7 @@ report_problem f083 request_page f22c request_quote f1b6 reset_brightness f482 +reset_exposure f266 reset_focus f481 reset_image f824 reset_iso f480 @@ -2830,6 +3005,7 @@ reset_wrench f56c resize f707 respiratory_rate e127 responsive_layout e9da +rest_area f22a restart_alt f053 restaurant e56c restaurant_menu e561 @@ -2913,11 +3089,11 @@ science_off f542 scooter f471 score e269 scoreboard ebd0 -screen_lock_landscape e1be -screen_lock_portrait e1bf -screen_lock_rotation e1c0 +screen_lock_landscape f2d8 +screen_lock_portrait f2be +screen_lock_rotation f2d6 screen_record f679 -screen_rotation e1c1 +screen_rotation f2d5 screen_rotation_alt ebee screen_rotation_up f678 screen_search_desktop ef70 @@ -2941,6 +3117,7 @@ search e8b6 search_activity f3e5 search_check f800 search_check_2 f469 +search_gear eefa search_hands_free e696 search_insights f4bc search_off ea76 @@ -2952,9 +3129,9 @@ seat_vent_left f32d seat_vent_right f32c security e32a security_key f503 -security_update f072 +security_update f2cd security_update_good f073 -security_update_warning f074 +security_update_warning f2d3 segment e94b select f74d select_all e162 @@ -2970,7 +3147,7 @@ send e163 send_and_archive ea0c send_money e8b7 send_time_extension eadb -send_to_mobile f05c +send_to_mobile f2d2 sensor_door f1b5 sensor_occupied ec10 sensor_window f1b4 @@ -3005,7 +3182,7 @@ settings_b_roll f625 settings_backup_restore e8ba settings_bluetooth e8bb settings_brightness e8bd -settings_cell e8bc +settings_cell f2d1 settings_cinematic_blur f624 settings_ethernet e8be settings_heart f522 @@ -3022,6 +3199,7 @@ settings_phone e8c5 settings_photo_camera f834 settings_power e8c6 settings_remote e8c7 +settings_seating ef2d settings_slow_motion f623 settings_suggest f05e settings_system_daydream e1c3 @@ -3042,6 +3220,7 @@ share_location f05f share_off f6cb share_reviews f8a4 share_windows f613 +shaved_ice f225 sheets_rtl f823 shelf_auto_hide f703 shelf_position f702 @@ -3052,6 +3231,7 @@ shield_locked f592 shield_moon eaa9 shield_person f650 shield_question f529 +shield_toggle f2ad shield_watch f30f shield_with_heart e78f shield_with_house e78d @@ -3081,6 +3261,7 @@ shutter_speed_minus f57d sick f220 side_navigation e9e2 sign_language ebe5 +sign_language_2 f258 signal_cellular_0_bar f0a8 signal_cellular_1_bar f0a9 signal_cellular_2_bar f0aa @@ -3140,9 +3321,9 @@ smart_card_reader f4a5 smart_card_reader_off f4a6 smart_display f06a smart_outlet e844 -smart_screen f06b +smart_screen f2d0 smart_toy f06c -smartphone e32c +smartphone e7ba smartphone_camera f44e smb_share f74b smoke_free eb4a @@ -3157,9 +3338,11 @@ snowing_heavy f61c snowmobile e503 snowshoeing e514 soap f1b2 +soba ef36 social_distance e1cb social_leaderboard f6a0 solar_power ec0f +solo_dining ef35 sort e164 sort_by_alpha e053 sos ebf7 @@ -3283,10 +3466,10 @@ stat_3 e69a stat_minus_1 e69b stat_minus_2 e69c stat_minus_3 e69d -stay_current_landscape e0d3 -stay_current_portrait e0d4 -stay_primary_landscape e0d5 -stay_primary_portrait e0d6 +stay_current_landscape ed3e +stay_current_portrait e7ba +stay_primary_landscape ed3e +stay_primary_portrait f2d3 steering_wheel_heat f32b step f6fe step_into f701 @@ -3340,6 +3523,7 @@ subtitles e048 subtitles_gear f355 subtitles_off ef72 subway e56f +subway_walk f287 summarize f071 sunny e81a sunny_snowing e819 @@ -3395,11 +3579,12 @@ sync_disabled e628 sync_lock eaee sync_problem e629 sync_saved_locally f820 +sync_saved_locally_off f264 syringe e133 -system_security_update f072 +system_security_update f2cd system_security_update_good f073 -system_security_update_warning f074 -system_update f072 +system_security_update_warning f2d3 +system_update f2cd system_update_alt e8d7 tab e8d8 tab_close f745 @@ -3421,9 +3606,11 @@ table_convert f3c7 table_edit f3c6 table_eye f466 table_lamp e1f2 +table_large f299 table_restaurant eac6 table_rows f101 table_rows_narrow f73f +table_sign ef2c table_view f1be tablet e32f tablet_android e330 @@ -3434,13 +3621,15 @@ tactic f564 tag e9ef tag_faces ea22 takeout_dining ea74 +takeout_dining_2 ef34 tamper_detection_off e82e tamper_detection_on f8c8 -tap_and_play e62b +tap_and_play f2cc tapas f1e9 target e719 task f075 task_alt e2e6 +tatami_seat ef33 taunt f69f taxi_alert ef74 team_dashboard e013 @@ -3507,6 +3696,7 @@ thumb_up_filled f577 thumb_up_off f577 thumb_up_off_alt f577 thumbnail_bar f734 +thumbs_up_double eefc thumbs_up_down e8dd thunderstorm ebdb tibia f89b @@ -3519,9 +3709,11 @@ time_to_leave eff7 timelapse e422 timeline e922 timer e425 +timer_1 f2af timer_10 e423 timer_10_alt_1 efbf timer_10_select f07a +timer_2 f2ae timer_3 e424 timer_3_alt_1 efc0 timer_3_select f07b @@ -3544,6 +3736,7 @@ toggle_on e9f6 token ea25 toll e8e0 tonality e427 +tonality_2 f2b4 toolbar e9f7 tools_flat_head f8cb tools_installation_kit e2ab @@ -3593,6 +3786,7 @@ transition_fade f50c transition_push f50b transition_slide f50a translate e8e2 +translate_indic f263 transportation e21d travel ef93 travel_explore e2db @@ -3637,6 +3831,7 @@ two_wheeler e9f9 type_specimen f8f0 u_turn_left eba1 u_turn_right eba2 +udon ef32 ulna_radius f89d ulna_radius_alt f89e umbrella f1ad @@ -3693,7 +3888,7 @@ vertical_distribute e076 vertical_shades ec0e vertical_shades_closed ec0d vertical_split e949 -vibration e62d +vibration f2cb video_call e070 video_camera_back f07f video_camera_back_add f40c @@ -3738,6 +3933,7 @@ view_stream e8f2 view_timeline eb85 view_week e8f3 vignette e435 +vignette_2 f2b3 villa e586 visibility e8f4 visibility_lock f653 @@ -3780,7 +3976,9 @@ warning f083 warning_amber f083 warning_off f7ad wash f1b1 +washoku f280 watch e334 +watch_arrow f2ca watch_button_press f6aa watch_check f468 watch_later efd6 @@ -3867,6 +4065,7 @@ window f088 window_closed e77e window_open e78c window_sensor e2bb +windshield_defrost_auto f248 windshield_defrost_front f32a windshield_defrost_rear f329 windshield_heat_front f328 @@ -3888,7 +4087,9 @@ wrap_text e25b wrist f69c wrong_location ef78 wysiwyg f1c3 +yakitori ef31 yard f089 +yoshoku f27f your_trips eb2b youtube_activity f85a youtube_searched_for e8fa diff --git a/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined-Regular.json b/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined.json similarity index 93% rename from client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined-Regular.json rename to client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined.json index 2eb48b234b..a9a95206e6 100644 --- a/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined-Regular.json +++ b/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined.json @@ -86,12 +86,13 @@ "account_tree": 59770, "action_key": 62722, "activity_zone": 57830, + "acupuncture": 62148, "acute": 58571, "ad": 58970, "ad_group": 58971, "ad_group_off": 60133, "ad_off": 63410, - "ad_units": 61241, + "ad_units": 62187, "adaptive_audio_mic": 62668, "adaptive_audio_mic_off": 62667, "adb": 58894, @@ -128,7 +129,7 @@ "add_shopping_cart": 59476, "add_task": 62010, "add_to_drive": 58972, - "add_to_home_screen": 57854, + "add_to_home_screen": 62137, "add_to_photos": 58269, "add_to_queue": 57436, "add_triangle": 62606, @@ -209,10 +210,36 @@ "analytics": 61246, "anchor": 61901, "android": 59481, + "android_cell_4_bar": 61190, + "android_cell_4_bar_alert": 61193, + "android_cell_4_bar_off": 61192, + "android_cell_4_bar_plus": 61191, + "android_cell_5_bar": 61186, + "android_cell_5_bar_alert": 61189, + "android_cell_5_bar_off": 61188, + "android_cell_5_bar_plus": 61187, + "android_cell_dual_4_bar": 61197, + "android_cell_dual_4_bar_alert": 61199, + "android_cell_dual_4_bar_plus": 61198, + "android_cell_dual_5_bar": 61194, + "android_cell_dual_5_bar_alert": 61196, + "android_cell_dual_5_bar_plus": 61195, + "android_wifi_3_bar": 61206, + "android_wifi_3_bar_alert": 61211, + "android_wifi_3_bar_lock": 61210, + "android_wifi_3_bar_off": 61209, + "android_wifi_3_bar_plus": 61208, + "android_wifi_3_bar_question": 61207, + "android_wifi_4_bar": 61200, + "android_wifi_4_bar_alert": 61205, + "android_wifi_4_bar_lock": 61204, + "android_wifi_4_bar_off": 61203, + "android_wifi_4_bar_plus": 61202, + "android_wifi_4_bar_question": 61201, "animated_images": 62618, "animation": 59164, "announcement": 59519, - "aod": 61402, + "aod": 62182, "aod_tablet": 63647, "aod_watch": 63148, "apartment": 59968, @@ -220,14 +247,15 @@ "apk_document": 63630, "apk_install": 63631, "app_badging": 63279, - "app_blocking": 61247, - "app_promo": 59777, + "app_blocking": 62181, + "app_promo": 62157, "app_registration": 61248, - "app_settings_alt": 61249, - "app_shortcut": 60132, + "app_settings_alt": 62169, + "app_shortcut": 62175, "apparel": 61307, "approval": 59778, "approval_delegation": 63562, + "approval_delegation_off": 62149, "apps": 58819, "apps_outage": 59340, "aq": 62810, @@ -266,6 +294,9 @@ "arrow_right": 58847, "arrow_right_alt": 59713, "arrow_selector_tool": 63535, + "arrow_shape_up": 61174, + "arrow_shape_up_stack": 61175, + "arrow_shape_up_stack_2": 61176, "arrow_split": 59908, "arrow_top_left": 63278, "arrow_top_right": 63277, @@ -288,6 +319,7 @@ "assessment": 61644, "assignment": 59485, "assignment_add": 63560, + "assignment_globe": 61164, "assignment_ind": 59486, "assignment_late": 59487, "assignment_return": 59488, @@ -337,6 +369,7 @@ "auto_read_play": 61974, "auto_schedule": 57876, "auto_stories": 58982, + "auto_stories_off": 62055, "auto_timer": 61311, "auto_towing": 59166, "auto_transmission": 62783, @@ -353,6 +386,7 @@ "avc": 62639, "avg_pace": 63163, "avg_time": 63507, + "award_meal": 62017, "award_star": 62994, "azm": 63212, "baby_changing_station": 61851, @@ -371,6 +405,7 @@ "backup_table": 61251, "badge": 60007, "badge_critical_battery": 61782, + "badminton": 62120, "bakery_dining": 59987, "balance": 60150, "balcony": 58767, @@ -383,9 +418,11 @@ "barcode_scanner": 59148, "barefoot": 63601, "batch_prediction": 61685, + "bath_bedrock": 62086, "bath_outdoor": 63227, "bath_private": 63226, "bath_public_large": 63225, + "bath_soak": 62112, "bathroom": 61405, "bathtub": 59969, "battery_0_bar": 60380, @@ -411,6 +448,19 @@ "battery_android_6": 62215, "battery_android_alert": 62214, "battery_android_bolt": 62213, + "battery_android_frame_1": 62039, + "battery_android_frame_2": 62038, + "battery_android_frame_3": 62037, + "battery_android_frame_4": 62036, + "battery_android_frame_5": 62035, + "battery_android_frame_6": 62034, + "battery_android_frame_alert": 62033, + "battery_android_frame_bolt": 62032, + "battery_android_frame_full": 62031, + "battery_android_frame_plus": 62030, + "battery_android_frame_question": 62029, + "battery_android_frame_share": 62028, + "battery_android_frame_shield": 62027, "battery_android_full": 62212, "battery_android_plus": 62211, "battery_android_question": 62210, @@ -450,6 +500,7 @@ "bedtime": 61785, "bedtime_off": 60278, "beenhere": 58669, + "beer_meal": 62085, "bento": 61940, "bia": 63211, "bid_landscape": 59000, @@ -491,7 +542,7 @@ "book_4": 62780, "book_5": 62779, "book_6": 62431, - "book_online": 61975, + "book_online": 62180, "book_ribbon": 62439, "bookmark": 59623, "bookmark_add": 58776, @@ -538,6 +589,7 @@ "breaking_news_alt_1": 61626, "breastfeeding": 63574, "brick": 62344, + "briefcase_meal": 62022, "brightness_1": 58362, "brightness_2": 61494, "brightness_3": 58280, @@ -565,6 +617,7 @@ "bubble": 61315, "bubble_chart": 59101, "bubbles": 63054, + "bucket_check": 61226, "bug_report": 59496, "build": 63693, "build_circle": 61256, @@ -587,7 +640,11 @@ "calculate": 59999, "calendar_add_on": 61317, "calendar_apps_script": 61627, + "calendar_check": 62019, "calendar_clock": 62784, + "calendar_lock": 62018, + "calendar_meal": 62102, + "calendar_meal_2": 62016, "calendar_month": 60364, "calendar_today": 59701, "calendar_view_day": 59702, @@ -608,10 +665,10 @@ "camera": 58287, "camera_alt": 58386, "camera_enhance": 59644, - "camera_front": 58289, + "camera_front": 62153, "camera_indoor": 61417, "camera_outdoor": 61418, - "camera_rear": 58290, + "camera_rear": 62152, "camera_roll": 58291, "camera_video": 63398, "cameraswitch": 61419, @@ -629,7 +686,9 @@ "car_defrost_left": 62276, "car_defrost_low_left": 62275, "car_defrost_low_right": 62274, + "car_defrost_mid_left": 62072, "car_defrost_mid_low_left": 62273, + "car_defrost_mid_low_right": 62071, "car_defrost_mid_right": 62272, "car_defrost_right": 62271, "car_fan_low_left": 62270, @@ -675,17 +734,21 @@ "center_focus_weak": 58293, "chair": 61421, "chair_alt": 61422, + "chair_counter": 62111, + "chair_fireplace": 62110, + "chair_umbrella": 62109, "chalet": 58757, "change_circle": 58087, "change_history": 59499, "charger": 58030, - "charging_station": 61853, + "charging_station": 62179, "chart_data": 58483, "chat": 57545, "chat_add_on": 61683, "chat_apps_script": 61629, "chat_bubble": 57547, "chat_bubble_outline": 57547, + "chat_dashed": 61165, "chat_error": 63404, "chat_info": 62763, "chat_paste_go": 63165, @@ -696,6 +759,7 @@ "check_circle": 61630, "check_circle_filled": 61630, "check_circle_outline": 61630, + "check_circle_unread": 62078, "check_in_out": 63222, "check_indeterminate_small": 63626, "check_small": 63627, @@ -708,13 +772,22 @@ "cheer": 63144, "chef_hat": 62295, "chess": 62951, + "chess_bishop": 62049, + "chess_bishop_2": 62050, + "chess_king": 62047, + "chess_king_2": 62048, + "chess_knight": 62046, "chess_pawn": 62390, + "chess_pawn_2": 62045, + "chess_queen": 62044, + "chess_rook": 62043, "chevron_backward": 62571, "chevron_forward": 62570, "chevron_left": 58827, "chevron_right": 58828, "child_care": 60225, "child_friendly": 60226, + "child_hat": 61232, "chip_extraction": 63521, "chips": 59795, "chrome_reader_mode": 59501, @@ -840,6 +913,7 @@ "control_point": 58298, "control_point_duplicate": 58299, "controller_gen": 59453, + "conversation": 61231, "conversion_path": 61633, "conversion_path_off": 63412, "convert_to_text": 62495, @@ -985,13 +1059,13 @@ "developer_board": 58125, "developer_board_off": 58623, "developer_guide": 59806, - "developer_mode": 57776, + "developer_mode": 62178, "developer_mode_tv": 59508, "device_band": 62197, "device_hub": 58165, "device_reset": 59571, "device_thermostat": 57855, - "device_unknown": 58169, + "device_unknown": 62177, "devices": 58150, "devices_fold": 60382, "devices_fold_2": 62470, @@ -1005,10 +1079,14 @@ "dialogs": 59807, "dialpad": 57532, "diamond": 60117, + "diamond_shine": 62130, "dictionary": 62777, "difference": 60285, "digital_out_of_home": 61918, "digital_wellbeing": 61318, + "dine_heart": 62108, + "dine_in": 62101, + "dine_lamp": 62107, "dining": 61428, "dinner_dining": 59991, "directions": 58670, @@ -1058,7 +1136,7 @@ "do_not_disturb_on_total_silence": 61435, "do_not_step": 61855, "do_not_touch": 61872, - "dock": 58126, + "dock": 62176, "dock_to_bottom": 63462, "dock_to_left": 63461, "dock_to_right": 63460, @@ -1113,6 +1191,8 @@ "drive_file_rename_outline": 59810, "drive_folder_upload": 59811, "drive_fusiontable": 59000, + "drone": 62042, + "drone_2": 62041, "dropdown": 59812, "dropper_eye": 62289, "dry": 61875, @@ -1140,8 +1220,8 @@ "ecg_heart": 63209, "eco": 59957, "eda": 63208, - "edgesensor_high": 61445, - "edgesensor_low": 61446, + "edgesensor_high": 62191, + "edgesensor_low": 62190, "edit": 61591, "edit_arrow_down": 62336, "edit_arrow_up": 62335, @@ -1267,6 +1347,8 @@ "extension_off": 58613, "eye_tracking": 62665, "eyeglasses": 63214, + "eyeglasses_2": 62151, + "eyeglasses_2_sound": 62053, "face": 61448, "face_2": 63706, "face_3": 63707, @@ -1286,6 +1368,7 @@ "factory": 60348, "falling": 62989, "familiar_face_and_zone": 57884, + "family_group": 61170, "family_history": 57517, "family_home": 60198, "family_link": 60185, @@ -1380,6 +1463,7 @@ "fit_width": 63353, "fitness_center": 60227, "fitness_tracker": 62563, + "fitness_trackers": 61169, "flag": 61638, "flag_2": 62479, "flag_check": 62424, @@ -1516,6 +1600,8 @@ "forward_to_inbox": 61831, "foundation": 61952, "fragrance": 62277, + "frame_bug": 61167, + "frame_exclamation": 61166, "frame_inspect": 63346, "frame_person": 63654, "frame_person_mic": 62677, @@ -1542,8 +1628,10 @@ "gamepad": 58127, "games": 58127, "garage": 61457, + "garage_check": 62093, "garage_door": 59156, "garage_home": 59437, + "garage_money": 62092, "garden_cart": 63657, "gas_meter": 60441, "gastroenterology": 57585, @@ -1622,9 +1710,12 @@ "h_plus_mobiledata_badge": 63455, "hail": 59825, "hallway": 59128, + "hanami_dango": 62015, "hand_bones": 63636, "hand_gesture": 61340, "hand_gesture_off": 62451, + "hand_meal": 62100, + "hand_package": 62099, "handheld_controller": 62662, "handshake": 60363, "handwriting_recognition": 60162, @@ -1656,6 +1747,7 @@ "healing": 58355, "health_and_beauty": 61341, "health_and_safety": 57813, + "health_cross": 62147, "health_metrics": 63202, "heap_snapshot_large": 63342, "heap_snapshot_multiple": 63341, @@ -1663,11 +1755,14 @@ "hearing": 57379, "hearing_aid": 62564, "hearing_aid_disabled": 62384, + "hearing_aid_disabled_left": 62188, + "hearing_aid_left": 62189, "hearing_disabled": 61700, "heart_broken": 60098, "heart_check": 62986, "heart_minus": 63619, "heart_plus": 63620, + "heart_smile": 62098, "heat": 62775, "heat_pump": 60440, "heat_pump_balance": 57982, @@ -1683,6 +1778,7 @@ "hide": 61342, "hide_image": 61474, "hide_source": 61475, + "high_chair": 62106, "high_density": 63388, "high_quality": 57380, "high_res": 62795, @@ -1771,6 +1867,7 @@ "image": 58356, "image_arrow_up": 62231, "image_aspect_ratio": 58357, + "image_inset": 62023, "image_not_supported": 61718, "image_search": 58431, "imagesearch_roller": 59828, @@ -1816,7 +1913,7 @@ "insert_text": 63527, "insights": 61586, "install_desktop": 60273, - "install_mobile": 60274, + "install_mobile": 62157, "instant_mix": 57382, "integration_instructions": 61268, "interactive_space": 63487, @@ -1831,6 +1928,8 @@ "iron": 58755, "iso": 58358, "jamboard_kiosk": 59829, + "japanese_curry": 62084, + "japanese_flag": 62083, "javascript": 60284, "join": 63567, "join_full": 63567, @@ -1839,6 +1938,7 @@ "join_right": 60138, "joystick": 62958, "jump_to_element": 63257, + "kanji_alcohol": 62014, "kayaking": 58636, "kebab_dining": 59458, "keep": 61478, @@ -2066,9 +2166,11 @@ "magnify_docked": 63446, "magnify_fullscreen": 63445, "mail": 57689, + "mail_asterisk": 61172, "mail_lock": 60426, "mail_off": 62603, "mail_outline": 57689, + "mail_shield": 62025, "male": 58766, "man": 58603, "man_2": 63713, @@ -2080,6 +2182,8 @@ "manga": 62947, "manufacturing": 59174, "map": 58715, + "map_pin_heart": 62104, + "map_pin_review": 62103, "map_search": 62410, "maps_home_work": 61488, "maps_ugc": 61272, @@ -2098,11 +2202,14 @@ "masked_transitions": 59182, "masked_transitions_add": 62507, "masks": 61976, + "massage": 62146, "match_case": 63217, "match_case_off": 62319, "match_word": 63216, "matter": 59655, "maximize": 59696, + "meal_dinner": 62013, + "meal_lunch": 62012, "measuring_tape": 63151, "media_bluetooth_off": 61489, "media_bluetooth_on": 61490, @@ -2121,6 +2228,7 @@ "menstrual_health": 63201, "menu": 58834, "menu_book": 59929, + "menu_book_2": 62097, "menu_open": 59837, "merge": 60312, "merge_type": 57938, @@ -2152,17 +2260,57 @@ "mitre": 62791, "mixture_med": 58568, "mms": 58904, - "mobile_friendly": 57856, + "mobile": 59322, + "mobile_2": 62171, + "mobile_3": 62170, + "mobile_alert": 62163, + "mobile_arrow_down": 62157, + "mobile_arrow_right": 62162, + "mobile_arrow_up_right": 62137, + "mobile_block": 62181, + "mobile_camera": 62542, + "mobile_camera_front": 62153, + "mobile_camera_rear": 62152, + "mobile_cancel": 62186, + "mobile_cast": 62156, + "mobile_charge": 62179, + "mobile_chat": 63391, + "mobile_check": 61555, + "mobile_code": 62178, + "mobile_dots": 62160, + "mobile_friendly": 61555, + "mobile_gear": 62169, "mobile_hand": 62243, "mobile_hand_left": 62227, "mobile_hand_left_off": 62226, "mobile_hand_off": 62228, + "mobile_info": 62172, + "mobile_landscape": 60734, + "mobile_layout": 62143, + "mobile_lock_landscape": 62168, + "mobile_lock_portrait": 62142, "mobile_loupe": 62242, + "mobile_menu": 62161, "mobile_off": 57857, - "mobile_screen_share": 57575, + "mobile_question": 62177, + "mobile_rotate": 62165, + "mobile_rotate_lock": 62166, + "mobile_screen_share": 62175, "mobile_screensaver": 62241, + "mobile_sensor_hi": 62191, + "mobile_sensor_lo": 62190, + "mobile_share": 62175, + "mobile_share_stack": 62174, + "mobile_sound": 62184, "mobile_sound_2": 62232, + "mobile_sound_off": 63402, "mobile_speaker": 62240, + "mobile_text": 62187, + "mobile_text_2": 62182, + "mobile_theft": 62121, + "mobile_ticket": 62180, + "mobile_vibrate": 62155, + "mobile_wrench": 62128, "mobiledata_off": 61492, "mode": 61591, "mode_comment": 57939, @@ -2187,6 +2335,7 @@ "money_bag": 62446, "money_off": 61496, "money_off_csred": 61496, + "money_range": 62021, "monitor": 61275, "monitor_heart": 60066, "monitor_weight": 61497, @@ -2200,6 +2349,7 @@ "moon_stars": 62287, "mop": 57997, "moped": 60200, + "moped_package": 62091, "more": 58905, "more_down": 61846, "more_horiz": 58835, @@ -2221,6 +2371,7 @@ "motion_sensor_urgent": 59278, "motorcycle": 59675, "mountain_flag": 62946, + "mountain_steam": 62082, "mouse": 58147, "mouse_lock": 62608, "mouse_lock_off": 62607, @@ -2242,6 +2393,7 @@ "movie_filter": 58426, "movie_info": 57389, "movie_off": 62617, + "movie_speaker": 62115, "moving": 58625, "moving_beds": 59197, "moving_ministry": 59198, @@ -2253,6 +2405,7 @@ "multiple_stop": 61881, "museum": 59958, "music_cast": 60186, + "music_history": 62145, "music_note": 58373, "music_note_add": 62353, "music_off": 58432, @@ -2289,6 +2442,11 @@ "nest_display_max": 61733, "nest_doorbell_visitor": 63677, "nest_eco_leaf": 63678, + "nest_farsight_cool": 62077, + "nest_farsight_dual": 62076, + "nest_farsight_eco": 62075, + "nest_farsight_heat": 62074, + "nest_farsight_seasonal": 62073, "nest_farsight_weather": 63679, "nest_found_savings": 63680, "nest_gale_wifi": 62841, @@ -2357,7 +2515,7 @@ "nightlife": 60002, "nightlight": 61501, "nightlight_round": 61501, - "nights_stay": 59974, + "nights_stay": 61812, "no_accounts": 61502, "no_adult_content": 63742, "no_backpack": 62007, @@ -2411,8 +2569,9 @@ "offline_bolt": 59698, "offline_pin": 59658, "offline_pin_off": 62672, - "offline_share": 59845, + "offline_share": 62174, "oil_barrel": 60437, + "okonomiyaki": 62081, "on_device_training": 60413, "on_hub_device": 59075, "oncology": 57620, @@ -2425,7 +2584,7 @@ "open_in_new": 59550, "open_in_new_down": 63247, "open_in_new_off": 58614, - "open_in_phone": 59138, + "open_in_phone": 62162, "open_jam": 61358, "open_run": 62647, "open_with": 59551, @@ -2462,10 +2621,12 @@ "package": 58511, "package_2": 62825, "padding": 59848, + "padel": 62119, "page_control": 59185, "page_footer": 62339, "page_header": 62340, "page_info": 62996, + "page_menu_ios": 61179, "pageless": 62729, "pages": 59385, "pageview": 59552, @@ -2482,10 +2643,15 @@ "panorama_vertical": 58382, "panorama_wide_angle": 58383, "paragliding": 58639, + "parent_child_dining": 61997, "park": 60003, + "parking_meter": 62090, + "parking_sign": 62089, + "parking_valet": 62088, "partly_cloudy_day": 61810, "partly_cloudy_night": 61812, "partner_exchange": 63481, + "partner_heart": 61230, "partner_reports": 61359, "party_mode": 59386, "passkey": 63615, @@ -2500,6 +2666,8 @@ "pause_circle_outline": 57762, "pause_presentation": 57578, "payment": 59553, + "payment_arrow_down": 62144, + "payment_card": 62113, "payments": 61283, "pedal_bike": 60201, "pediatrics": 57629, @@ -2515,12 +2683,13 @@ "people_alt": 59937, "people_outline": 59937, "percent": 60248, + "percent_discount": 62020, "performance_max": 58650, "pergola": 57859, "perm_camera_mic": 59554, "perm_contact_calendar": 59555, "perm_data_setting": 59556, - "perm_device_information": 59557, + "perm_device_information": 62172, "perm_identity": 61651, "perm_media": 59559, "perm_phone_msg": 59560, @@ -2540,6 +2709,7 @@ "person_check": 62821, "person_edit": 62714, "person_filled": 61651, + "person_heart": 62096, "person_off": 58640, "person_outline": 61651, "person_pin": 58714, @@ -2562,24 +2732,24 @@ "phishing": 60119, "phone": 61652, "phone_alt": 61652, - "phone_android": 58148, + "phone_android": 62171, "phone_bluetooth_speaker": 58907, "phone_callback": 58953, "phone_disabled": 59852, "phone_enabled": 59853, "phone_forwarded": 58908, "phone_in_talk": 58909, - "phone_iphone": 58149, + "phone_iphone": 62170, "phone_locked": 58910, "phone_missed": 58911, "phone_paused": 58912, "phonelink": 58150, - "phonelink_erase": 57563, - "phonelink_lock": 57564, - "phonelink_off": 58151, - "phonelink_ring": 57565, + "phonelink_erase": 62186, + "phonelink_lock": 62142, + "phonelink_off": 63397, + "phonelink_ring": 62184, "phonelink_ring_off": 63402, - "phonelink_setup": 61249, + "phonelink_setup": 62169, "photo": 58418, "photo_album": 58385, "photo_auto_merge": 62768, @@ -2597,6 +2767,7 @@ "physical_therapy": 57630, "piano": 58657, "piano_off": 58656, + "pickleball": 62118, "picture_as_pdf": 58389, "picture_in_picture": 59562, "picture_in_picture_alt": 59665, @@ -2627,6 +2798,7 @@ "place": 61915, "place_item": 61936, "plagiarism": 59994, + "plane_contrails": 62124, "planet": 62343, "planner_banner_ad_pt": 59026, "planner_review": 59028, @@ -2638,6 +2810,8 @@ "play_music": 59118, "play_pause": 61751, "play_shapes": 63484, + "playground": 62094, + "playground_2": 62095, "playing_cards": 62940, "playlist_add": 57403, "playlist_add_check": 57445, @@ -2819,6 +2993,7 @@ "request_page": 61996, "request_quote": 61878, "reset_brightness": 62594, + "reset_exposure": 62054, "reset_focus": 62593, "reset_image": 63524, "reset_iso": 62592, @@ -2831,6 +3006,7 @@ "resize": 63239, "respiratory_rate": 57639, "responsive_layout": 59866, + "rest_area": 61994, "restart_alt": 61523, "restaurant": 58732, "restaurant_menu": 58721, @@ -2914,11 +3090,11 @@ "scooter": 62577, "score": 57961, "scoreboard": 60368, - "screen_lock_landscape": 57790, - "screen_lock_portrait": 57791, - "screen_lock_rotation": 57792, + "screen_lock_landscape": 62168, + "screen_lock_portrait": 62142, + "screen_lock_rotation": 62166, "screen_record": 63097, - "screen_rotation": 57793, + "screen_rotation": 62165, "screen_rotation_alt": 60398, "screen_rotation_up": 63096, "screen_search_desktop": 61296, @@ -2942,6 +3118,7 @@ "search_activity": 62437, "search_check": 63488, "search_check_2": 62569, + "search_gear": 61178, "search_hands_free": 59030, "search_insights": 62652, "search_off": 60022, @@ -2953,9 +3130,9 @@ "seat_vent_right": 62252, "security": 58154, "security_key": 62723, - "security_update": 61554, + "security_update": 62157, "security_update_good": 61555, - "security_update_warning": 61556, + "security_update_warning": 62163, "segment": 59723, "select": 63309, "select_all": 57698, @@ -2971,7 +3148,7 @@ "send_and_archive": 59916, "send_money": 59575, "send_time_extension": 60123, - "send_to_mobile": 61532, + "send_to_mobile": 62162, "sensor_door": 61877, "sensor_occupied": 60432, "sensor_window": 61876, @@ -3006,7 +3183,7 @@ "settings_backup_restore": 59578, "settings_bluetooth": 59579, "settings_brightness": 59581, - "settings_cell": 59580, + "settings_cell": 62161, "settings_cinematic_blur": 63012, "settings_ethernet": 59582, "settings_heart": 62754, @@ -3023,6 +3200,7 @@ "settings_photo_camera": 63540, "settings_power": 59590, "settings_remote": 59591, + "settings_seating": 61229, "settings_slow_motion": 63011, "settings_suggest": 61534, "settings_system_daydream": 57795, @@ -3043,6 +3221,7 @@ "share_off": 63179, "share_reviews": 63652, "share_windows": 62995, + "shaved_ice": 61989, "sheets_rtl": 63523, "shelf_auto_hide": 63235, "shelf_position": 63234, @@ -3053,6 +3232,7 @@ "shield_moon": 60073, "shield_person": 63056, "shield_question": 62761, + "shield_toggle": 62125, "shield_watch": 62223, "shield_with_heart": 59279, "shield_with_house": 59277, @@ -3082,6 +3262,7 @@ "sick": 61984, "side_navigation": 59874, "sign_language": 60389, + "sign_language_2": 62040, "signal_cellular_0_bar": 61608, "signal_cellular_1_bar": 61609, "signal_cellular_2_bar": 61610, @@ -3141,9 +3322,9 @@ "smart_card_reader_off": 62630, "smart_display": 61546, "smart_outlet": 59460, - "smart_screen": 61547, + "smart_screen": 62160, "smart_toy": 61548, - "smartphone": 58156, + "smartphone": 59322, "smartphone_camera": 62542, "smb_share": 63307, "smoke_free": 60234, @@ -3158,9 +3339,11 @@ "snowmobile": 58627, "snowshoeing": 58644, "soap": 61874, + "soba": 61238, "social_distance": 57803, "social_leaderboard": 63136, "solar_power": 60431, + "solo_dining": 61237, "sort": 57700, "sort_by_alpha": 57427, "sos": 60407, @@ -3284,10 +3467,10 @@ "stat_minus_1": 59035, "stat_minus_2": 59036, "stat_minus_3": 59037, - "stay_current_landscape": 57555, - "stay_current_portrait": 57556, - "stay_primary_landscape": 57557, - "stay_primary_portrait": 57558, + "stay_current_landscape": 60734, + "stay_current_portrait": 59322, + "stay_primary_landscape": 60734, + "stay_primary_portrait": 62163, "steering_wheel_heat": 62251, "step": 63230, "step_into": 63233, @@ -3341,6 +3524,7 @@ "subtitles_gear": 62293, "subtitles_off": 61298, "subway": 58735, + "subway_walk": 62087, "summarize": 61553, "sunny": 59418, "sunny_snowing": 59417, @@ -3396,11 +3580,12 @@ "sync_lock": 60142, "sync_problem": 58921, "sync_saved_locally": 63520, + "sync_saved_locally_off": 62052, "syringe": 57651, - "system_security_update": 61554, + "system_security_update": 62157, "system_security_update_good": 61555, - "system_security_update_warning": 61556, - "system_update": 61554, + "system_security_update_warning": 62163, + "system_update": 62157, "system_update_alt": 59607, "tab": 59608, "tab_close": 63301, @@ -3422,9 +3607,11 @@ "table_edit": 62406, "table_eye": 62566, "table_lamp": 57842, + "table_large": 62105, "table_restaurant": 60102, "table_rows": 61697, "table_rows_narrow": 63295, + "table_sign": 61228, "table_view": 61886, "tablet": 58159, "tablet_android": 58160, @@ -3435,13 +3622,15 @@ "tag": 59887, "tag_faces": 59938, "takeout_dining": 60020, + "takeout_dining_2": 61236, "tamper_detection_off": 59438, "tamper_detection_on": 63688, - "tap_and_play": 58923, + "tap_and_play": 62156, "tapas": 61929, "target": 59161, "task": 61557, "task_alt": 58086, + "tatami_seat": 61235, "taunt": 63135, "taxi_alert": 61300, "team_dashboard": 57363, @@ -3508,6 +3697,7 @@ "thumb_up_off": 62839, "thumb_up_off_alt": 62839, "thumbnail_bar": 63284, + "thumbs_up_double": 61180, "thumbs_up_down": 59613, "thunderstorm": 60379, "tibia": 63643, @@ -3520,9 +3710,11 @@ "timelapse": 58402, "timeline": 59682, "timer": 58405, + "timer_1": 62127, "timer_10": 58403, "timer_10_alt_1": 61375, "timer_10_select": 61562, + "timer_2": 62126, "timer_3": 58404, "timer_3_alt_1": 61376, "timer_3_select": 61563, @@ -3545,6 +3737,7 @@ "token": 59941, "toll": 59616, "tonality": 58407, + "tonality_2": 62132, "toolbar": 59895, "tools_flat_head": 63691, "tools_installation_kit": 58027, @@ -3594,6 +3787,7 @@ "transition_push": 62731, "transition_slide": 62730, "translate": 59618, + "translate_indic": 62051, "transportation": 57885, "travel": 61331, "travel_explore": 58075, @@ -3638,6 +3832,7 @@ "type_specimen": 63728, "u_turn_left": 60321, "u_turn_right": 60322, + "udon": 61234, "ulna_radius": 63645, "ulna_radius_alt": 63646, "umbrella": 61869, @@ -3694,7 +3889,7 @@ "vertical_shades": 60430, "vertical_shades_closed": 60429, "vertical_split": 59721, - "vibration": 58925, + "vibration": 62155, "video_call": 57456, "video_camera_back": 61567, "video_camera_back_add": 62476, @@ -3739,6 +3934,7 @@ "view_timeline": 60293, "view_week": 59635, "vignette": 58421, + "vignette_2": 62131, "villa": 58758, "visibility": 59636, "visibility_lock": 63059, @@ -3781,7 +3977,9 @@ "warning_amber": 61571, "warning_off": 63405, "wash": 61873, + "washoku": 62080, "watch": 58164, + "watch_arrow": 62154, "watch_button_press": 63146, "watch_check": 62568, "watch_later": 61398, @@ -3868,6 +4066,7 @@ "window_closed": 59262, "window_open": 59276, "window_sensor": 58043, + "windshield_defrost_auto": 62024, "windshield_defrost_front": 62250, "windshield_defrost_rear": 62249, "windshield_heat_front": 62248, @@ -3889,7 +4088,9 @@ "wrist": 63132, "wrong_location": 61304, "wysiwyg": 61891, + "yakitori": 61233, "yard": 61577, + "yoshoku": 62079, "your_trips": 60203, "youtube_activity": 63578, "youtube_searched_for": 59642, diff --git a/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined.ttf b/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined.ttf new file mode 100644 index 0000000000..1b940f6148 Binary files /dev/null and b/client/ayon_core/vendor/python/qtmaterialsymbols/resources/MaterialSymbolsOutlined.ttf differ diff --git a/client/ayon_core/vendor/python/qtmaterialsymbols/resources/__init__.py b/client/ayon_core/vendor/python/qtmaterialsymbols/resources/__init__.py index 581b37c213..6da4c6986b 100644 --- a/client/ayon_core/vendor/python/qtmaterialsymbols/resources/__init__.py +++ b/client/ayon_core/vendor/python/qtmaterialsymbols/resources/__init__.py @@ -5,32 +5,12 @@ CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) def get_font_filepath( - font_name: Optional[str] = "MaterialSymbolsOutlined-Regular" + font_name: Optional[str] = "MaterialSymbolsOutlined" ) -> str: return os.path.join(CURRENT_DIR, f"{font_name}.ttf") def get_mapping_filepath( - font_name: Optional[str] = "MaterialSymbolsOutlined-Regular" + font_name: Optional[str] = "MaterialSymbolsOutlined" ) -> str: return os.path.join(CURRENT_DIR, f"{font_name}.json") - - -def regenerate_mapping(): - """Regenerate the MaterialSymbolsOutlined.json file, assuming - MaterialSymbolsOutlined.codepoints and the TrueType font file have been - updated to support the new symbols. - """ - import json - jfile = get_mapping_filepath() - cpfile = jfile.replace(".json", ".codepoints") - with open(cpfile, "r") as cpf: - codepoints = cpf.read() - - mapping = {} - for cp in codepoints.splitlines(): - name, code = cp.split() - mapping[name] = int(f"0x{code}", 16) - - with open(jfile, "w") as jf: - json.dump(mapping, jf, indent=4) diff --git a/client/ayon_core/version.py b/client/ayon_core/version.py index 509c4a8d14..6aa30b935a 100644 --- a/client/ayon_core/version.py +++ b/client/ayon_core/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring AYON addon 'core' version.""" -__version__ = "1.4.1+dev" +__version__ = "1.6.7+dev" diff --git a/client/pyproject.toml b/client/pyproject.toml index 6416d9b8e1..c98591b707 100644 --- a/client/pyproject.toml +++ b/client/pyproject.toml @@ -19,3 +19,6 @@ OpenTimelineIO = "0.16.0" opencolorio = "^2.3.2,<2.4.0" Pillow = "9.5.0" websocket-client = ">=0.40.0,<2" + +[ayon.runtimeDependencies.darwin] +pyobjc-core = "^11.1" diff --git a/mkdocs.yml b/mkdocs.yml index 8e4c2663bc..a3b89b5455 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -11,12 +11,12 @@ theme: - media: "(prefers-color-scheme: dark)" scheme: slate toggle: - icon: material/toggle-switch-off-outline + icon: material/weather-sunny name: Switch to light mode - media: "(prefers-color-scheme: light)" scheme: default toggle: - icon: material/toggle-switch + icon: material/weather-night name: Switch to dark mode logo: img/ay-symbol-blackw-full.png favicon: img/favicon.ico diff --git a/mkdocs_requirements.txt b/mkdocs_requirements.txt new file mode 100644 index 0000000000..829d02951a --- /dev/null +++ b/mkdocs_requirements.txt @@ -0,0 +1,9 @@ +mkdocs-material >= 9.6.7 +mkdocs-autoapi >= 0.4.0 +mkdocstrings-python >= 1.16.2 +mkdocs-minify-plugin >= 0.8.0 +markdown-checklist >= 0.4.4 +mdx-gh-links >= 0.4 +pymdown-extensions >= 10.14.3 +mike >= 2.1.3 +mkdocstrings-shell >= 1.0.2 diff --git a/package.py b/package.py index 039bf0379c..ff3fad5b19 100644 --- a/package.py +++ b/package.py @@ -1,11 +1,13 @@ name = "core" title = "Core" -version = "1.4.1+dev" +version = "1.6.7+dev" client_dir = "ayon_core" plugin_for = ["ayon_server"] +project_can_override_addon_version = True + ayon_server_version = ">=1.8.4,<2.0.0" ayon_launcher_version = ">=1.0.2" ayon_required_addons = {} diff --git a/pyproject.toml b/pyproject.toml index 9609729420..6656f15249 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ [tool.poetry] name = "ayon-core" -version = "1.4.1+dev" +version = "1.6.7+dev" description = "" authors = ["Ynput Team "] readme = "README.md" @@ -19,6 +19,7 @@ python = ">=3.9.1,<3.10" pytest = "^8.0" pytest-print = "^1.0" ayon-python-api = "^1.0" +arrow = "0.17.0" # linting dependencies ruff = "^0.11.7" pre-commit = "^4" @@ -26,17 +27,6 @@ codespell = "^2.2.6" semver = "^3.0.2" mypy = "^1.14.0" mock = "^5.0.0" -tomlkit = "^0.13.2" -requests = "^2.32.3" -mkdocs-material = "^9.6.7" -mkdocs-autoapi = "^0.4.0" -mkdocstrings-python = "^1.16.2" -mkdocs-minify-plugin = "^0.8.0" -markdown-checklist = "^0.4.4" -mdx-gh-links = "^0.4" -pymdown-extensions = "^10.14.3" -mike = "^2.1.3" -mkdocstrings-shell = "^1.0.2" nxtools = "^1.6" [tool.poetry.group.test.dependencies] diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index d690d79607..ee422a0acf 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -747,6 +747,11 @@ class ExtractReviewProfileModel(BaseSettingsModel): hosts: list[str] = SettingsField( default_factory=list, title="Host names" ) + task_types: list[str] = SettingsField( + default_factory=list, + title="Task Types", + enum_resolver=task_types_enum, + ) outputs: list[ExtractReviewOutputDefModel] = SettingsField( default_factory=list, title="Output Definitions" ) @@ -1348,6 +1353,7 @@ DEFAULT_PUBLISH_VALUES = { { "product_types": [], "hosts": [], + "task_types": [], "outputs": [ { "name": "png", diff --git a/server/settings/tools.py b/server/settings/tools.py index 815ef40f8e..f40c7c3627 100644 --- a/server/settings/tools.py +++ b/server/settings/tools.py @@ -454,7 +454,7 @@ DEFAULT_TOOLS_VALUES = { "hosts": [], "task_types": [], "tasks": [], - "template": "{product[type]}{Task[name]}{Variant}" + "template": "{product[type]}{Task[name]}{Variant}<_{Aov}>" }, { "product_types": [ diff --git a/tests/client/ayon_core/pipeline/editorial/test_extract_otio_review.py b/tests/client/ayon_core/pipeline/editorial/test_extract_otio_review.py index 6a74df7f43..ed441edc63 100644 --- a/tests/client/ayon_core/pipeline/editorial/test_extract_otio_review.py +++ b/tests/client/ayon_core/pipeline/editorial/test_extract_otio_review.py @@ -246,75 +246,75 @@ def test_multiple_review_clips_no_gap(): expected = [ # 10 head black frames generated from gap (991-1000) '/path/to/ffmpeg -t 0.4 -r 25.0 -f lavfi' - ' -i color=c=black:s=1280x720 -tune ' + ' -i color=c=black:s=1920x1080 -tune ' 'stillimage -start_number 991 -pix_fmt rgba C:/result/output.%04d.png', # Alternance 25fps tiff sequence and 24fps exr sequence # for 100 frames each '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' f'C:\\no_tc{os.sep}output.%04d.tif ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1001 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i ' f'C:\\with_tc{os.sep}output.%04d.exr ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1102 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' f'C:\\no_tc{os.sep}output.%04d.tif ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1198 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i ' f'C:\\with_tc{os.sep}output.%04d.exr ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1299 -pix_fmt rgba C:/result/output.%04d.png', # Repeated 25fps tiff sequence multiple times till the end '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' f'C:\\no_tc{os.sep}output.%04d.tif ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1395 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' f'C:\\no_tc{os.sep}output.%04d.tif ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1496 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' f'C:\\no_tc{os.sep}output.%04d.tif ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1597 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' f'C:\\no_tc{os.sep}output.%04d.tif ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1698 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' f'C:\\no_tc{os.sep}output.%04d.tif ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1799 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' f'C:\\no_tc{os.sep}output.%04d.tif ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1900 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' f'C:\\no_tc{os.sep}output.%04d.tif ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 2001 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' f'C:\\no_tc{os.sep}output.%04d.tif ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 2102 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' f'C:\\no_tc{os.sep}output.%04d.tif ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 2203 -pix_fmt rgba C:/result/output.%04d.png' ] @@ -348,12 +348,12 @@ def test_multiple_review_clips_with_gap(): '/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i ' f'C:\\with_tc{os.sep}output.%04d.exr ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1003 -pix_fmt rgba C:/result/output.%04d.png', '/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i ' f'C:\\with_tc{os.sep}output.%04d.exr ' - '-vf scale=1280:720:flags=lanczos -compression_level 5 ' + '-vf scale=1920:1080:flags=lanczos -compression_level 5 ' '-start_number 1091 -pix_fmt rgba C:/result/output.%04d.png' ]