diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 9fb6ee645d..933448a6a9 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -35,6 +35,9 @@ body: label: Version description: What version are you running? Look to AYON Tray options: + - 1.5.2 + - 1.5.1 + - 1.5.0 - 1.4.1 - 1.4.0 - 1.3.2 diff --git a/client/ayon_core/addon/base.py b/client/ayon_core/addon/base.py index 72270fa585..57968b0e09 100644 --- a/client/ayon_core/addon/base.py +++ b/client/ayon_core/addon/base.py @@ -8,6 +8,7 @@ import inspect import logging import threading import collections +import warnings from uuid import uuid4 from abc import ABC, abstractmethod from typing import Optional @@ -815,10 +816,26 @@ class AddonsManager: Unknown keys are logged out. + Deprecated: + Use targeted methods 'collect_launcher_action_paths', + 'collect_create_plugin_paths', 'collect_load_plugin_paths', + 'collect_publish_plugin_paths' and + 'collect_inventory_action_paths' to collect plugin paths. + Returns: dict: Output is dictionary with keys "publish", "create", "load", "actions" and "inventory" each containing list of paths. + """ + warnings.warn( + "Used deprecated method 'collect_plugin_paths'. Please use" + " targeted methods 'collect_launcher_action_paths'," + " 'collect_create_plugin_paths', 'collect_load_plugin_paths'" + " 'collect_publish_plugin_paths' and" + " 'collect_inventory_action_paths'", + DeprecationWarning, + stacklevel=2 + ) # Output structure output = { "publish": [], @@ -874,24 +891,28 @@ class AddonsManager: if not isinstance(addon, IPluginPaths): continue + paths = None method = getattr(addon, method_name) try: paths = method(*args, **kwargs) except Exception: self.log.warning( - ( - "Failed to get plugin paths from addon" - " '{}' using '{}'." - ).format(addon.__class__.__name__, method_name), + "Failed to get plugin paths from addon" + f" '{addon.name}' using '{method_name}'.", exc_info=True ) + + if not paths: continue - if paths: - # Convert to list if value is not list - if not isinstance(paths, (list, tuple, set)): - paths = [paths] - output.extend(paths) + if isinstance(paths, str): + paths = [paths] + self.log.warning( + f"Addon '{addon.name}' returned invalid output type" + f" from '{method_name}'." + f" Got 'str' expected 'list[str]'." + ) + output.extend(paths) return output def collect_launcher_action_paths(self): diff --git a/client/ayon_core/addon/interfaces.py b/client/ayon_core/addon/interfaces.py index 232c056fb4..bf08ccd48c 100644 --- a/client/ayon_core/addon/interfaces.py +++ b/client/ayon_core/addon/interfaces.py @@ -1,6 +1,7 @@ """Addon interfaces for AYON.""" from __future__ import annotations +import warnings from abc import ABCMeta, abstractmethod from typing import TYPE_CHECKING, Callable, Optional, Type @@ -39,26 +40,29 @@ class AYONInterface(metaclass=_AYONInterfaceMeta): class IPluginPaths(AYONInterface): - """Addon has plugin paths to return. + """Addon wants to register plugin paths.""" - Expected result is dictionary with keys "publish", "create", "load", - "actions" or "inventory" and values as list or string. - { - "publish": ["path/to/publish_plugins"] - } - """ - - @abstractmethod def get_plugin_paths(self) -> dict[str, list[str]]: """Return plugin paths for addon. + This method was abstract (required) in the past, so raise the required + 'core' addon version when 'get_plugin_paths' is removed from + addon. + + Deprecated: + Please implement specific methods 'get_create_plugin_paths', + 'get_load_plugin_paths', 'get_inventory_action_paths' and + 'get_publish_plugin_paths' to return plugin paths. + Returns: dict[str, list[str]]: Plugin paths for addon. """ + return {} def _get_plugin_paths_by_type( - self, plugin_type: str) -> list[str]: + self, plugin_type: str + ) -> list[str]: """Get plugin paths by type. Args: @@ -78,6 +82,24 @@ class IPluginPaths(AYONInterface): if not isinstance(paths, (list, tuple, set)): paths = [paths] + + new_function_name = "get_launcher_action_paths" + if plugin_type == "create": + new_function_name = "get_create_plugin_paths" + elif plugin_type == "load": + new_function_name = "get_load_plugin_paths" + elif plugin_type == "publish": + new_function_name = "get_publish_plugin_paths" + elif plugin_type == "inventory": + new_function_name = "get_inventory_action_paths" + + warnings.warn( + f"Addon '{self.name}' returns '{plugin_type}' paths using" + " 'get_plugin_paths' method. Please implement" + f" '{new_function_name}' instead.", + DeprecationWarning, + stacklevel=2 + ) return paths def get_launcher_action_paths(self) -> list[str]: diff --git a/client/ayon_core/hooks/pre_add_last_workfile_arg.py b/client/ayon_core/hooks/pre_add_last_workfile_arg.py index d6110ea367..c6afaaa083 100644 --- a/client/ayon_core/hooks/pre_add_last_workfile_arg.py +++ b/client/ayon_core/hooks/pre_add_last_workfile_arg.py @@ -33,6 +33,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook): "cinema4d", "silhouette", "gaffer", + "loki", } launch_types = {LaunchTypes.local} diff --git a/client/ayon_core/hooks/pre_ocio_hook.py b/client/ayon_core/hooks/pre_ocio_hook.py index 12f9454a88..85fcef47f2 100644 --- a/client/ayon_core/hooks/pre_ocio_hook.py +++ b/client/ayon_core/hooks/pre_ocio_hook.py @@ -24,6 +24,7 @@ class OCIOEnvHook(PreLaunchHook): "cinema4d", "silhouette", "gaffer", + "loki", } launch_types = set() diff --git a/client/ayon_core/host/__init__.py b/client/ayon_core/host/__init__.py index da1237c739..ef5c324028 100644 --- a/client/ayon_core/host/__init__.py +++ b/client/ayon_core/host/__init__.py @@ -1,9 +1,12 @@ +from .constants import ContextChangeReason from .host import ( HostBase, ) from .interfaces import ( IWorkfileHost, + WorkfileInfo, + PublishedWorkfileInfo, ILoadHost, IPublishHost, INewPublisher, @@ -13,9 +16,13 @@ from .dirmap import HostDirmap __all__ = ( + "ContextChangeReason", + "HostBase", "IWorkfileHost", + "WorkfileInfo", + "PublishedWorkfileInfo", "ILoadHost", "IPublishHost", "INewPublisher", diff --git a/client/ayon_core/host/constants.py b/client/ayon_core/host/constants.py new file mode 100644 index 0000000000..2564c5d54d --- /dev/null +++ b/client/ayon_core/host/constants.py @@ -0,0 +1,15 @@ +from enum import Enum + + +class StrEnum(str, Enum): + """A string-based Enum class that allows for string comparison.""" + + def __str__(self) -> str: + return self.value + + +class ContextChangeReason(StrEnum): + """Reasons for context change in the host.""" + undefined = "undefined" + workfile_open = "workfile.opened" + workfile_save = "workfile.saved" diff --git a/client/ayon_core/host/host.py b/client/ayon_core/host/host.py index 5a29de6cd7..7fc4b19bdd 100644 --- a/client/ayon_core/host/host.py +++ b/client/ayon_core/host/host.py @@ -1,10 +1,37 @@ +from __future__ import annotations + import os import logging import contextlib -from abc import ABC, abstractproperty +from abc import ABC, abstractmethod +from dataclasses import dataclass +import typing +from typing import Optional, Any -# NOTE can't import 'typing' because of issues in Maya 2020 -# - shiboken crashes on 'typing' module import +import ayon_api + +from ayon_core.lib import emit_event + +from .constants import ContextChangeReason + +if typing.TYPE_CHECKING: + from ayon_core.pipeline import Anatomy + + from typing import TypedDict + + class HostContextData(TypedDict): + project_name: str + folder_path: Optional[str] + task_name: Optional[str] + + +@dataclass +class ContextChangeData: + project_entity: dict[str, Any] + folder_entity: dict[str, Any] + task_entity: dict[str, Any] + reason: ContextChangeReason + anatomy: Anatomy class HostBase(ABC): @@ -92,8 +119,9 @@ class HostBase(ABC): self._log = logging.getLogger(self.__class__.__name__) return self._log - @abstractproperty - def name(self): + @property + @abstractmethod + def name(self) -> str: """Host name.""" pass @@ -106,7 +134,7 @@ class HostBase(ABC): return os.environ.get("AYON_PROJECT_NAME") - def get_current_folder_path(self): + def get_current_folder_path(self) -> Optional[str]: """ Returns: Union[str, None]: Current asset name. @@ -114,7 +142,7 @@ class HostBase(ABC): return os.environ.get("AYON_FOLDER_PATH") - def get_current_task_name(self): + def get_current_task_name(self) -> Optional[str]: """ Returns: Union[str, None]: Current task name. @@ -122,7 +150,7 @@ class HostBase(ABC): return os.environ.get("AYON_TASK_NAME") - def get_current_context(self): + def get_current_context(self) -> "HostContextData": """Get current context information. This method should be used to get current context of host. Usage of @@ -141,6 +169,75 @@ class HostBase(ABC): "task_name": self.get_current_task_name() } + def set_current_context( + self, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + reason: ContextChangeReason = ContextChangeReason.undefined, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional[Anatomy] = None, + ) -> "HostContextData": + """Set current context information. + + This method should be used to set current context of host. Usage of + this method can be crucial for host implementations in DCCs where + can be opened multiple workfiles at one moment and change of context + can't be caught properly. + + Notes: + This method should not care about change of workdir and expect any + of the arguments. + + Args: + folder_entity (Optional[dict[str, Any]]): Folder entity. + task_entity (Optional[dict[str, Any]]): Task entity. + reason (ContextChangeReason): Reason for context change. + project_entity (Optional[dict[str, Any]]): Project entity data. + anatomy (Optional[Anatomy]): Anatomy instance for the project. + + Returns: + dict[str, Optional[str]]: Context information with project name, + folder path and task name. + + """ + from ayon_core.pipeline import Anatomy + + folder_path = folder_entity["path"] + task_name = task_entity["name"] + + context = self.get_current_context() + # Don't do anything if context did not change + if ( + context["folder_path"] == folder_path + and context["task_name"] == task_name + ): + return context + + project_name = self.get_current_project_name() + if project_entity is None: + project_entity = ayon_api.get_project(project_name) + + if anatomy is None: + anatomy = Anatomy(project_name, project_entity=project_entity) + + context_change_data = ContextChangeData( + project_entity, + folder_entity, + task_entity, + reason, + anatomy, + ) + self._before_context_change(context_change_data) + self._set_current_context(context_change_data) + self._after_context_change(context_change_data) + + return self._emit_context_change_event( + project_name, + folder_path, + task_name, + ) + def get_context_title(self): """Context title shown for UI purposes. @@ -187,3 +284,91 @@ class HostBase(ABC): yield finally: pass + + def _emit_context_change_event( + self, + project_name: str, + folder_path: Optional[str], + task_name: Optional[str], + ) -> "HostContextData": + """Emit context change event. + + Args: + project_name (str): Name of the project. + folder_path (Optional[str]): Path of the folder. + task_name (Optional[str]): Name of the task. + + Returns: + HostContextData: Data send to context change event. + + """ + data = { + "project_name": project_name, + "folder_path": folder_path, + "task_name": task_name, + } + emit_event("taskChanged", data) + return data + + def _set_current_context( + self, context_change_data: ContextChangeData + ) -> None: + """Method that changes the context in host. + + Can be overriden for hosts that do need different handling of context + than using environment variables. + + Args: + context_change_data (ContextChangeData): Context change related + data. + + """ + project_name = self.get_current_project_name() + folder_path = None + task_name = None + if context_change_data.folder_entity: + folder_path = context_change_data.folder_entity["path"] + if context_change_data.task_entity: + task_name = context_change_data.task_entity["name"] + + envs = { + "AYON_PROJECT_NAME": project_name, + "AYON_FOLDER_PATH": folder_path, + "AYON_TASK_NAME": task_name, + } + + # Update the Session and environments. Pop from environments all + # keys with value set to None. + for key, value in envs.items(): + if value is None: + os.environ.pop(key, None) + else: + os.environ[key] = value + + def _before_context_change(self, context_change_data: ContextChangeData): + """Before context is changed. + + This method is called before the context is changed in the host. + + Can be overridden to implement host specific logic. + + Args: + context_change_data (ContextChangeData): Object with information + about context change. + + """ + pass + + def _after_context_change(self, context_change_data: ContextChangeData): + """After context is changed. + + This method is called after the context is changed in the host. + + Can be overridden to implement host specific logic. + + Args: + context_change_data (ContextChangeData): Object with information + about context change. + + """ + pass diff --git a/client/ayon_core/host/interfaces/__init__.py b/client/ayon_core/host/interfaces/__init__.py new file mode 100644 index 0000000000..8f11ad4e2f --- /dev/null +++ b/client/ayon_core/host/interfaces/__init__.py @@ -0,0 +1,66 @@ +from .exceptions import MissingMethodsError +from .workfiles import ( + IWorkfileHost, + WorkfileInfo, + PublishedWorkfileInfo, + + OpenWorkfileOptionalData, + ListWorkfilesOptionalData, + ListPublishedWorkfilesOptionalData, + SaveWorkfileOptionalData, + CopyWorkfileOptionalData, + CopyPublishedWorkfileOptionalData, + + get_open_workfile_context, + get_list_workfiles_context, + get_list_published_workfiles_context, + get_save_workfile_context, + get_copy_workfile_context, + get_copy_repre_workfile_context, + + OpenWorkfileContext, + ListWorkfilesContext, + ListPublishedWorkfilesContext, + SaveWorkfileContext, + CopyWorkfileContext, + CopyPublishedWorkfileContext, +) +from .interfaces import ( + IPublishHost, + INewPublisher, + ILoadHost, +) + + +__all__ = ( + "MissingMethodsError", + + "IWorkfileHost", + "WorkfileInfo", + "PublishedWorkfileInfo", + + "OpenWorkfileOptionalData", + "ListWorkfilesOptionalData", + "ListPublishedWorkfilesOptionalData", + "SaveWorkfileOptionalData", + "CopyWorkfileOptionalData", + "CopyPublishedWorkfileOptionalData", + + "get_open_workfile_context", + "get_list_workfiles_context", + "get_list_published_workfiles_context", + "get_save_workfile_context", + "get_copy_workfile_context", + "get_copy_repre_workfile_context", + + "OpenWorkfileContext", + "ListWorkfilesContext", + "ListPublishedWorkfilesContext", + "SaveWorkfileContext", + "CopyWorkfileContext", + "CopyPublishedWorkfileContext", + + "IPublishHost", + "INewPublisher", + "ILoadHost", +) diff --git a/client/ayon_core/host/interfaces/exceptions.py b/client/ayon_core/host/interfaces/exceptions.py new file mode 100644 index 0000000000..eec4564142 --- /dev/null +++ b/client/ayon_core/host/interfaces/exceptions.py @@ -0,0 +1,15 @@ +class MissingMethodsError(ValueError): + """Exception when host miss some required methods for a specific workflow. + + Args: + host (HostBase): Host implementation where are missing methods. + missing_methods (list[str]): List of missing methods. + """ + + def __init__(self, host, missing_methods): + joined_missing = ", ".join( + ['"{}"'.format(item) for item in missing_methods] + ) + super().__init__( + f"Host \"{host.name}\" miss methods {joined_missing}" + ) diff --git a/client/ayon_core/host/interfaces.py b/client/ayon_core/host/interfaces/interfaces.py similarity index 50% rename from client/ayon_core/host/interfaces.py rename to client/ayon_core/host/interfaces/interfaces.py index c077dfeae9..a41dffe92a 100644 --- a/client/ayon_core/host/interfaces.py +++ b/client/ayon_core/host/interfaces/interfaces.py @@ -1,28 +1,6 @@ -from abc import ABC, abstractmethod +from abc import abstractmethod - -class MissingMethodsError(ValueError): - """Exception when host miss some required methods for specific workflow. - - Args: - host (HostBase): Host implementation where are missing methods. - missing_methods (list[str]): List of missing methods. - """ - - def __init__(self, host, missing_methods): - joined_missing = ", ".join( - ['"{}"'.format(item) for item in missing_methods] - ) - host_name = getattr(host, "name", None) - if not host_name: - try: - host_name = host.__file__.replace("\\", "/").split("/")[-3] - except Exception: - host_name = str(host) - message = ( - "Host \"{}\" miss methods {}".format(host_name, joined_missing) - ) - super(MissingMethodsError, self).__init__(message) +from .exceptions import MissingMethodsError class ILoadHost: @@ -105,181 +83,6 @@ class ILoadHost: return self.get_containers() -class IWorkfileHost(ABC): - """Implementation requirements to be able use workfile utils and tool.""" - - @staticmethod - def get_missing_workfile_methods(host): - """Look for missing methods on "old type" host implementation. - - Method is used for validation of implemented functions related to - workfiles. Checks only existence of methods. - - Args: - Union[ModuleType, HostBase]: Object of host where to look for - required methods. - - Returns: - list[str]: Missing method implementations for workfiles workflow. - """ - - if isinstance(host, IWorkfileHost): - return [] - - required = [ - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - ] - missing = [] - for name in required: - if not hasattr(host, name): - missing.append(name) - return missing - - @staticmethod - def validate_workfile_methods(host): - """Validate methods of "old type" host for workfiles workflow. - - Args: - Union[ModuleType, HostBase]: Object of host to validate. - - Raises: - MissingMethodsError: If there are missing methods on host - implementation. - """ - - missing = IWorkfileHost.get_missing_workfile_methods(host) - if missing: - raise MissingMethodsError(host, missing) - - @abstractmethod - def get_workfile_extensions(self): - """Extensions that can be used as save. - - Questions: - This could potentially use 'HostDefinition'. - """ - - return [] - - @abstractmethod - def save_workfile(self, dst_path=None): - """Save currently opened scene. - - Args: - dst_path (str): Where the current scene should be saved. Or use - current path if 'None' is passed. - """ - - pass - - @abstractmethod - def open_workfile(self, filepath): - """Open passed filepath in the host. - - Args: - filepath (str): Path to workfile. - """ - - pass - - @abstractmethod - def get_current_workfile(self): - """Retrieve path to current opened file. - - Returns: - str: Path to file which is currently opened. - None: If nothing is opened. - """ - - return None - - def workfile_has_unsaved_changes(self): - """Currently opened scene is saved. - - Not all hosts can know if current scene is saved because the API of - DCC does not support it. - - Returns: - bool: True if scene is saved and False if has unsaved - modifications. - None: Can't tell if workfiles has modifications. - """ - - return None - - def work_root(self, session): - """Modify workdir per host. - - Default implementation keeps workdir untouched. - - Warnings: - We must handle this modification with more sophisticated way - because this can't be called out of DCC so opening of last workfile - (calculated before DCC is launched) is complicated. Also breaking - defined work template is not a good idea. - Only place where it's really used and can make sense is Maya. There - workspace.mel can modify subfolders where to look for maya files. - - Args: - session (dict): Session context data. - - Returns: - str: Path to new workdir. - """ - - return session["AYON_WORKDIR"] - - # --- Deprecated method names --- - def file_extensions(self): - """Deprecated variant of 'get_workfile_extensions'. - - Todo: - Remove when all usages are replaced. - """ - return self.get_workfile_extensions() - - def save_file(self, dst_path=None): - """Deprecated variant of 'save_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - self.save_workfile(dst_path) - - def open_file(self, filepath): - """Deprecated variant of 'open_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - return self.open_workfile(filepath) - - def current_file(self): - """Deprecated variant of 'get_current_workfile'. - - Todo: - Remove when all usages are replaced. - """ - - return self.get_current_workfile() - - def has_unsaved_changes(self): - """Deprecated variant of 'workfile_has_unsaved_changes'. - - Todo: - Remove when all usages are replaced. - """ - - return self.workfile_has_unsaved_changes() - - class IPublishHost: """Functions related to new creation system in new publisher. diff --git a/client/ayon_core/host/interfaces/workfiles.py b/client/ayon_core/host/interfaces/workfiles.py new file mode 100644 index 0000000000..82d71d152a --- /dev/null +++ b/client/ayon_core/host/interfaces/workfiles.py @@ -0,0 +1,1797 @@ +from __future__ import annotations + +import os +import platform +import shutil +import typing +import warnings +import functools +from abc import abstractmethod +from dataclasses import dataclass, asdict +from typing import Optional, Any + +import ayon_api +import arrow + +from ayon_core.lib import emit_event +from ayon_core.settings import get_project_settings +from ayon_core.host.constants import ContextChangeReason + +if typing.TYPE_CHECKING: + from ayon_core.pipeline import Anatomy + + +def deprecated(reason): + def decorator(func): + message = f"Call to deprecated function {func.__name__} ({reason})." + + @functools.wraps(func) + def new_func(*args, **kwargs): + warnings.simplefilter("always", DeprecationWarning) + warnings.warn( + message, + category=DeprecationWarning, + stacklevel=2 + ) + warnings.simplefilter("default", DeprecationWarning) + return func(*args, **kwargs) + + return new_func + + return decorator + + +# Wrappers for optional arguments that might change in future +class _WorkfileOptionalData: + """Base class for optional data used in workfile operations.""" + def __init__( + self, + *, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, + project_settings: Optional[dict[str, Any]] = None, + **kwargs + ): + if kwargs: + cls_name = self.__class__.__name__ + keys = ", ".join(['"{}"'.format(k) for k in kwargs.keys()]) + warnings.warn( + f"Unknown keywords passed to {cls_name}: {keys}", + ) + + self.project_entity = project_entity + self.anatomy = anatomy + self.project_settings = project_settings + + def get_project_data( + self, project_name: str + ) -> tuple[dict[str, Any], "Anatomy", dict[str, Any]]: + from ayon_core.pipeline import Anatomy + + project_entity = self.project_entity + anatomy = self.anatomy + project_settings = self.project_settings + + if project_entity is None: + project_entity = ayon_api.get_project(project_name) + + if anatomy is None: + anatomy = Anatomy( + project_name, + project_entity=project_entity + ) + + if project_settings is None: + project_settings = get_project_settings(project_name) + return ( + project_entity, + anatomy, + project_settings, + ) + + +class OpenWorkfileOptionalData(_WorkfileOptionalData): + """Optional data for opening workfile.""" + data_version = 1 + + +class ListWorkfilesOptionalData(_WorkfileOptionalData): + """Optional data to list workfiles.""" + data_version = 1 + + def __init__( + self, + *, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, + project_settings: Optional[dict[str, Any]] = None, + template_key: Optional[str] = None, + workfile_entities: Optional[list[dict[str, Any]]] = None, + **kwargs + ): + super().__init__( + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + **kwargs + ) + self.template_key = template_key + self.workfile_entities = workfile_entities + + def get_template_key( + self, + project_name: str, + task_type: str, + host_name: str, + project_settings: dict[str, Any], + ) -> str: + from ayon_core.pipeline.workfile import get_workfile_template_key + + if self.template_key is not None: + return self.template_key + + return get_workfile_template_key( + project_name=project_name, + task_type=task_type, + host_name=host_name, + project_settings=project_settings, + ) + + def get_workfile_entities( + self, project_name: str, task_id: str + ) -> list[dict[str, Any]]: + """Fill workfile entities if not provided.""" + if self.workfile_entities is not None: + return self.workfile_entities + return list(ayon_api.get_workfiles_info( + project_name, task_ids=[task_id] + )) + + +class ListPublishedWorkfilesOptionalData(_WorkfileOptionalData): + """Optional data to list published workfiles.""" + data_version = 1 + + def __init__( + self, + *, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, + project_settings: Optional[dict[str, Any]] = None, + product_entities: Optional[list[dict[str, Any]]] = None, + version_entities: Optional[list[dict[str, Any]]] = None, + repre_entities: Optional[list[dict[str, Any]]] = None, + **kwargs + ): + super().__init__( + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + **kwargs + ) + + self.product_entities = product_entities + self.version_entities = version_entities + self.repre_entities = repre_entities + + def get_entities( + self, + project_name: str, + folder_id: str, + ) -> tuple[ + list[dict[str, Any]], + list[dict[str, Any]], + list[dict[str, Any]] + ]: + product_entities = self.product_entities + if product_entities is None: + product_entities = list(ayon_api.get_products( + project_name, + folder_ids={folder_id}, + product_types={"workfile"}, + fields={"id", "name"}, + )) + + version_entities = self.version_entities + if version_entities is None: + product_ids = {p["id"] for p in product_entities} + version_entities = list(ayon_api.get_versions( + project_name, + product_ids=product_ids, + fields={"id", "author", "taskId"}, + )) + + repre_entities = self.repre_entities + if repre_entities is None: + version_ids = {v["id"] for v in version_entities} + repre_entities = list(ayon_api.get_representations( + project_name, + version_ids=version_ids, + )) + return product_entities, version_entities, repre_entities + + +class SaveWorkfileOptionalData(_WorkfileOptionalData): + """Optional data to save workfile.""" + data_version = 1 + + def __init__( + self, + *, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, + project_settings: Optional[dict[str, Any]] = None, + rootless_path: Optional[str] = None, + workfile_entities: Optional[list[dict[str, Any]]] = None, + **kwargs + ): + super().__init__( + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + **kwargs + ) + + self.rootless_path = rootless_path + self.workfile_entities = workfile_entities + + def get_workfile_entities(self, project_name: str, task_id: str): + """Fill workfile entities if not provided.""" + if self.workfile_entities is not None: + return self.workfile_entities + return list(ayon_api.get_workfiles_info( + project_name, task_ids=[task_id] + )) + + def get_rootless_path( + self, + workfile_path: str, + project_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + host_name: str, + project_entity: dict[str, Any], + project_settings: dict[str, Any], + anatomy: "Anatomy", + ): + from ayon_core.pipeline.workfile.utils import ( + find_workfile_rootless_path + ) + + if self.rootless_path is not None: + return self.rootless_path + + return find_workfile_rootless_path( + workfile_path, + project_name, + folder_entity, + task_entity, + host_name, + project_entity=project_entity, + project_settings=project_settings, + anatomy=anatomy, + ) + + +class CopyWorkfileOptionalData(SaveWorkfileOptionalData): + """Optional data to copy workfile.""" + data_version = 1 + + +class CopyPublishedWorkfileOptionalData(SaveWorkfileOptionalData): + """Optional data to copy published workfile.""" + data_version = 1 + + def __init__( + self, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, + project_settings: Optional[dict[str, Any]] = None, + rootless_path: Optional[str] = None, + workfile_entities: Optional[list[dict[str, Any]]] = None, + src_anatomy: Optional["Anatomy"] = None, + src_representation_path: Optional[str] = None, + **kwargs + ): + super().__init__( + rootless_path=rootless_path, + workfile_entities=workfile_entities, + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + **kwargs + ) + self.src_anatomy = src_anatomy + self.src_representation_path = src_representation_path + + def get_source_data( + self, + current_anatomy: Optional["Anatomy"], + project_name: str, + representation_entity: dict[str, Any], + ) -> tuple["Anatomy", str]: + from ayon_core.pipeline import Anatomy + from ayon_core.pipeline.load import ( + get_representation_path_with_anatomy + ) + + src_anatomy = self.src_anatomy + + if ( + src_anatomy is None + and current_anatomy is not None + and current_anatomy.project_name == project_name + ): + src_anatomy = current_anatomy + else: + src_anatomy = Anatomy(project_name) + + repre_path = self.src_representation_path + if repre_path is None: + repre_path = get_representation_path_with_anatomy( + representation_entity, + src_anatomy, + ) + return src_anatomy, repre_path + + +# Dataclasses used during workfile operations +@dataclass +class OpenWorkfileContext: + data_version: int + project_name: str + filepath: str + project_entity: dict[str, Any] + folder_entity: dict[str, Any] + task_entity: dict[str, Any] + anatomy: "Anatomy" + project_settings: dict[str, Any] + + +@dataclass +class ListWorkfilesContext: + data_version: int + project_name: str + project_entity: dict[str, Any] + folder_entity: dict[str, Any] + task_entity: dict[str, Any] + anatomy: "Anatomy" + project_settings: dict[str, Any] + template_key: str + workfile_entities: list[dict[str, Any]] + + +@dataclass +class ListPublishedWorkfilesContext: + data_version: int + project_name: str + project_entity: dict[str, Any] + folder_id: str + anatomy: "Anatomy" + project_settings: dict[str, Any] + product_entities: list[dict[str, Any]] + version_entities: list[dict[str, Any]] + repre_entities: list[dict[str, Any]] + + +@dataclass +class SaveWorkfileContext: + data_version: int + project_name: str + project_entity: dict[str, Any] + folder_entity: dict[str, Any] + task_entity: dict[str, Any] + anatomy: "Anatomy" + project_settings: dict[str, Any] + dst_path: str + rootless_path: str + workfile_entities: list[dict[str, Any]] + + +@dataclass +class CopyWorkfileContext(SaveWorkfileContext): + src_path: str + version: Optional[int] + comment: Optional[str] + description: Optional[str] + open_workfile: bool + + +@dataclass +class CopyPublishedWorkfileContext(CopyWorkfileContext): + src_project_name: str + src_representation_entity: dict[str, Any] + src_anatomy: "Anatomy" + + +def get_open_workfile_context( + project_name: str, + filepath: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + prepared_data: Optional[OpenWorkfileOptionalData], +) -> OpenWorkfileContext: + if prepared_data is None: + prepared_data = OpenWorkfileOptionalData() + ( + project_entity, anatomy, project_settings + ) = prepared_data.get_project_data(project_name) + return OpenWorkfileContext( + data_version=prepared_data.data_version, + filepath=filepath, + folder_entity=folder_entity, + task_entity=task_entity, + project_name=project_name, + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + ) + + +def get_list_workfiles_context( + project_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + host_name: str, + prepared_data: Optional[ListWorkfilesOptionalData], +) -> ListWorkfilesContext: + if prepared_data is None: + prepared_data = ListWorkfilesOptionalData() + ( + project_entity, anatomy, project_settings + ) = prepared_data.get_project_data(project_name) + + template_key = prepared_data.get_template_key( + project_name, + task_entity["taskType"], + host_name, + project_settings, + ) + workfile_entities = prepared_data.get_workfile_entities( + project_name, task_entity["id"] + ) + return ListWorkfilesContext( + data_version=prepared_data.data_version, + project_entity=project_entity, + folder_entity=folder_entity, + task_entity=task_entity, + project_name=project_name, + anatomy=anatomy, + project_settings=project_settings, + template_key=template_key, + workfile_entities=workfile_entities, + ) + + +def get_list_published_workfiles_context( + project_name: str, + folder_id: str, + prepared_data: Optional[ListPublishedWorkfilesOptionalData], +) -> ListPublishedWorkfilesContext: + if prepared_data is None: + prepared_data = ListPublishedWorkfilesOptionalData() + ( + project_entity, anatomy, project_settings + ) = prepared_data.get_project_data(project_name) + ( + product_entities, + version_entities, + repre_entities, + ) = prepared_data.get_entities(project_name, folder_id) + + return ListPublishedWorkfilesContext( + data_version=prepared_data.data_version, + project_name=project_name, + project_entity=project_entity, + folder_id=folder_id, + anatomy=anatomy, + project_settings=project_settings, + product_entities=product_entities, + version_entities=version_entities, + repre_entities=repre_entities, + ) + + +def get_save_workfile_context( + project_name: str, + filepath: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + host_name: str, + prepared_data: Optional[SaveWorkfileOptionalData], +) -> SaveWorkfileContext: + if prepared_data is None: + prepared_data = SaveWorkfileOptionalData() + + ( + project_entity, anatomy, project_settings + ) = prepared_data.get_project_data(project_name) + + rootless_path = prepared_data.get_rootless_path( + filepath, + project_name, + folder_entity, + task_entity, + host_name, + project_entity, + project_settings, + anatomy, + ) + workfile_entities = prepared_data.get_workfile_entities( + project_name, task_entity["id"] + ) + return SaveWorkfileContext( + data_version=prepared_data.data_version, + project_name=project_name, + project_entity=project_entity, + folder_entity=folder_entity, + task_entity=task_entity, + anatomy=anatomy, + project_settings=project_settings, + dst_path=filepath, + rootless_path=rootless_path, + workfile_entities=workfile_entities, + ) + + +def get_copy_workfile_context( + project_name: str, + src_path: str, + dst_path: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + version: Optional[int], + comment: Optional[str], + description: Optional[str], + open_workfile: bool, + host_name: str, + prepared_data: Optional[CopyWorkfileOptionalData], +) -> CopyWorkfileContext: + if prepared_data is None: + prepared_data = CopyWorkfileOptionalData() + context: SaveWorkfileContext = get_save_workfile_context( + project_name, + dst_path, + folder_entity, + task_entity, + host_name, + prepared_data, + ) + return CopyWorkfileContext( + data_version=prepared_data.data_version, + src_path=src_path, + project_name=context.project_name, + project_entity=context.project_entity, + folder_entity=context.folder_entity, + task_entity=context.task_entity, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + anatomy=context.anatomy, + project_settings=context.project_settings, + dst_path=context.dst_path, + rootless_path=context.rootless_path, + workfile_entities=context.workfile_entities, + ) + + +def get_copy_repre_workfile_context( + project_name: str, + src_project_name: str, + src_representation_entity: dict[str, Any], + dst_path: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + version: Optional[int], + comment: Optional[str], + description: Optional[str], + open_workfile: bool, + host_name: str, + prepared_data: Optional[CopyPublishedWorkfileOptionalData], +) -> CopyPublishedWorkfileContext: + if prepared_data is None: + prepared_data = CopyPublishedWorkfileOptionalData() + + context: SaveWorkfileContext = get_save_workfile_context( + project_name, + dst_path, + folder_entity, + task_entity, + host_name, + prepared_data, + ) + src_anatomy, repre_path = prepared_data.get_source_data( + context.anatomy, + src_project_name, + src_representation_entity, + ) + return CopyPublishedWorkfileContext( + data_version=prepared_data.data_version, + src_project_name=src_project_name, + src_representation_entity=src_representation_entity, + src_path=repre_path, + dst_path=context.dst_path, + project_name=context.project_name, + project_entity=context.project_entity, + folder_entity=context.folder_entity, + task_entity=context.task_entity, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + anatomy=context.anatomy, + project_settings=context.project_settings, + rootless_path=context.rootless_path, + workfile_entities=context.workfile_entities, + src_anatomy=src_anatomy, + ) + + +@dataclass +class WorkfileInfo: + """Information about workfile. + + Host can open, copy and use the workfile using this information object. + + Attributes: + filepath (str): Path to the workfile. + rootless_path (str): Path to the workfile without the root. And without + backslashes on Windows. + version (Optional[int]): Version of the workfile. + comment (Optional[str]): Comment of the workfile. + file_size (Optional[float]): Size of the workfile in bytes. + file_created (Optional[float]): Timestamp when the workfile was + created on the filesystem. + file_modified (Optional[float]): Timestamp when the workfile was + modified on the filesystem. + workfile_entity_id (Optional[str]): Workfile entity id. If None then + the workfile is not in the database. + description (str): Description of the workfile. + created_by (Optional[str]): User id of the user who created the + workfile entity. + updated_by (Optional[str]): User id of the user who updated the + workfile entity. + available (bool): True if workfile is available on the machine. + + """ + filepath: str + rootless_path: str + version: Optional[int] + comment: Optional[str] + file_size: Optional[float] + file_created: Optional[float] + file_modified: Optional[float] + workfile_entity_id: Optional[str] + description: str + created_by: Optional[str] + updated_by: Optional[str] + available: bool + + @classmethod + def new( + cls, + filepath: str, + rootless_path: str, + *, + version: Optional[int], + comment: Optional[str], + available: bool, + workfile_entity: dict[str, Any], + ): + file_size = file_modified = file_created = None + if filepath and os.path.exists(filepath): + filestat = os.stat(filepath) + file_size = filestat.st_size + file_created = filestat.st_ctime + file_modified = filestat.st_mtime + + if workfile_entity is None: + workfile_entity = {} + + attrib = {} + if workfile_entity: + attrib = workfile_entity["attrib"] + + return cls( + filepath=filepath, + rootless_path=rootless_path, + version=version, + comment=comment, + file_size=file_size, + file_created=file_created, + file_modified=file_modified, + workfile_entity_id=workfile_entity.get("id"), + description=attrib.get("description") or "", + created_by=workfile_entity.get("createdBy"), + updated_by=workfile_entity.get("updatedBy"), + available=available, + ) + + def to_data(self) -> dict[str, Any]: + """Converts file item to data. + + Returns: + dict[str, Any]: Workfile item data. + + """ + return asdict(self) + + @classmethod + def from_data(cls, data: dict[str, Any]) -> WorkfileInfo: + """Converts data to workfile item. + + Args: + data (dict[str, Any]): Workfile item data. + + Returns: + WorkfileInfo: File item. + + """ + return WorkfileInfo(**data) + + +@dataclass +class PublishedWorkfileInfo: + """Information about published workfile. + + Host can copy and use the workfile using this information object. + + Attributes: + project_name (str): Name of the project where workfile lives. + folder_id (str): Folder id under which is workfile stored. + task_id (Optional[str]): Task id under which is workfile stored. + representation_id (str): Representation id of the workfile. + filepath (str): Path to the workfile. + created_at (float): Timestamp when the workfile representation + was created. + author (str): Author of the workfile representation. + available (bool): True if workfile is available on the machine. + file_size (Optional[float]): Size of the workfile in bytes. + file_created (Optional[float]): Timestamp when the workfile was + created on the filesystem. + file_modified (Optional[float]): Timestamp when the workfile was + modified on the filesystem. + + """ + project_name: str + folder_id: str + task_id: Optional[str] + representation_id: str + filepath: str + created_at: float + author: str + available: bool + file_size: Optional[float] + file_created: Optional[float] + file_modified: Optional[float] + + @classmethod + def new( + cls, + project_name: str, + folder_id: str, + task_id: Optional[str], + repre_entity: dict[str, Any], + *, + filepath: str, + author: str, + available: bool, + file_size: Optional[float], + file_modified: Optional[float], + file_created: Optional[float], + ) -> "PublishedWorkfileInfo": + created_at = arrow.get(repre_entity["createdAt"]).to("local") + + return cls( + project_name=project_name, + folder_id=folder_id, + task_id=task_id, + representation_id=repre_entity["id"], + filepath=filepath, + created_at=created_at.float_timestamp, + author=author, + available=available, + file_size=file_size, + file_created=file_created, + file_modified=file_modified, + ) + + def to_data(self) -> dict[str, Any]: + """Converts file item to data. + + Returns: + dict[str, Any]: Workfile item data. + + """ + return asdict(self) + + @classmethod + def from_data(cls, data: dict[str, Any]) -> "PublishedWorkfileInfo": + """Converts data to workfile item. + + Args: + data (dict[str, Any]): Workfile item data. + + Returns: + PublishedWorkfileInfo: File item. + + """ + return PublishedWorkfileInfo(**data) + + +class IWorkfileHost: + """Implementation requirements to be able to use workfiles utils and tool. + + Some of the methods are pre-implemented as they generally do the same in + all host integrations. + + """ + @abstractmethod + def save_workfile(self, dst_path: Optional[str] = None) -> None: + """Save the currently opened scene. + + Args: + dst_path (str): Where the current scene should be saved. Or use + the current path if 'None' is passed. + + """ + pass + + @abstractmethod + def open_workfile(self, filepath: str) -> None: + """Open passed filepath in the host. + + Args: + filepath (str): Path to workfile. + + """ + pass + + @abstractmethod + def get_current_workfile(self) -> Optional[str]: + """Retrieve a path to current opened file. + + Returns: + Optional[str]: Path to the file which is currently opened. None if + nothing is opened or the current workfile is unsaved. + + """ + return None + + def workfile_has_unsaved_changes(self) -> Optional[bool]: + """Currently opened scene is saved. + + Not all hosts can know if the current scene is saved because the API + of DCC does not support it. + + Returns: + Optional[bool]: True if scene is saved and False if has unsaved + modifications. None if can't tell if workfiles has + modifications. + + """ + return None + + def get_workfile_extensions(self) -> list[str]: + """Extensions that can be used to save the workfile to. + + Notes: + Method may not be used if 'list_workfiles' and + 'list_published_workfiles' are re-implemented with different + logic. + + Returns: + list[str]: List of extensions that can be used for saving. + + """ + return [] + + def save_workfile_with_context( + self, + filepath: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + prepared_data: Optional[SaveWorkfileOptionalData] = None, + ) -> None: + """Save the current workfile with context. + + Arguments 'rootless_path', 'workfile_entities', 'project_entity' + and 'anatomy' can be filled to enhance efficiency if you already + have access to the values. + + Argument 'project_settings' is used to calculate 'rootless_path' + if it is not provided. + + Notes: + Should this method care about context change? + + Args: + filepath (str): Where the current scene should be saved. + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. + version (Optional[int]): Version of the workfile. Information + for workfile entity. Recommended to fill. + comment (Optional[str]): Comment for the workfile. + Usually used in the filename template. + description (Optional[str]): Artist note for the workfile entity. + prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + project_name = self.get_current_project_name() + save_workfile_context = get_save_workfile_context( + project_name, + filepath, + folder_entity, + task_entity, + host_name=self.name, + prepared_data=prepared_data, + ) + + self._before_workfile_save(save_workfile_context) + event_data = self._get_workfile_event_data( + project_name, + folder_entity, + task_entity, + filepath, + ) + self._emit_workfile_save_event(event_data, after_save=False) + + workdir = os.path.dirname(filepath) + if not os.path.exists(workdir): + os.makedirs(workdir, exist_ok=True) + + # Set 'AYON_WORKDIR' environment variable + os.environ["AYON_WORKDIR"] = workdir + + self.set_current_context( + folder_entity, + task_entity, + reason=ContextChangeReason.workfile_save, + project_entity=save_workfile_context.project_entity, + anatomy=save_workfile_context.anatomy, + ) + + self.save_workfile(filepath) + + self._save_workfile_entity( + save_workfile_context, + version, + comment, + description, + ) + self._after_workfile_save(save_workfile_context) + self._emit_workfile_save_event(event_data) + + def open_workfile_with_context( + self, + filepath: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + prepared_data: Optional[OpenWorkfileOptionalData] = None, + ) -> None: + """Open passed filepath in the host with context. + + This function should be used to open workfile in different context. + + Notes: + Should this method care about context change? + + Args: + filepath (str): Path to workfile. + folder_entity (dict[str, Any]): Folder id. + task_entity (dict[str, Any]): Task id. + prepared_data (Optional[WorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + context = self.get_current_context() + project_name = context["project_name"] + + open_workfile_context = get_open_workfile_context( + project_name, + filepath, + folder_entity, + task_entity, + prepared_data=prepared_data, + ) + + workdir = os.path.dirname(filepath) + # Set 'AYON_WORKDIR' environment variable + os.environ["AYON_WORKDIR"] = workdir + + event_data = self._get_workfile_event_data( + project_name, folder_entity, task_entity, filepath + ) + self._before_workfile_open(open_workfile_context) + self._emit_workfile_open_event(event_data, after_open=False) + + self.set_current_context( + folder_entity, + task_entity, + reason=ContextChangeReason.workfile_open, + project_entity=open_workfile_context.project_entity, + anatomy=open_workfile_context.anatomy, + ) + + self.open_workfile(filepath) + + self._after_workfile_open(open_workfile_context) + self._emit_workfile_open_event(event_data) + + def list_workfiles( + self, + project_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + prepared_data: Optional[ListWorkfilesOptionalData] = None, + ) -> list[WorkfileInfo]: + """List workfiles in the given task. + + The method should also return workfiles that are not available on + disk, but are in the AYON database. + + Notes: + - Better method name? + - This method is pre-implemented as the logic can be shared across + 95% of host integrations. Ad-hoc implementation to give host + integration workfile api functionality. + + Args: + project_name (str): Project name. + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. + prepared_data (Optional[ListWorkfilesOptionalData]): Prepared + data for speed enhancements. + + Returns: + list[WorkfileInfo]: List of workfiles. + + """ + from ayon_core.pipeline.template_data import get_template_data + from ayon_core.pipeline.workfile.path_resolving import ( + get_workdir_with_workdir_data, + WorkfileDataParser, + ) + + extensions = self.get_workfile_extensions() + if not extensions: + return [] + + list_workfiles_context = get_list_workfiles_context( + project_name, + folder_entity, + task_entity, + host_name=self.name, + prepared_data=prepared_data, + ) + + workfile_entities_by_path = {} + for workfile_entity in list_workfiles_context.workfile_entities: + rootless_path = workfile_entity["path"] + path = os.path.normpath( + list_workfiles_context.anatomy.fill_root(rootless_path) + ) + workfile_entities_by_path[path] = workfile_entity + + workdir_data = get_template_data( + list_workfiles_context.project_entity, + folder_entity, + task_entity, + host_name=self.name, + ) + workdir = get_workdir_with_workdir_data( + workdir_data, + project_name, + anatomy=list_workfiles_context.anatomy, + template_key=list_workfiles_context.template_key, + project_settings=list_workfiles_context.project_settings, + ) + + file_template = list_workfiles_context.anatomy.get_template_item( + "work", list_workfiles_context.template_key, "file" + ) + rootless_workdir = workdir.rootless + if platform.system().lower() == "windows": + rootless_workdir = rootless_workdir.replace("\\", "/") + + filenames = [] + if os.path.exists(workdir): + filenames = list(os.listdir(workdir)) + + data_parser = WorkfileDataParser(file_template, workdir_data) + items = [] + for filename in filenames: + # TODO add 'default' support for folders + ext = os.path.splitext(filename)[1].lower() + if ext not in extensions: + continue + + filepath = os.path.join(workdir, filename) + + rootless_path = f"{rootless_workdir}/{filename}" + workfile_entity = workfile_entities_by_path.pop( + filepath, None + ) + version = comment = None + if workfile_entity is not None: + _data = workfile_entity["data"] + version = _data.get("version") + comment = _data.get("comment") + + if version is None: + parsed_data = data_parser.parse_data(filename) + version = parsed_data.version + comment = parsed_data.comment + + item = WorkfileInfo.new( + filepath, + rootless_path, + version=version, + comment=comment, + available=True, + workfile_entity=workfile_entity, + ) + items.append(item) + + for filepath, workfile_entity in workfile_entities_by_path.items(): + # Workfile entity is not in the filesystem + # but it is in the database + rootless_path = workfile_entity["path"] + ext = os.path.splitext(rootless_path)[1].lower() + if ext not in extensions: + continue + + _data = workfile_entity["data"] + version = _data.get("version") + comment = _data.get("comment") + if version is None: + filename = os.path.basename(rootless_path) + parsed_data = data_parser.parse_data(filename) + version = parsed_data.version + comment = parsed_data.comment + + available = os.path.exists(filepath) + items.append(WorkfileInfo.new( + filepath, + rootless_path, + version=version, + comment=comment, + available=available, + workfile_entity=workfile_entity, + )) + + return items + + def list_published_workfiles( + self, + project_name: str, + folder_id: str, + *, + prepared_data: Optional[ListPublishedWorkfilesOptionalData] = None, + ) -> list[PublishedWorkfileInfo]: + """List published workfiles for the given folder. + + The default implementation looks for products with the 'workfile' + product type. + + Pre-fetched entities have mandatory fields to be fetched: + - Version: 'id', 'author', 'taskId' + - Representation: 'id', 'versionId', 'files' + + Args: + project_name (str): Project name. + folder_id (str): Folder id. + prepared_data (Optional[ListPublishedWorkfilesOptionalData]): + Prepared data for speed enhancements. + + Returns: + list[PublishedWorkfileInfo]: Published workfile information for + the given context. + + """ + list_workfiles_context = get_list_published_workfiles_context( + project_name, + folder_id, + prepared_data=prepared_data, + ) + if not list_workfiles_context.repre_entities: + return [] + + versions_by_id = { + version_entity["id"]: version_entity + for version_entity in list_workfiles_context.version_entities + } + extensions = { + ext.lstrip(".") + for ext in self.get_workfile_extensions() + } + items = [] + for repre_entity in list_workfiles_context.repre_entities: + version_id = repre_entity["versionId"] + version_entity = versions_by_id[version_id] + task_id = version_entity["taskId"] + + # Filter by extension + workfile_path = None + for repre_file in repre_entity["files"]: + ext = ( + os.path.splitext(repre_file["name"])[1] + .lower() + .lstrip(".") + ) + if ext in extensions: + workfile_path = repre_file["path"] + break + + if not workfile_path: + continue + + try: + workfile_path = workfile_path.format( + root=list_workfiles_context.anatomy.roots + ) + except Exception: + self.log.warning( + "Failed to format workfile path.", exc_info=True + ) + + is_available = False + file_size = file_modified = file_created = None + if workfile_path and os.path.exists(workfile_path): + filestat = os.stat(workfile_path) + is_available = True + file_size = filestat.st_size + file_created = filestat.st_ctime + file_modified = filestat.st_mtime + + workfile_item = PublishedWorkfileInfo.new( + project_name, + folder_id, + task_id, + repre_entity, + filepath=workfile_path, + author=version_entity["author"], + available=is_available, + file_size=file_size, + file_created=file_created, + file_modified=file_modified, + ) + items.append(workfile_item) + + return items + + def copy_workfile( + self, + src_path: str, + dst_path: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + open_workfile: bool = True, + prepared_data: Optional[CopyWorkfileOptionalData] = None, + ) -> None: + """Save workfile path with target folder and task context. + + It is expected that workfile is saved to the current project, but + can be copied from the other project. + + Arguments 'rootless_path', 'workfile_entities', 'project_entity' + and 'anatomy' can be filled to enhance efficiency if you already + have access to the values. + + Argument 'project_settings' is used to calculate 'rootless_path' + if it is not provided. + + Args: + src_path (str): Path to the source scene. + dst_path (str): Where the scene should be saved. + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. + version (Optional[int]): Version of the workfile. Information + for workfile entity. Recommended to fill. + comment (Optional[str]): Comment for the workfile. + description (Optional[str]): Artist note for the workfile entity. + open_workfile (bool): Open workfile when copied. + prepared_data (Optional[CopyWorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + project_name = self.get_current_project_name() + copy_workfile_context: CopyWorkfileContext = get_copy_workfile_context( + project_name, + src_path, + dst_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + host_name=self.name, + prepared_data=prepared_data, + ) + self._copy_workfile( + copy_workfile_context, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + ) + + def copy_workfile_representation( + self, + src_project_name: str, + src_representation_entity: dict[str, Any], + dst_path: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + open_workfile: bool = True, + prepared_data: Optional[CopyPublishedWorkfileOptionalData] = None, + ) -> None: + """Copy workfile representation. + + Use representation as a source for the workfile. + + Arguments 'rootless_path', 'workfile_entities', 'project_entity' + and 'anatomy' can be filled to enhance efficiency if you already + have access to the values. + + Argument 'project_settings' is used to calculate 'rootless_path' + if it is not provided. + + Args: + src_project_name (str): Project name. + src_representation_entity (dict[str, Any]): Representation + entity. + dst_path (str): Where the scene should be saved. + folder_entity (dict[str, Any): Folder entity. + task_entity (dict[str, Any]): Task entity. + version (Optional[int]): Version of the workfile. Information + for workfile entity. Recommended to fill. + comment (Optional[str]): Comment for the workfile. + description (Optional[str]): Artist note for the workfile entity. + open_workfile (bool): Open workfile when copied. + prepared_data (Optional[CopyPublishedWorkfileOptionalData]): + Prepared data for speed enhancements. + + """ + project_name = self.get_current_project_name() + copy_repre_workfile_context: CopyPublishedWorkfileContext = ( + get_copy_repre_workfile_context( + project_name, + src_project_name, + src_representation_entity, + dst_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + host_name=self.name, + prepared_data=prepared_data, + ) + ) + self._copy_workfile( + copy_repre_workfile_context, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + ) + + # --- Deprecated method names --- + @deprecated("Use 'get_workfile_extensions' instead") + def file_extensions(self): + """Deprecated variant of 'get_workfile_extensions'. + + Todo: + Remove when all usages are replaced. + + """ + return self.get_workfile_extensions() + + @deprecated("Use 'save_workfile' instead") + def save_file(self, dst_path=None): + """Deprecated variant of 'save_workfile'. + + Todo: + Remove when all usages are replaced + + """ + self.save_workfile(dst_path) + + @deprecated("Use 'open_workfile' instead") + def open_file(self, filepath): + """Deprecated variant of 'open_workfile'. + + Todo: + Remove when all usages are replaced. + + """ + return self.open_workfile(filepath) + + @deprecated("Use 'get_current_workfile' instead") + def current_file(self): + """Deprecated variant of 'get_current_workfile'. + + Todo: + Remove when all usages are replaced. + + """ + return self.get_current_workfile() + + @deprecated("Use 'workfile_has_unsaved_changes' instead") + def has_unsaved_changes(self): + """Deprecated variant of 'workfile_has_unsaved_changes'. + + Todo: + Remove when all usages are replaced. + + """ + return self.workfile_has_unsaved_changes() + + def _copy_workfile( + self, + copy_workfile_context: CopyWorkfileContext, + *, + version: Optional[int], + comment: Optional[str], + description: Optional[str], + open_workfile: bool, + ) -> None: + """Save workfile path with target folder and task context. + + It is expected that workfile is saved to the current project, but + can be copied from the other project. + + Arguments 'rootless_path', 'workfile_entities', 'project_entity' + and 'anatomy' can be filled to enhance efficiency if you already + have access to the values. + + Argument 'project_settings' is used to calculate 'rootless_path' + if it is not provided. + + Args: + copy_workfile_context (CopyWorkfileContext): Prepared data + for speed enhancements. + version (Optional[int]): Version of the workfile. Information + for workfile entity. Recommended to fill. + comment (Optional[str]): Comment for the workfile. + description (Optional[str]): Artist note for the workfile entity. + open_workfile (bool): Open workfile when copied. + + """ + self._before_workfile_copy(copy_workfile_context) + event_data = self._get_workfile_event_data( + copy_workfile_context.project_name, + copy_workfile_context.folder_entity, + copy_workfile_context.task_entity, + copy_workfile_context.dst_path, + ) + self._emit_workfile_save_event(event_data, after_save=False) + + dst_dir = os.path.dirname(copy_workfile_context.dst_path) + if not os.path.exists(dst_dir): + os.makedirs(dst_dir, exist_ok=True) + shutil.copy( + copy_workfile_context.src_path, + copy_workfile_context.dst_path + ) + + self._save_workfile_entity( + copy_workfile_context, + version, + comment, + description, + ) + self._after_workfile_copy(copy_workfile_context) + self._emit_workfile_save_event(event_data) + + if not open_workfile: + return + + self.open_workfile_with_context( + copy_workfile_context.dst_path, + copy_workfile_context.folder_entity, + copy_workfile_context.task_entity, + ) + + def _save_workfile_entity( + self, + save_workfile_context: SaveWorkfileContext, + version: Optional[int], + comment: Optional[str], + description: Optional[str], + ) -> Optional[dict[str, Any]]: + """Create of update workfile entity to AYON based on provided data. + + Args: + save_workfile_context (SaveWorkfileContext): Save workfile + context with all prepared data. + version (Optional[int]): Version of the workfile. + comment (Optional[str]): Comment for the workfile. + description (Optional[str]): Artist note for the workfile entity. + + Returns: + Optional[dict[str, Any]]: Workfile entity. + + """ + from ayon_core.pipeline.workfile.utils import ( + save_workfile_info + ) + + project_name = self.get_current_project_name() + if not description: + description = None + + if not comment: + comment = None + + rootless_path = save_workfile_context.rootless_path + # It is not possible to create workfile infor without rootless path + workfile_info = None + if not rootless_path: + return workfile_info + + if platform.system().lower() == "windows": + rootless_path = rootless_path.replace("\\", "/") + + workfile_info = save_workfile_info( + project_name, + save_workfile_context.task_entity["id"], + rootless_path, + self.name, + version, + comment, + description, + workfile_entities=save_workfile_context.workfile_entities, + ) + return workfile_info + + def _create_extra_folders( + self, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + workdir: str, + ) -> None: + """Create extra folders in the workdir. + + This method should be called when workfile is saved or copied. + + Args: + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. + workdir (str): Workdir where workfile/s will be stored. + + """ + from ayon_core.pipeline.workfile.path_resolving import ( + create_workdir_extra_folders + ) + + project_name = self.get_current_project_name() + + # Create extra folders + create_workdir_extra_folders( + workdir, + self.name, + task_entity["taskType"], + task_entity["name"], + project_name + ) + + def _get_workfile_event_data( + self, + project_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + filepath: str, + ) -> dict[str, Optional[str]]: + """Prepare workfile event data. + + Args: + project_name (str): Name of the project where workfile lives. + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. + filepath (str): Path to the workfile. + + Returns: + dict[str, Optional[str]]: Data for workfile event. + + """ + workdir, filename = os.path.split(filepath) + return { + "project_name": project_name, + "folder_id": folder_entity["id"], + "folder_path": folder_entity["path"], + "task_id": task_entity["id"], + "task_name": task_entity["name"], + "host_name": self.name, + "filepath": filepath, + "filename": filename, + "workdir_path": workdir, + } + + def _before_workfile_open( + self, open_workfile_context: OpenWorkfileContext + ) -> None: + """Before workfile is opened. + + This method is called before the workfile is opened in the host. + + Can be overridden to implement host specific logic. + + Args: + open_workfile_context (OpenWorkfileContext): Context and path of + workfile to open. + + """ + pass + + def _after_workfile_open( + self, open_workfile_context: OpenWorkfileContext + ) -> None: + """After workfile is opened. + + This method is called after the workfile is opened in the host. + + Can be overridden to implement host specific logic. + + Args: + open_workfile_context (OpenWorkfileContext): Context and path of + opened workfile. + + """ + pass + + def _before_workfile_save( + self, save_workfile_context: SaveWorkfileContext + ) -> None: + """Before workfile is saved. + + This method is called before the workfile is saved in the host. + + Can be overridden to implement host specific logic. + + Args: + save_workfile_context (SaveWorkfileContext): Workfile path with + target folder and task context. + + """ + pass + + def _after_workfile_save( + self, save_workfile_context: SaveWorkfileContext + ) -> None: + """After workfile is saved. + + This method is called after the workfile is saved in the host. + + Can be overridden to implement host specific logic. + + Args: + save_workfile_context (SaveWorkfileContext): Workfile path with + target folder and task context. + + """ + workdir = os.path.dirname(save_workfile_context.dst_path) + self._create_extra_folders( + save_workfile_context.folder_entity, + save_workfile_context.task_entity, + workdir + ) + + def _before_workfile_copy( + self, copy_workfile_context: CopyWorkfileContext + ) -> None: + """Before workfile is copied. + + This method is called before the workfile is copied by host + integration. + + Can be overridden to implement host specific logic. + + Args: + copy_workfile_context (CopyWorkfileContext): Source and destination + path with context before workfile is copied. + + """ + pass + + def _after_workfile_copy( + self, copy_workfile_context: CopyWorkfileContext + ) -> None: + """After workfile is copied. + + This method is called after the workfile is copied by host + integration. + + Can be overridden to implement host specific logic. + + Args: + copy_workfile_context (CopyWorkfileContext): Source and destination + path with context after workfile is copied. + + """ + workdir = os.path.dirname(copy_workfile_context.dst_path) + self._create_extra_folders( + copy_workfile_context.folder_entity, + copy_workfile_context.task_entity, + workdir, + ) + + def _emit_workfile_open_event( + self, + event_data: dict[str, Optional[str]], + after_open: bool = True, + ) -> None: + """Emit workfile save event. + + Emit event before and after workfile is opened. + + This method is not meant to be overridden. + + Other addons can listen to this event and do additional steps. + + Args: + event_data (dict[str, Optional[str]]): Prepare event data. + after_open (bool): Emit event after workfile is opened. + + """ + topics = [] + topic_end = "before" + if after_open: + topics.append("workfile.opened") + topic_end = "after" + + # Keep backwards compatible event topic + topics.append(f"workfile.open.{topic_end}") + + for topic in topics: + emit_event(topic, event_data) + + def _emit_workfile_save_event( + self, + event_data: dict[str, Optional[str]], + after_save: bool = True, + ) -> None: + """Emit workfile save event. + + Emit event before and after workfile is saved or copied. + + This method is not meant to be overridden. + + Other addons can listen to this event and do additional steps. + + Args: + event_data (dict[str, Optional[str]]): Prepare event data. + after_save (bool): Emit event after workfile is saved. + + """ + topics = [] + topic_end = "before" + if after_save: + topics.append("workfile.saved") + topic_end = "after" + + # Keep backwards compatible event topic + topics.append(f"workfile.save.{topic_end}") + + for topic in topics: + emit_event(topic, event_data) diff --git a/client/ayon_core/lib/local_settings.py b/client/ayon_core/lib/local_settings.py index d994145d4b..1edfc3c1b6 100644 --- a/client/ayon_core/lib/local_settings.py +++ b/client/ayon_core/lib/local_settings.py @@ -8,6 +8,7 @@ import warnings from datetime import datetime from abc import ABC, abstractmethod from functools import lru_cache +from typing import Optional, Any import platformdirs import ayon_api @@ -15,22 +16,31 @@ import ayon_api _PLACEHOLDER = object() -def _get_ayon_appdirs(*args): +# TODO should use 'KeyError' or 'Exception' as base +class RegistryItemNotFound(ValueError): + """Raised when the item is not found in the keyring.""" + + +class _Cache: + username = None + + +def _get_ayon_appdirs(*args: str) -> str: return os.path.join( platformdirs.user_data_dir("AYON", "Ynput"), *args ) -def get_ayon_appdirs(*args): +def get_ayon_appdirs(*args: str) -> str: """Local app data directory of AYON client. Deprecated: Use 'get_launcher_local_dir' or 'get_launcher_storage_dir' based on - use-case. Deprecation added 24/08/09 (0.4.4-dev.1). + a use-case. Deprecation added 24/08/09 (0.4.4-dev.1). Args: - *args (Iterable[str]): Subdirectories/files in local app data dir. + *args (Iterable[str]): Subdirectories/files in the local app data dir. Returns: str: Path to directory/file in local app data dir. @@ -48,7 +58,7 @@ def get_ayon_appdirs(*args): def get_launcher_storage_dir(*subdirs: str) -> str: - """Get storage directory for launcher. + """Get a storage directory for launcher. Storage directory is used for storing shims, addons, dependencies, etc. @@ -73,14 +83,14 @@ def get_launcher_storage_dir(*subdirs: str) -> str: def get_launcher_local_dir(*subdirs: str) -> str: - """Get local directory for launcher. + """Get a local directory for launcher. - Local directory is used for storing machine or user specific data. + Local directory is used for storing machine or user-specific data. - The location is user specific. + The location is user-specific. Note: - This function should be called at least once on bootstrap. + This function should be called at least once on the bootstrap. Args: *subdirs (str): Subdirectories relative to local dir. @@ -97,7 +107,7 @@ def get_launcher_local_dir(*subdirs: str) -> str: def get_addons_resources_dir(addon_name: str, *args) -> str: - """Get directory for storing resources for addons. + """Get a directory for storing resources for addons. Some addons might need to store ad-hoc resources that are not part of addon client package (e.g. because of size). Studio might define @@ -107,7 +117,7 @@ def get_addons_resources_dir(addon_name: str, *args) -> str: Args: addon_name (str): Addon name. - *args (str): Subfolders in resources directory. + *args (str): Subfolders in the resources directory. Returns: str: Path to resources directory. @@ -120,6 +130,10 @@ def get_addons_resources_dir(addon_name: str, *args) -> str: return os.path.join(addons_resources_dir, addon_name, *args) +class _FakeException(Exception): + """Placeholder exception used if real exception is not available.""" + + class AYONSecureRegistry: """Store information using keyring. @@ -130,9 +144,10 @@ class AYONSecureRegistry: identify which data were created by AYON. Args: - name(str): Name of registry used as identifier for data. + name(str): Name of registry used as the identifier for data. + """ - def __init__(self, name): + def __init__(self, name: str) -> None: try: import keyring @@ -148,13 +163,12 @@ class AYONSecureRegistry: keyring.set_keyring(Windows.WinVaultKeyring()) # Force "AYON" prefix - self._name = "/".join(("AYON", name)) + self._name = f"AYON/{name}" - def set_item(self, name, value): - # type: (str, str) -> None - """Set sensitive item into system's keyring. + def set_item(self, name: str, value: str) -> None: + """Set sensitive item into the system's keyring. - This uses `Keyring module`_ to save sensitive stuff into system's + This uses `Keyring module`_ to save sensitive stuff into the system's keyring. Args: @@ -168,22 +182,26 @@ class AYONSecureRegistry: import keyring keyring.set_password(self._name, name, value) + self.get_item.cache_clear() @lru_cache(maxsize=32) - def get_item(self, name, default=_PLACEHOLDER): - """Get value of sensitive item from system's keyring. + def get_item( + self, name: str, default: Any = _PLACEHOLDER + ) -> Optional[str]: + """Get value of sensitive item from the system's keyring. See also `Keyring module`_ Args: name (str): Name of the item. - default (Any): Default value if item is not available. + default (Any): Default value if the item is not available. Returns: value (str): Value of the item. Raises: - ValueError: If item doesn't exist and default is not defined. + RegistryItemNotFound: If the item doesn't exist and default + is not defined. .. _Keyring module: https://github.com/jaraco/keyring @@ -191,21 +209,29 @@ class AYONSecureRegistry: """ import keyring - value = keyring.get_password(self._name, name) + # Capture 'ItemNotFoundException' exception (on linux) + try: + from secretstorage.exceptions import ItemNotFoundException + except ImportError: + ItemNotFoundException = _FakeException + + try: + value = keyring.get_password(self._name, name) + except ItemNotFoundException: + value = None + if value is not None: return value if default is not _PLACEHOLDER: return default - # NOTE Should raise `KeyError` - raise ValueError( - "Item {}:{} does not exist in keyring.".format(self._name, name) + raise RegistryItemNotFound( + f"Item {self._name}:{name} not found in keyring." ) - def delete_item(self, name): - # type: (str) -> None - """Delete value stored in system's keyring. + def delete_item(self, name: str) -> None: + """Delete value stored in the system's keyring. See also `Keyring module`_ @@ -223,47 +249,38 @@ class AYONSecureRegistry: class ASettingRegistry(ABC): - """Abstract class defining structure of **SettingRegistry** class. - - It is implementing methods to store secure items into keyring, otherwise - mechanism for storing common items must be implemented in abstract - methods. - - Attributes: - _name (str): Registry names. + """Abstract class to defining structure of registry class. """ - - def __init__(self, name): - # type: (str) -> ASettingRegistry - super(ASettingRegistry, self).__init__() - + def __init__(self, name: str) -> None: self._name = name - self._items = {} - - def set_item(self, name, value): - # type: (str, str) -> None - """Set item to settings registry. - - Args: - name (str): Name of the item. - value (str): Value of the item. - - """ - self._set_item(name, value) @abstractmethod - def _set_item(self, name, value): - # type: (str, str) -> None - # Implement it - pass + def _get_item(self, name: str) -> Any: + """Get item value from registry.""" - def __setitem__(self, name, value): - self._items[name] = value + @abstractmethod + def _set_item(self, name: str, value: str) -> None: + """Set item value to registry.""" + + @abstractmethod + def _delete_item(self, name: str) -> None: + """Delete item from registry.""" + + def __getitem__(self, name: str) -> Any: + return self._get_item(name) + + def __setitem__(self, name: str, value: str) -> None: self._set_item(name, value) - def get_item(self, name): - # type: (str) -> str + def __delitem__(self, name: str) -> None: + self._delete_item(name) + + @property + def name(self) -> str: + return self._name + + def get_item(self, name: str) -> str: """Get item from settings registry. Args: @@ -273,22 +290,22 @@ class ASettingRegistry(ABC): value (str): Value of the item. Raises: - ValueError: If item doesn't exist. + RegistryItemNotFound: If the item doesn't exist. """ return self._get_item(name) - @abstractmethod - def _get_item(self, name): - # type: (str) -> str - # Implement it - pass + def set_item(self, name: str, value: str) -> None: + """Set item to settings registry. - def __getitem__(self, name): - return self._get_item(name) + Args: + name (str): Name of the item. + value (str): Value of the item. - def delete_item(self, name): - # type: (str) -> None + """ + self._set_item(name, value) + + def delete_item(self, name: str) -> None: """Delete item from settings registry. Args: @@ -297,16 +314,6 @@ class ASettingRegistry(ABC): """ self._delete_item(name) - @abstractmethod - def _delete_item(self, name): - # type: (str) -> None - """Delete item from settings.""" - pass - - def __delitem__(self, name): - del self._items[name] - self._delete_item(name) - class IniSettingRegistry(ASettingRegistry): """Class using :mod:`configparser`. @@ -314,20 +321,17 @@ class IniSettingRegistry(ASettingRegistry): This class is using :mod:`configparser` (ini) files to store items. """ - - def __init__(self, name, path): - # type: (str, str) -> IniSettingRegistry - super(IniSettingRegistry, self).__init__(name) + def __init__(self, name: str, path: str) -> None: + super().__init__(name) # get registry file - self._registry_file = os.path.join(path, "{}.ini".format(name)) + self._registry_file = os.path.join(path, f"{name}.ini") if not os.path.exists(self._registry_file): with open(self._registry_file, mode="w") as cfg: print("# Settings registry", cfg) now = datetime.now().strftime("%d/%m/%Y %H:%M:%S") - print("# {}".format(now), cfg) + print(f"# {now}", cfg) - def set_item_section(self, section, name, value): - # type: (str, str, str) -> None + def set_item_section(self, section: str, name: str, value: str) -> None: """Set item to specific section of ini registry. If section doesn't exists, it is created. @@ -350,12 +354,10 @@ class IniSettingRegistry(ASettingRegistry): with open(self._registry_file, mode="w") as cfg: config.write(cfg) - def _set_item(self, name, value): - # type: (str, str) -> None + def _set_item(self, name: str, value: str) -> None: self.set_item_section("MAIN", name, value) - def set_item(self, name, value): - # type: (str, str) -> None + def set_item(self, name: str, value: str) -> None: """Set item to settings ini file. This saves item to ``DEFAULT`` section of ini as each item there @@ -368,10 +370,9 @@ class IniSettingRegistry(ASettingRegistry): """ # this does the some, overridden just for different docstring. # we cast value to str as ini options values must be strings. - super(IniSettingRegistry, self).set_item(name, str(value)) + super().set_item(name, str(value)) - def get_item(self, name): - # type: (str) -> str + def get_item(self, name: str) -> str: """Gets item from settings ini file. This gets settings from ``DEFAULT`` section of ini file as each item @@ -384,19 +385,18 @@ class IniSettingRegistry(ASettingRegistry): str: Value of item. Raises: - ValueError: If value doesn't exist. + RegistryItemNotFound: If value doesn't exist. """ - return super(IniSettingRegistry, self).get_item(name) + return super().get_item(name) @lru_cache(maxsize=32) - def get_item_from_section(self, section, name): - # type: (str, str) -> str + def get_item_from_section(self, section: str, name: str) -> str: """Get item from section of ini file. This will read ini file and try to get item value from specified - section. If that section or item doesn't exist, :exc:`ValueError` - is risen. + section. If that section or item doesn't exist, + :exc:`RegistryItemNotFound` is risen. Args: section (str): Name of ini section. @@ -406,7 +406,7 @@ class IniSettingRegistry(ASettingRegistry): str: Item value. Raises: - ValueError: If value doesn't exist. + RegistryItemNotFound: If value doesn't exist. """ config = configparser.ConfigParser() @@ -414,16 +414,15 @@ class IniSettingRegistry(ASettingRegistry): try: value = config[section][name] except KeyError: - raise ValueError( - "Registry doesn't contain value {}:{}".format(section, name)) + raise RegistryItemNotFound( + f"Registry doesn't contain value {section}:{name}" + ) return value - def _get_item(self, name): - # type: (str) -> str + def _get_item(self, name: str) -> str: return self.get_item_from_section("MAIN", name) - def delete_item_from_section(self, section, name): - # type: (str, str) -> None + def delete_item_from_section(self, section: str, name: str) -> None: """Delete item from section in ini file. Args: @@ -431,7 +430,7 @@ class IniSettingRegistry(ASettingRegistry): name (str): Name of the item. Raises: - ValueError: If item doesn't exist. + RegistryItemNotFound: If the item doesn't exist. """ self.get_item_from_section.cache_clear() @@ -440,8 +439,9 @@ class IniSettingRegistry(ASettingRegistry): try: _ = config[section][name] except KeyError: - raise ValueError( - "Registry doesn't contain value {}:{}".format(section, name)) + raise RegistryItemNotFound( + f"Registry doesn't contain value {section}:{name}" + ) config.remove_option(section, name) # if section is empty, delete it @@ -457,29 +457,28 @@ class IniSettingRegistry(ASettingRegistry): class JSONSettingRegistry(ASettingRegistry): - """Class using json file as storage.""" + """Class using a json file as storage.""" - def __init__(self, name, path): - # type: (str, str) -> JSONSettingRegistry - super(JSONSettingRegistry, self).__init__(name) - #: str: name of registry file - self._registry_file = os.path.join(path, "{}.json".format(name)) + def __init__(self, name: str, path: str) -> None: + super().__init__(name) + self._registry_file = os.path.join(path, f"{name}.json") now = datetime.now().strftime("%d/%m/%Y %H:%M:%S") header = { "__metadata__": {"generated": now}, "registry": {} } - if not os.path.exists(os.path.dirname(self._registry_file)): - os.makedirs(os.path.dirname(self._registry_file), exist_ok=True) + # Use 'os.path.dirname' in case someone uses slashes in 'name' + dirpath = os.path.dirname(self._registry_file) + if not os.path.exists(dirpath): + os.makedirs(dirpath, exist_ok=True) if not os.path.exists(self._registry_file): with open(self._registry_file, mode="w") as cfg: json.dump(header, cfg, indent=4) @lru_cache(maxsize=32) - def _get_item(self, name): - # type: (str) -> object - """Get item value from registry json. + def _get_item(self, name: str) -> str: + """Get item value from the registry. Note: See :meth:`ayon_core.lib.JSONSettingRegistry.get_item` @@ -490,29 +489,13 @@ class JSONSettingRegistry(ASettingRegistry): try: value = data["registry"][name] except KeyError: - raise ValueError( - "Registry doesn't contain value {}".format(name)) + raise RegistryItemNotFound( + f"Registry doesn't contain value {name}" + ) return value - def get_item(self, name): - # type: (str) -> object - """Get item value from registry json. - - Args: - name (str): Name of the item. - - Returns: - value of the item - - Raises: - ValueError: If item is not found in registry file. - - """ - return self._get_item(name) - - def _set_item(self, name, value): - # type: (str, object) -> None - """Set item value to registry json. + def _set_item(self, name: str, value: str) -> None: + """Set item value to the registry. Note: See :meth:`ayon_core.lib.JSONSettingRegistry.set_item` @@ -524,41 +507,39 @@ class JSONSettingRegistry(ASettingRegistry): cfg.truncate(0) cfg.seek(0) json.dump(data, cfg, indent=4) - - def set_item(self, name, value): - # type: (str, object) -> None - """Set item and its value into json registry file. - - Args: - name (str): name of the item. - value (Any): value of the item. - - """ - self._set_item(name, value) - - def _delete_item(self, name): - # type: (str) -> None self._get_item.cache_clear() + + def _delete_item(self, name: str) -> None: with open(self._registry_file, "r+") as cfg: data = json.load(cfg) del data["registry"][name] cfg.truncate(0) cfg.seek(0) json.dump(data, cfg, indent=4) + self._get_item.cache_clear() class AYONSettingsRegistry(JSONSettingRegistry): """Class handling AYON general settings registry. Args: - name (Optional[str]): Name of the registry. - """ + name (Optional[str]): Name of the registry. Using 'None' or not + passing name is deprecated. - def __init__(self, name=None): + """ + def __init__(self, name: Optional[str] = None) -> None: if not name: name = "AYON_settings" + warnings.warn( + ( + "Used 'AYONSettingsRegistry' without 'name' argument." + " The argument will be required in future versions." + ), + DeprecationWarning, + stacklevel=2, + ) path = get_launcher_storage_dir() - super(AYONSettingsRegistry, self).__init__(name, path) + super().__init__(name, path) def get_local_site_id(): @@ -591,10 +572,26 @@ def get_local_site_id(): def get_ayon_username(): """AYON username used for templates and publishing. - Uses curet ayon api username. + Uses current ayon api username. Returns: str: Username. """ - return ayon_api.get_user()["name"] + # Look for username in the connection stack + # - this is used when service is working as other user + # (e.g. in background sync) + # TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather + # use public method to get username from connection stack. + con = ayon_api.get_server_api_connection() + user_stack = getattr(con, "_as_user_stack", None) + if user_stack is not None: + username = user_stack.username + if username is not None: + return username + + # Cache the username to avoid multiple API calls + # - it is not expected that user would change + if _Cache.username is None: + _Cache.username = ayon_api.get_user()["name"] + return _Cache.username diff --git a/client/ayon_core/lib/path_templates.py b/client/ayon_core/lib/path_templates.py index 9e3e455a6c..c6e9e14eac 100644 --- a/client/ayon_core/lib/path_templates.py +++ b/client/ayon_core/lib/path_templates.py @@ -3,6 +3,7 @@ import re import copy import numbers import warnings +import platform from string import Formatter import typing from typing import List, Dict, Any, Set @@ -12,6 +13,7 @@ if typing.TYPE_CHECKING: SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)") OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?") +_IS_WINDOWS = platform.system().lower() == "windows" class TemplateUnsolved(Exception): @@ -277,8 +279,11 @@ class TemplateResult(str): """Convert to normalized path.""" cls = self.__class__ + path = str(self) + if _IS_WINDOWS: + path = path.replace("\\", "/") return cls( - os.path.normpath(self.replace("\\", "/")), + os.path.normpath(path), self.template, self.solved, self.used_values, diff --git a/client/ayon_core/pipeline/anatomy/__init__.py b/client/ayon_core/pipeline/anatomy/__init__.py index 7000f51495..36bc2a138d 100644 --- a/client/ayon_core/pipeline/anatomy/__init__.py +++ b/client/ayon_core/pipeline/anatomy/__init__.py @@ -6,6 +6,7 @@ from .exceptions import ( AnatomyTemplateUnsolved, ) from .anatomy import Anatomy +from .templates import AnatomyTemplateResult, AnatomyStringTemplate __all__ = ( @@ -16,4 +17,7 @@ __all__ = ( "AnatomyTemplateUnsolved", "Anatomy", + + "AnatomyTemplateResult", + "AnatomyStringTemplate", ) diff --git a/client/ayon_core/pipeline/anatomy/templates.py b/client/ayon_core/pipeline/anatomy/templates.py index d89b70719e..e3ec005089 100644 --- a/client/ayon_core/pipeline/anatomy/templates.py +++ b/client/ayon_core/pipeline/anatomy/templates.py @@ -1,6 +1,7 @@ import os import re import copy +import platform import collections import numbers @@ -15,6 +16,7 @@ from .exceptions import ( AnatomyTemplateUnsolved, ) +_IS_WINDOWS = platform.system().lower() == "windows" _PLACEHOLDER = object() @@ -526,6 +528,14 @@ class AnatomyTemplates: root_key = "{" + root_key + "}" output = output.replace(str(used_value), root_key) + # Make sure rootless path is with forward slashes + if _IS_WINDOWS: + output.replace("\\", "/") + + # Make sure there are no double slashes + while "//" in output: + output = output.replace("//", "/") + return output def format(self, data, strict=True): diff --git a/client/ayon_core/pipeline/context_tools.py b/client/ayon_core/pipeline/context_tools.py index 66556bbb35..423e8f7216 100644 --- a/client/ayon_core/pipeline/context_tools.py +++ b/client/ayon_core/pipeline/context_tools.py @@ -1,9 +1,12 @@ """Core pipeline functionality""" +from __future__ import annotations import os import logging import platform import uuid +import warnings +from typing import Optional, Any import ayon_api import pyblish.api @@ -14,8 +17,6 @@ from ayon_core.host import HostBase from ayon_core.lib import ( is_in_tests, initialize_ayon_connection, - emit_event, - version_up ) from ayon_core.addon import load_addons, AddonsManager from ayon_core.settings import get_project_settings @@ -23,13 +24,7 @@ from ayon_core.settings import get_project_settings from .publish.lib import filter_pyblish_plugins from .anatomy import Anatomy from .template_data import get_template_data_with_names -from .workfile import ( - get_workdir, - get_custom_workfile_template_by_string_context, - get_workfile_template_key_from_context, - get_last_workfile, - MissingWorkdirError, -) +from .workfile import get_custom_workfile_template_by_string_context from . import ( register_loader_plugin_path, register_inventory_action_path, @@ -75,7 +70,7 @@ def _get_addons_manager(): def register_root(path): - """Register currently active root""" + """DEPRECATED Register currently active root.""" log.info("Registering root: %s" % path) _registered_root["_"] = path @@ -94,18 +89,29 @@ def registered_root(): Returns: dict[str, str]: Root paths. - """ + """ + warnings.warn( + "Used deprecated function 'registered_root'. Please use 'Anatomy'" + " to get roots.", + DeprecationWarning, + stacklevel=2, + ) return _registered_root["_"] -def install_host(host): +def install_host(host: HostBase) -> None: """Install `host` into the running Python session. Args: host (HostBase): A host interface object. """ + if not isinstance(host, HostBase): + log.error( + f"Host must be a subclass of 'HostBase', got '{type(host)}'." + ) + global _is_installed _is_installed = True @@ -183,7 +189,7 @@ def install_ayon_plugins(project_name=None, host_name=None): register_inventory_action_path(INVENTORY_PATH) if host_name is None: - host_name = os.environ.get("AYON_HOST_NAME") + host_name = get_current_host_name() addons_manager = _get_addons_manager() publish_plugin_dirs = addons_manager.collect_publish_plugin_paths( @@ -366,6 +372,24 @@ def get_current_task_name(): return get_global_context()["task_name"] +def get_current_project_settings() -> dict[str, Any]: + """Project settings for the current context project. + + Returns: + dict[str, Any]: Project settings for the current context project. + + Raises: + ValueError: If current project is not set. + + """ + project_name = get_current_project_name() + if not project_name: + raise ValueError( + "Current project is not set. Can't get project settings." + ) + return get_project_settings(project_name) + + def get_current_project_entity(fields=None): """Helper function to get project document based on global Session. @@ -505,66 +529,64 @@ def get_current_context_custom_workfile_template(project_settings=None): ) -def change_current_context(folder_entity, task_entity, template_key=None): +_PLACEHOLDER = object() + + +def change_current_context( + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + template_key: Optional[str] = _PLACEHOLDER, + reason: Optional[str] = None, + project_entity: Optional[dict[str, Any]] = None, + anatomy: Optional[Anatomy] = None, +) -> dict[str, str]: """Update active Session to a new task work area. - This updates the live Session to a different task under folder. + This updates the live Session to a different task under a folder. + + Notes: + * This function does a lot of things related to workfiles which + extends arguments options a lot. + * We might want to implement 'set_current_context' on host integration + instead. But `AYON_WORKDIR`, which is related to 'IWorkfileHost', + would not be available in that case which might break some + logic. Args: folder_entity (Dict[str, Any]): Folder entity to set. task_entity (Dict[str, Any]): Task entity to set. - template_key (Union[str, None]): Prepared template key to be used for - workfile template in Anatomy. + template_key (Optional[str]): DEPRECATED: Prepared template key to + be used for workfile template in Anatomy. + reason (Optional[str]): Reason for changing context. + anatomy (Optional[Anatomy]): Anatomy object used for workdir + calculation. + project_entity (Optional[dict[str, Any]]): Project entity used for + workdir calculation. Returns: - Dict[str, str]: The changed key, values in the current Session. - """ + dict[str, str]: New context data. - project_name = get_current_project_name() - workdir = None - folder_path = None - task_name = None - if folder_entity: - folder_path = folder_entity["path"] - if task_entity: - task_name = task_entity["name"] - project_entity = ayon_api.get_project(project_name) - host_name = get_current_host_name() - workdir = get_workdir( - project_entity, - folder_entity, - task_entity, - host_name, - template_key=template_key + """ + if template_key is not _PLACEHOLDER: + warnings.warn( + ( + "Used deprecated argument 'template_key' in" + " 'change_current_context'." + " It is not necessary to pass it in anymore." + ), + DeprecationWarning, + stacklevel=2, ) - envs = { - "AYON_PROJECT_NAME": project_name, - "AYON_FOLDER_PATH": folder_path, - "AYON_TASK_NAME": task_name, - "AYON_WORKDIR": workdir, - } - - # Update the Session and environments. Pop from environments all keys with - # value set to None. - for key, value in envs.items(): - if value is None: - os.environ.pop(key, None) - else: - os.environ[key] = value - - data = envs.copy() - - # Convert env keys to human readable keys - data["project_name"] = project_name - data["folder_path"] = folder_path - data["task_name"] = task_name - data["workdir_path"] = workdir - - # Emit session change - emit_event("taskChanged", data) - - return data + host = registered_host() + return host.set_current_context( + folder_entity, + task_entity, + reason=reason, + project_entity=project_entity, + anatomy=anatomy, + ) def get_process_id(): @@ -583,53 +605,16 @@ def get_process_id(): def version_up_current_workfile(): - """Function to increment and save workfile + """DEPRECATED Function to increment and save workfile. + + Please use 'save_next_version' from 'ayon_core.pipeline.workfile' instead. + """ - host = registered_host() - - project_name = get_current_project_name() - folder_path = get_current_folder_path() - task_name = get_current_task_name() - host_name = get_current_host_name() - - template_key = get_workfile_template_key_from_context( - project_name, - folder_path, - task_name, - host_name, + warnings.warn( + "Used deprecated 'version_up_current_workfile' please use" + " 'save_next_version' from 'ayon_core.pipeline.workfile' instead.", + DeprecationWarning, + stacklevel=2, ) - anatomy = Anatomy(project_name) - - data = get_template_data_with_names( - project_name, folder_path, task_name, host_name - ) - data["root"] = anatomy.roots - - work_template = anatomy.get_template_item("work", template_key) - - # Define saving file extension - extensions = host.get_workfile_extensions() - current_file = host.get_current_workfile() - if current_file: - extensions = [os.path.splitext(current_file)[-1]] - - work_root = work_template["directory"].format_strict(data) - file_template = work_template["file"].template - last_workfile_path = get_last_workfile( - work_root, file_template, data, extensions, True - ) - # `get_last_workfile` will return the first expected file version - # if no files exist yet. In that case, if they do not exist we will - # want to save v001 - new_workfile_path = last_workfile_path - if os.path.exists(new_workfile_path): - new_workfile_path = version_up(new_workfile_path) - - # Raise an error if the parent folder doesn't exist as `host.save_workfile` - # is not supposed/able to create missing folders. - parent_folder = os.path.dirname(new_workfile_path) - if not os.path.exists(parent_folder): - raise MissingWorkdirError( - f"Work area directory '{parent_folder}' does not exist.") - - host.save_workfile(new_workfile_path) + from ayon_core.pipeline.workfile import save_next_version + save_next_version() diff --git a/client/ayon_core/pipeline/editorial.py b/client/ayon_core/pipeline/editorial.py index 8b6cfc52f1..b553fae3fb 100644 --- a/client/ayon_core/pipeline/editorial.py +++ b/client/ayon_core/pipeline/editorial.py @@ -7,6 +7,10 @@ import opentimelineio as otio from opentimelineio import opentime as _ot +# https://github.com/AcademySoftwareFoundation/OpenTimelineIO/issues/1822 +OTIO_EPSILON = 1e-9 + + def otio_range_to_frame_range(otio_range): start = _ot.to_frames( otio_range.start_time, otio_range.start_time.rate) diff --git a/client/ayon_core/pipeline/load/utils.py b/client/ayon_core/pipeline/load/utils.py index 3c50d76fb5..836fc5e096 100644 --- a/client/ayon_core/pipeline/load/utils.py +++ b/client/ayon_core/pipeline/load/utils.py @@ -720,11 +720,13 @@ def get_representation_path(representation, root=None): str: fullpath of the representation """ - if root is None: - from ayon_core.pipeline import registered_root + from ayon_core.pipeline import get_current_project_name, Anatomy - root = registered_root() + anatomy = Anatomy(get_current_project_name()) + return get_representation_path_with_anatomy( + representation, anatomy + ) def path_from_representation(): try: @@ -772,7 +774,7 @@ def get_representation_path(representation, root=None): dir_path, file_name = os.path.split(path) if not os.path.exists(dir_path): - return + return None base_name, ext = os.path.splitext(file_name) file_name_items = None @@ -782,7 +784,7 @@ def get_representation_path(representation, root=None): file_name_items = base_name.split("%") if not file_name_items: - return + return None filename_start = file_name_items[0] diff --git a/client/ayon_core/pipeline/plugin_discover.py b/client/ayon_core/pipeline/plugin_discover.py index f531600276..03da7fce79 100644 --- a/client/ayon_core/pipeline/plugin_discover.py +++ b/client/ayon_core/pipeline/plugin_discover.py @@ -51,7 +51,7 @@ class DiscoverResult: "*** Discovered {} plugins".format(len(self.plugins)) ) for cls in self.plugins: - lines.append("- {}".format(cls.__class__.__name__)) + lines.append("- {}".format(cls.__name__)) # Plugin that were defined to be ignored if self.ignored_plugins or full_report: diff --git a/client/ayon_core/pipeline/publish/lib.py b/client/ayon_core/pipeline/publish/lib.py index 49143c4426..3b82d961f8 100644 --- a/client/ayon_core/pipeline/publish/lib.py +++ b/client/ayon_core/pipeline/publish/lib.py @@ -5,6 +5,7 @@ import sys import inspect import copy import warnings +import hashlib import xml.etree.ElementTree from typing import TYPE_CHECKING, Optional, Union, List @@ -243,32 +244,38 @@ def publish_plugins_discover( for path in paths: path = os.path.normpath(path) - if not os.path.isdir(path): - continue + filenames = [] + if os.path.isdir(path): + filenames.extend( + name + for name in os.listdir(path) + if ( + os.path.isfile(os.path.join(path, name)) + and not name.startswith("_") + ) + ) + else: + filenames.append(os.path.basename(path)) + path = os.path.dirname(path) - for fname in os.listdir(path): - if fname.startswith("_"): - continue - - abspath = os.path.join(path, fname) - - if not os.path.isfile(abspath): - continue - - mod_name, mod_ext = os.path.splitext(fname) - - if mod_ext != ".py": + dirpath_hash = hashlib.md5(path.encode("utf-8")).hexdigest() + for filename in filenames: + basename, ext = os.path.splitext(filename) + if ext.lower() != ".py": continue + filepath = os.path.join(path, filename) + module_name = f"{dirpath_hash}.{basename}" try: module = import_filepath( - abspath, mod_name, sys_module_name=mod_name) + filepath, module_name, sys_module_name=module_name + ) except Exception as err: # noqa: BLE001 # we need broad exception to catch all possible errors. - result.crashed_file_paths[abspath] = sys.exc_info() + result.crashed_file_paths[filepath] = sys.exc_info() - log.debug('Skipped: "%s" (%s)', mod_name, err) + log.debug('Skipped: "%s" (%s)', filepath, err) continue for plugin in pyblish.plugin.plugins_from_module(module): @@ -354,12 +361,18 @@ def get_plugin_settings(plugin, project_settings, log, category=None): # Use project settings based on a category name if category: try: - return ( + output = ( project_settings [category] ["publish"] [plugin.__name__] ) + warnings.warn( + "Please fill 'settings_category'" + f" for plugin '{plugin.__name__}'.", + DeprecationWarning + ) + return output except KeyError: pass @@ -384,12 +397,18 @@ def get_plugin_settings(plugin, project_settings, log, category=None): category_from_file = "core" try: - return ( + output = ( project_settings [category_from_file] [plugin_kind] [plugin.__name__] ) + warnings.warn( + "Please fill 'settings_category'" + f" for plugin '{plugin.__name__}'.", + DeprecationWarning + ) + return output except KeyError: pass return {} @@ -1048,7 +1067,7 @@ def main_cli_publish( discover_result = publish_plugins_discover() publish_plugins = discover_result.plugins - print("\n".join(discover_result.get_report(only_errors=False))) + print(discover_result.get_report(only_errors=False)) # Error exit as soon as any error occurs. error_format = ("Failed {plugin.__name__}: " diff --git a/client/ayon_core/pipeline/workfile/__init__.py b/client/ayon_core/pipeline/workfile/__init__.py index aa7e150bca..7acaf69a7c 100644 --- a/client/ayon_core/pipeline/workfile/__init__.py +++ b/client/ayon_core/pipeline/workfile/__init__.py @@ -4,6 +4,8 @@ from .path_resolving import ( get_workdir_with_workdir_data, get_workdir, + get_last_workfile_with_version_from_paths, + get_last_workfile_from_paths, get_last_workfile_with_version, get_last_workfile, @@ -11,12 +13,21 @@ from .path_resolving import ( get_custom_workfile_template_by_string_context, create_workdir_extra_folders, + + get_comments_from_workfile_paths, ) from .utils import ( should_use_last_workfile_on_launch, should_open_workfiles_tool_on_launch, MissingWorkdirError, + + save_workfile_info, + save_current_workfile_to, + save_workfile_with_current_context, + save_next_version, + copy_workfile_to_context, + find_workfile_rootless_path, ) from .build_workfile import BuildWorkfile @@ -37,18 +48,29 @@ __all__ = ( "get_workdir_with_workdir_data", "get_workdir", + "get_last_workfile_with_version_from_paths", + "get_last_workfile_from_paths", "get_last_workfile_with_version", "get_last_workfile", + "find_workfile_rootless_path", "get_custom_workfile_template", "get_custom_workfile_template_by_string_context", "create_workdir_extra_folders", + "get_comments_from_workfile_paths", + "should_use_last_workfile_on_launch", "should_open_workfiles_tool_on_launch", "MissingWorkdirError", + "save_workfile_info", + "save_current_workfile_to", + "save_workfile_with_current_context", + "save_next_version", + "copy_workfile_to_context", + "BuildWorkfile", "discover_workfile_build_plugins", diff --git a/client/ayon_core/pipeline/workfile/path_resolving.py b/client/ayon_core/pipeline/workfile/path_resolving.py index 9b2fe25199..b806f1ebf0 100644 --- a/client/ayon_core/pipeline/workfile/path_resolving.py +++ b/client/ayon_core/pipeline/workfile/path_resolving.py @@ -1,8 +1,12 @@ +from __future__ import annotations import os import re import copy import platform +import warnings +import typing from typing import Optional, Dict, Any +from dataclasses import dataclass import ayon_api @@ -15,6 +19,9 @@ from ayon_core.lib import ( from ayon_core.pipeline import version_start, Anatomy from ayon_core.pipeline.template_data import get_template_data +if typing.TYPE_CHECKING: + from ayon_core.pipeline.anatomy import AnatomyTemplateResult + def get_workfile_template_key_from_context( project_name: str, @@ -111,7 +118,7 @@ def get_workdir_with_workdir_data( anatomy=None, template_key=None, project_settings=None -): +) -> "AnatomyTemplateResult": """Fill workdir path from entered data and project's anatomy. It is possible to pass only project's name instead of project's anatomy but @@ -130,9 +137,9 @@ def get_workdir_with_workdir_data( if 'template_key' is not passed. Returns: - TemplateResult: Workdir path. - """ + AnatomyTemplateResult: Workdir path. + """ if not anatomy: anatomy = Anatomy(project_name) @@ -147,7 +154,7 @@ def get_workdir_with_workdir_data( template_obj = anatomy.get_template_item( "work", template_key, "directory" ) - # Output is TemplateResult object which contain useful data + # Output is AnatomyTemplateResult object which contain useful data output = template_obj.format_strict(workdir_data) if output: return output.normalized() @@ -155,14 +162,14 @@ def get_workdir_with_workdir_data( def get_workdir( - project_entity, - folder_entity, - task_entity, - host_name, + project_entity: dict[str, Any], + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + host_name: str, anatomy=None, template_key=None, project_settings=None -): +) -> "AnatomyTemplateResult": """Fill workdir path from entered data and project's anatomy. Args: @@ -174,8 +181,8 @@ def get_workdir( is stored under `AYON_HOST_NAME` key. anatomy (Anatomy): Optional argument. Anatomy object is created using project name from `project_entity`. It is preferred to pass this - argument as initialization of a new Anatomy object may be time - consuming. + argument as initialization of a new Anatomy object may be + time-consuming. template_key (str): Key of work templates in anatomy templates. Default value is defined in `get_workdir_with_workdir_data`. project_settings(Dict[str, Any]): Prepared project settings for @@ -183,9 +190,9 @@ def get_workdir( if 'template_key' is not passed. Returns: - TemplateResult: Workdir path. - """ + AnatomyTemplateResult: Workdir path. + """ if not anatomy: anatomy = Anatomy( project_entity["name"], project_entity=project_entity @@ -197,7 +204,7 @@ def get_workdir( task_entity, host_name, ) - # Output is TemplateResult object which contain useful data + # Output is AnatomyTemplateResult object which contain useful data return get_workdir_with_workdir_data( workdir_data, anatomy.project_name, @@ -207,12 +214,141 @@ def get_workdir( ) -def get_last_workfile_with_version( - workdir, file_template, fill_data, extensions -): +@dataclass +class WorkfileParsedData: + version: Optional[int] = None + comment: Optional[str] = None + ext: Optional[str] = None + + +class WorkfileDataParser: + """Parse dynamic data from existing filenames based on template. + + Args: + file_template (str): Workfile file template. + data (dict[str, Any]): Data to fill the template with. + + """ + def __init__( + self, + file_template: str, + data: dict[str, Any], + ): + data = copy.deepcopy(data) + file_template = str(file_template) + # Use placeholders that will never be in the filename + ext_replacement = "CIextID" + version_replacement = "CIversionID" + comment_replacement = "CIcommentID" + data["version"] = version_replacement + data["comment"] = comment_replacement + for pattern, replacement in ( + # Replace `.{ext}` with `{ext}` so we are sure dot is not + # at the end + (r"\.?{ext}", ext_replacement), + ): + file_template = re.sub(pattern, replacement, file_template) + + file_template = StringTemplate(file_template) + # Prepare template that does contain 'comment' + comment_template = re.escape(str(file_template.format_strict(data))) + # Prepare template that does not contain 'comment' + # - comment is usually marked as optional and in that case the regex + # to find the comment is different based on the filename + # - if filename contains comment then 'comment_template' will match + # - if filename does not contain comment then 'file_template' will + # match + data.pop("comment") + file_template = re.escape(str(file_template.format_strict(data))) + for src, replacement in ( + (ext_replacement, r"(?P\..*)"), + (version_replacement, r"(?P[0-9]+)"), + (comment_replacement, r"(?P.+?)"), + ): + comment_template = comment_template.replace(src, replacement) + file_template = file_template.replace(src, replacement) + + kwargs = {} + if platform.system().lower() == "windows": + kwargs["flags"] = re.IGNORECASE + + # Match from beginning to end of string to be safe + self._comment_template = re.compile(f"^{comment_template}$", **kwargs) + self._file_template = re.compile(f"^{file_template}$", **kwargs) + + def parse_data(self, filename: str) -> WorkfileParsedData: + """Parse the dynamic data from a filename.""" + match = self._comment_template.match(filename) + if not match: + match = self._file_template.match(filename) + + if not match: + return WorkfileParsedData() + + kwargs = match.groupdict() + version = kwargs.get("version") + if version is not None: + kwargs["version"] = int(version) + return WorkfileParsedData(**kwargs) + + +def parse_dynamic_data_from_workfile( + filename: str, + file_template: str, + template_data: dict[str, Any], +) -> WorkfileParsedData: + """Parse dynamic data from a workfile filename. + + Dynamic data are 'version', 'comment' and 'ext'. + + Args: + filename (str): Workfile filename. + file_template (str): Workfile file template. + template_data (dict[str, Any]): Data to fill the template with. + + Returns: + WorkfileParsedData: Dynamic data parsed from the filename. + + """ + parser = WorkfileDataParser(file_template, template_data) + return parser.parse_data(filename) + + +def parse_dynamic_data_from_workfiles( + filenames: list[str], + file_template: str, + template_data: dict[str, Any], +) -> dict[str, WorkfileParsedData]: + """Parse dynamic data from a workfiles filenames. + + Dynamic data are 'version', 'comment' and 'ext'. + + Args: + filenames (list[str]): Workfiles filenames. + file_template (str): Workfile file template. + template_data (dict[str, Any]): Data to fill the template with. + + Returns: + dict[str, WorkfileParsedData]: Dynamic data parsed from the filenames + by filename. + + """ + parser = WorkfileDataParser(file_template, template_data) + return { + filename: parser.parse_data(filename) + for filename in filenames + } + + +def get_last_workfile_with_version_from_paths( + filepaths: list[str], + file_template: str, + template_data: dict[str, Any], + extensions: set[str], +) -> tuple[Optional[str], Optional[int]]: """Return last workfile version. - Usign workfile template and it's filling data find most possible last + Using the workfile template and its template data find most possible last version of workfile which was created for the context. Functionality is fully based on knowing which keys are optional or what @@ -222,50 +358,43 @@ def get_last_workfile_with_version( last workfile. Args: - workdir (str): Path to dir where workfiles are stored. + filepaths (list[str]): Workfile paths. file_template (str): Template of file name. - fill_data (Dict[str, Any]): Data for filling template. - extensions (Iterable[str]): All allowed file extensions of workfile. + template_data (Dict[str, Any]): Data for filling template. + extensions (set[str]): All allowed file extensions of workfile. Returns: - Tuple[Union[str, None], Union[int, None]]: Last workfile with version + tuple[Optional[str], Optional[int]]: Last workfile with version if there is any workfile otherwise None for both. - """ - if not os.path.exists(workdir): + """ + if not filepaths: return None, None dotted_extensions = set() for ext in extensions: if not ext.startswith("."): - ext = ".{}".format(ext) - dotted_extensions.add(ext) - - # Fast match on extension - filenames = [ - filename - for filename in os.listdir(workdir) - if os.path.splitext(filename)[-1] in dotted_extensions - ] + ext = f".{ext}" + dotted_extensions.add(re.escape(ext)) # Build template without optionals, version to digits only regex # and comment to any definable value. # Escape extensions dot for regex - regex_exts = [ - "\\" + ext - for ext in dotted_extensions - ] - ext_expression = "(?:" + "|".join(regex_exts) + ")" + ext_expression = "(?:" + "|".join(dotted_extensions) + ")" + + for pattern, replacement in ( + # Replace `.{ext}` with `{ext}` so we are sure dot is not at the end + (r"\.?{ext}", ext_expression), + # Replace optional keys with optional content regex + (r"<.*?>", r".*?"), + # Replace `{version}` with group regex + (r"{version.*?}", r"([0-9]+)"), + (r"{comment.*?}", r".+?"), + ): + file_template = re.sub(pattern, replacement, file_template) - # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end - file_template = re.sub(r"\.?{ext}", ext_expression, file_template) - # Replace optional keys with optional content regex - file_template = re.sub(r"<.*?>", r".*?", file_template) - # Replace `{version}` with group regex - file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) - file_template = re.sub(r"{comment.*?}", r".+?", file_template) file_template = StringTemplate.format_strict_template( - file_template, fill_data + file_template, template_data ) # Match with ignore case on Windows due to the Windows @@ -278,64 +407,189 @@ def get_last_workfile_with_version( # Get highest version among existing matching files version = None - output_filenames = [] - for filename in sorted(filenames): + output_filepaths = [] + for filepath in sorted(filepaths): + filename = os.path.basename(filepath) match = re.match(file_template, filename, **kwargs) if not match: continue if not match.groups(): - output_filenames.append(filename) + output_filepaths.append(filename) continue file_version = int(match.group(1)) if version is None or file_version > version: - output_filenames[:] = [] + output_filepaths.clear() version = file_version if file_version == version: - output_filenames.append(filename) + output_filepaths.append(filepath) - output_filename = None - if output_filenames: - if len(output_filenames) == 1: - output_filename = output_filenames[0] - else: - last_time = None - for _output_filename in output_filenames: - full_path = os.path.join(workdir, _output_filename) - mod_time = os.path.getmtime(full_path) - if last_time is None or last_time < mod_time: - output_filename = _output_filename - last_time = mod_time + # Use file modification time to use most recent file if there are + # multiple workfiles with the same version + output_filepath = None + last_time = None + for _output_filepath in output_filepaths: + mod_time = None + if os.path.exists(_output_filepath): + mod_time = os.path.getmtime(_output_filepath) + if ( + last_time is None + or (mod_time is not None and last_time < mod_time) + ): + output_filepath = _output_filepath + last_time = mod_time - return output_filename, version + return output_filepath, version -def get_last_workfile( - workdir, file_template, fill_data, extensions, full_path=False -): - """Return last workfile filename. +def get_last_workfile_from_paths( + filepaths: list[str], + file_template: str, + template_data: dict[str, Any], + extensions: set[str], +) -> Optional[str]: + """Return the last workfile filename. - Returns file with version 1 if there is not workfile yet. + Returns the file with version 1 if there is not workfile yet. + + Args: + filepaths (list[str]): Paths to workfiles. + file_template (str): Template of file name. + template_data (dict[str, Any]): Data for filling template. + extensions (set[str]): All allowed file extensions of workfile. + + Returns: + Optional[str]: Last workfile path. + + """ + filepath, _version = get_last_workfile_with_version_from_paths( + filepaths, file_template, template_data, extensions + ) + return filepath + + +def _filter_dir_files_by_ext( + dirpath: str, + extensions: set[str], +) -> tuple[list[str], set[str]]: + """Filter files by extensions. + + Args: + dirpath (str): List of file paths. + extensions (set[str]): Set of file extensions. + + Returns: + tuple[list[str], set[str]]: Filtered list of file paths. + + """ + dotted_extensions = set() + for ext in extensions: + if not ext.startswith("."): + ext = f".{ext}" + dotted_extensions.add(ext) + + if not os.path.exists(dirpath): + return [], dotted_extensions + + filtered_paths = [ + os.path.join(dirpath, filename) + for filename in os.listdir(dirpath) + if os.path.splitext(filename)[-1] in dotted_extensions + ] + return filtered_paths, dotted_extensions + + +def get_last_workfile_with_version( + workdir: str, + file_template: str, + template_data: dict[str, Any], + extensions: set[str], +) -> tuple[Optional[str], Optional[int]]: + """Return last workfile version. + + Using the workfile template and its filling data to find the most possible + last version of workfile which was created for the context. + + Functionality is fully based on knowing which keys are optional or what + values are expected as value. + + The last modified file is used if more files can be considered as + last workfile. Args: workdir (str): Path to dir where workfiles are stored. file_template (str): Template of file name. - fill_data (Dict[str, Any]): Data for filling template. - extensions (Iterable[str]): All allowed file extensions of workfile. - full_path (Optional[bool]): Full path to file is returned if - set to True. + template_data (dict[str, Any]): Data for filling template. + extensions (set[str]): All allowed file extensions of workfile. Returns: - str: Last or first workfile as filename of full path to filename. + tuple[Optional[str], Optional[int]]: Last workfile with version + if there is any workfile otherwise None for both. """ - filename, _version = get_last_workfile_with_version( - workdir, file_template, fill_data, extensions + if not os.path.exists(workdir): + return None, None + + filepaths, dotted_extensions = _filter_dir_files_by_ext( + workdir, extensions ) - if filename is None: - data = copy.deepcopy(fill_data) + + return get_last_workfile_with_version_from_paths( + filepaths, + file_template, + template_data, + dotted_extensions, + ) + + +def get_last_workfile( + workdir: str, + file_template: str, + template_data: dict[str, Any], + extensions: set[str], + full_path: bool = False, +) -> str: + """Return last the workfile filename. + + Returns first file name/path if there are not workfiles yet. + + Args: + workdir (str): Path to dir where workfiles are stored. + file_template (str): Template of file name. + template_data (Dict[str, Any]): Data for filling template. + extensions (Iterable[str]): All allowed file extensions of workfile. + full_path (bool): Return full path to the file or only filename. + + Returns: + str: Last or first workfile file name or path based on + 'full_path' value. + + """ + # TODO (iLLiCiTiT): Remove the argument 'full_path' and return only full + # path. As far as I can tell it is always called with 'full_path' set + # to 'True'. + # - it has to be 2 step operation, first warn about having it 'False', and + # then warn about having it filled. + if full_path is False: + warnings.warn( + "Argument 'full_path' will be removed and will return" + " only full path in future.", + DeprecationWarning, + ) + + filepaths, dotted_extensions = _filter_dir_files_by_ext( + workdir, extensions + ) + filepath = get_last_workfile_from_paths( + filepaths, + file_template, + template_data, + dotted_extensions + ) + if filepath is None: + data = copy.deepcopy(template_data) data["version"] = version_start.get_versioning_start( data["project"]["name"], data["app"], @@ -344,15 +598,15 @@ def get_last_workfile( product_type="workfile" ) data.pop("comment", None) - if not data.get("ext"): - data["ext"] = extensions[0] + if data.get("ext") is None: + data["ext"] = next(iter(extensions), "") data["ext"] = data["ext"].lstrip(".") filename = StringTemplate.format_strict_template(file_template, data) + filepath = os.path.join(workdir, filename) if full_path: - return os.path.normpath(os.path.join(workdir, filename)) - - return filename + return os.path.normpath(filepath) + return os.path.basename(filepath) def get_custom_workfile_template( @@ -389,11 +643,10 @@ def get_custom_workfile_template( project_settings(Dict[str, Any]): Preloaded project settings. Returns: - str: Path to template or None if none of profiles match current - context. Existence of formatted path is not validated. - None: If no profile is matching context. - """ + Optional[str]: Path to template or None if none of profiles match + current context. Existence of formatted path is not validated. + """ log = Logger.get_logger("CustomWorkfileResolve") project_name = project_entity["name"] @@ -562,3 +815,112 @@ def create_workdir_extra_folders( fullpath = os.path.join(workdir, subfolder) if not os.path.exists(fullpath): os.makedirs(fullpath) + + +class CommentMatcher: + """Use anatomy and work file data to parse comments from filenames. + + Args: + extensions (set[str]): Set of extensions. + file_template (StringTemplate): Workfile file template. + data (dict[str, Any]): Data to fill the template with. + + """ + def __init__( + self, + extensions: set[str], + file_template: StringTemplate, + data: dict[str, Any] + ): + warnings.warn( + "Class 'CommentMatcher' is deprecated. Please" + " use 'parse_dynamic_data_from_workfiles' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._fname_regex = None + + if "{comment}" not in file_template: + # Don't look for comment if template doesn't allow it + return + + # Create a regex group for extensions + any_extension = "(?:{})".format( + "|".join(re.escape(ext.lstrip(".")) for ext in extensions) + ) + + # Use placeholders that will never be in the filename + temp_data = copy.deepcopy(data) + temp_data["comment"] = "<>" + temp_data["version"] = "<>" + temp_data["ext"] = "<>" + + fname_pattern = re.escape( + file_template.format_strict(temp_data) + ) + + # Replace comment and version with something we can match with regex + replacements = ( + ("<>", r"(?P.+)"), + ("<>", r"[0-9]+"), + ("<>", any_extension), + ) + for src, dest in replacements: + fname_pattern = fname_pattern.replace(re.escape(src), dest) + + # Match from beginning to end of string to be safe + self._fname_regex = re.compile(f"^{fname_pattern}$") + + def parse_comment(self, filename: str) -> Optional[str]: + """Parse the {comment} part from a filename.""" + if self._fname_regex: + match = self._fname_regex.match(filename) + if match: + return match.group("comment") + return None + + +def get_comments_from_workfile_paths( + filepaths: list[str], + extensions: set[str], + file_template: StringTemplate, + template_data: dict[str, Any], + current_filename: Optional[str] = None, +) -> tuple[list[str], str]: + """DEPRECATED Collect comments from workfile filenames. + + Based on 'current_filename' is also returned "current comment". + + Args: + filepaths (list[str]): List of filepaths to parse. + extensions (set[str]): Set of file extensions. + file_template (StringTemplate): Workfile file template. + template_data (dict[str, Any]): Data to fill the template with. + current_filename (str): Filename to check for the current comment. + + Returns: + tuple[list[str], str]: List of comments and the current comment. + + """ + warnings.warn( + "Function 'get_comments_from_workfile_paths' is deprecated. Please" + " use 'parse_dynamic_data_from_workfiles' instead.", + DeprecationWarning, + stacklevel=2, + ) + current_comment = "" + if not filepaths: + return [], current_comment + + matcher = CommentMatcher(extensions, file_template, template_data) + + comment_hints = set() + for filepath in filepaths: + filename = os.path.basename(filepath) + comment = matcher.parse_comment(filename) + if comment: + comment_hints.add(comment) + if filename == current_filename: + current_comment = comment + + return list(comment_hints), current_comment diff --git a/client/ayon_core/pipeline/workfile/utils.py b/client/ayon_core/pipeline/workfile/utils.py index 25be061dec..6666853998 100644 --- a/client/ayon_core/pipeline/workfile/utils.py +++ b/client/ayon_core/pipeline/workfile/utils.py @@ -1,5 +1,30 @@ -from ayon_core.lib import filter_profiles +from __future__ import annotations +import os +import platform +import uuid +import typing +from typing import Optional, Any + +import ayon_api +from ayon_api.operations import OperationsSession + +from ayon_core.lib import filter_profiles, get_ayon_username from ayon_core.settings import get_project_settings +from ayon_core.host.interfaces import ( + SaveWorkfileOptionalData, + ListWorkfilesOptionalData, + CopyWorkfileOptionalData, +) +from ayon_core.pipeline.version_start import get_versioning_start +from ayon_core.pipeline.template_data import get_template_data + +from .path_resolving import ( + get_workdir, + get_workfile_template_key, +) + +if typing.TYPE_CHECKING: + from ayon_core.pipeline import Anatomy class MissingWorkdirError(Exception): @@ -7,14 +32,61 @@ class MissingWorkdirError(Exception): pass +def get_workfiles_info( + workfile_path: str, + project_name: str, + task_id: str, + *, + anatomy: Optional["Anatomy"] = None, + workfile_entities: Optional[list[dict[str, Any]]] = None, +) -> Optional[dict[str, Any]]: + """Find workfile info entity for a workfile path. + + Args: + workfile_path (str): Workfile path. + project_name (str): The name of the project. + task_id (str): Task id under which is workfile created. + anatomy (Optional[Anatomy]): Project anatomy used to get roots. + workfile_entities (Optional[list[dict[str, Any]]]): Pre-fetched + workfile entities related to the task. + + Returns: + Optional[dict[str, Any]]: Workfile info entity if found, otherwise + `None`. + + """ + if anatomy is None: + anatomy = Anatomy(project_name) + + if workfile_entities is None: + workfile_entities = list(ayon_api.get_workfiles_info( + project_name, + task_ids=[task_id], + )) + + if platform.system().lower() == "windows": + workfile_path = workfile_path.replace("\\", "/") + workfile_path = workfile_path.lower() + + for workfile_entity in workfile_entities: + path = workfile_entity["path"] + filled_path = anatomy.fill_root(path) + if platform.system().lower() == "windows": + filled_path = filled_path.replace("\\", "/") + filled_path = filled_path.lower() + if filled_path == workfile_path: + return workfile_entity + return None + + def should_use_last_workfile_on_launch( - project_name, - host_name, - task_name, - task_type, - default_output=False, - project_settings=None, -): + project_name: str, + host_name: str, + task_name: str, + task_type: str, + default_output: bool = False, + project_settings: Optional[dict[str, Any]] = None, +) -> bool: """Define if host should start last version workfile if possible. Default output is `False`. Can be overridden with environment variable @@ -124,3 +196,608 @@ def should_open_workfiles_tool_on_launch( if output is None: return default_output return output + + +def save_workfile_info( + project_name: str, + task_id: str, + rootless_path: str, + host_name: str, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + username: Optional[str] = None, + workfile_entities: Optional[list[dict[str, Any]]] = None, +) -> dict[str, Any]: + """Save workfile info entity for a workfile path. + + Args: + project_name (str): The name of the project. + task_id (str): Task id under which is workfile created. + rootless_path (str): Rootless path of the workfile. + host_name (str): Name of host which is saving the workfile. + version (Optional[int]): Workfile version. + comment (Optional[str]): Workfile comment. + description (Optional[str]): Workfile description. + username (Optional[str]): Username of user who saves the workfile. + If not provided, current user is used. + workfile_entities (Optional[list[dict[str, Any]]]): Pre-fetched + workfile entities related to task. + + Returns: + dict[str, Any]: Workfile info entity. + + """ + if workfile_entities is None: + workfile_entities = list(ayon_api.get_workfiles_info( + project_name, + task_ids=[task_id], + )) + + workfile_entity = next( + ( + _ent + for _ent in workfile_entities + if _ent["path"] == rootless_path + ), + None + ) + + if username is None: + username = get_ayon_username() + + if not workfile_entity: + return _create_workfile_info_entity( + project_name, + task_id, + host_name, + rootless_path, + username, + version, + comment, + description, + ) + + data = { + key: value + for key, value in ( + ("host_name", host_name), + ("version", version), + ("comment", comment), + ) + if value is not None + } + + old_data = workfile_entity["data"] + + changed_data = {} + for key, value in data.items(): + if key not in old_data or old_data[key] != value: + changed_data[key] = value + + update_data = {} + if changed_data: + update_data["data"] = changed_data + + old_description = workfile_entity["attrib"].get("description") + if description is not None and old_description != description: + update_data["attrib"] = {"description": description} + workfile_entity["attrib"]["description"] = description + + # Automatically fix 'createdBy' and 'updatedBy' fields + # NOTE both fields were not automatically filled by server + # until 1.1.3 release. + if workfile_entity.get("createdBy") is None: + update_data["createdBy"] = username + workfile_entity["createdBy"] = username + + if workfile_entity.get("updatedBy") != username: + update_data["updatedBy"] = username + workfile_entity["updatedBy"] = username + + if not update_data: + return workfile_entity + + session = OperationsSession() + session.update_entity( + project_name, + "workfile", + workfile_entity["id"], + update_data, + ) + session.commit() + return workfile_entity + + +def save_current_workfile_to( + workfile_path: str, + folder_path: str, + task_name: str, + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + prepared_data: Optional[SaveWorkfileOptionalData] = None, +) -> None: + """Save current workfile to new location or context. + + Args: + workfile_path (str): Destination workfile path. + folder_path (str): Target folder path. + task_name (str): Target task name. + version (Optional[int]): Workfile version. + comment (optional[str]): Workfile comment. + description (Optional[str]): Workfile description. + prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + from ayon_core.pipeline.context_tools import registered_host + + host = registered_host() + context = host.get_current_context() + project_name = context["project_name"] + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + host.save_workfile_with_context( + workfile_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + + +def save_workfile_with_current_context( + workfile_path: str, + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + prepared_data: Optional[SaveWorkfileOptionalData] = None, +) -> None: + """Save current workfile to new location using current context. + + Helper function to save workfile using current context. Calls + 'save_current_workfile_to' at the end. + + Args: + workfile_path (str): Destination workfile path. + version (Optional[int]): Workfile version. + comment (optional[str]): Workfile comment. + description (Optional[str]): Workfile description. + prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + from ayon_core.pipeline.context_tools import registered_host + + host = registered_host() + context = host.get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] + task_name = context["task_name"] + folder_entity = task_entity = None + if folder_path: + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + if folder_entity and task_name: + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + + host.save_workfile_with_context( + workfile_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + + +def save_next_version( + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + *, + prepared_data: Optional[SaveWorkfileOptionalData] = None, +) -> None: + """Save workfile using current context, version and comment. + + Helper function to save a workfile using the current context. Last + workfile version + 1 is used if is not passed in. + + Args: + version (Optional[int]): Workfile version that will be used. Last + version + 1 is used if is not passed in. + comment (optional[str]): Workfile comment. Pass '""' to clear comment. + The current workfile comment is used if it is not passed. + description (Optional[str]): Workfile description. + prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data + for speed enhancements. + + """ + from ayon_core.pipeline import Anatomy + from ayon_core.pipeline.context_tools import registered_host + + host = registered_host() + current_path = host.get_current_workfile() + if not current_path: + current_path = None + else: + current_path = os.path.normpath(current_path) + + context = host.get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] + task_name = context["task_name"] + if prepared_data is None: + prepared_data = SaveWorkfileOptionalData() + + project_entity = prepared_data.project_entity + anatomy = prepared_data.anatomy + project_settings = prepared_data.project_settings + + if project_entity is None: + project_entity = ayon_api.get_project(project_name) + prepared_data.project_entity = project_entity + + if project_settings is None: + project_settings = get_project_settings(project_name) + prepared_data.project_settings = project_settings + + if anatomy is None: + anatomy = Anatomy(project_name, project_entity=project_entity) + prepared_data.anatomy = anatomy + + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + + template_key = get_workfile_template_key( + project_name, + task_entity["taskType"], + host.name, + project_settings=project_settings + ) + file_template = anatomy.get_template_item("work", template_key, "file") + template_data = get_template_data( + project_entity, + folder_entity, + task_entity, + host.name, + project_settings, + ) + workdir = get_workdir( + project_entity, + folder_entity, + task_entity, + host.name, + anatomy=anatomy, + template_key=template_key, + project_settings=project_settings, + ) + rootless_dir = workdir.rootless + last_workfile = None + current_workfile = None + if version is None or comment is None: + workfiles = host.list_workfiles( + project_name, folder_entity, task_entity, + prepared_data=ListWorkfilesOptionalData( + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + template_key=template_key, + ) + ) + for workfile in workfiles: + if current_workfile is None and workfile.filepath == current_path: + current_workfile = workfile + + if workfile.version is None: + continue + + if ( + last_workfile is None + or last_workfile.version < workfile.version + ): + last_workfile = workfile + + if version is None and last_workfile is not None: + version = last_workfile.version + 1 + + if version is None: + version = get_versioning_start( + project_name, + host.name, + task_name=task_entity["name"], + task_type=task_entity["taskType"], + product_type="workfile" + ) + + # Re-use comment from the current workfile if is not passed in + if comment is None and current_workfile is not None: + comment = current_workfile.comment + + template_data["version"] = version + if comment: + template_data["comment"] = comment + + # Resolve extension + # - Don't fill any if the host does not have defined any -> e.g. if host + # uses directory instead of a file. + # 1. Use the current file extension. + # 2. Use the last known workfile extension. + # 3. Use the first extensions from 'get_workfile_extensions'. + ext = None + workfile_extensions = host.get_workfile_extensions() + if workfile_extensions: + if current_path: + ext = os.path.splitext(current_path)[1] + elif last_workfile is not None: + ext = os.path.splitext(last_workfile.filepath)[1] + else: + ext = next(iter(workfile_extensions)) + ext = ext.lstrip(".") + + if ext: + template_data["ext"] = ext + + filename = file_template.format_strict(template_data) + workfile_path = os.path.join(workdir, filename) + rootless_path = f"{rootless_dir}/{filename}" + if platform.system().lower() == "windows": + rootless_path = rootless_path.replace("\\", "/") + prepared_data.rootless_path = rootless_path + + host.save_workfile_with_context( + workfile_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + + +def copy_workfile_to_context( + src_workfile_path: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + *, + version: Optional[int] = None, + comment: Optional[str] = None, + description: Optional[str] = None, + open_workfile: bool = True, + prepared_data: Optional[CopyWorkfileOptionalData] = None, +) -> None: + """Copy workfile to a context. + + Copy workfile to a specified folder and task. Destination path is + calculated based on passed information. + + Args: + src_workfile_path (str): Source workfile path. + folder_entity (dict[str, Any]): Target folder entity. + task_entity (dict[str, Any]): Target task entity. + version (Optional[int]): Workfile version. Use next version if not + passed. + comment (optional[str]): Workfile comment. + description (Optional[str]): Workfile description. + prepared_data (Optional[CopyWorkfileOptionalData]): Prepared data + for speed enhancements. Rootless path is calculated in this + function. + + """ + from ayon_core.pipeline import Anatomy + from ayon_core.pipeline.context_tools import registered_host + + host = registered_host() + project_name = host.get_current_project_name() + + anatomy = prepared_data.anatomy + if anatomy is None: + if prepared_data.project_entity is None: + prepared_data.project_entity = ayon_api.get_project( + project_name + ) + anatomy = Anatomy( + project_name, project_entity=prepared_data.project_entity + ) + prepared_data.anatomy = anatomy + + project_settings = prepared_data.project_settings + if project_settings is None: + project_settings = get_project_settings(project_name) + prepared_data.project_settings = project_settings + + if version is None: + list_prepared_data = None + if prepared_data is not None: + list_prepared_data = ListWorkfilesOptionalData( + project_entity=prepared_data.project_entity, + anatomy=prepared_data.anatomy, + project_settings=prepared_data.project_settings, + workfile_entities=prepared_data.workfile_entities, + ) + + workfiles = host.list_workfiles( + project_name, + folder_entity, + task_entity, + prepared_data=list_prepared_data + ) + if workfiles: + version = max( + workfile.version + for workfile in workfiles + ) + 1 + else: + version = get_versioning_start( + project_name, + host.name, + task_name=task_entity["name"], + task_type=task_entity["taskType"], + product_type="workfile" + ) + + task_type = task_entity["taskType"] + template_key = get_workfile_template_key( + project_name, + task_type, + host.name, + project_settings=prepared_data.project_settings + ) + + template_data = get_template_data( + prepared_data.project_entity, + folder_entity, + task_entity, + host.name, + prepared_data.project_settings, + ) + template_data["version"] = version + if comment: + template_data["comment"] = comment + + workfile_extensions = host.get_workfile_extensions() + if workfile_extensions: + ext = os.path.splitext(src_workfile_path)[1].lstrip(".") + template_data["ext"] = ext + + workfile_template = anatomy.get_template_item( + "work", template_key, "path" + ) + workfile_path = workfile_template.format_strict(template_data) + prepared_data.rootless_path = workfile_path.rootless + host.copy_workfile( + src_workfile_path, + workfile_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + open_workfile=open_workfile, + prepared_data=prepared_data, + ) + + +def find_workfile_rootless_path( + workfile_path: str, + project_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], + host_name: str, + *, + project_entity: Optional[dict[str, Any]] = None, + project_settings: Optional[dict[str, Any]] = None, + anatomy: Optional["Anatomy"] = None, +) -> str: + """Find rootless workfile path.""" + if anatomy is None: + from ayon_core.pipeline import Anatomy + + anatomy = Anatomy(project_name, project_entity=project_entity) + + task_type = task_entity["taskType"] + template_key = get_workfile_template_key( + project_name, + task_type, + host_name, + project_settings=project_settings + ) + dir_template = anatomy.get_template_item( + "work", template_key, "directory" + ) + result = dir_template.format({"root": anatomy.roots}) + used_root = result.used_values.get("root") + rootless_path = str(workfile_path) + if platform.system().lower() == "windows": + rootless_path = rootless_path.replace("\\", "/") + + root_key = root_value = None + if used_root is not None: + root_key, root_value = next(iter(used_root.items())) + if platform.system().lower() == "windows": + root_value = root_value.replace("\\", "/") + + if root_value and rootless_path.startswith(root_value): + rootless_path = rootless_path[len(root_value):].lstrip("/") + rootless_path = f"{{root[{root_key}]}}/{rootless_path}" + else: + success, result = anatomy.find_root_template_from_path(rootless_path) + if success: + rootless_path = result + return rootless_path + + +def _create_workfile_info_entity( + project_name: str, + task_id: str, + host_name: str, + rootless_path: str, + username: str, + version: Optional[int], + comment: Optional[str], + description: Optional[str], +) -> dict[str, Any]: + """Create workfile entity data. + + Args: + project_name (str): Project name. + task_id (str): Task id. + host_name (str): Host name. + rootless_path (str): Rootless workfile path. + username (str): Username. + version (Optional[int]): Workfile version. + comment (Optional[str]): Workfile comment. + description (Optional[str]): Workfile description. + + Returns: + dict[str, Any]: Created workfile entity data. + + """ + extension = os.path.splitext(rootless_path)[1] + + attrib = {} + for key, value in ( + ("extension", extension), + ("description", description), + ): + if value is not None: + attrib[key] = value + + data = { + "host_name": host_name, + "version": version, + "comment": comment, + } + + workfile_info = { + "id": uuid.uuid4().hex, + "path": rootless_path, + "taskId": task_id, + "attrib": attrib, + "data": data, + # TODO remove 'createdBy' and 'updatedBy' fields when server is + # or above 1.1.3 . + "createdBy": username, + "updatedBy": username, + } + + session = OperationsSession() + session.create_entity( + project_name, "workfile", workfile_info + ) + session.commit() + return workfile_info diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 8cea7de86b..b0fad8d2a1 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -631,7 +631,7 @@ class AbstractTemplateBuilder(ABC): """Open template file with registered host.""" template_preset = self.get_template_preset() template_path = template_preset["path"] - self.host.open_file(template_path) + self.host.open_workfile(template_path) @abstractmethod def import_template(self, template_path): diff --git a/client/ayon_core/plugins/publish/cleanup.py b/client/ayon_core/plugins/publish/cleanup.py index 681fe700a3..03eaaf9c6e 100644 --- a/client/ayon_core/plugins/publish/cleanup.py +++ b/client/ayon_core/plugins/publish/cleanup.py @@ -38,6 +38,8 @@ class CleanUp(pyblish.api.InstancePlugin): "webpublisher", "shell" ] + settings_category = "core" + exclude_families = ["clip"] optional = True active = True diff --git a/client/ayon_core/plugins/publish/cleanup_farm.py b/client/ayon_core/plugins/publish/cleanup_farm.py index e655437ced..8d1c8de425 100644 --- a/client/ayon_core/plugins/publish/cleanup_farm.py +++ b/client/ayon_core/plugins/publish/cleanup_farm.py @@ -13,6 +13,8 @@ class CleanUpFarm(pyblish.api.ContextPlugin): order = pyblish.api.IntegratorOrder + 11 label = "Clean Up Farm" + + settings_category = "core" enabled = True # Keep "filesequence" for backwards compatibility of older jobs diff --git a/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py b/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py index 2fcf562dd0..2cb2297bf7 100644 --- a/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py +++ b/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py @@ -46,6 +46,8 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.49 label = "Collect Anatomy Instance data" + settings_category = "core" + follow_workfile_version = False def process(self, context): diff --git a/client/ayon_core/plugins/publish/collect_audio.py b/client/ayon_core/plugins/publish/collect_audio.py index 069082af37..2949ff1196 100644 --- a/client/ayon_core/plugins/publish/collect_audio.py +++ b/client/ayon_core/plugins/publish/collect_audio.py @@ -41,6 +41,7 @@ class CollectAudio(pyblish.api.ContextPlugin): "max", "batchdelivery", ] + settings_category = "core" audio_product_name = "audioMain" diff --git a/client/ayon_core/plugins/publish/collect_frames_fix.py b/client/ayon_core/plugins/publish/collect_frames_fix.py index 0f7d5b692a..4270af5541 100644 --- a/client/ayon_core/plugins/publish/collect_frames_fix.py +++ b/client/ayon_core/plugins/publish/collect_frames_fix.py @@ -23,6 +23,7 @@ class CollectFramesFixDef( targets = ["local"] hosts = ["nuke"] families = ["render", "prerender"] + settings_category = "core" rewrite_version_enable = False diff --git a/client/ayon_core/plugins/publish/collect_otio_frame_ranges.py b/client/ayon_core/plugins/publish/collect_otio_frame_ranges.py index 0a4efc2172..d68970d428 100644 --- a/client/ayon_core/plugins/publish/collect_otio_frame_ranges.py +++ b/client/ayon_core/plugins/publish/collect_otio_frame_ranges.py @@ -8,13 +8,7 @@ This module contains a unified plugin that handles: from pprint import pformat -import opentimelineio as otio import pyblish.api -from ayon_core.pipeline.editorial import ( - get_media_range_with_retimes, - otio_range_to_frame_range, - otio_range_with_handles, -) def validate_otio_clip(instance, logger): @@ -74,6 +68,8 @@ class CollectOtioRanges(pyblish.api.InstancePlugin): if not validate_otio_clip(instance, self.log): return + import opentimelineio as otio + otio_clip = instance.data["otioClip"] # Collect timeline ranges if workfile start frame is available @@ -100,6 +96,11 @@ class CollectOtioRanges(pyblish.api.InstancePlugin): def _collect_timeline_ranges(self, instance, otio_clip): """Collect basic timeline frame ranges.""" + from ayon_core.pipeline.editorial import ( + otio_range_to_frame_range, + otio_range_with_handles, + ) + workfile_start = instance.data["workfileFrameStart"] # Get timeline ranges @@ -129,6 +130,8 @@ class CollectOtioRanges(pyblish.api.InstancePlugin): def _collect_source_ranges(self, instance, otio_clip): """Collect source media frame ranges.""" + import opentimelineio as otio + # Get source ranges otio_src_range = otio_clip.source_range otio_available_range = otio_clip.available_range() @@ -178,6 +181,8 @@ class CollectOtioRanges(pyblish.api.InstancePlugin): def _collect_retimed_ranges(self, instance, otio_clip): """Handle retimed clip frame ranges.""" + from ayon_core.pipeline.editorial import get_media_range_with_retimes + retimed_attributes = get_media_range_with_retimes(otio_clip, 0, 0) self.log.debug(f"Retimed attributes: {retimed_attributes}") diff --git a/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py b/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py index 1abb8e29d2..524381f656 100644 --- a/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py +++ b/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py @@ -1,7 +1,9 @@ import ayon_api import ayon_api.utils +from ayon_core.host import ILoadHost from ayon_core.pipeline import registered_host + import pyblish.api @@ -27,16 +29,23 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): def process(self, context): host = registered_host() if host is None: - self.log.warn("No registered host.") + self.log.warning("No registered host.") return - if not hasattr(host, "ls"): - host_name = host.__name__ - self.log.warn("Host %r doesn't have ls() implemented." % host_name) + if not isinstance(host, ILoadHost): + host_name = host.name + self.log.warning( + f"Host {host_name} does not implement ILoadHost. " + "Skipping querying of loaded versions in scene." + ) + return + + containers = list(host.get_containers()) + if not containers: + # Opt out early if there are no containers + self.log.debug("No loaded containers found in scene.") return - loaded_versions = [] - containers = list(host.ls()) repre_ids = { container["representation"] for container in containers @@ -61,6 +70,7 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): # QUESTION should we add same representation id when loaded multiple # times? + loaded_versions = [] for con in containers: repre_id = con["representation"] repre_entity = repre_entities_by_id.get(repre_id) @@ -80,4 +90,5 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): } loaded_versions.append(version) + self.log.debug(f"Collected {len(loaded_versions)} loaded versions.") context.data["loadedVersions"] = loaded_versions diff --git a/client/ayon_core/plugins/publish/collect_scene_version.py b/client/ayon_core/plugins/publish/collect_scene_version.py index 7979b66abe..e6e81ea074 100644 --- a/client/ayon_core/plugins/publish/collect_scene_version.py +++ b/client/ayon_core/plugins/publish/collect_scene_version.py @@ -12,9 +12,10 @@ class CollectSceneVersion(pyblish.api.ContextPlugin): """ order = pyblish.api.CollectorOrder - label = 'Collect Scene Version' + label = "Collect Scene Version" # configurable in Settings hosts = ["*"] + settings_category = "core" # in some cases of headless publishing (for example webpublisher using PS) # you want to ignore version from name and let integrate use next version diff --git a/client/ayon_core/plugins/publish/extract_burnin.py b/client/ayon_core/plugins/publish/extract_burnin.py index 20bd4a3614..351d85a97f 100644 --- a/client/ayon_core/plugins/publish/extract_burnin.py +++ b/client/ayon_core/plugins/publish/extract_burnin.py @@ -57,6 +57,7 @@ class ExtractBurnin(publish.Extractor): "unreal", "batchdelivery", ] + settings_category = "core" optional = True diff --git a/client/ayon_core/plugins/publish/extract_color_transcode.py b/client/ayon_core/plugins/publish/extract_color_transcode.py index 8a276cf608..bbb6f9585b 100644 --- a/client/ayon_core/plugins/publish/extract_color_transcode.py +++ b/client/ayon_core/plugins/publish/extract_color_transcode.py @@ -55,6 +55,8 @@ class ExtractOIIOTranscode(publish.Extractor): label = "Transcode color spaces" order = pyblish.api.ExtractorOrder + 0.019 + settings_category = "core" + optional = True # Supported extensions diff --git a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py index 472694d334..3a450a4f33 100644 --- a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py +++ b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py @@ -158,6 +158,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): """ # Not all hosts can import this module. import opentimelineio as otio + from ayon_core.pipeline.editorial import OTIO_EPSILON output = [] # go trough all audio tracks @@ -172,6 +173,14 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): clip_start = otio_clip.source_range.start_time fps = clip_start.rate conformed_av_start = media_av_start.rescaled_to(fps) + + # Avoid rounding issue on media available range. + if clip_start.almost_equal( + conformed_av_start, + OTIO_EPSILON + ): + conformed_av_start = clip_start + # ffmpeg ignores embedded tc start = clip_start - conformed_av_start duration = otio_clip.source_range.duration diff --git a/client/ayon_core/plugins/publish/extract_otio_review.py b/client/ayon_core/plugins/publish/extract_otio_review.py index f217be551c..90215bd2c9 100644 --- a/client/ayon_core/plugins/publish/extract_otio_review.py +++ b/client/ayon_core/plugins/publish/extract_otio_review.py @@ -23,7 +23,10 @@ from ayon_core.lib import ( get_ffmpeg_tool_args, run_subprocess, ) -from ayon_core.pipeline import publish +from ayon_core.pipeline import ( + KnownPublishError, + publish, +) class ExtractOTIOReview( @@ -97,8 +100,11 @@ class ExtractOTIOReview( # skip instance if no reviewable data available if ( - not isinstance(otio_review_clips[0], otio.schema.Clip) - and len(otio_review_clips) == 1 + len(otio_review_clips) == 1 + and ( + not isinstance(otio_review_clips[0], otio.schema.Clip) + or otio_review_clips[0].media_reference.is_missing_reference + ) ): self.log.warning( "Instance `{}` has nothing to process".format(instance)) @@ -248,7 +254,7 @@ class ExtractOTIOReview( # Single video way. # Extraction via FFmpeg. - else: + elif hasattr(media_ref, "target_url"): path = media_ref.target_url # Set extract range from 0 (FFmpeg ignores # embedded timecode). @@ -352,6 +358,7 @@ class ExtractOTIOReview( import opentimelineio as otio from ayon_core.pipeline.editorial import ( trim_media_range, + OTIO_EPSILON, ) def _round_to_frame(rational_time): @@ -370,6 +377,13 @@ class ExtractOTIOReview( avl_start = avl_range.start_time + # Avoid rounding issue on media available range. + if start.almost_equal( + avl_start, + OTIO_EPSILON + ): + avl_start = start + # An additional gap is required before the available # range to conform source start point and head handles. if start < avl_start: @@ -388,6 +402,14 @@ class ExtractOTIOReview( # (media duration is shorter then clip requirement). end_point = start + duration avl_end_point = avl_range.end_time_exclusive() + + # Avoid rounding issue on media available range. + if end_point.almost_equal( + avl_end_point, + OTIO_EPSILON + ): + avl_end_point = end_point + if end_point > avl_end_point: gap_duration = end_point - avl_end_point duration -= gap_duration @@ -444,7 +466,7 @@ class ExtractOTIOReview( command = get_ffmpeg_tool_args("ffmpeg") input_extension = None - if sequence: + if sequence is not None: input_dir, collection, sequence_fps = sequence in_frame_start = min(collection.indexes) @@ -478,7 +500,7 @@ class ExtractOTIOReview( "-i", input_path ]) - elif video: + elif video is not None: video_path, otio_range = video frame_start = otio_range.start_time.value input_fps = otio_range.start_time.rate @@ -496,7 +518,7 @@ class ExtractOTIOReview( "-i", video_path ]) - elif gap: + elif gap is not None: sec_duration = frames_to_seconds(gap, self.actual_fps) # form command for rendering gap files @@ -510,6 +532,9 @@ class ExtractOTIOReview( "-tune", "stillimage" ]) + else: + raise KnownPublishError("Sequence, video or gap is required.") + if video or sequence: command.extend([ "-vf", f"scale={self.to_width}:{self.to_height}:flags=lanczos", diff --git a/client/ayon_core/plugins/publish/extract_review.py b/client/ayon_core/plugins/publish/extract_review.py index 164f3a07cb..04e534054e 100644 --- a/client/ayon_core/plugins/publish/extract_review.py +++ b/client/ayon_core/plugins/publish/extract_review.py @@ -162,8 +162,10 @@ class ExtractReview(pyblish.api.InstancePlugin): "flame", "unreal", "batchdelivery", + "photoshop" ] + settings_category = "core" # Supported extensions image_exts = {"exr", "jpg", "jpeg", "png", "dpx", "tga", "tiff", "tif"} video_exts = {"mov", "mp4"} @@ -202,15 +204,21 @@ class ExtractReview(pyblish.api.InstancePlugin): def _get_outputs_for_instance(self, instance): host_name = instance.context.data["hostName"] product_type = instance.data["productType"] + task_type = None + task_entity = instance.data.get("taskEntity") + if task_entity: + task_type = task_entity["taskType"] self.log.debug("Host: \"{}\"".format(host_name)) self.log.debug("Product type: \"{}\"".format(product_type)) + self.log.debug("Task type: \"{}\"".format(task_type)) profile = filter_profiles( self.profiles, { "hosts": host_name, "product_types": product_type, + "task_types": task_type }, logger=self.log) if not profile: diff --git a/client/ayon_core/plugins/publish/extract_thumbnail.py b/client/ayon_core/plugins/publish/extract_thumbnail.py index 7925b14caa..705fea1f72 100644 --- a/client/ayon_core/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/plugins/publish/extract_thumbnail.py @@ -38,10 +38,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): "substancedesigner", "nuke", "aftereffects", + "photoshop", "unreal", "houdini", "batchdelivery", ] + settings_category = "core" enabled = False integrate_thumbnail = False diff --git a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py index ec1fddc6b1..0dc9a5e34d 100644 --- a/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py +++ b/client/ayon_core/plugins/publish/extract_usd_layer_contributions.py @@ -256,6 +256,7 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin, label = "Collect USD Layer Contributions (Asset/Shot)" families = ["usd"] enabled = True + settings_category = "core" # A contribution defines a contribution into a (department) layer which # will get layered into the target product, usually the asset or shot. @@ -633,6 +634,8 @@ class ExtractUSDLayerContribution(publish.Extractor): label = "Extract USD Layer Contributions (Asset/Shot)" order = pyblish.api.ExtractorOrder + 0.45 + settings_category = "core" + use_ayon_entity_uri = False def process(self, instance): @@ -795,6 +798,8 @@ class ExtractUSDAssetContribution(publish.Extractor): label = "Extract USD Asset/Shot Contributions" order = ExtractUSDLayerContribution.order + 0.01 + settings_category = "core" + use_ayon_entity_uri = False def process(self, instance): diff --git a/client/ayon_core/plugins/publish/integrate_hero_version.py b/client/ayon_core/plugins/publish/integrate_hero_version.py index 43f93da293..90e6f15568 100644 --- a/client/ayon_core/plugins/publish/integrate_hero_version.py +++ b/client/ayon_core/plugins/publish/integrate_hero_version.py @@ -61,6 +61,8 @@ class IntegrateHeroVersion( # Must happen after IntegrateNew order = pyblish.api.IntegratorOrder + 0.1 + settings_category = "core" + optional = True active = True diff --git a/client/ayon_core/plugins/publish/integrate_inputlinks.py b/client/ayon_core/plugins/publish/integrate_inputlinks.py index a3b6a228d6..be399a95fc 100644 --- a/client/ayon_core/plugins/publish/integrate_inputlinks.py +++ b/client/ayon_core/plugins/publish/integrate_inputlinks.py @@ -105,7 +105,7 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin): created links by its type """ if workfile_instance is None: - self.log.warn("No workfile in this publish session.") + self.log.warning("No workfile in this publish session.") return workfile_version_id = workfile_instance.data["versionEntity"]["id"] diff --git a/client/ayon_core/plugins/publish/integrate_product_group.py b/client/ayon_core/plugins/publish/integrate_product_group.py index 90887a359d..8904d21d69 100644 --- a/client/ayon_core/plugins/publish/integrate_product_group.py +++ b/client/ayon_core/plugins/publish/integrate_product_group.py @@ -24,6 +24,8 @@ class IntegrateProductGroup(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder - 0.1 label = "Product Group" + settings_category = "core" + # Attributes set by settings product_grouping_profiles = None diff --git a/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py b/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py index 8bd67c0183..900febc236 100644 --- a/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py +++ b/client/ayon_core/plugins/publish/preintegrate_thumbnail_representation.py @@ -22,6 +22,8 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin): label = "Override Integrate Thumbnail Representations" order = pyblish.api.IntegratorOrder - 0.1 + settings_category = "core" + integrate_profiles = [] def process(self, instance): diff --git a/client/ayon_core/plugins/publish/validate_containers.py b/client/ayon_core/plugins/publish/validate_containers.py index 520e7a7ce9..fda3d93627 100644 --- a/client/ayon_core/plugins/publish/validate_containers.py +++ b/client/ayon_core/plugins/publish/validate_containers.py @@ -31,6 +31,7 @@ class ValidateOutdatedContainers( label = "Validate Outdated Containers" order = pyblish.api.ValidatorOrder + settings_category = "core" optional = True actions = [ShowInventory] diff --git a/client/ayon_core/plugins/publish/validate_file_saved.py b/client/ayon_core/plugins/publish/validate_file_saved.py index f8fdd27342..28734ba714 100644 --- a/client/ayon_core/plugins/publish/validate_file_saved.py +++ b/client/ayon_core/plugins/publish/validate_file_saved.py @@ -37,7 +37,7 @@ class ValidateCurrentSaveFile(pyblish.api.ContextPlugin): label = "Validate File Saved" order = pyblish.api.ValidatorOrder - 0.1 hosts = ["fusion", "houdini", "max", "maya", "nuke", "substancepainter", - "cinema4d", "silhouette", "gaffer", "blender"] + "cinema4d", "silhouette", "gaffer", "blender", "loki"] actions = [SaveByVersionUpAction, ShowWorkfilesAction] def process(self, context): diff --git a/client/ayon_core/plugins/publish/validate_intent.py b/client/ayon_core/plugins/publish/validate_intent.py index 71df652e92..fa5e5af093 100644 --- a/client/ayon_core/plugins/publish/validate_intent.py +++ b/client/ayon_core/plugins/publish/validate_intent.py @@ -14,6 +14,8 @@ class ValidateIntent(pyblish.api.ContextPlugin): order = pyblish.api.ValidatorOrder label = "Validate Intent" + settings_category = "core" + enabled = False # Can be modified by settings diff --git a/client/ayon_core/plugins/publish/validate_unique_subsets.py b/client/ayon_core/plugins/publish/validate_unique_subsets.py index 4067dd75a5..26c9ada116 100644 --- a/client/ayon_core/plugins/publish/validate_unique_subsets.py +++ b/client/ayon_core/plugins/publish/validate_unique_subsets.py @@ -34,7 +34,11 @@ class ValidateProductUniqueness(pyblish.api.ContextPlugin): for instance in context: # Ignore disabled instances - if not instance.data.get('publish', True): + if not instance.data.get("publish", True): + continue + + # Ignore instances not marked to integrate + if not instance.data.get("integrate", True): continue # Ignore instance without folder data diff --git a/client/ayon_core/plugins/publish/validate_version.py b/client/ayon_core/plugins/publish/validate_version.py index 0359f8fb53..d63c4e1f03 100644 --- a/client/ayon_core/plugins/publish/validate_version.py +++ b/client/ayon_core/plugins/publish/validate_version.py @@ -17,6 +17,7 @@ class ValidateVersion(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): order = pyblish.api.ValidatorOrder label = "Validate Version" + settings_category = "core" optional = False active = True diff --git a/client/ayon_core/settings/lib.py b/client/ayon_core/settings/lib.py index 72af07799f..fbbd860397 100644 --- a/client/ayon_core/settings/lib.py +++ b/client/ayon_core/settings/lib.py @@ -4,6 +4,7 @@ import logging import collections import copy import time +import warnings import ayon_api @@ -175,17 +176,22 @@ def get_project_environments(project_name, project_settings=None): def get_current_project_settings(): - """Project settings for current context project. + """DEPRECATE Project settings for current context project. + + Function requires access to pipeline context which is in + 'ayon_core.pipeline'. + + Returns: + dict[str, Any]: Project settings for current context project. - Project name should be stored in environment variable `AYON_PROJECT_NAME`. - This function should be used only in host context where environment - variable must be set and should not happen that any part of process will - change the value of the environment variable. """ - project_name = os.environ.get("AYON_PROJECT_NAME") - if not project_name: - raise ValueError( - "Missing context project in environment" - " variable `AYON_PROJECT_NAME`." - ) - return get_project_settings(project_name) + warnings.warn( + "Used deprecated function 'get_current_project_settings' in" + " 'ayon_core.settings'. The function was moved to" + " 'ayon_core.pipeline.context_tools'.", + DeprecationWarning, + stacklevel=2 + ) + from ayon_core.pipeline.context_tools import get_current_project_settings + + return get_current_project_settings() diff --git a/client/ayon_core/tools/launcher/models/actions.py b/client/ayon_core/tools/launcher/models/actions.py index adb8d371ed..1a8e423751 100644 --- a/client/ayon_core/tools/launcher/models/actions.py +++ b/client/ayon_core/tools/launcher/models/actions.py @@ -399,7 +399,11 @@ class ActionsModel: return cache.get_data() try: - response = ayon_api.post("actions/list", **request_data) + # 'variant' query is supported since AYON backend 1.10.4 + query = urlencode({"variant": self._variant}) + response = ayon_api.post( + f"actions/list?{query}", **request_data + ) response.raise_for_status() except Exception: self.log.warning("Failed to collect webactions.", exc_info=True) diff --git a/client/ayon_core/tools/workfiles/abstract.py b/client/ayon_core/tools/workfiles/abstract.py index 152ca33d99..863d6bb9bc 100644 --- a/client/ayon_core/tools/workfiles/abstract.py +++ b/client/ayon_core/tools/workfiles/abstract.py @@ -4,76 +4,6 @@ from abc import ABC, abstractmethod from ayon_core.style import get_default_entity_icon_color -class WorkfileInfo: - """Information about workarea file with possible additional from database. - - Args: - folder_id (str): Folder id. - task_id (str): Task id. - filepath (str): Filepath. - filesize (int): File size. - creation_time (float): Creation time (timestamp). - modification_time (float): Modification time (timestamp). - created_by (Union[str, none]): User who created the file. - updated_by (Union[str, none]): User who last updated the file. - note (str): Note. - """ - - def __init__( - self, - folder_id, - task_id, - filepath, - filesize, - creation_time, - modification_time, - created_by, - updated_by, - note, - ): - self.folder_id = folder_id - self.task_id = task_id - self.filepath = filepath - self.filesize = filesize - self.creation_time = creation_time - self.modification_time = modification_time - self.created_by = created_by - self.updated_by = updated_by - self.note = note - - def to_data(self): - """Converts WorkfileInfo item to data. - - Returns: - dict[str, Any]: Folder item data. - """ - - return { - "folder_id": self.folder_id, - "task_id": self.task_id, - "filepath": self.filepath, - "filesize": self.filesize, - "creation_time": self.creation_time, - "modification_time": self.modification_time, - "created_by": self.created_by, - "updated_by": self.updated_by, - "note": self.note, - } - - @classmethod - def from_data(cls, data): - """Re-creates WorkfileInfo item from data. - - Args: - data (dict[str, Any]): Workfile info item data. - - Returns: - WorkfileInfo: Workfile info item. - """ - - return cls(**data) - - class FolderItem: """Item representing folder entity on a server. @@ -87,8 +17,8 @@ class FolderItem: label (str): Folder label. icon_name (str): Name of icon from font awesome. icon_color (str): Hex color string that will be used for icon. - """ + """ def __init__( self, entity_id, parent_id, name, label, icon_name, icon_color ): @@ -104,8 +34,8 @@ class FolderItem: Returns: dict[str, Any]: Folder item data. - """ + """ return { "entity_id": self.entity_id, "parent_id": self.parent_id, @@ -124,8 +54,8 @@ class FolderItem: Returns: FolderItem: Folder item. - """ + """ return cls(**data) @@ -144,8 +74,8 @@ class TaskItem: parent_id (str): Parent folder id. icon_name (str): Name of icon from font awesome. icon_color (str): Hex color string that will be used for icon. - """ + """ def __init__( self, task_id, name, task_type, parent_id, icon_name, icon_color ): @@ -163,8 +93,8 @@ class TaskItem: Returns: str: Task id. - """ + """ return self.task_id @property @@ -173,8 +103,8 @@ class TaskItem: Returns: str: Label of task item. - """ + """ if self._label is None: self._label = "{} ({})".format(self.name, self.task_type) return self._label @@ -184,8 +114,8 @@ class TaskItem: Returns: dict[str, Any]: Task item data. - """ + """ return { "task_id": self.task_id, "name": self.name, @@ -204,116 +134,11 @@ class TaskItem: Returns: TaskItem: Task item. - """ + """ return cls(**data) -class FileItem: - """File item that represents a file. - - Can be used for both Workarea and Published workfile. Workarea file - will always exist on disk which is not the case for Published workfile. - - Args: - dirpath (str): Directory path of file. - filename (str): Filename. - modified (float): Modified timestamp. - created_by (Optional[str]): Username. - representation_id (Optional[str]): Representation id of published - workfile. - filepath (Optional[str]): Prepared filepath. - exists (Optional[bool]): If file exists on disk. - """ - - def __init__( - self, - dirpath, - filename, - modified, - created_by=None, - updated_by=None, - representation_id=None, - filepath=None, - exists=None - ): - self.filename = filename - self.dirpath = dirpath - self.modified = modified - self.created_by = created_by - self.updated_by = updated_by - self.representation_id = representation_id - self._filepath = filepath - self._exists = exists - - @property - def filepath(self): - """Filepath of file. - - Returns: - str: Full path to a file. - """ - - if self._filepath is None: - self._filepath = os.path.join(self.dirpath, self.filename) - return self._filepath - - @property - def exists(self): - """File is available. - - Returns: - bool: If file exists on disk. - """ - - if self._exists is None: - self._exists = os.path.exists(self.filepath) - return self._exists - - def to_data(self): - """Converts file item to data. - - Returns: - dict[str, Any]: File item data. - """ - - return { - "filename": self.filename, - "dirpath": self.dirpath, - "modified": self.modified, - "created_by": self.created_by, - "representation_id": self.representation_id, - "filepath": self.filepath, - "exists": self.exists, - } - - @classmethod - def from_data(cls, data): - """Re-creates file item from data. - - Args: - data (dict[str, Any]): File item data. - - Returns: - FileItem: File item. - """ - - required_keys = { - "filename", - "dirpath", - "modified", - "representation_id" - } - missing_keys = required_keys - set(data.keys()) - if missing_keys: - raise KeyError("Missing keys: {}".format(missing_keys)) - - return cls(**{ - key: data[key] - for key in required_keys - }) - - class WorkareaFilepathResult: """Result of workarea file formatting. @@ -323,8 +148,8 @@ class WorkareaFilepathResult: exists (bool): True if file exists. filepath (str): Filepath. If not provided it will be constructed from root and filename. - """ + """ def __init__(self, root, filename, exists, filepath=None): if not filepath and root and filename: filepath = os.path.join(root, filename) @@ -341,8 +166,8 @@ class AbstractWorkfilesCommon(ABC): Returns: bool: True if host is valid. - """ + """ pass @abstractmethod @@ -353,8 +178,8 @@ class AbstractWorkfilesCommon(ABC): Returns: Iterable[str]: List of extensions. - """ + """ pass @abstractmethod @@ -363,8 +188,8 @@ class AbstractWorkfilesCommon(ABC): Returns: bool: True if save is enabled. - """ + """ pass @abstractmethod @@ -373,8 +198,8 @@ class AbstractWorkfilesCommon(ABC): Args: enabled (bool): Enable save workfile when True. - """ + """ pass @@ -386,6 +211,7 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: str: Name of host. + """ pass @@ -395,8 +221,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: str: Name of project. - """ + """ pass @abstractmethod @@ -406,8 +232,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: Union[str, None]: Folder id or None if host does not have any context. - """ + """ pass @abstractmethod @@ -417,8 +243,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: Union[str, None]: Task name or None if host does not have any context. - """ + """ pass @abstractmethod @@ -428,8 +254,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: Union[str, None]: Path to workfile or None if host does not have opened specific file. - """ + """ pass @property @@ -439,8 +265,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: Anatomy: Project anatomy. - """ + """ pass @property @@ -450,8 +276,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Project settings. - """ + """ pass @abstractmethod @@ -463,8 +289,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Project entity data. - """ + """ pass @abstractmethod @@ -477,8 +303,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Folder entity data. - """ + """ pass @abstractmethod @@ -491,10 +317,24 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Task entity data. - """ + """ pass + @abstractmethod + def get_workfile_entities(self, task_id: str): + """Workfile entities for given task. + + Args: + task_id (str): Task id. + + Returns: + list[dict[str, Any]]: List of workfile entities. + + """ + pass + + @abstractmethod def emit_event(self, topic, data=None, source=None): """Emit event. @@ -502,8 +342,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon): topic (str): Event topic used for callbacks filtering. data (Optional[dict[str, Any]]): Event data. source (Optional[str]): Event source. - """ + """ pass @@ -530,8 +370,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): topic (str): Name of topic. callback (Callable): Callback that will be called when event is triggered. - """ + """ pass @abstractmethod @@ -592,8 +432,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: List[str]: File extensions that can be used as workfile for current host. - """ + """ pass # Selection information @@ -603,8 +443,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: Union[str, None]: Folder id or None if no folder is selected. - """ + """ pass @abstractmethod @@ -616,8 +456,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Args: folder_id (Union[str, None]): Folder id or None if no folder is selected. - """ + """ pass @abstractmethod @@ -626,8 +466,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: Union[str, None]: Task id or None if no folder is selected. - """ + """ pass @abstractmethod @@ -649,8 +489,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): is selected. task_name (Union[str, None]): Task name or None if no task is selected. - """ + """ pass @abstractmethod @@ -659,18 +499,22 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: Union[str, None]: Selected workfile path. - """ + """ pass @abstractmethod - def set_selected_workfile_path(self, path): + def set_selected_workfile_path( + self, rootless_path, path, workfile_entity_id + ): """Change selected workfile path. Args: + rootless_path (Union[str, None]): Selected workfile rootless path. path (Union[str, None]): Selected workfile path. - """ + workfile_entity_id (Union[str, None]): Workfile entity id. + """ pass @abstractmethod @@ -680,8 +524,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: Union[str, None]: Representation id or None if no representation is selected. - """ + """ pass @abstractmethod @@ -691,8 +535,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Args: representation_id (Union[str, None]): Selected workfile representation id. - """ + """ pass def get_selected_context(self): @@ -700,8 +544,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: dict[str, Union[str, None]]: Selected context. - """ + """ return { "folder_id": self.get_selected_folder_id(), "task_id": self.get_selected_task_id(), @@ -737,8 +581,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): files UI element. representation_id (Optional[str]): Representation id. Used for published filed UI element. - """ + """ pass @abstractmethod @@ -750,8 +594,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Expected selection data. - """ + """ pass @abstractmethod @@ -760,8 +604,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Args: folder_id (str): Folder id which was selected. - """ + """ pass @abstractmethod @@ -771,8 +615,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Args: folder_id (str): Folder id under which task is. task_name (str): Task name which was selected. - """ + """ pass @abstractmethod @@ -785,8 +629,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): folder_id (str): Folder id under which representation is. task_name (str): Task name under which representation is. representation_id (str): Representation id which was selected. - """ + """ pass @abstractmethod @@ -797,8 +641,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): folder_id (str): Folder id under which workfile is. task_name (str): Task name under which workfile is. workfile_name (str): Workfile filename which was selected. - """ + """ pass @abstractmethod @@ -823,8 +667,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: list[FolderItem]: Minimum possible information needed for visualisation of folder hierarchy. - """ + """ pass @abstractmethod @@ -843,8 +687,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: list[TaskItem]: Minimum possible information needed for visualisation of tasks. - """ + """ pass @abstractmethod @@ -853,8 +697,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: bool: Has unsaved changes. - """ + """ pass @abstractmethod @@ -867,8 +711,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: str: Workarea directory. - """ + """ pass @abstractmethod @@ -881,9 +725,9 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): sender (Optional[str]): Who requested workarea file items. Returns: - list[FileItem]: List of workarea file items. - """ + list[WorkfileInfo]: List of workarea file items. + """ pass @abstractmethod @@ -899,8 +743,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: dict[str, Any]: Data for Save As operation. - """ + """ pass @abstractmethod @@ -925,12 +769,12 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Returns: WorkareaFilepathResult: Result of the operation. - """ + """ pass @abstractmethod - def get_published_file_items(self, folder_id, task_id): + def get_published_file_items(self, folder_id: str, task_id: str): """Get published file items. Args: @@ -938,44 +782,52 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): task_id (Union[str, None]): Task id. Returns: - list[FileItem]: List of published file items. - """ + list[PublishedWorkfileInfo]: List of published file items. + """ pass @abstractmethod - def get_workfile_info(self, folder_id, task_name, filepath): + def get_workfile_info(self, folder_id, task_id, rootless_path): """Workfile info from database. Args: folder_id (str): Folder id. - task_name (str): Task id. - filepath (str): Workfile path. + task_id (str): Task id. + rootless_path (str): Workfile path. Returns: - Union[WorkfileInfo, None]: Workfile info or None if was passed + Optional[WorkfileInfo]: Workfile info or None if was passed invalid context. - """ + """ pass @abstractmethod - def save_workfile_info(self, folder_id, task_name, filepath, note): + def save_workfile_info( + self, + task_id, + rootless_path, + version=None, + comment=None, + description=None, + ): """Save workfile info to database. At this moment the only information which can be saved about - workfile is 'note'. + workfile is 'description'. - When 'note' is 'None' it is only validated if workfile info exists, - and if not then creates one with empty note. + If value of 'version', 'comment' or 'description' is 'None' it is not + added/updated to entity. Args: - folder_id (str): Folder id. - task_name (str): Task id. - filepath (str): Workfile path. - note (Union[str, None]): Note. - """ + task_id (str): Task id. + rootless_path (str): Rootless workfile path. + version (Optional[int]): Version of workfile. + comment (Optional[str]): User's comment (subversion). + description (Optional[str]): Workfile description. + """ pass # General commands @@ -985,8 +837,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): Triggers 'controller.reset.started' event at the beginning and 'controller.reset.finished' at the end. - """ + """ pass # Controller actions @@ -998,8 +850,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): folder_id (str): Folder id. task_id (str): Task id. filepath (str): Workfile path. - """ + """ pass @abstractmethod @@ -1013,22 +865,27 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): self, folder_id, task_id, + rootless_workdir, workdir, filename, - template_key, - artist_note, + version, + comment, + description, ): """Save current state of workfile to workarea. Args: folder_id (str): Folder id. task_id (str): Task id. - workdir (str): Workarea directory. + rootless_workdir (str): Workarea directory. filename (str): Workarea filename. template_key (str): Template key used to get the workdir and filename. - """ + version (Optional[int]): Version of workfile. + comment (Optional[str]): User's comment (subversion). + description (Optional[str]): Workfile description. + """ pass @abstractmethod @@ -1040,8 +897,10 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): task_id, workdir, filename, - template_key, - artist_note, + rootless_workdir, + version, + comment, + description, ): """Action to copy published workfile representation to workarea. @@ -1055,23 +914,40 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon): task_id (str): Task id. workdir (str): Workarea directory. filename (str): Workarea filename. - template_key (str): Template key. - artist_note (str): Artist note. - """ + rootless_workdir (str): Rootless workdir. + version (int): Workfile version. + comment (str): User's comment (subversion). + description (str): Description note. + """ pass @abstractmethod - def duplicate_workfile(self, src_filepath, workdir, filename, artist_note): + def duplicate_workfile( + self, + folder_id, + task_id, + src_filepath, + rootless_workdir, + workdir, + filename, + description, + version, + comment + ): """Duplicate workfile. Workfiles is not opened when done. Args: + folder_id (str): Folder id. + task_id (str): Task id. src_filepath (str): Source workfile path. + rootless_workdir (str): Rootless workdir. workdir (str): Destination workdir. filename (str): Destination filename. - artist_note (str): Artist note. + version (int): Workfile version. + comment (str): User's comment (subversion). + description (str): Workfile description. """ - pass diff --git a/client/ayon_core/tools/workfiles/control.py b/client/ayon_core/tools/workfiles/control.py index 3a7459da0c..4391e6b5fd 100644 --- a/client/ayon_core/tools/workfiles/control.py +++ b/client/ayon_core/tools/workfiles/control.py @@ -1,19 +1,13 @@ import os -import shutil import ayon_api from ayon_core.host import IWorkfileHost -from ayon_core.lib import Logger, emit_event +from ayon_core.lib import Logger from ayon_core.lib.events import QueuedEventSystem from ayon_core.settings import get_project_settings from ayon_core.pipeline import Anatomy, registered_host -from ayon_core.pipeline.context_tools import ( - change_current_context, - get_current_host_name, - get_global_context, -) -from ayon_core.pipeline.workfile import create_workdir_extra_folders +from ayon_core.pipeline.context_tools import get_global_context from ayon_core.tools.common_models import ( HierarchyModel, @@ -140,12 +134,7 @@ class BaseWorkfileController( if host is None: host = registered_host() - host_is_valid = False - if host is not None: - missing_methods = ( - IWorkfileHost.get_missing_workfile_methods(host) - ) - host_is_valid = len(missing_methods) == 0 + host_is_valid = isinstance(host, IWorkfileHost) self._host = host self._host_is_valid = host_is_valid @@ -182,7 +171,7 @@ class BaseWorkfileController( return UsersModel(self) def _create_workfiles_model(self): - return WorkfilesModel(self) + return WorkfilesModel(self._host, self) def _create_expected_selection_obj(self): return WorkfilesToolExpectedSelection(self) @@ -293,28 +282,14 @@ class BaseWorkfileController( # Host information def get_workfile_extensions(self): - host = self._host - if isinstance(host, IWorkfileHost): - return host.get_workfile_extensions() - return host.file_extensions() + return self._host.get_workfile_extensions() def has_unsaved_changes(self): - host = self._host - if isinstance(host, IWorkfileHost): - return host.workfile_has_unsaved_changes() - return host.has_unsaved_changes() + return self._host.workfile_has_unsaved_changes() # Current context def get_host_name(self): - host = self._host - if isinstance(host, IWorkfileHost): - return host.name - return get_current_host_name() - - def _get_host_current_context(self): - if hasattr(self._host, "get_current_context"): - return self._host.get_current_context() - return get_global_context() + return self._host.name def get_current_project_name(self): return self._current_project_name @@ -326,10 +301,7 @@ class BaseWorkfileController( return self._current_task_name def get_current_workfile(self): - host = self._host - if isinstance(host, IWorkfileHost): - return host.get_current_workfile() - return host.current_file() + return self._workfiles_model.get_current_workfile() # Selection information def get_selected_folder_id(self): @@ -350,8 +322,12 @@ class BaseWorkfileController( def get_selected_workfile_path(self): return self._selection_model.get_selected_workfile_path() - def set_selected_workfile_path(self, path): - self._selection_model.set_selected_workfile_path(path) + def set_selected_workfile_path( + self, rootless_path, path, workfile_entity_id + ): + self._selection_model.set_selected_workfile_path( + rootless_path, path, workfile_entity_id + ) def get_selected_representation_id(self): return self._selection_model.get_selected_representation_id() @@ -424,7 +400,7 @@ class BaseWorkfileController( def get_workarea_file_items(self, folder_id, task_name, sender=None): task_id = self._get_task_id(folder_id, task_name) return self._workfiles_model.get_workarea_file_items( - folder_id, task_id, task_name + folder_id, task_id ) def get_workarea_save_as_data(self, folder_id, task_id): @@ -450,28 +426,34 @@ class BaseWorkfileController( ) def get_published_file_items(self, folder_id, task_id): - task_name = None - if task_id: - task = self.get_task_entity( - self.get_current_project_name(), task_id - ) - task_name = task.get("name") - return self._workfiles_model.get_published_file_items( - folder_id, task_name) + folder_id, task_id + ) - def get_workfile_info(self, folder_id, task_name, filepath): - task_id = self._get_task_id(folder_id, task_name) + def get_workfile_info(self, folder_id, task_id, rootless_path): return self._workfiles_model.get_workfile_info( - folder_id, task_id, filepath + folder_id, task_id, rootless_path ) - def save_workfile_info(self, folder_id, task_name, filepath, note): - task_id = self._get_task_id(folder_id, task_name) + def save_workfile_info( + self, + task_id, + rootless_path, + version=None, + comment=None, + description=None, + ): self._workfiles_model.save_workfile_info( - folder_id, task_id, filepath, note + task_id, + rootless_path, + version, + comment, + description, ) + def get_workfile_entities(self, task_id): + return self._workfiles_model.get_workfile_entities(task_id) + def reset(self): if not self._host_is_valid: self._emit_event("controller.reset.started") @@ -509,6 +491,7 @@ class BaseWorkfileController( self._projects_model.reset() self._hierarchy_model.reset() + self._workfiles_model.reset() if not expected_folder_id: expected_folder_id = folder_id @@ -528,53 +511,31 @@ class BaseWorkfileController( # Controller actions def open_workfile(self, folder_id, task_id, filepath): - self._emit_event("open_workfile.started") - - failed = False - try: - self._open_workfile(folder_id, task_id, filepath) - - except Exception: - failed = True - self.log.warning("Open of workfile failed", exc_info=True) - - self._emit_event( - "open_workfile.finished", - {"failed": failed}, - ) + self._workfiles_model.open_workfile(folder_id, task_id, filepath) def save_current_workfile(self): - current_file = self.get_current_workfile() - self._host_save_workfile(current_file) + self._workfiles_model.save_current_workfile() def save_as_workfile( self, folder_id, task_id, + rootless_workdir, workdir, filename, - template_key, - artist_note, + version, + comment, + description, ): - self._emit_event("save_as.started") - - failed = False - try: - self._save_as_workfile( - folder_id, - task_id, - workdir, - filename, - template_key, - artist_note=artist_note, - ) - except Exception: - failed = True - self.log.warning("Save as failed", exc_info=True) - - self._emit_event( - "save_as.finished", - {"failed": failed}, + self._workfiles_model.save_as_workfile( + folder_id, + task_id, + rootless_workdir, + workdir, + filename, + version, + comment, + description, ) def copy_workfile_representation( @@ -585,64 +546,48 @@ class BaseWorkfileController( task_id, workdir, filename, - template_key, - artist_note, + rootless_workdir, + version, + comment, + description, ): - self._emit_event("copy_representation.started") - - failed = False - try: - self._save_as_workfile( - folder_id, - task_id, - workdir, - filename, - template_key, - artist_note, - src_filepath=representation_filepath - ) - except Exception: - failed = True - self.log.warning( - "Copy of workfile representation failed", exc_info=True - ) - - self._emit_event( - "copy_representation.finished", - {"failed": failed}, + self._workfiles_model.copy_workfile_representation( + representation_id, + representation_filepath, + folder_id, + task_id, + workdir, + filename, + rootless_workdir, + version, + comment, + description, ) - def duplicate_workfile(self, src_filepath, workdir, filename, artist_note): - self._emit_event("workfile_duplicate.started") - - failed = False - try: - dst_filepath = os.path.join(workdir, filename) - shutil.copy(src_filepath, dst_filepath) - except Exception: - failed = True - self.log.warning("Duplication of workfile failed", exc_info=True) - - self._emit_event( - "workfile_duplicate.finished", - {"failed": failed}, + def duplicate_workfile( + self, + folder_id, + task_id, + src_filepath, + rootless_workdir, + workdir, + filename, + version, + comment, + description + ): + self._workfiles_model.duplicate_workfile( + folder_id, + task_id, + src_filepath, + rootless_workdir, + workdir, + filename, + version, + comment, + description, ) - # Helper host methods that resolve 'IWorkfileHost' interface - def _host_open_workfile(self, filepath): - host = self._host - if isinstance(host, IWorkfileHost): - host.open_workfile(filepath) - else: - host.open_file(filepath) - - def _host_save_workfile(self, filepath): - host = self._host - if isinstance(host, IWorkfileHost): - host.save_workfile(filepath) - else: - host.save_file(filepath) - def _emit_event(self, topic, data=None): self.emit_event(topic, data, "controller") @@ -657,6 +602,11 @@ class BaseWorkfileController( return None return task_item.id + def _get_host_current_context(self): + if hasattr(self._host, "get_current_context"): + return self._host.get_current_context() + return get_global_context() + # Expected selection # - expected selection is used to restore selection after refresh # or when current context should be used @@ -665,123 +615,3 @@ class BaseWorkfileController( "expected_selection_changed", self._expected_selection.get_expected_selection_data(), ) - - def _get_event_context_data( - self, project_name, folder_id, task_id, folder=None, task=None - ): - if folder is None: - folder = self.get_folder_entity(project_name, folder_id) - if task is None: - task = self.get_task_entity(project_name, task_id) - return { - "project_name": project_name, - "folder_id": folder_id, - "folder_path": folder["path"], - "task_id": task_id, - "task_name": task["name"], - "host_name": self.get_host_name(), - } - - def _open_workfile(self, folder_id, task_id, filepath): - project_name = self.get_current_project_name() - event_data = self._get_event_context_data( - project_name, folder_id, task_id - ) - event_data["filepath"] = filepath - - emit_event("workfile.open.before", event_data, source="workfiles.tool") - - # Change context - task_name = event_data["task_name"] - if ( - folder_id != self.get_current_folder_id() - or task_name != self.get_current_task_name() - ): - self._change_current_context(project_name, folder_id, task_id) - - self._host_open_workfile(filepath) - - emit_event("workfile.open.after", event_data, source="workfiles.tool") - - def _save_as_workfile( - self, - folder_id: str, - task_id: str, - workdir: str, - filename: str, - template_key: str, - artist_note: str, - src_filepath=None, - ): - # Trigger before save event - project_name = self.get_current_project_name() - folder = self.get_folder_entity(project_name, folder_id) - task = self.get_task_entity(project_name, task_id) - task_name = task["name"] - - # QUESTION should the data be different for 'before' and 'after'? - event_data = self._get_event_context_data( - project_name, folder_id, task_id, folder, task - ) - event_data.update({ - "filename": filename, - "workdir_path": workdir, - }) - - emit_event("workfile.save.before", event_data, source="workfiles.tool") - - # Create workfiles root folder - if not os.path.exists(workdir): - self.log.debug("Initializing work directory: %s", workdir) - os.makedirs(workdir) - - # Change context - if ( - folder_id != self.get_current_folder_id() - or task_name != self.get_current_task_name() - ): - self._change_current_context( - project_name, folder_id, task_id, template_key - ) - - # Save workfile - dst_filepath = os.path.join(workdir, filename) - if src_filepath: - shutil.copyfile(src_filepath, dst_filepath) - self._host_open_workfile(dst_filepath) - else: - self._host_save_workfile(dst_filepath) - - # Make sure workfile info exists - if not artist_note: - artist_note = None - self.save_workfile_info( - folder_id, task_name, dst_filepath, note=artist_note - ) - - # Create extra folders - create_workdir_extra_folders( - workdir, - self.get_host_name(), - task["taskType"], - task_name, - project_name - ) - - # Trigger after save events - emit_event("workfile.save.after", event_data, source="workfiles.tool") - - def _change_current_context( - self, project_name, folder_id, task_id, template_key=None - ): - # Change current context - folder_entity = self.get_folder_entity(project_name, folder_id) - task_entity = self.get_task_entity(project_name, task_id) - change_current_context( - folder_entity, - task_entity, - template_key=template_key - ) - self._current_folder_id = folder_entity["id"] - self._current_folder_path = folder_entity["path"] - self._current_task_name = task_entity["name"] diff --git a/client/ayon_core/tools/workfiles/models/selection.py b/client/ayon_core/tools/workfiles/models/selection.py index 2f0896842d..9a6440b2a1 100644 --- a/client/ayon_core/tools/workfiles/models/selection.py +++ b/client/ayon_core/tools/workfiles/models/selection.py @@ -62,7 +62,9 @@ class SelectionModel(object): def get_selected_workfile_path(self): return self._workfile_path - def set_selected_workfile_path(self, path): + def set_selected_workfile_path( + self, rootless_path, path, workfile_entity_id + ): if path == self._workfile_path: return @@ -72,9 +74,11 @@ class SelectionModel(object): { "project_name": self._controller.get_current_project_name(), "path": path, + "rootless_path": rootless_path, "folder_id": self._folder_id, "task_name": self._task_name, "task_id": self._task_id, + "workfile_entity_id": workfile_entity_id, }, self.event_source ) diff --git a/client/ayon_core/tools/workfiles/models/workfiles.py b/client/ayon_core/tools/workfiles/models/workfiles.py index cc034571f3..d33a532222 100644 --- a/client/ayon_core/tools/workfiles/models/workfiles.py +++ b/client/ayon_core/tools/workfiles/models/workfiles.py @@ -1,13 +1,32 @@ +from __future__ import annotations import os -import re import copy -import uuid +import platform +import typing +from typing import Optional, Any -import arrow import ayon_api -from ayon_api.operations import OperationsSession -from ayon_core.lib import get_ayon_username +from ayon_core.lib import ( + get_ayon_username, + NestedCacheItem, + CacheItem, + Logger, +) +from ayon_core.host import ( + HostBase, + IWorkfileHost, + WorkfileInfo, + PublishedWorkfileInfo, +) +from ayon_core.host.interfaces import ( + OpenWorkfileOptionalData, + ListWorkfilesOptionalData, + ListPublishedWorkfilesOptionalData, + SaveWorkfileOptionalData, + CopyWorkfileOptionalData, + CopyPublishedWorkfileOptionalData, +) from ayon_core.pipeline.template_data import ( get_template_data, get_task_template_data, @@ -16,147 +35,339 @@ from ayon_core.pipeline.template_data import ( from ayon_core.pipeline.workfile import ( get_workdir_with_workdir_data, get_workfile_template_key, - get_last_workfile_with_version, + save_workfile_info, ) from ayon_core.pipeline.version_start import get_versioning_start from ayon_core.tools.workfiles.abstract import ( WorkareaFilepathResult, - FileItem, - WorkfileInfo, + AbstractWorkfilesBackend, ) +if typing.TYPE_CHECKING: + from ayon_core.pipeline import Anatomy + _NOT_SET = object() -class CommentMatcher(object): - """Use anatomy and work file data to parse comments from filenames. - - Args: - extensions (set[str]): Set of extensions. - file_template (AnatomyStringTemplate): File template. - data (dict[str, Any]): Data to fill the template with. - - """ - def __init__(self, extensions, file_template, data): - self.fname_regex = None - - if "{comment}" not in file_template: - # Don't look for comment if template doesn't allow it - return - - # Create a regex group for extensions - any_extension = "(?:{})".format( - "|".join(re.escape(ext.lstrip(".")) for ext in extensions) - ) - - # Use placeholders that will never be in the filename - temp_data = copy.deepcopy(data) - temp_data["comment"] = "<>" - temp_data["version"] = "<>" - temp_data["ext"] = "<>" - - fname_pattern = file_template.format_strict(temp_data) - fname_pattern = re.escape(fname_pattern) - - # Replace comment and version with something we can match with regex - replacements = { - "<>": "(.+)", - "<>": "[0-9]+", - "<>": any_extension, - } - for src, dest in replacements.items(): - fname_pattern = fname_pattern.replace(re.escape(src), dest) - - # Match from beginning to end of string to be safe - fname_pattern = "^{}$".format(fname_pattern) - - self.fname_regex = re.compile(fname_pattern) - - def parse_comment(self, filepath): - """Parse the {comment} part from a filename""" - if not self.fname_regex: - return - - fname = os.path.basename(filepath) - match = self.fname_regex.match(fname) - if match: - return match.group(1) +class HostType(HostBase, IWorkfileHost): + pass -class WorkareaModel: - """Workfiles model looking for workfiles in workare folder. +class WorkfilesModel: + """Workfiles model.""" - Workarea folder is usually task and host specific, defined by - anatomy templates. Is looking for files with extensions defined - by host integration. - """ + def __init__( + self, + host: HostType, + controller: AbstractWorkfilesBackend + ): + self._host: HostType = host + self._controller: AbstractWorkfilesBackend = controller - def __init__(self, controller): - self._controller = controller + self._log = Logger.get_logger("WorkfilesModel") extensions = None if controller.is_host_valid(): extensions = controller.get_workfile_extensions() - self._extensions = extensions + self._extensions: Optional[set[str]] = extensions + + self._current_username = _NOT_SET + + # Workarea self._base_data = None self._fill_data_by_folder_id = {} self._task_data_by_folder_id = {} self._workdir_by_context = {} + self._workarea_file_items_mapping = {} + self._workarea_file_items_cache = NestedCacheItem( + levels=1, default_factory=list + ) - @property - def project_name(self): - return self._controller.get_current_project_name() + # Published workfiles + self._repre_by_id = {} + self._published_workfile_items_cache = NestedCacheItem( + levels=1, default_factory=list + ) + + # Entities + self._workfile_entities_by_task_id = {} def reset(self): self._base_data = None self._fill_data_by_folder_id = {} self._task_data_by_folder_id = {} + self._workdir_by_context = {} + self._workarea_file_items_mapping = {} + self._workarea_file_items_cache.reset() - def _get_base_data(self): - if self._base_data is None: - base_data = get_template_data( - ayon_api.get_project(self.project_name) - ) - base_data["app"] = self._controller.get_host_name() - self._base_data = base_data - return copy.deepcopy(self._base_data) + self._repre_by_id = {} + self._published_workfile_items_cache.reset() - def _get_folder_data(self, folder_id): - fill_data = self._fill_data_by_folder_id.get(folder_id) - if fill_data is None: - folder = self._controller.get_folder_entity( - self.project_name, folder_id - ) - fill_data = get_folder_template_data(folder, self.project_name) - self._fill_data_by_folder_id[folder_id] = fill_data - return copy.deepcopy(fill_data) + self._workfile_entities_by_task_id = {} - def _get_task_data(self, project_entity, folder_id, task_id): - task_data = self._task_data_by_folder_id.setdefault(folder_id, {}) - if task_id not in task_data: - task = self._controller.get_task_entity( - self.project_name, task_id - ) - if task: - task_data[task_id] = get_task_template_data( - project_entity, task) - return copy.deepcopy(task_data[task_id]) + # Host functionality + def get_current_workfile(self): + return self._host.get_current_workfile() - def _prepare_fill_data(self, folder_id, task_id): - if not folder_id or not task_id: - return {} + def open_workfile(self, folder_id, task_id, filepath): + self._emit_event("open_workfile.started") - base_data = self._get_base_data() - project_name = base_data["project"]["name"] - folder_data = self._get_folder_data(folder_id) + failed = False + try: + self._open_workfile(folder_id, task_id, filepath) + + except Exception: + failed = True + self._log.warning("Open of workfile failed", exc_info=True) + + self._emit_event( + "open_workfile.finished", + {"failed": failed}, + ) + + def save_current_workfile(self): + current_file = self.get_current_workfile() + self._host.save_workfile(current_file) + + def save_as_workfile( + self, + folder_id, + task_id, + rootless_workdir, + workdir, + filename, + version, + comment, + description, + ): + self._emit_event("save_as.started") + + filepath = os.path.join(workdir, filename) + rootless_path = f"{rootless_workdir}/{filename}" + project_name = self._controller.get_current_project_name() project_entity = self._controller.get_project_entity(project_name) - task_data = self._get_task_data(project_entity, folder_id, task_id) + folder_entity = self._controller.get_folder_entity( + project_name, folder_id + ) + task_entity = self._controller.get_task_entity( + project_name, task_id + ) - base_data.update(folder_data) - base_data.update(task_data) + prepared_data = SaveWorkfileOptionalData( + project_entity=project_entity, + anatomy=self._controller.project_anatomy, + project_settings=self._controller.project_settings, + rootless_path=rootless_path, + workfile_entities=self.get_workfile_entities(task_id), + ) + failed = False + try: + self._host.save_workfile_with_context( + filepath, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + self._update_workfile_info( + task_id, rootless_path, description + ) + self._update_current_context( + folder_id, folder_entity["path"], task_entity["name"] + ) - return base_data + except Exception: + failed = True + self._log.warning("Save as failed", exc_info=True) - def get_workarea_dir_by_context(self, folder_id, task_id): + self._emit_event( + "save_as.finished", + {"failed": failed}, + ) + + def copy_workfile_representation( + self, + representation_id, + representation_filepath, + folder_id, + task_id, + workdir, + filename, + rootless_workdir, + version, + comment, + description, + ): + self._emit_event("copy_representation.started") + + project_name = self._project_name + project_entity = self._controller.get_project_entity(project_name) + folder_entity = self._controller.get_folder_entity( + project_name, folder_id + ) + task_entity = self._controller.get_task_entity( + project_name, task_id + ) + repre_entity = self._repre_by_id.get(representation_id) + dst_filepath = os.path.join(workdir, filename) + rootless_path = f"{rootless_workdir}/{filename}" + + prepared_data = CopyPublishedWorkfileOptionalData( + project_entity=project_entity, + anatomy=self._controller.project_anatomy, + project_settings=self._controller.project_settings, + rootless_path=rootless_path, + representation_path=representation_filepath, + workfile_entities=self.get_workfile_entities(task_id), + src_anatomy=self._controller.project_anatomy, + ) + failed = False + try: + self._host.copy_workfile_representation( + project_name, + repre_entity, + dst_filepath, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + self._update_workfile_info( + task_id, rootless_path, description + ) + self._update_current_context( + folder_id, folder_entity["path"], task_entity["name"] + ) + + except Exception: + failed = True + self._log.warning( + "Copy of workfile representation failed", exc_info=True + ) + + self._emit_event( + "copy_representation.finished", + {"failed": failed}, + ) + + def duplicate_workfile( + self, + folder_id, + task_id, + src_filepath, + rootless_workdir, + workdir, + filename, + version, + comment, + description + ): + self._emit_event("workfile_duplicate.started") + + project_name = self._controller.get_current_project_name() + project_entity = self._controller.get_project_entity(project_name) + folder_entity = self._controller.get_folder_entity( + project_name, folder_id + ) + task_entity = self._controller.get_task_entity(project_name, task_id) + workfile_entities = self.get_workfile_entities(task_id) + rootless_path = f"{rootless_workdir}/{filename}" + workfile_path = os.path.join(workdir, filename) + + prepared_data = CopyWorkfileOptionalData( + project_entity=project_entity, + project_settings=self._controller.project_settings, + anatomy=self._controller.project_anatomy, + rootless_path=rootless_path, + workfile_entities=workfile_entities, + ) + failed = False + try: + self._host.copy_workfile( + src_filepath, + workfile_path, + folder_entity, + task_entity, + version=version, + comment=comment, + description=description, + prepared_data=prepared_data, + ) + + except Exception: + failed = True + self._log.warning("Duplication of workfile failed", exc_info=True) + + self._emit_event( + "workfile_duplicate.finished", + {"failed": failed}, + ) + + def get_workfile_entities(self, task_id: str): + if not task_id: + return [] + workfile_entities = self._workfile_entities_by_task_id.get(task_id) + if workfile_entities is None: + workfile_entities = list(ayon_api.get_workfiles_info( + self._project_name, + task_ids=[task_id], + )) + self._workfile_entities_by_task_id[task_id] = workfile_entities + return workfile_entities + + def get_workfile_info( + self, + folder_id: Optional[str], + task_id: Optional[str], + rootless_path: Optional[str] + ): + if not folder_id or not task_id or not rootless_path: + return None + + mapping = self._workarea_file_items_mapping.get(task_id) + if mapping is None: + self._cache_file_items(folder_id, task_id) + mapping = self._workarea_file_items_mapping[task_id] + return mapping.get(rootless_path) + + def save_workfile_info( + self, + task_id: str, + rootless_path: str, + version: Optional[int], + comment: Optional[str], + description: Optional[str], + ): + self._save_workfile_info( + task_id, + rootless_path, + version, + comment, + description, + ) + + self._update_file_description( + task_id, rootless_path, description + ) + + def get_workarea_dir_by_context( + self, folder_id: str, task_id: str + ) -> Optional[str]: + """Workarea dir for passed context. + + The directory path is based on project anatomy templates. + + Args: + folder_id (str): Folder id. + task_id (str): Task id. + + Returns: + Optional[str]: Workarea dir path or None for invalid context. + + """ if not folder_id or not task_id: return None folder_mapping = self._workdir_by_context.setdefault(folder_id, {}) @@ -168,164 +379,48 @@ class WorkareaModel: workdir = get_workdir_with_workdir_data( workdir_data, - self.project_name, + self._project_name, anatomy=self._controller.project_anatomy, ) folder_mapping[task_id] = workdir return workdir - def get_file_items(self, folder_id, task_id, task_name): - items = [] - if not folder_id or not task_id: - return items - - workdir = self.get_workarea_dir_by_context(folder_id, task_id) - if not os.path.exists(workdir): - return items - - for filename in os.listdir(workdir): - # We want to support both files and folders. e.g. Silhoutte uses - # folders as its project files. So we do not check whether it is - # a file or not. - filepath = os.path.join(workdir, filename) - - ext = os.path.splitext(filename)[1].lower() - if ext not in self._extensions: - continue - - workfile_info = self._controller.get_workfile_info( - folder_id, task_name, filepath - ) - modified = os.path.getmtime(filepath) - items.append(FileItem( - workdir, - filename, - modified, - workfile_info.created_by, - workfile_info.updated_by, - )) - return items - - def _get_template_key(self, fill_data): - task_type = fill_data.get("task", {}).get("type") - # TODO cache - return get_workfile_template_key( - self.project_name, - task_type, - self._controller.get_host_name(), - ) - - def _get_last_workfile_version( - self, workdir, file_template, fill_data, extensions - ): - """ - - Todos: - Validate if logic of this function is correct. It does return - last version + 1 which might be wrong. + def get_workarea_file_items(self, folder_id, task_id): + """Workfile items for passed context from workarea. Args: - workdir (str): Workdir path. - file_template (str): File template. - fill_data (dict[str, Any]): Fill data. - extensions (set[str]): Extensions. + folder_id (Optional[str]): Folder id. + task_id (Optional[str]): Task id. Returns: - int: Next workfile version. + list[WorkfileInfo]: List of file items matching workarea of passed + context. """ - version = get_last_workfile_with_version( - workdir, file_template, fill_data, extensions - )[1] + return self._cache_file_items(folder_id, task_id) - if version is None: - task_info = fill_data.get("task", {}) - version = get_versioning_start( - self.project_name, - self._controller.get_host_name(), - task_name=task_info.get("name"), - task_type=task_info.get("type"), - product_type="workfile", - project_settings=self._controller.project_settings, - ) - else: - version += 1 - return version - - def _get_comments_from_root( - self, - file_template, - extensions, - fill_data, - root, - current_filename, - ): - """Get comments from root directory. - - Args: - file_template (AnatomyStringTemplate): File template. - extensions (set[str]): Extensions. - fill_data (dict[str, Any]): Fill data. - root (str): Root directory. - current_filename (str): Current filename. - - Returns: - Tuple[list[str], Union[str, None]]: Comment hints and current - comment. - - """ - current_comment = None - filenames = [] - if root and os.path.exists(root): - for filename in os.listdir(root): - path = os.path.join(root, filename) - if not os.path.isfile(path): - continue - - ext = os.path.splitext(filename)[-1].lower() - if ext in extensions: - filenames.append(filename) - - if not filenames: - return [], current_comment - - matcher = CommentMatcher(extensions, file_template, fill_data) - - comment_hints = set() - for filename in filenames: - comment = matcher.parse_comment(filename) - if comment: - comment_hints.add(comment) - if filename == current_filename: - current_comment = comment - - return list(comment_hints), current_comment - - def _get_workdir(self, anatomy, template_key, fill_data): - directory_template = anatomy.get_template_item( - "work", template_key, "directory" - ) - return directory_template.format_strict(fill_data).normalized() - - def get_workarea_save_as_data(self, folder_id, task_id): + def get_workarea_save_as_data( + self, folder_id: Optional[str], task_id: Optional[str] + ) -> dict[str, Any]: folder_entity = None task_entity = None if folder_id: folder_entity = self._controller.get_folder_entity( - self.project_name, folder_id + self._project_name, folder_id ) if folder_entity and task_id: task_entity = self._controller.get_task_entity( - self.project_name, task_id + self._project_name, task_id ) - if not folder_entity or not task_entity: + if not folder_entity or not task_entity or self._extensions is None: return { "template_key": None, "template_has_version": None, "template_has_comment": None, "ext": None, "workdir": None, + "rootless_workdir": None, "comment": None, "comment_hints": None, "last_version": None, @@ -349,6 +444,17 @@ class WorkareaModel: workdir = self._get_workdir(anatomy, template_key, fill_data) + rootless_workdir = workdir + if platform.system().lower() == "windows": + rootless_workdir = rootless_workdir.replace("\\", "/") + + used_roots = workdir.used_values.get("root") + if used_roots: + used_root_name = next(iter(used_roots)) + root_value = used_roots[used_root_name] + workdir_end = rootless_workdir[len(root_value):].lstrip("/") + rootless_workdir = f"{{root[{used_root_name}]}}/{workdir_end}" + file_template = anatomy.get_template_item( "work", template_key, "file" ) @@ -357,15 +463,21 @@ class WorkareaModel: template_has_version = "{version" in file_template_str template_has_comment = "{comment" in file_template_str - comment_hints, comment = self._get_comments_from_root( - file_template, - extensions, - fill_data, - workdir, - current_filename, - ) + file_items = self.get_workarea_file_items(folder_id, task_id) + comment_hints = set() + comment = None + for item in file_items: + filepath = item.filepath + filename = os.path.basename(filepath) + if filename == current_filename: + comment = item.comment + + if item.comment: + comment_hints.add(item.comment) + comment_hints = list(comment_hints) + last_version = self._get_last_workfile_version( - workdir, file_template_str, fill_data, extensions + file_items, task_entity ) return { @@ -374,6 +486,7 @@ class WorkareaModel: "template_has_comment": template_has_comment, "ext": current_ext, "workdir": workdir, + "rootless_workdir": rootless_workdir, "comment": comment, "comment_hints": comment_hints, "last_version": last_version, @@ -382,13 +495,13 @@ class WorkareaModel: def fill_workarea_filepath( self, - folder_id, - task_id, - extension, - use_last_version, - version, - comment, - ): + folder_id: str, + task_id: str, + extension: str, + use_last_version: bool, + version: int, + comment: str, + ) -> WorkareaFilepathResult: """Fill workarea filepath based on context. Args: @@ -415,8 +528,12 @@ class WorkareaModel: ) if use_last_version: + file_items = self.get_workarea_file_items(folder_id, task_id) + task_entity = self._controller.get_task_entity( + self._project_name, task_id + ) version = self._get_last_workfile_version( - workdir, file_template.template, fill_data, self._extensions + file_items, task_entity ) fill_data["version"] = version fill_data["ext"] = extension.lstrip(".") @@ -439,374 +556,350 @@ class WorkareaModel: exists ) - -class WorkfileEntitiesModel: - """Workfile entities model. - - Args: - control (AbstractWorkfileController): Controller object. - """ - - def __init__(self, controller): - self._controller = controller - self._cache = {} - self._items = {} - self._current_username = _NOT_SET - - def _get_workfile_info_identifier( - self, folder_id, task_id, rootless_path - ): - return "_".join([folder_id, task_id, rootless_path]) - - def _get_rootless_path(self, filepath): - anatomy = self._controller.project_anatomy - - workdir, filename = os.path.split(filepath) - _, rootless_dir = anatomy.find_root_template_from_path(workdir) - return "/".join([ - os.path.normpath(rootless_dir).replace("\\", "/"), - filename - ]) - - def _prepare_workfile_info_item( - self, folder_id, task_id, workfile_info, filepath - ): - note = "" - created_by = None - updated_by = None - if workfile_info: - note = workfile_info["attrib"].get("description") or "" - created_by = workfile_info.get("createdBy") - updated_by = workfile_info.get("updatedBy") - - filestat = os.stat(filepath) - return WorkfileInfo( - folder_id, - task_id, - filepath, - filesize=filestat.st_size, - creation_time=filestat.st_ctime, - modification_time=filestat.st_mtime, - created_by=created_by, - updated_by=updated_by, - note=note - ) - - def _get_workfile_info(self, folder_id, task_id, identifier): - workfile_info = self._cache.get(identifier) - if workfile_info is not None: - return workfile_info - - for workfile_info in ayon_api.get_workfiles_info( - self._controller.get_current_project_name(), - task_ids=[task_id], - fields=["id", "path", "attrib", "createdBy", "updatedBy"], - ): - workfile_identifier = self._get_workfile_info_identifier( - folder_id, task_id, workfile_info["path"] - ) - self._cache[workfile_identifier] = workfile_info - return self._cache.get(identifier) - - def get_workfile_info( - self, folder_id, task_id, filepath, rootless_path=None - ): - if not folder_id or not task_id or not filepath: - return None - - if rootless_path is None: - rootless_path = self._get_rootless_path(filepath) - - identifier = self._get_workfile_info_identifier( - folder_id, task_id, rootless_path) - item = self._items.get(identifier) - if item is None: - workfile_info = self._get_workfile_info( - folder_id, task_id, identifier - ) - item = self._prepare_workfile_info_item( - folder_id, task_id, workfile_info, filepath - ) - self._items[identifier] = item - return item - - def save_workfile_info(self, folder_id, task_id, filepath, note): - rootless_path = self._get_rootless_path(filepath) - identifier = self._get_workfile_info_identifier( - folder_id, task_id, rootless_path - ) - workfile_info = self._get_workfile_info( - folder_id, task_id, identifier - ) - if not workfile_info: - self._cache[identifier] = self._create_workfile_info_entity( - task_id, rootless_path, note or "") - self._items.pop(identifier, None) - return - - old_note = workfile_info.get("attrib", {}).get("note") - - new_workfile_info = copy.deepcopy(workfile_info) - update_data = {} - if note is not None and old_note != note: - update_data["attrib"] = {"description": note} - attrib = new_workfile_info.setdefault("attrib", {}) - attrib["description"] = note - - username = self._get_current_username() - # Automatically fix 'createdBy' and 'updatedBy' fields - # NOTE both fields were not automatically filled by server - # until 1.1.3 release. - if workfile_info.get("createdBy") is None: - update_data["createdBy"] = username - new_workfile_info["createdBy"] = username - - if workfile_info.get("updatedBy") != username: - update_data["updatedBy"] = username - new_workfile_info["updatedBy"] = username - - if not update_data: - return - - self._cache[identifier] = new_workfile_info - self._items.pop(identifier, None) - - project_name = self._controller.get_current_project_name() - - session = OperationsSession() - session.update_entity( - project_name, - "workfile", - workfile_info["id"], - update_data, - ) - session.commit() - - def _create_workfile_info_entity(self, task_id, rootless_path, note): - extension = os.path.splitext(rootless_path)[1] - - project_name = self._controller.get_current_project_name() - - username = self._get_current_username() - workfile_info = { - "id": uuid.uuid4().hex, - "path": rootless_path, - "taskId": task_id, - "attrib": { - "extension": extension, - "description": note - }, - # TODO remove 'createdBy' and 'updatedBy' fields when server is - # or above 1.1.3 . - "createdBy": username, - "updatedBy": username, - } - - session = OperationsSession() - session.create_entity(project_name, "workfile", workfile_info) - session.commit() - return workfile_info - - def _get_current_username(self): - if self._current_username is _NOT_SET: - self._current_username = get_ayon_username() - return self._current_username - - -class PublishWorkfilesModel: - """Model for handling of published workfiles. - - Todos: - Cache workfiles products and representations for some time. - Note Representations won't change. Only what can change are - versions. - """ - - def __init__(self, controller): - self._controller = controller - self._cached_extensions = None - self._cached_repre_extensions = None - - @property - def _extensions(self): - if self._cached_extensions is None: - exts = self._controller.get_workfile_extensions() or [] - self._cached_extensions = exts - return self._cached_extensions - - @property - def _repre_extensions(self): - if self._cached_repre_extensions is None: - self._cached_repre_extensions = { - ext.lstrip(".") for ext in self._extensions - } - return self._cached_repre_extensions - - def _file_item_from_representation( - self, repre_entity, project_anatomy, author, task_name=None - ): - if task_name is not None: - task_info = repre_entity["context"].get("task") - if not task_info or task_info["name"] != task_name: - return None - - # Filter by extension - extensions = self._repre_extensions - workfile_path = None - for repre_file in repre_entity["files"]: - ext = ( - os.path.splitext(repre_file["name"])[1] - .lower() - .lstrip(".") - ) - if ext in extensions: - workfile_path = repre_file["path"] - break - - if not workfile_path: - return None - - try: - workfile_path = workfile_path.format( - root=project_anatomy.roots) - except Exception as exc: - print("Failed to format workfile path: {}".format(exc)) - - dirpath, filename = os.path.split(workfile_path) - created_at = arrow.get(repre_entity["createdAt"]).to("local") - return FileItem( - dirpath, - filename, - created_at.float_timestamp, - author, - None, - repre_entity["id"] - ) - - def get_file_items(self, folder_id, task_name): - # TODO refactor to use less server API calls - project_name = self._controller.get_current_project_name() - # Get subset docs of folder - product_entities = ayon_api.get_products( - project_name, - folder_ids={folder_id}, - product_types={"workfile"}, - fields={"id", "name"} - ) - - output = [] - product_ids = {product["id"] for product in product_entities} - if not product_ids: - return output - - # Get version docs of products with their families - version_entities = ayon_api.get_versions( - project_name, - product_ids=product_ids, - fields={"id", "author"} - ) - versions_by_id = { - version["id"]: version - for version in version_entities - } - if not versions_by_id: - return output - - # Query representations of filtered versions and add filter for - # extension - repre_entities = ayon_api.get_representations( - project_name, - version_ids=set(versions_by_id) - ) - project_anatomy = self._controller.project_anatomy - - # Filter queried representations by task name if task is set - file_items = [] - for repre_entity in repre_entities: - version_id = repre_entity["versionId"] - version_entity = versions_by_id[version_id] - file_item = self._file_item_from_representation( - repre_entity, - project_anatomy, - version_entity["author"], - task_name, - ) - if file_item is not None: - file_items.append(file_item) - - return file_items - - -class WorkfilesModel: - """Workfiles model.""" - - def __init__(self, controller): - self._controller = controller - - self._entities_model = WorkfileEntitiesModel(controller) - self._workarea_model = WorkareaModel(controller) - self._published_model = PublishWorkfilesModel(controller) - - def get_workfile_info(self, folder_id, task_id, filepath): - return self._entities_model.get_workfile_info( - folder_id, task_id, filepath - ) - - def save_workfile_info(self, folder_id, task_id, filepath, note): - self._entities_model.save_workfile_info( - folder_id, task_id, filepath, note - ) - - def get_workarea_dir_by_context(self, folder_id, task_id): - """Workarea dir for passed context. - - The directory path is based on project anatomy templates. + def get_published_file_items( + self, folder_id: str, task_id: str + ) -> list[PublishedWorkfileInfo]: + """Published workfiles for passed context. Args: folder_id (str): Folder id. task_id (str): Task id. Returns: - Union[str, None]: Workarea dir path or None for invalid context. + list[PublishedWorkfileInfo]: List of files for published workfiles. + """ + if not folder_id: + return [] - return self._workarea_model.get_workarea_dir_by_context( - folder_id, task_id) + cache = self._published_workfile_items_cache[folder_id] + if not cache.is_valid: + project_name = self._project_name + anatomy = self._controller.project_anatomy - def get_workarea_file_items(self, folder_id, task_id, task_name): - """Workfile items for passed context from workarea. + product_entities = list(ayon_api.get_products( + project_name, + folder_ids={folder_id}, + product_types={"workfile"}, + fields={"id", "name"} + )) - Args: - folder_id (Union[str, None]): Folder id. - task_id (Union[str, None]): Task id. - task_name (Union[str, None]): Task name. + version_entities = [] + product_ids = {product["id"] for product in product_entities} + if product_ids: + # Get version docs of products with their families + version_entities = list(ayon_api.get_versions( + project_name, + product_ids=product_ids, + fields={"id", "author", "taskId"}, + )) - Returns: - list[FileItem]: List of file items matching workarea of passed - context. - """ - return self._workarea_model.get_file_items( - folder_id, task_id, task_name + repre_entities = [] + if version_entities: + repre_entities = list(ayon_api.get_representations( + project_name, + version_ids={v["id"] for v in version_entities} + )) + + self._repre_by_id.update({ + repre_entity["id"]: repre_entity + for repre_entity in repre_entities + }) + project_entity = self._controller.get_project_entity(project_name) + + prepared_data = ListPublishedWorkfilesOptionalData( + project_entity=project_entity, + anatomy=anatomy, + project_settings=self._controller.project_settings, + product_entities=product_entities, + version_entities=version_entities, + repre_entities=repre_entities, + ) + cache.update_data(self._host.list_published_workfiles( + project_name, + folder_id, + prepared_data=prepared_data, + )) + + items = cache.get_data() + + if task_id: + items = [ + item + for item in items + if item.task_id == task_id + ] + return items + + @property + def _project_name(self) -> str: + return self._controller.get_current_project_name() + + @property + def _host_name(self) -> str: + return self._host.name + + def _emit_event(self, topic, data=None): + self._controller.emit_event(topic, data, "workfiles") + + def _get_current_username(self) -> str: + if self._current_username is _NOT_SET: + self._current_username = get_ayon_username() + return self._current_username + + # --- Host --- + def _open_workfile(self, folder_id: str, task_id: str, filepath: str): + # TODO move to workfiles pipeline + project_name = self._project_name + project_entity = self._controller.get_project_entity(project_name) + folder_entity = self._controller.get_folder_entity( + project_name, folder_id + ) + task_entity = self._controller.get_task_entity( + project_name, task_id + ) + prepared_data = OpenWorkfileOptionalData( + project_entity=project_entity, + anatomy=self._controller.project_anatomy, + project_settings=self._controller.project_settings, + ) + self._host.open_workfile_with_context( + filepath, folder_entity, task_entity, prepared_data=prepared_data + ) + self._update_current_context( + folder_id, folder_entity["path"], task_entity["name"] ) - def get_workarea_save_as_data(self, folder_id, task_id): - return self._workarea_model.get_workarea_save_as_data( - folder_id, task_id) + def _update_current_context(self, folder_id, folder_path, task_name): + self._current_folder_id = folder_id + self._current_folder_path = folder_path + self._current_task_name = task_name - def fill_workarea_filepath(self, *args, **kwargs): - return self._workarea_model.fill_workarea_filepath( - *args, **kwargs + # --- Workarea --- + def _reset_workarea_file_items(self, task_id: str): + cache: CacheItem = self._workarea_file_items_cache[task_id] + cache.set_invalid() + self._workarea_file_items_mapping.pop(task_id, None) + + def _get_base_data(self) -> dict[str, Any]: + if self._base_data is None: + base_data = get_template_data( + ayon_api.get_project(self._project_name), + host_name=self._host_name, + ) + self._base_data = base_data + return copy.deepcopy(self._base_data) + + def _get_folder_data(self, folder_id: str) -> dict[str, Any]: + fill_data = self._fill_data_by_folder_id.get(folder_id) + if fill_data is None: + folder = self._controller.get_folder_entity( + self._project_name, folder_id + ) + fill_data = get_folder_template_data(folder, self._project_name) + self._fill_data_by_folder_id[folder_id] = fill_data + return copy.deepcopy(fill_data) + + def _get_task_data( + self, + project_entity: dict[str, Any], + folder_id: str, + task_id: str + ) -> dict[str, Any]: + task_data = self._task_data_by_folder_id.setdefault(folder_id, {}) + if task_id not in task_data: + task_entity = self._controller.get_task_entity( + self._project_name, task_id + ) + if task_entity: + task_data[task_id] = get_task_template_data( + project_entity, task_entity + ) + return copy.deepcopy(task_data[task_id]) + + def _prepare_fill_data( + self, folder_id: str, task_id: str + ) -> dict[str, Any]: + if not folder_id or not task_id: + return {} + + base_data = self._get_base_data() + project_name = base_data["project"]["name"] + folder_data = self._get_folder_data(folder_id) + project_entity = self._controller.get_project_entity(project_name) + task_data = self._get_task_data(project_entity, folder_id, task_id) + + base_data.update(folder_data) + base_data.update(task_data) + + return base_data + + def _cache_file_items( + self, folder_id: Optional[str], task_id: Optional[str] + ) -> list[WorkfileInfo]: + if not folder_id or not task_id: + return [] + + cache: CacheItem = self._workarea_file_items_cache[task_id] + if cache.is_valid: + return cache.get_data() + + project_entity = self._controller.get_project_entity( + self._project_name + ) + folder_entity = self._controller.get_folder_entity( + self._project_name, folder_id + ) + task_entity = self._controller.get_task_entity( + self._project_name, task_id + ) + anatomy = self._controller.project_anatomy + project_settings = self._controller.project_settings + workfile_entities = self._controller.get_workfile_entities(task_id) + + fill_data = self._prepare_fill_data(folder_id, task_id) + template_key = self._get_template_key(fill_data) + + prepared_data = ListWorkfilesOptionalData( + project_entity=project_entity, + anatomy=anatomy, + project_settings=project_settings, + template_key=template_key, + workfile_entities=workfile_entities, ) - def get_published_file_items(self, folder_id, task_name): - """Published workfiles for passed context. + items = self._host.list_workfiles( + self._project_name, + folder_entity, + task_entity, + prepared_data=prepared_data, + ) + cache.update_data(items) - Args: - folder_id (str): Folder id. - task_name (str): Task name. + # Cache items by entity ids and rootless path + self._workarea_file_items_mapping[task_id] = { + item.rootless_path: item + for item in items + } - Returns: - list[FileItem]: List of files for published workfiles. + return items + + def _get_template_key(self, fill_data: dict[str, Any]) -> str: + task_type = fill_data.get("task", {}).get("type") + # TODO cache + return get_workfile_template_key( + self._project_name, + task_type, + self._host_name, + project_settings=self._controller.project_settings, + ) + + def _get_last_workfile_version( + self, file_items: list[WorkfileInfo], task_entity: dict[str, Any] + ) -> int: """ - return self._published_model.get_file_items(folder_id, task_name) + Todos: + Validate if logic of this function is correct. It does return + last version + 1 which might be wrong. + + Args: + file_items (list[WorkfileInfo]): Workfile items. + task_entity (dict[str, Any]): Task entity. + + Returns: + int: Next workfile version. + + """ + versions = { + item.version + for item in file_items + if item.version is not None + } + if versions: + return max(versions) + 1 + + return get_versioning_start( + self._project_name, + self._host_name, + task_name=task_entity["name"], + task_type=task_entity["taskType"], + product_type="workfile", + project_settings=self._controller.project_settings, + ) + + def _get_workdir( + self, anatomy: "Anatomy", template_key: str, fill_data: dict[str, Any] + ): + directory_template = anatomy.get_template_item( + "work", template_key, "directory" + ) + return directory_template.format_strict(fill_data).normalized() + + def _update_workfile_info( + self, + task_id: str, + rootless_path: str, + description: str, + ): + self._update_file_description(task_id, rootless_path, description) + self._reset_workarea_file_items(task_id) + + # Update workfile entity cache if are cached + if task_id in self._workfile_entities_by_task_id: + workfile_entities = self.get_workfile_entities(task_id) + + target_workfile_entity = None + for workfile_entity in workfile_entities: + if rootless_path == workfile_entity["path"]: + target_workfile_entity = workfile_entity + break + + if target_workfile_entity is None: + self._workfile_entities_by_task_id.pop(task_id, None) + self.get_workfile_entities(task_id) + else: + target_workfile_entity["attrib"]["description"] = description + + def _update_file_description( + self, task_id: str, rootless_path: str, description: str + ): + mapping = self._workarea_file_items_mapping.get(task_id) + if not mapping: + return + item = mapping.get(rootless_path) + if item is not None: + item.description = description + + # --- Workfile entities --- + def _save_workfile_info( + self, + task_id: str, + rootless_path: str, + version: Optional[int], + comment: Optional[str], + description: Optional[str], + ): + workfile_entity = save_workfile_info( + self._controller.get_current_project_name(), + task_id, + rootless_path, + self._controller.get_host_name(), + version=version, + comment=comment, + description=description, + workfile_entities=self.get_workfile_entities(task_id), + ) + # Update cache + workfile_entities = self.get_workfile_entities(task_id) + match_idx = None + for idx, entity in enumerate(workfile_entities): + if entity["id"] == workfile_entity["id"]: + # Update existing entity + match_idx = idx + break + + if match_idx is None: + workfile_entities.append(workfile_entity) + else: + workfile_entities[match_idx] = workfile_entity diff --git a/client/ayon_core/tools/workfiles/widgets/files_widget.py b/client/ayon_core/tools/workfiles/widgets/files_widget.py index f0b74f4289..0c8ad392e2 100644 --- a/client/ayon_core/tools/workfiles/widgets/files_widget.py +++ b/client/ayon_core/tools/workfiles/widgets/files_widget.py @@ -200,6 +200,9 @@ class FilesWidget(QtWidgets.QWidget): self._open_workfile(folder_id, task_id, path) def _on_current_open_requests(self): + # TODO validate if item under mouse is enabled + # - this uses selected item, but that does not have to be the one + # under mouse self._on_workarea_open_clicked() def _on_duplicate_request(self): @@ -210,11 +213,18 @@ class FilesWidget(QtWidgets.QWidget): result = self._exec_save_as_dialog() if result is None: return + folder_id = self._selected_folder_id + task_id = self._selected_task_id self._controller.duplicate_workfile( + folder_id, + task_id, filepath, + result["rootless_workdir"], result["workdir"], result["filename"], - artist_note=result["artist_note"] + version=result["version"], + comment=result["comment"], + description=result["description"] ) def _on_workarea_browse_clicked(self): @@ -259,10 +269,12 @@ class FilesWidget(QtWidgets.QWidget): self._controller.save_as_workfile( result["folder_id"], result["task_id"], + result["rootless_workdir"], result["workdir"], result["filename"], - result["template_key"], - artist_note=result["artist_note"] + version=result["version"], + comment=result["comment"], + description=result["description"] ) def _on_workarea_path_changed(self, event): @@ -314,12 +326,16 @@ class FilesWidget(QtWidgets.QWidget): result["task_id"], result["workdir"], result["filename"], - result["template_key"], - artist_note=result["artist_note"] + result["rootless_workdir"], + version=result["version"], + comment=result["comment"], + description=result["description"], ) def _on_save_as_request(self): - self._on_published_save_clicked() + # Make sure the save is enabled + if self._is_save_enabled and self._valid_selected_context: + self._on_published_save_clicked() def _set_select_contex_mode(self, enabled): if self._select_context_mode is enabled: diff --git a/client/ayon_core/tools/workfiles/widgets/files_widget_published.py b/client/ayon_core/tools/workfiles/widgets/files_widget_published.py index 07122046be..250204a7d7 100644 --- a/client/ayon_core/tools/workfiles/widgets/files_widget_published.py +++ b/client/ayon_core/tools/workfiles/widgets/files_widget_published.py @@ -1,3 +1,5 @@ +import os + import qtawesome from qtpy import QtWidgets, QtCore, QtGui @@ -205,24 +207,25 @@ class PublishedFilesModel(QtGui.QStandardItemModel): new_items.append(item) item.setColumnCount(self.columnCount()) item.setData(self._file_icon, QtCore.Qt.DecorationRole) - item.setData(file_item.filename, QtCore.Qt.DisplayRole) item.setData(repre_id, REPRE_ID_ROLE) - if file_item.exists: + if file_item.available: flags = QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable else: flags = QtCore.Qt.NoItemFlags - author = file_item.created_by + author = file_item.author user_item = user_items_by_name.get(author) if user_item is not None and user_item.full_name: author = user_item.full_name - item.setFlags(flags) + filename = os.path.basename(file_item.filepath) + item.setFlags(flags) + item.setData(filename, QtCore.Qt.DisplayRole) item.setData(file_item.filepath, FILEPATH_ROLE) item.setData(author, AUTHOR_ROLE) - item.setData(file_item.modified, DATE_MODIFIED_ROLE) + item.setData(file_item.file_modified, DATE_MODIFIED_ROLE) self._items_by_id[repre_id] = item diff --git a/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py b/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py index 7f76b6a8ab..47d4902812 100644 --- a/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py +++ b/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py @@ -1,3 +1,5 @@ +import os + import qtawesome from qtpy import QtWidgets, QtCore, QtGui @@ -10,8 +12,10 @@ from ayon_core.tools.utils.delegates import PrettyTimeDelegate FILENAME_ROLE = QtCore.Qt.UserRole + 1 FILEPATH_ROLE = QtCore.Qt.UserRole + 2 -AUTHOR_ROLE = QtCore.Qt.UserRole + 3 -DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 4 +ROOTLESS_PATH_ROLE = QtCore.Qt.UserRole + 3 +AUTHOR_ROLE = QtCore.Qt.UserRole + 4 +DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 5 +WORKFILE_ENTITY_ID_ROLE = QtCore.Qt.UserRole + 6 class WorkAreaFilesModel(QtGui.QStandardItemModel): @@ -198,7 +202,7 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): items_to_remove = set(self._items_by_filename.keys()) new_items = [] for file_item in file_items: - filename = file_item.filename + filename = os.path.basename(file_item.filepath) if filename in self._items_by_filename: items_to_remove.discard(filename) item = self._items_by_filename[filename] @@ -206,23 +210,28 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): item = QtGui.QStandardItem() new_items.append(item) item.setColumnCount(self.columnCount()) - item.setFlags( - QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable - ) item.setData(self._file_icon, QtCore.Qt.DecorationRole) - item.setData(file_item.filename, QtCore.Qt.DisplayRole) - item.setData(file_item.filename, FILENAME_ROLE) + item.setData(filename, QtCore.Qt.DisplayRole) + item.setData(filename, FILENAME_ROLE) + flags = QtCore.Qt.ItemIsSelectable + if file_item.available: + flags |= QtCore.Qt.ItemIsEnabled + item.setFlags(flags) updated_by = file_item.updated_by user_item = user_items_by_name.get(updated_by) if user_item is not None and user_item.full_name: updated_by = user_item.full_name + item.setData( + file_item.workfile_entity_id, WORKFILE_ENTITY_ID_ROLE + ) item.setData(file_item.filepath, FILEPATH_ROLE) + item.setData(file_item.rootless_path, ROOTLESS_PATH_ROLE) + item.setData(file_item.file_modified, DATE_MODIFIED_ROLE) item.setData(updated_by, AUTHOR_ROLE) - item.setData(file_item.modified, DATE_MODIFIED_ROLE) - self._items_by_filename[file_item.filename] = item + self._items_by_filename[filename] = item if new_items: root_item.appendRows(new_items) @@ -354,14 +363,18 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): def _get_selected_info(self): selection_model = self._view.selectionModel() - filepath = None - filename = None + workfile_entity_id = filename = rootless_path = filepath = None for index in selection_model.selectedIndexes(): filepath = index.data(FILEPATH_ROLE) + rootless_path = index.data(ROOTLESS_PATH_ROLE) filename = index.data(FILENAME_ROLE) + workfile_entity_id = index.data(WORKFILE_ENTITY_ID_ROLE) + return { "filepath": filepath, + "rootless_path": rootless_path, "filename": filename, + "workfile_entity_id": workfile_entity_id, } def get_selected_path(self): @@ -374,8 +387,12 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): return self._get_selected_info()["filepath"] def _on_selection_change(self): - filepath = self.get_selected_path() - self._controller.set_selected_workfile_path(filepath) + info = self._get_selected_info() + self._controller.set_selected_workfile_path( + info["rootless_path"], + info["filepath"], + info["workfile_entity_id"], + ) def _on_mouse_double_click(self, event): if event.button() == QtCore.Qt.LeftButton: @@ -430,19 +447,25 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): ) def _on_model_refresh(self): - if ( - not self._change_selection_on_refresh - or self._proxy_model.rowCount() < 1 - ): + if not self._change_selection_on_refresh: return # Find the row with latest date modified + indexes = [ + self._proxy_model.index(idx, 0) + for idx in range(self._proxy_model.rowCount()) + ] + filtered_indexes = [ + index + for index in indexes + if self._proxy_model.flags(index) & QtCore.Qt.ItemIsEnabled + ] + if not filtered_indexes: + return + latest_index = max( - ( - self._proxy_model.index(idx, 0) - for idx in range(self._proxy_model.rowCount()) - ), - key=lambda model_index: model_index.data(DATE_MODIFIED_ROLE) + filtered_indexes, + key=lambda model_index: model_index.data(DATE_MODIFIED_ROLE) or 0 ) # Select row of latest modified diff --git a/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py b/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py index bddff816fe..24d64319ca 100644 --- a/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py +++ b/client/ayon_core/tools/workfiles/widgets/save_as_dialog.py @@ -108,6 +108,7 @@ class SaveAsDialog(QtWidgets.QDialog): self._ext_value = None self._filename = None self._workdir = None + self._rootless_workdir = None self._result = None @@ -144,8 +145,8 @@ class SaveAsDialog(QtWidgets.QDialog): version_layout.addWidget(last_version_check) # Artist note widget - artist_note_input = PlaceholderPlainTextEdit(inputs_widget) - artist_note_input.setPlaceholderText( + description_input = PlaceholderPlainTextEdit(inputs_widget) + description_input.setPlaceholderText( "Provide a note about this workfile.") # Preview widget @@ -166,7 +167,7 @@ class SaveAsDialog(QtWidgets.QDialog): subversion_label = QtWidgets.QLabel("Subversion:", inputs_widget) extension_label = QtWidgets.QLabel("Extension:", inputs_widget) preview_label = QtWidgets.QLabel("Preview:", inputs_widget) - artist_note_label = QtWidgets.QLabel("Artist Note:", inputs_widget) + description_label = QtWidgets.QLabel("Artist Note:", inputs_widget) # Build inputs inputs_layout = QtWidgets.QGridLayout(inputs_widget) @@ -178,8 +179,8 @@ class SaveAsDialog(QtWidgets.QDialog): inputs_layout.addWidget(extension_combobox, 2, 1) inputs_layout.addWidget(preview_label, 3, 0) inputs_layout.addWidget(preview_widget, 3, 1) - inputs_layout.addWidget(artist_note_label, 4, 0, 1, 2) - inputs_layout.addWidget(artist_note_input, 5, 0, 1, 2) + inputs_layout.addWidget(description_label, 4, 0, 1, 2) + inputs_layout.addWidget(description_input, 5, 0, 1, 2) # Build layout main_layout = QtWidgets.QVBoxLayout(self) @@ -214,13 +215,13 @@ class SaveAsDialog(QtWidgets.QDialog): self._extension_combobox = extension_combobox self._subversion_input = subversion_input self._preview_widget = preview_widget - self._artist_note_input = artist_note_input + self._description_input = description_input self._version_label = version_label self._subversion_label = subversion_label self._extension_label = extension_label self._preview_label = preview_label - self._artist_note_label = artist_note_label + self._description_label = description_label # Post init setup @@ -255,6 +256,7 @@ class SaveAsDialog(QtWidgets.QDialog): self._folder_id = folder_id self._task_id = task_id self._workdir = data["workdir"] + self._rootless_workdir = data["rootless_workdir"] self._comment_value = data["comment"] self._ext_value = data["ext"] self._template_key = data["template_key"] @@ -329,10 +331,13 @@ class SaveAsDialog(QtWidgets.QDialog): self._result = { "filename": self._filename, "workdir": self._workdir, + "rootless_workdir": self._rootless_workdir, "folder_id": self._folder_id, "task_id": self._task_id, "template_key": self._template_key, - "artist_note": self._artist_note_input.toPlainText(), + "version": self._version_value, + "comment": self._comment_value, + "description": self._description_input.toPlainText(), } self.close() diff --git a/client/ayon_core/tools/workfiles/widgets/side_panel.py b/client/ayon_core/tools/workfiles/widgets/side_panel.py index 7ba60b5544..b1b91d9721 100644 --- a/client/ayon_core/tools/workfiles/widgets/side_panel.py +++ b/client/ayon_core/tools/workfiles/widgets/side_panel.py @@ -4,6 +4,8 @@ from qtpy import QtWidgets, QtCore def file_size_to_string(file_size): + if not file_size: + return "N/A" size = 0 size_ending_mapping = { "KB": 1024 ** 1, @@ -43,44 +45,47 @@ class SidePanelWidget(QtWidgets.QWidget): details_input = QtWidgets.QPlainTextEdit(self) details_input.setReadOnly(True) - artist_note_widget = QtWidgets.QWidget(self) - note_label = QtWidgets.QLabel("Artist note", artist_note_widget) - note_input = QtWidgets.QPlainTextEdit(artist_note_widget) - btn_note_save = QtWidgets.QPushButton("Save note", artist_note_widget) + description_widget = QtWidgets.QWidget(self) + description_label = QtWidgets.QLabel("Artist note", description_widget) + description_input = QtWidgets.QPlainTextEdit(description_widget) + btn_description_save = QtWidgets.QPushButton( + "Save note", description_widget + ) - artist_note_layout = QtWidgets.QVBoxLayout(artist_note_widget) - artist_note_layout.setContentsMargins(0, 0, 0, 0) - artist_note_layout.addWidget(note_label, 0) - artist_note_layout.addWidget(note_input, 1) - artist_note_layout.addWidget( - btn_note_save, 0, alignment=QtCore.Qt.AlignRight + description_layout = QtWidgets.QVBoxLayout(description_widget) + description_layout.setContentsMargins(0, 0, 0, 0) + description_layout.addWidget(description_label, 0) + description_layout.addWidget(description_input, 1) + description_layout.addWidget( + btn_description_save, 0, alignment=QtCore.Qt.AlignRight ) main_layout = QtWidgets.QVBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) main_layout.addWidget(details_label, 0) main_layout.addWidget(details_input, 1) - main_layout.addWidget(artist_note_widget, 1) + main_layout.addWidget(description_widget, 1) - note_input.textChanged.connect(self._on_note_change) - btn_note_save.clicked.connect(self._on_save_click) + description_input.textChanged.connect(self._on_description_change) + btn_description_save.clicked.connect(self._on_save_click) controller.register_event_callback( "selection.workarea.changed", self._on_selection_change ) self._details_input = details_input - self._artist_note_widget = artist_note_widget - self._note_input = note_input - self._btn_note_save = btn_note_save + self._description_widget = description_widget + self._description_input = description_input + self._btn_description_save = btn_description_save self._folder_id = None - self._task_name = None + self._task_id = None self._filepath = None - self._orig_note = "" + self._rootless_path = None + self._orig_description = "" self._controller = controller - self._set_context(None, None, None) + self._set_context(None, None, None, None) def set_published_mode(self, published_mode): """Change published mode. @@ -89,64 +94,69 @@ class SidePanelWidget(QtWidgets.QWidget): published_mode (bool): Published mode enabled. """ - self._artist_note_widget.setVisible(not published_mode) + self._description_widget.setVisible(not published_mode) def _on_selection_change(self, event): folder_id = event["folder_id"] - task_name = event["task_name"] + task_id = event["task_id"] filepath = event["path"] + rootless_path = event["rootless_path"] - self._set_context(folder_id, task_name, filepath) + self._set_context(folder_id, task_id, rootless_path, filepath) - def _on_note_change(self): - text = self._note_input.toPlainText() - self._btn_note_save.setEnabled(self._orig_note != text) + def _on_description_change(self): + text = self._description_input.toPlainText() + self._btn_description_save.setEnabled(self._orig_description != text) def _on_save_click(self): - note = self._note_input.toPlainText() + description = self._description_input.toPlainText() self._controller.save_workfile_info( - self._folder_id, - self._task_name, - self._filepath, - note + self._task_id, + self._rootless_path, + description=description, ) - self._orig_note = note - self._btn_note_save.setEnabled(False) + self._orig_description = description + self._btn_description_save.setEnabled(False) - def _set_context(self, folder_id, task_name, filepath): + def _set_context(self, folder_id, task_id, rootless_path, filepath): workfile_info = None # Check if folder, task and file are selected - if bool(folder_id) and bool(task_name) and bool(filepath): + if folder_id and task_id and rootless_path: workfile_info = self._controller.get_workfile_info( - folder_id, task_name, filepath + folder_id, task_id, rootless_path ) enabled = workfile_info is not None self._details_input.setEnabled(enabled) - self._note_input.setEnabled(enabled) - self._btn_note_save.setEnabled(enabled) + self._description_input.setEnabled(enabled) + self._btn_description_save.setEnabled(enabled) self._folder_id = folder_id - self._task_name = task_name + self._task_id = task_id self._filepath = filepath + self._rootless_path = rootless_path # Disable inputs and remove texts if any required arguments are # missing if not enabled: - self._orig_note = "" + self._orig_description = "" self._details_input.setPlainText("") - self._note_input.setPlainText("") + self._description_input.setPlainText("") return - note = workfile_info.note - size_value = file_size_to_string(workfile_info.filesize) + description = workfile_info.description + size_value = file_size_to_string(workfile_info.file_size) # Append html string datetime_format = "%b %d %Y %H:%M:%S" - creation_time = datetime.datetime.fromtimestamp( - workfile_info.creation_time) - modification_time = datetime.datetime.fromtimestamp( - workfile_info.modification_time) + file_created = workfile_info.file_created + modification_time = workfile_info.file_modified + if file_created: + file_created = datetime.datetime.fromtimestamp(file_created) + + if modification_time: + modification_time = datetime.datetime.fromtimestamp( + modification_time) user_items_by_name = self._controller.get_user_items_by_name() @@ -156,33 +166,38 @@ class SidePanelWidget(QtWidgets.QWidget): return user_item.full_name return username - created_lines = [ - creation_time.strftime(datetime_format) - ] + created_lines = [] if workfile_info.created_by: - created_lines.insert( - 0, convert_username(workfile_info.created_by) + created_lines.append( + convert_username(workfile_info.created_by) ) + if file_created: + created_lines.append(file_created.strftime(datetime_format)) - modified_lines = [ - modification_time.strftime(datetime_format) - ] + if created_lines: + created_lines.insert(0, "Created:") + + modified_lines = [] if workfile_info.updated_by: - modified_lines.insert( - 0, convert_username(workfile_info.updated_by) + modified_lines.append( + convert_username(workfile_info.updated_by) ) + if modification_time: + modified_lines.append( + modification_time.strftime(datetime_format) + ) + if modified_lines: + modified_lines.insert(0, "Modified:") lines = ( "Size:", size_value, - "Created:", "
".join(created_lines), - "Modified:", "
".join(modified_lines), ) - self._orig_note = note - self._note_input.setPlainText(note) + self._orig_description = description + self._description_input.setPlainText(description) # Set as empty string self._details_input.setPlainText("") - self._details_input.appendHtml("
".join(lines)) + self._details_input.appendHtml("
".join(lines)) diff --git a/client/ayon_core/version.py b/client/ayon_core/version.py index 509c4a8d14..9f1bac6805 100644 --- a/client/ayon_core/version.py +++ b/client/ayon_core/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring AYON addon 'core' version.""" -__version__ = "1.4.1+dev" +__version__ = "1.5.2+dev" diff --git a/package.py b/package.py index 039bf0379c..7bd806159f 100644 --- a/package.py +++ b/package.py @@ -1,6 +1,6 @@ name = "core" title = "Core" -version = "1.4.1+dev" +version = "1.5.2+dev" client_dir = "ayon_core" diff --git a/pyproject.toml b/pyproject.toml index 9609729420..e67fcc2138 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ [tool.poetry] name = "ayon-core" -version = "1.4.1+dev" +version = "1.5.2+dev" description = "" authors = ["Ynput Team "] readme = "README.md" @@ -19,6 +19,7 @@ python = ">=3.9.1,<3.10" pytest = "^8.0" pytest-print = "^1.0" ayon-python-api = "^1.0" +arrow = "0.17.0" # linting dependencies ruff = "^0.11.7" pre-commit = "^4" diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index d690d79607..ee422a0acf 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -747,6 +747,11 @@ class ExtractReviewProfileModel(BaseSettingsModel): hosts: list[str] = SettingsField( default_factory=list, title="Host names" ) + task_types: list[str] = SettingsField( + default_factory=list, + title="Task Types", + enum_resolver=task_types_enum, + ) outputs: list[ExtractReviewOutputDefModel] = SettingsField( default_factory=list, title="Output Definitions" ) @@ -1348,6 +1353,7 @@ DEFAULT_PUBLISH_VALUES = { { "product_types": [], "hosts": [], + "task_types": [], "outputs": [ { "name": "png",