mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 08:24:53 +01:00
Merge branch 'develop' into bugfix/some-batchdelivery-related-issues
This commit is contained in:
commit
77bb806267
64 changed files with 4890 additions and 1894 deletions
3
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
3
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,9 @@ body:
|
||||||
label: Version
|
label: Version
|
||||||
description: What version are you running? Look to AYON Tray
|
description: What version are you running? Look to AYON Tray
|
||||||
options:
|
options:
|
||||||
|
- 1.5.2
|
||||||
|
- 1.5.1
|
||||||
|
- 1.5.0
|
||||||
- 1.4.1
|
- 1.4.1
|
||||||
- 1.4.0
|
- 1.4.0
|
||||||
- 1.3.2
|
- 1.3.2
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ import inspect
|
||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
import collections
|
import collections
|
||||||
|
import warnings
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
@ -815,10 +816,26 @@ class AddonsManager:
|
||||||
|
|
||||||
Unknown keys are logged out.
|
Unknown keys are logged out.
|
||||||
|
|
||||||
|
Deprecated:
|
||||||
|
Use targeted methods 'collect_launcher_action_paths',
|
||||||
|
'collect_create_plugin_paths', 'collect_load_plugin_paths',
|
||||||
|
'collect_publish_plugin_paths' and
|
||||||
|
'collect_inventory_action_paths' to collect plugin paths.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: Output is dictionary with keys "publish", "create", "load",
|
dict: Output is dictionary with keys "publish", "create", "load",
|
||||||
"actions" and "inventory" each containing list of paths.
|
"actions" and "inventory" each containing list of paths.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
warnings.warn(
|
||||||
|
"Used deprecated method 'collect_plugin_paths'. Please use"
|
||||||
|
" targeted methods 'collect_launcher_action_paths',"
|
||||||
|
" 'collect_create_plugin_paths', 'collect_load_plugin_paths'"
|
||||||
|
" 'collect_publish_plugin_paths' and"
|
||||||
|
" 'collect_inventory_action_paths'",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
# Output structure
|
# Output structure
|
||||||
output = {
|
output = {
|
||||||
"publish": [],
|
"publish": [],
|
||||||
|
|
@ -874,24 +891,28 @@ class AddonsManager:
|
||||||
if not isinstance(addon, IPluginPaths):
|
if not isinstance(addon, IPluginPaths):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
paths = None
|
||||||
method = getattr(addon, method_name)
|
method = getattr(addon, method_name)
|
||||||
try:
|
try:
|
||||||
paths = method(*args, **kwargs)
|
paths = method(*args, **kwargs)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
(
|
"Failed to get plugin paths from addon"
|
||||||
"Failed to get plugin paths from addon"
|
f" '{addon.name}' using '{method_name}'.",
|
||||||
" '{}' using '{}'."
|
|
||||||
).format(addon.__class__.__name__, method_name),
|
|
||||||
exc_info=True
|
exc_info=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if not paths:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if paths:
|
if isinstance(paths, str):
|
||||||
# Convert to list if value is not list
|
paths = [paths]
|
||||||
if not isinstance(paths, (list, tuple, set)):
|
self.log.warning(
|
||||||
paths = [paths]
|
f"Addon '{addon.name}' returned invalid output type"
|
||||||
output.extend(paths)
|
f" from '{method_name}'."
|
||||||
|
f" Got 'str' expected 'list[str]'."
|
||||||
|
)
|
||||||
|
output.extend(paths)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def collect_launcher_action_paths(self):
|
def collect_launcher_action_paths(self):
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
"""Addon interfaces for AYON."""
|
"""Addon interfaces for AYON."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import warnings
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
from typing import TYPE_CHECKING, Callable, Optional, Type
|
from typing import TYPE_CHECKING, Callable, Optional, Type
|
||||||
|
|
||||||
|
|
@ -39,26 +40,29 @@ class AYONInterface(metaclass=_AYONInterfaceMeta):
|
||||||
|
|
||||||
|
|
||||||
class IPluginPaths(AYONInterface):
|
class IPluginPaths(AYONInterface):
|
||||||
"""Addon has plugin paths to return.
|
"""Addon wants to register plugin paths."""
|
||||||
|
|
||||||
Expected result is dictionary with keys "publish", "create", "load",
|
|
||||||
"actions" or "inventory" and values as list or string.
|
|
||||||
{
|
|
||||||
"publish": ["path/to/publish_plugins"]
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_plugin_paths(self) -> dict[str, list[str]]:
|
def get_plugin_paths(self) -> dict[str, list[str]]:
|
||||||
"""Return plugin paths for addon.
|
"""Return plugin paths for addon.
|
||||||
|
|
||||||
|
This method was abstract (required) in the past, so raise the required
|
||||||
|
'core' addon version when 'get_plugin_paths' is removed from
|
||||||
|
addon.
|
||||||
|
|
||||||
|
Deprecated:
|
||||||
|
Please implement specific methods 'get_create_plugin_paths',
|
||||||
|
'get_load_plugin_paths', 'get_inventory_action_paths' and
|
||||||
|
'get_publish_plugin_paths' to return plugin paths.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, list[str]]: Plugin paths for addon.
|
dict[str, list[str]]: Plugin paths for addon.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
return {}
|
||||||
|
|
||||||
def _get_plugin_paths_by_type(
|
def _get_plugin_paths_by_type(
|
||||||
self, plugin_type: str) -> list[str]:
|
self, plugin_type: str
|
||||||
|
) -> list[str]:
|
||||||
"""Get plugin paths by type.
|
"""Get plugin paths by type.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -78,6 +82,24 @@ class IPluginPaths(AYONInterface):
|
||||||
|
|
||||||
if not isinstance(paths, (list, tuple, set)):
|
if not isinstance(paths, (list, tuple, set)):
|
||||||
paths = [paths]
|
paths = [paths]
|
||||||
|
|
||||||
|
new_function_name = "get_launcher_action_paths"
|
||||||
|
if plugin_type == "create":
|
||||||
|
new_function_name = "get_create_plugin_paths"
|
||||||
|
elif plugin_type == "load":
|
||||||
|
new_function_name = "get_load_plugin_paths"
|
||||||
|
elif plugin_type == "publish":
|
||||||
|
new_function_name = "get_publish_plugin_paths"
|
||||||
|
elif plugin_type == "inventory":
|
||||||
|
new_function_name = "get_inventory_action_paths"
|
||||||
|
|
||||||
|
warnings.warn(
|
||||||
|
f"Addon '{self.name}' returns '{plugin_type}' paths using"
|
||||||
|
" 'get_plugin_paths' method. Please implement"
|
||||||
|
f" '{new_function_name}' instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
return paths
|
return paths
|
||||||
|
|
||||||
def get_launcher_action_paths(self) -> list[str]:
|
def get_launcher_action_paths(self) -> list[str]:
|
||||||
|
|
|
||||||
|
|
@ -33,6 +33,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
||||||
"cinema4d",
|
"cinema4d",
|
||||||
"silhouette",
|
"silhouette",
|
||||||
"gaffer",
|
"gaffer",
|
||||||
|
"loki",
|
||||||
}
|
}
|
||||||
launch_types = {LaunchTypes.local}
|
launch_types = {LaunchTypes.local}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,7 @@ class OCIOEnvHook(PreLaunchHook):
|
||||||
"cinema4d",
|
"cinema4d",
|
||||||
"silhouette",
|
"silhouette",
|
||||||
"gaffer",
|
"gaffer",
|
||||||
|
"loki",
|
||||||
}
|
}
|
||||||
launch_types = set()
|
launch_types = set()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,12 @@
|
||||||
|
from .constants import ContextChangeReason
|
||||||
from .host import (
|
from .host import (
|
||||||
HostBase,
|
HostBase,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .interfaces import (
|
from .interfaces import (
|
||||||
IWorkfileHost,
|
IWorkfileHost,
|
||||||
|
WorkfileInfo,
|
||||||
|
PublishedWorkfileInfo,
|
||||||
ILoadHost,
|
ILoadHost,
|
||||||
IPublishHost,
|
IPublishHost,
|
||||||
INewPublisher,
|
INewPublisher,
|
||||||
|
|
@ -13,9 +16,13 @@ from .dirmap import HostDirmap
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
|
"ContextChangeReason",
|
||||||
|
|
||||||
"HostBase",
|
"HostBase",
|
||||||
|
|
||||||
"IWorkfileHost",
|
"IWorkfileHost",
|
||||||
|
"WorkfileInfo",
|
||||||
|
"PublishedWorkfileInfo",
|
||||||
"ILoadHost",
|
"ILoadHost",
|
||||||
"IPublishHost",
|
"IPublishHost",
|
||||||
"INewPublisher",
|
"INewPublisher",
|
||||||
|
|
|
||||||
15
client/ayon_core/host/constants.py
Normal file
15
client/ayon_core/host/constants.py
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class StrEnum(str, Enum):
|
||||||
|
"""A string-based Enum class that allows for string comparison."""
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.value
|
||||||
|
|
||||||
|
|
||||||
|
class ContextChangeReason(StrEnum):
|
||||||
|
"""Reasons for context change in the host."""
|
||||||
|
undefined = "undefined"
|
||||||
|
workfile_open = "workfile.opened"
|
||||||
|
workfile_save = "workfile.saved"
|
||||||
|
|
@ -1,10 +1,37 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import contextlib
|
import contextlib
|
||||||
from abc import ABC, abstractproperty
|
from abc import ABC, abstractmethod
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import typing
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
# NOTE can't import 'typing' because of issues in Maya 2020
|
import ayon_api
|
||||||
# - shiboken crashes on 'typing' module import
|
|
||||||
|
from ayon_core.lib import emit_event
|
||||||
|
|
||||||
|
from .constants import ContextChangeReason
|
||||||
|
|
||||||
|
if typing.TYPE_CHECKING:
|
||||||
|
from ayon_core.pipeline import Anatomy
|
||||||
|
|
||||||
|
from typing import TypedDict
|
||||||
|
|
||||||
|
class HostContextData(TypedDict):
|
||||||
|
project_name: str
|
||||||
|
folder_path: Optional[str]
|
||||||
|
task_name: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ContextChangeData:
|
||||||
|
project_entity: dict[str, Any]
|
||||||
|
folder_entity: dict[str, Any]
|
||||||
|
task_entity: dict[str, Any]
|
||||||
|
reason: ContextChangeReason
|
||||||
|
anatomy: Anatomy
|
||||||
|
|
||||||
|
|
||||||
class HostBase(ABC):
|
class HostBase(ABC):
|
||||||
|
|
@ -92,8 +119,9 @@ class HostBase(ABC):
|
||||||
self._log = logging.getLogger(self.__class__.__name__)
|
self._log = logging.getLogger(self.__class__.__name__)
|
||||||
return self._log
|
return self._log
|
||||||
|
|
||||||
@abstractproperty
|
@property
|
||||||
def name(self):
|
@abstractmethod
|
||||||
|
def name(self) -> str:
|
||||||
"""Host name."""
|
"""Host name."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
@ -106,7 +134,7 @@ class HostBase(ABC):
|
||||||
|
|
||||||
return os.environ.get("AYON_PROJECT_NAME")
|
return os.environ.get("AYON_PROJECT_NAME")
|
||||||
|
|
||||||
def get_current_folder_path(self):
|
def get_current_folder_path(self) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Returns:
|
Returns:
|
||||||
Union[str, None]: Current asset name.
|
Union[str, None]: Current asset name.
|
||||||
|
|
@ -114,7 +142,7 @@ class HostBase(ABC):
|
||||||
|
|
||||||
return os.environ.get("AYON_FOLDER_PATH")
|
return os.environ.get("AYON_FOLDER_PATH")
|
||||||
|
|
||||||
def get_current_task_name(self):
|
def get_current_task_name(self) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Returns:
|
Returns:
|
||||||
Union[str, None]: Current task name.
|
Union[str, None]: Current task name.
|
||||||
|
|
@ -122,7 +150,7 @@ class HostBase(ABC):
|
||||||
|
|
||||||
return os.environ.get("AYON_TASK_NAME")
|
return os.environ.get("AYON_TASK_NAME")
|
||||||
|
|
||||||
def get_current_context(self):
|
def get_current_context(self) -> "HostContextData":
|
||||||
"""Get current context information.
|
"""Get current context information.
|
||||||
|
|
||||||
This method should be used to get current context of host. Usage of
|
This method should be used to get current context of host. Usage of
|
||||||
|
|
@ -141,6 +169,75 @@ class HostBase(ABC):
|
||||||
"task_name": self.get_current_task_name()
|
"task_name": self.get_current_task_name()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def set_current_context(
|
||||||
|
self,
|
||||||
|
folder_entity: dict[str, Any],
|
||||||
|
task_entity: dict[str, Any],
|
||||||
|
*,
|
||||||
|
reason: ContextChangeReason = ContextChangeReason.undefined,
|
||||||
|
project_entity: Optional[dict[str, Any]] = None,
|
||||||
|
anatomy: Optional[Anatomy] = None,
|
||||||
|
) -> "HostContextData":
|
||||||
|
"""Set current context information.
|
||||||
|
|
||||||
|
This method should be used to set current context of host. Usage of
|
||||||
|
this method can be crucial for host implementations in DCCs where
|
||||||
|
can be opened multiple workfiles at one moment and change of context
|
||||||
|
can't be caught properly.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
This method should not care about change of workdir and expect any
|
||||||
|
of the arguments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
folder_entity (Optional[dict[str, Any]]): Folder entity.
|
||||||
|
task_entity (Optional[dict[str, Any]]): Task entity.
|
||||||
|
reason (ContextChangeReason): Reason for context change.
|
||||||
|
project_entity (Optional[dict[str, Any]]): Project entity data.
|
||||||
|
anatomy (Optional[Anatomy]): Anatomy instance for the project.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, Optional[str]]: Context information with project name,
|
||||||
|
folder path and task name.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from ayon_core.pipeline import Anatomy
|
||||||
|
|
||||||
|
folder_path = folder_entity["path"]
|
||||||
|
task_name = task_entity["name"]
|
||||||
|
|
||||||
|
context = self.get_current_context()
|
||||||
|
# Don't do anything if context did not change
|
||||||
|
if (
|
||||||
|
context["folder_path"] == folder_path
|
||||||
|
and context["task_name"] == task_name
|
||||||
|
):
|
||||||
|
return context
|
||||||
|
|
||||||
|
project_name = self.get_current_project_name()
|
||||||
|
if project_entity is None:
|
||||||
|
project_entity = ayon_api.get_project(project_name)
|
||||||
|
|
||||||
|
if anatomy is None:
|
||||||
|
anatomy = Anatomy(project_name, project_entity=project_entity)
|
||||||
|
|
||||||
|
context_change_data = ContextChangeData(
|
||||||
|
project_entity,
|
||||||
|
folder_entity,
|
||||||
|
task_entity,
|
||||||
|
reason,
|
||||||
|
anatomy,
|
||||||
|
)
|
||||||
|
self._before_context_change(context_change_data)
|
||||||
|
self._set_current_context(context_change_data)
|
||||||
|
self._after_context_change(context_change_data)
|
||||||
|
|
||||||
|
return self._emit_context_change_event(
|
||||||
|
project_name,
|
||||||
|
folder_path,
|
||||||
|
task_name,
|
||||||
|
)
|
||||||
|
|
||||||
def get_context_title(self):
|
def get_context_title(self):
|
||||||
"""Context title shown for UI purposes.
|
"""Context title shown for UI purposes.
|
||||||
|
|
||||||
|
|
@ -187,3 +284,91 @@ class HostBase(ABC):
|
||||||
yield
|
yield
|
||||||
finally:
|
finally:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def _emit_context_change_event(
|
||||||
|
self,
|
||||||
|
project_name: str,
|
||||||
|
folder_path: Optional[str],
|
||||||
|
task_name: Optional[str],
|
||||||
|
) -> "HostContextData":
|
||||||
|
"""Emit context change event.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Name of the project.
|
||||||
|
folder_path (Optional[str]): Path of the folder.
|
||||||
|
task_name (Optional[str]): Name of the task.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
HostContextData: Data send to context change event.
|
||||||
|
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
"project_name": project_name,
|
||||||
|
"folder_path": folder_path,
|
||||||
|
"task_name": task_name,
|
||||||
|
}
|
||||||
|
emit_event("taskChanged", data)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def _set_current_context(
|
||||||
|
self, context_change_data: ContextChangeData
|
||||||
|
) -> None:
|
||||||
|
"""Method that changes the context in host.
|
||||||
|
|
||||||
|
Can be overriden for hosts that do need different handling of context
|
||||||
|
than using environment variables.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context_change_data (ContextChangeData): Context change related
|
||||||
|
data.
|
||||||
|
|
||||||
|
"""
|
||||||
|
project_name = self.get_current_project_name()
|
||||||
|
folder_path = None
|
||||||
|
task_name = None
|
||||||
|
if context_change_data.folder_entity:
|
||||||
|
folder_path = context_change_data.folder_entity["path"]
|
||||||
|
if context_change_data.task_entity:
|
||||||
|
task_name = context_change_data.task_entity["name"]
|
||||||
|
|
||||||
|
envs = {
|
||||||
|
"AYON_PROJECT_NAME": project_name,
|
||||||
|
"AYON_FOLDER_PATH": folder_path,
|
||||||
|
"AYON_TASK_NAME": task_name,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update the Session and environments. Pop from environments all
|
||||||
|
# keys with value set to None.
|
||||||
|
for key, value in envs.items():
|
||||||
|
if value is None:
|
||||||
|
os.environ.pop(key, None)
|
||||||
|
else:
|
||||||
|
os.environ[key] = value
|
||||||
|
|
||||||
|
def _before_context_change(self, context_change_data: ContextChangeData):
|
||||||
|
"""Before context is changed.
|
||||||
|
|
||||||
|
This method is called before the context is changed in the host.
|
||||||
|
|
||||||
|
Can be overridden to implement host specific logic.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context_change_data (ContextChangeData): Object with information
|
||||||
|
about context change.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _after_context_change(self, context_change_data: ContextChangeData):
|
||||||
|
"""After context is changed.
|
||||||
|
|
||||||
|
This method is called after the context is changed in the host.
|
||||||
|
|
||||||
|
Can be overridden to implement host specific logic.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context_change_data (ContextChangeData): Object with information
|
||||||
|
about context change.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
|
||||||
66
client/ayon_core/host/interfaces/__init__.py
Normal file
66
client/ayon_core/host/interfaces/__init__.py
Normal file
|
|
@ -0,0 +1,66 @@
|
||||||
|
from .exceptions import MissingMethodsError
|
||||||
|
from .workfiles import (
|
||||||
|
IWorkfileHost,
|
||||||
|
WorkfileInfo,
|
||||||
|
PublishedWorkfileInfo,
|
||||||
|
|
||||||
|
OpenWorkfileOptionalData,
|
||||||
|
ListWorkfilesOptionalData,
|
||||||
|
ListPublishedWorkfilesOptionalData,
|
||||||
|
SaveWorkfileOptionalData,
|
||||||
|
CopyWorkfileOptionalData,
|
||||||
|
CopyPublishedWorkfileOptionalData,
|
||||||
|
|
||||||
|
get_open_workfile_context,
|
||||||
|
get_list_workfiles_context,
|
||||||
|
get_list_published_workfiles_context,
|
||||||
|
get_save_workfile_context,
|
||||||
|
get_copy_workfile_context,
|
||||||
|
get_copy_repre_workfile_context,
|
||||||
|
|
||||||
|
OpenWorkfileContext,
|
||||||
|
ListWorkfilesContext,
|
||||||
|
ListPublishedWorkfilesContext,
|
||||||
|
SaveWorkfileContext,
|
||||||
|
CopyWorkfileContext,
|
||||||
|
CopyPublishedWorkfileContext,
|
||||||
|
)
|
||||||
|
from .interfaces import (
|
||||||
|
IPublishHost,
|
||||||
|
INewPublisher,
|
||||||
|
ILoadHost,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"MissingMethodsError",
|
||||||
|
|
||||||
|
"IWorkfileHost",
|
||||||
|
"WorkfileInfo",
|
||||||
|
"PublishedWorkfileInfo",
|
||||||
|
|
||||||
|
"OpenWorkfileOptionalData",
|
||||||
|
"ListWorkfilesOptionalData",
|
||||||
|
"ListPublishedWorkfilesOptionalData",
|
||||||
|
"SaveWorkfileOptionalData",
|
||||||
|
"CopyWorkfileOptionalData",
|
||||||
|
"CopyPublishedWorkfileOptionalData",
|
||||||
|
|
||||||
|
"get_open_workfile_context",
|
||||||
|
"get_list_workfiles_context",
|
||||||
|
"get_list_published_workfiles_context",
|
||||||
|
"get_save_workfile_context",
|
||||||
|
"get_copy_workfile_context",
|
||||||
|
"get_copy_repre_workfile_context",
|
||||||
|
|
||||||
|
"OpenWorkfileContext",
|
||||||
|
"ListWorkfilesContext",
|
||||||
|
"ListPublishedWorkfilesContext",
|
||||||
|
"SaveWorkfileContext",
|
||||||
|
"CopyWorkfileContext",
|
||||||
|
"CopyPublishedWorkfileContext",
|
||||||
|
|
||||||
|
"IPublishHost",
|
||||||
|
"INewPublisher",
|
||||||
|
"ILoadHost",
|
||||||
|
)
|
||||||
15
client/ayon_core/host/interfaces/exceptions.py
Normal file
15
client/ayon_core/host/interfaces/exceptions.py
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
class MissingMethodsError(ValueError):
|
||||||
|
"""Exception when host miss some required methods for a specific workflow.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
host (HostBase): Host implementation where are missing methods.
|
||||||
|
missing_methods (list[str]): List of missing methods.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, host, missing_methods):
|
||||||
|
joined_missing = ", ".join(
|
||||||
|
['"{}"'.format(item) for item in missing_methods]
|
||||||
|
)
|
||||||
|
super().__init__(
|
||||||
|
f"Host \"{host.name}\" miss methods {joined_missing}"
|
||||||
|
)
|
||||||
|
|
@ -1,28 +1,6 @@
|
||||||
from abc import ABC, abstractmethod
|
from abc import abstractmethod
|
||||||
|
|
||||||
|
from .exceptions import MissingMethodsError
|
||||||
class MissingMethodsError(ValueError):
|
|
||||||
"""Exception when host miss some required methods for specific workflow.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
host (HostBase): Host implementation where are missing methods.
|
|
||||||
missing_methods (list[str]): List of missing methods.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, host, missing_methods):
|
|
||||||
joined_missing = ", ".join(
|
|
||||||
['"{}"'.format(item) for item in missing_methods]
|
|
||||||
)
|
|
||||||
host_name = getattr(host, "name", None)
|
|
||||||
if not host_name:
|
|
||||||
try:
|
|
||||||
host_name = host.__file__.replace("\\", "/").split("/")[-3]
|
|
||||||
except Exception:
|
|
||||||
host_name = str(host)
|
|
||||||
message = (
|
|
||||||
"Host \"{}\" miss methods {}".format(host_name, joined_missing)
|
|
||||||
)
|
|
||||||
super(MissingMethodsError, self).__init__(message)
|
|
||||||
|
|
||||||
|
|
||||||
class ILoadHost:
|
class ILoadHost:
|
||||||
|
|
@ -105,181 +83,6 @@ class ILoadHost:
|
||||||
return self.get_containers()
|
return self.get_containers()
|
||||||
|
|
||||||
|
|
||||||
class IWorkfileHost(ABC):
|
|
||||||
"""Implementation requirements to be able use workfile utils and tool."""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_missing_workfile_methods(host):
|
|
||||||
"""Look for missing methods on "old type" host implementation.
|
|
||||||
|
|
||||||
Method is used for validation of implemented functions related to
|
|
||||||
workfiles. Checks only existence of methods.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
Union[ModuleType, HostBase]: Object of host where to look for
|
|
||||||
required methods.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[str]: Missing method implementations for workfiles workflow.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if isinstance(host, IWorkfileHost):
|
|
||||||
return []
|
|
||||||
|
|
||||||
required = [
|
|
||||||
"open_file",
|
|
||||||
"save_file",
|
|
||||||
"current_file",
|
|
||||||
"has_unsaved_changes",
|
|
||||||
"file_extensions",
|
|
||||||
"work_root",
|
|
||||||
]
|
|
||||||
missing = []
|
|
||||||
for name in required:
|
|
||||||
if not hasattr(host, name):
|
|
||||||
missing.append(name)
|
|
||||||
return missing
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def validate_workfile_methods(host):
|
|
||||||
"""Validate methods of "old type" host for workfiles workflow.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
Union[ModuleType, HostBase]: Object of host to validate.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MissingMethodsError: If there are missing methods on host
|
|
||||||
implementation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
missing = IWorkfileHost.get_missing_workfile_methods(host)
|
|
||||||
if missing:
|
|
||||||
raise MissingMethodsError(host, missing)
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_workfile_extensions(self):
|
|
||||||
"""Extensions that can be used as save.
|
|
||||||
|
|
||||||
Questions:
|
|
||||||
This could potentially use 'HostDefinition'.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return []
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def save_workfile(self, dst_path=None):
|
|
||||||
"""Save currently opened scene.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dst_path (str): Where the current scene should be saved. Or use
|
|
||||||
current path if 'None' is passed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def open_workfile(self, filepath):
|
|
||||||
"""Open passed filepath in the host.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filepath (str): Path to workfile.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_current_workfile(self):
|
|
||||||
"""Retrieve path to current opened file.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Path to file which is currently opened.
|
|
||||||
None: If nothing is opened.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def workfile_has_unsaved_changes(self):
|
|
||||||
"""Currently opened scene is saved.
|
|
||||||
|
|
||||||
Not all hosts can know if current scene is saved because the API of
|
|
||||||
DCC does not support it.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if scene is saved and False if has unsaved
|
|
||||||
modifications.
|
|
||||||
None: Can't tell if workfiles has modifications.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def work_root(self, session):
|
|
||||||
"""Modify workdir per host.
|
|
||||||
|
|
||||||
Default implementation keeps workdir untouched.
|
|
||||||
|
|
||||||
Warnings:
|
|
||||||
We must handle this modification with more sophisticated way
|
|
||||||
because this can't be called out of DCC so opening of last workfile
|
|
||||||
(calculated before DCC is launched) is complicated. Also breaking
|
|
||||||
defined work template is not a good idea.
|
|
||||||
Only place where it's really used and can make sense is Maya. There
|
|
||||||
workspace.mel can modify subfolders where to look for maya files.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
session (dict): Session context data.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Path to new workdir.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return session["AYON_WORKDIR"]
|
|
||||||
|
|
||||||
# --- Deprecated method names ---
|
|
||||||
def file_extensions(self):
|
|
||||||
"""Deprecated variant of 'get_workfile_extensions'.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Remove when all usages are replaced.
|
|
||||||
"""
|
|
||||||
return self.get_workfile_extensions()
|
|
||||||
|
|
||||||
def save_file(self, dst_path=None):
|
|
||||||
"""Deprecated variant of 'save_workfile'.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Remove when all usages are replaced.
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.save_workfile(dst_path)
|
|
||||||
|
|
||||||
def open_file(self, filepath):
|
|
||||||
"""Deprecated variant of 'open_workfile'.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Remove when all usages are replaced.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self.open_workfile(filepath)
|
|
||||||
|
|
||||||
def current_file(self):
|
|
||||||
"""Deprecated variant of 'get_current_workfile'.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Remove when all usages are replaced.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self.get_current_workfile()
|
|
||||||
|
|
||||||
def has_unsaved_changes(self):
|
|
||||||
"""Deprecated variant of 'workfile_has_unsaved_changes'.
|
|
||||||
|
|
||||||
Todo:
|
|
||||||
Remove when all usages are replaced.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self.workfile_has_unsaved_changes()
|
|
||||||
|
|
||||||
|
|
||||||
class IPublishHost:
|
class IPublishHost:
|
||||||
"""Functions related to new creation system in new publisher.
|
"""Functions related to new creation system in new publisher.
|
||||||
|
|
||||||
1797
client/ayon_core/host/interfaces/workfiles.py
Normal file
1797
client/ayon_core/host/interfaces/workfiles.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -8,6 +8,7 @@ import warnings
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
import platformdirs
|
import platformdirs
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
|
@ -15,22 +16,31 @@ import ayon_api
|
||||||
_PLACEHOLDER = object()
|
_PLACEHOLDER = object()
|
||||||
|
|
||||||
|
|
||||||
def _get_ayon_appdirs(*args):
|
# TODO should use 'KeyError' or 'Exception' as base
|
||||||
|
class RegistryItemNotFound(ValueError):
|
||||||
|
"""Raised when the item is not found in the keyring."""
|
||||||
|
|
||||||
|
|
||||||
|
class _Cache:
|
||||||
|
username = None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_ayon_appdirs(*args: str) -> str:
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
platformdirs.user_data_dir("AYON", "Ynput"),
|
platformdirs.user_data_dir("AYON", "Ynput"),
|
||||||
*args
|
*args
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_ayon_appdirs(*args):
|
def get_ayon_appdirs(*args: str) -> str:
|
||||||
"""Local app data directory of AYON client.
|
"""Local app data directory of AYON client.
|
||||||
|
|
||||||
Deprecated:
|
Deprecated:
|
||||||
Use 'get_launcher_local_dir' or 'get_launcher_storage_dir' based on
|
Use 'get_launcher_local_dir' or 'get_launcher_storage_dir' based on
|
||||||
use-case. Deprecation added 24/08/09 (0.4.4-dev.1).
|
a use-case. Deprecation added 24/08/09 (0.4.4-dev.1).
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*args (Iterable[str]): Subdirectories/files in local app data dir.
|
*args (Iterable[str]): Subdirectories/files in the local app data dir.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Path to directory/file in local app data dir.
|
str: Path to directory/file in local app data dir.
|
||||||
|
|
@ -48,7 +58,7 @@ def get_ayon_appdirs(*args):
|
||||||
|
|
||||||
|
|
||||||
def get_launcher_storage_dir(*subdirs: str) -> str:
|
def get_launcher_storage_dir(*subdirs: str) -> str:
|
||||||
"""Get storage directory for launcher.
|
"""Get a storage directory for launcher.
|
||||||
|
|
||||||
Storage directory is used for storing shims, addons, dependencies, etc.
|
Storage directory is used for storing shims, addons, dependencies, etc.
|
||||||
|
|
||||||
|
|
@ -73,14 +83,14 @@ def get_launcher_storage_dir(*subdirs: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
def get_launcher_local_dir(*subdirs: str) -> str:
|
def get_launcher_local_dir(*subdirs: str) -> str:
|
||||||
"""Get local directory for launcher.
|
"""Get a local directory for launcher.
|
||||||
|
|
||||||
Local directory is used for storing machine or user specific data.
|
Local directory is used for storing machine or user-specific data.
|
||||||
|
|
||||||
The location is user specific.
|
The location is user-specific.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
This function should be called at least once on bootstrap.
|
This function should be called at least once on the bootstrap.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*subdirs (str): Subdirectories relative to local dir.
|
*subdirs (str): Subdirectories relative to local dir.
|
||||||
|
|
@ -97,7 +107,7 @@ def get_launcher_local_dir(*subdirs: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
def get_addons_resources_dir(addon_name: str, *args) -> str:
|
def get_addons_resources_dir(addon_name: str, *args) -> str:
|
||||||
"""Get directory for storing resources for addons.
|
"""Get a directory for storing resources for addons.
|
||||||
|
|
||||||
Some addons might need to store ad-hoc resources that are not part of
|
Some addons might need to store ad-hoc resources that are not part of
|
||||||
addon client package (e.g. because of size). Studio might define
|
addon client package (e.g. because of size). Studio might define
|
||||||
|
|
@ -107,7 +117,7 @@ def get_addons_resources_dir(addon_name: str, *args) -> str:
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
addon_name (str): Addon name.
|
addon_name (str): Addon name.
|
||||||
*args (str): Subfolders in resources directory.
|
*args (str): Subfolders in the resources directory.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Path to resources directory.
|
str: Path to resources directory.
|
||||||
|
|
@ -120,6 +130,10 @@ def get_addons_resources_dir(addon_name: str, *args) -> str:
|
||||||
return os.path.join(addons_resources_dir, addon_name, *args)
|
return os.path.join(addons_resources_dir, addon_name, *args)
|
||||||
|
|
||||||
|
|
||||||
|
class _FakeException(Exception):
|
||||||
|
"""Placeholder exception used if real exception is not available."""
|
||||||
|
|
||||||
|
|
||||||
class AYONSecureRegistry:
|
class AYONSecureRegistry:
|
||||||
"""Store information using keyring.
|
"""Store information using keyring.
|
||||||
|
|
||||||
|
|
@ -130,9 +144,10 @@ class AYONSecureRegistry:
|
||||||
identify which data were created by AYON.
|
identify which data were created by AYON.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
name(str): Name of registry used as identifier for data.
|
name(str): Name of registry used as the identifier for data.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, name):
|
def __init__(self, name: str) -> None:
|
||||||
try:
|
try:
|
||||||
import keyring
|
import keyring
|
||||||
|
|
||||||
|
|
@ -148,13 +163,12 @@ class AYONSecureRegistry:
|
||||||
keyring.set_keyring(Windows.WinVaultKeyring())
|
keyring.set_keyring(Windows.WinVaultKeyring())
|
||||||
|
|
||||||
# Force "AYON" prefix
|
# Force "AYON" prefix
|
||||||
self._name = "/".join(("AYON", name))
|
self._name = f"AYON/{name}"
|
||||||
|
|
||||||
def set_item(self, name, value):
|
def set_item(self, name: str, value: str) -> None:
|
||||||
# type: (str, str) -> None
|
"""Set sensitive item into the system's keyring.
|
||||||
"""Set sensitive item into system's keyring.
|
|
||||||
|
|
||||||
This uses `Keyring module`_ to save sensitive stuff into system's
|
This uses `Keyring module`_ to save sensitive stuff into the system's
|
||||||
keyring.
|
keyring.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -168,22 +182,26 @@ class AYONSecureRegistry:
|
||||||
import keyring
|
import keyring
|
||||||
|
|
||||||
keyring.set_password(self._name, name, value)
|
keyring.set_password(self._name, name, value)
|
||||||
|
self.get_item.cache_clear()
|
||||||
|
|
||||||
@lru_cache(maxsize=32)
|
@lru_cache(maxsize=32)
|
||||||
def get_item(self, name, default=_PLACEHOLDER):
|
def get_item(
|
||||||
"""Get value of sensitive item from system's keyring.
|
self, name: str, default: Any = _PLACEHOLDER
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Get value of sensitive item from the system's keyring.
|
||||||
|
|
||||||
See also `Keyring module`_
|
See also `Keyring module`_
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
name (str): Name of the item.
|
name (str): Name of the item.
|
||||||
default (Any): Default value if item is not available.
|
default (Any): Default value if the item is not available.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
value (str): Value of the item.
|
value (str): Value of the item.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If item doesn't exist and default is not defined.
|
RegistryItemNotFound: If the item doesn't exist and default
|
||||||
|
is not defined.
|
||||||
|
|
||||||
.. _Keyring module:
|
.. _Keyring module:
|
||||||
https://github.com/jaraco/keyring
|
https://github.com/jaraco/keyring
|
||||||
|
|
@ -191,21 +209,29 @@ class AYONSecureRegistry:
|
||||||
"""
|
"""
|
||||||
import keyring
|
import keyring
|
||||||
|
|
||||||
value = keyring.get_password(self._name, name)
|
# Capture 'ItemNotFoundException' exception (on linux)
|
||||||
|
try:
|
||||||
|
from secretstorage.exceptions import ItemNotFoundException
|
||||||
|
except ImportError:
|
||||||
|
ItemNotFoundException = _FakeException
|
||||||
|
|
||||||
|
try:
|
||||||
|
value = keyring.get_password(self._name, name)
|
||||||
|
except ItemNotFoundException:
|
||||||
|
value = None
|
||||||
|
|
||||||
if value is not None:
|
if value is not None:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if default is not _PLACEHOLDER:
|
if default is not _PLACEHOLDER:
|
||||||
return default
|
return default
|
||||||
|
|
||||||
# NOTE Should raise `KeyError`
|
raise RegistryItemNotFound(
|
||||||
raise ValueError(
|
f"Item {self._name}:{name} not found in keyring."
|
||||||
"Item {}:{} does not exist in keyring.".format(self._name, name)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def delete_item(self, name):
|
def delete_item(self, name: str) -> None:
|
||||||
# type: (str) -> None
|
"""Delete value stored in the system's keyring.
|
||||||
"""Delete value stored in system's keyring.
|
|
||||||
|
|
||||||
See also `Keyring module`_
|
See also `Keyring module`_
|
||||||
|
|
||||||
|
|
@ -223,47 +249,38 @@ class AYONSecureRegistry:
|
||||||
|
|
||||||
|
|
||||||
class ASettingRegistry(ABC):
|
class ASettingRegistry(ABC):
|
||||||
"""Abstract class defining structure of **SettingRegistry** class.
|
"""Abstract class to defining structure of registry class.
|
||||||
|
|
||||||
It is implementing methods to store secure items into keyring, otherwise
|
|
||||||
mechanism for storing common items must be implemented in abstract
|
|
||||||
methods.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
_name (str): Registry names.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
def __init__(self, name: str) -> None:
|
||||||
def __init__(self, name):
|
|
||||||
# type: (str) -> ASettingRegistry
|
|
||||||
super(ASettingRegistry, self).__init__()
|
|
||||||
|
|
||||||
self._name = name
|
self._name = name
|
||||||
self._items = {}
|
|
||||||
|
|
||||||
def set_item(self, name, value):
|
|
||||||
# type: (str, str) -> None
|
|
||||||
"""Set item to settings registry.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name (str): Name of the item.
|
|
||||||
value (str): Value of the item.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self._set_item(name, value)
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def _set_item(self, name, value):
|
def _get_item(self, name: str) -> Any:
|
||||||
# type: (str, str) -> None
|
"""Get item value from registry."""
|
||||||
# Implement it
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __setitem__(self, name, value):
|
@abstractmethod
|
||||||
self._items[name] = value
|
def _set_item(self, name: str, value: str) -> None:
|
||||||
|
"""Set item value to registry."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def _delete_item(self, name: str) -> None:
|
||||||
|
"""Delete item from registry."""
|
||||||
|
|
||||||
|
def __getitem__(self, name: str) -> Any:
|
||||||
|
return self._get_item(name)
|
||||||
|
|
||||||
|
def __setitem__(self, name: str, value: str) -> None:
|
||||||
self._set_item(name, value)
|
self._set_item(name, value)
|
||||||
|
|
||||||
def get_item(self, name):
|
def __delitem__(self, name: str) -> None:
|
||||||
# type: (str) -> str
|
self._delete_item(name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
def get_item(self, name: str) -> str:
|
||||||
"""Get item from settings registry.
|
"""Get item from settings registry.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -273,22 +290,22 @@ class ASettingRegistry(ABC):
|
||||||
value (str): Value of the item.
|
value (str): Value of the item.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If item doesn't exist.
|
RegistryItemNotFound: If the item doesn't exist.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return self._get_item(name)
|
return self._get_item(name)
|
||||||
|
|
||||||
@abstractmethod
|
def set_item(self, name: str, value: str) -> None:
|
||||||
def _get_item(self, name):
|
"""Set item to settings registry.
|
||||||
# type: (str) -> str
|
|
||||||
# Implement it
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __getitem__(self, name):
|
Args:
|
||||||
return self._get_item(name)
|
name (str): Name of the item.
|
||||||
|
value (str): Value of the item.
|
||||||
|
|
||||||
def delete_item(self, name):
|
"""
|
||||||
# type: (str) -> None
|
self._set_item(name, value)
|
||||||
|
|
||||||
|
def delete_item(self, name: str) -> None:
|
||||||
"""Delete item from settings registry.
|
"""Delete item from settings registry.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -297,16 +314,6 @@ class ASettingRegistry(ABC):
|
||||||
"""
|
"""
|
||||||
self._delete_item(name)
|
self._delete_item(name)
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def _delete_item(self, name):
|
|
||||||
# type: (str) -> None
|
|
||||||
"""Delete item from settings."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __delitem__(self, name):
|
|
||||||
del self._items[name]
|
|
||||||
self._delete_item(name)
|
|
||||||
|
|
||||||
|
|
||||||
class IniSettingRegistry(ASettingRegistry):
|
class IniSettingRegistry(ASettingRegistry):
|
||||||
"""Class using :mod:`configparser`.
|
"""Class using :mod:`configparser`.
|
||||||
|
|
@ -314,20 +321,17 @@ class IniSettingRegistry(ASettingRegistry):
|
||||||
This class is using :mod:`configparser` (ini) files to store items.
|
This class is using :mod:`configparser` (ini) files to store items.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
def __init__(self, name: str, path: str) -> None:
|
||||||
def __init__(self, name, path):
|
super().__init__(name)
|
||||||
# type: (str, str) -> IniSettingRegistry
|
|
||||||
super(IniSettingRegistry, self).__init__(name)
|
|
||||||
# get registry file
|
# get registry file
|
||||||
self._registry_file = os.path.join(path, "{}.ini".format(name))
|
self._registry_file = os.path.join(path, f"{name}.ini")
|
||||||
if not os.path.exists(self._registry_file):
|
if not os.path.exists(self._registry_file):
|
||||||
with open(self._registry_file, mode="w") as cfg:
|
with open(self._registry_file, mode="w") as cfg:
|
||||||
print("# Settings registry", cfg)
|
print("# Settings registry", cfg)
|
||||||
now = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
now = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
||||||
print("# {}".format(now), cfg)
|
print(f"# {now}", cfg)
|
||||||
|
|
||||||
def set_item_section(self, section, name, value):
|
def set_item_section(self, section: str, name: str, value: str) -> None:
|
||||||
# type: (str, str, str) -> None
|
|
||||||
"""Set item to specific section of ini registry.
|
"""Set item to specific section of ini registry.
|
||||||
|
|
||||||
If section doesn't exists, it is created.
|
If section doesn't exists, it is created.
|
||||||
|
|
@ -350,12 +354,10 @@ class IniSettingRegistry(ASettingRegistry):
|
||||||
with open(self._registry_file, mode="w") as cfg:
|
with open(self._registry_file, mode="w") as cfg:
|
||||||
config.write(cfg)
|
config.write(cfg)
|
||||||
|
|
||||||
def _set_item(self, name, value):
|
def _set_item(self, name: str, value: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
self.set_item_section("MAIN", name, value)
|
self.set_item_section("MAIN", name, value)
|
||||||
|
|
||||||
def set_item(self, name, value):
|
def set_item(self, name: str, value: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
"""Set item to settings ini file.
|
"""Set item to settings ini file.
|
||||||
|
|
||||||
This saves item to ``DEFAULT`` section of ini as each item there
|
This saves item to ``DEFAULT`` section of ini as each item there
|
||||||
|
|
@ -368,10 +370,9 @@ class IniSettingRegistry(ASettingRegistry):
|
||||||
"""
|
"""
|
||||||
# this does the some, overridden just for different docstring.
|
# this does the some, overridden just for different docstring.
|
||||||
# we cast value to str as ini options values must be strings.
|
# we cast value to str as ini options values must be strings.
|
||||||
super(IniSettingRegistry, self).set_item(name, str(value))
|
super().set_item(name, str(value))
|
||||||
|
|
||||||
def get_item(self, name):
|
def get_item(self, name: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
"""Gets item from settings ini file.
|
"""Gets item from settings ini file.
|
||||||
|
|
||||||
This gets settings from ``DEFAULT`` section of ini file as each item
|
This gets settings from ``DEFAULT`` section of ini file as each item
|
||||||
|
|
@ -384,19 +385,18 @@ class IniSettingRegistry(ASettingRegistry):
|
||||||
str: Value of item.
|
str: Value of item.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If value doesn't exist.
|
RegistryItemNotFound: If value doesn't exist.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return super(IniSettingRegistry, self).get_item(name)
|
return super().get_item(name)
|
||||||
|
|
||||||
@lru_cache(maxsize=32)
|
@lru_cache(maxsize=32)
|
||||||
def get_item_from_section(self, section, name):
|
def get_item_from_section(self, section: str, name: str) -> str:
|
||||||
# type: (str, str) -> str
|
|
||||||
"""Get item from section of ini file.
|
"""Get item from section of ini file.
|
||||||
|
|
||||||
This will read ini file and try to get item value from specified
|
This will read ini file and try to get item value from specified
|
||||||
section. If that section or item doesn't exist, :exc:`ValueError`
|
section. If that section or item doesn't exist,
|
||||||
is risen.
|
:exc:`RegistryItemNotFound` is risen.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
section (str): Name of ini section.
|
section (str): Name of ini section.
|
||||||
|
|
@ -406,7 +406,7 @@ class IniSettingRegistry(ASettingRegistry):
|
||||||
str: Item value.
|
str: Item value.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If value doesn't exist.
|
RegistryItemNotFound: If value doesn't exist.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
config = configparser.ConfigParser()
|
config = configparser.ConfigParser()
|
||||||
|
|
@ -414,16 +414,15 @@ class IniSettingRegistry(ASettingRegistry):
|
||||||
try:
|
try:
|
||||||
value = config[section][name]
|
value = config[section][name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise ValueError(
|
raise RegistryItemNotFound(
|
||||||
"Registry doesn't contain value {}:{}".format(section, name))
|
f"Registry doesn't contain value {section}:{name}"
|
||||||
|
)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def _get_item(self, name):
|
def _get_item(self, name: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.get_item_from_section("MAIN", name)
|
return self.get_item_from_section("MAIN", name)
|
||||||
|
|
||||||
def delete_item_from_section(self, section, name):
|
def delete_item_from_section(self, section: str, name: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
"""Delete item from section in ini file.
|
"""Delete item from section in ini file.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -431,7 +430,7 @@ class IniSettingRegistry(ASettingRegistry):
|
||||||
name (str): Name of the item.
|
name (str): Name of the item.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If item doesn't exist.
|
RegistryItemNotFound: If the item doesn't exist.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.get_item_from_section.cache_clear()
|
self.get_item_from_section.cache_clear()
|
||||||
|
|
@ -440,8 +439,9 @@ class IniSettingRegistry(ASettingRegistry):
|
||||||
try:
|
try:
|
||||||
_ = config[section][name]
|
_ = config[section][name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise ValueError(
|
raise RegistryItemNotFound(
|
||||||
"Registry doesn't contain value {}:{}".format(section, name))
|
f"Registry doesn't contain value {section}:{name}"
|
||||||
|
)
|
||||||
config.remove_option(section, name)
|
config.remove_option(section, name)
|
||||||
|
|
||||||
# if section is empty, delete it
|
# if section is empty, delete it
|
||||||
|
|
@ -457,29 +457,28 @@ class IniSettingRegistry(ASettingRegistry):
|
||||||
|
|
||||||
|
|
||||||
class JSONSettingRegistry(ASettingRegistry):
|
class JSONSettingRegistry(ASettingRegistry):
|
||||||
"""Class using json file as storage."""
|
"""Class using a json file as storage."""
|
||||||
|
|
||||||
def __init__(self, name, path):
|
def __init__(self, name: str, path: str) -> None:
|
||||||
# type: (str, str) -> JSONSettingRegistry
|
super().__init__(name)
|
||||||
super(JSONSettingRegistry, self).__init__(name)
|
self._registry_file = os.path.join(path, f"{name}.json")
|
||||||
#: str: name of registry file
|
|
||||||
self._registry_file = os.path.join(path, "{}.json".format(name))
|
|
||||||
now = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
now = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
||||||
header = {
|
header = {
|
||||||
"__metadata__": {"generated": now},
|
"__metadata__": {"generated": now},
|
||||||
"registry": {}
|
"registry": {}
|
||||||
}
|
}
|
||||||
|
|
||||||
if not os.path.exists(os.path.dirname(self._registry_file)):
|
# Use 'os.path.dirname' in case someone uses slashes in 'name'
|
||||||
os.makedirs(os.path.dirname(self._registry_file), exist_ok=True)
|
dirpath = os.path.dirname(self._registry_file)
|
||||||
|
if not os.path.exists(dirpath):
|
||||||
|
os.makedirs(dirpath, exist_ok=True)
|
||||||
if not os.path.exists(self._registry_file):
|
if not os.path.exists(self._registry_file):
|
||||||
with open(self._registry_file, mode="w") as cfg:
|
with open(self._registry_file, mode="w") as cfg:
|
||||||
json.dump(header, cfg, indent=4)
|
json.dump(header, cfg, indent=4)
|
||||||
|
|
||||||
@lru_cache(maxsize=32)
|
@lru_cache(maxsize=32)
|
||||||
def _get_item(self, name):
|
def _get_item(self, name: str) -> str:
|
||||||
# type: (str) -> object
|
"""Get item value from the registry.
|
||||||
"""Get item value from registry json.
|
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
See :meth:`ayon_core.lib.JSONSettingRegistry.get_item`
|
See :meth:`ayon_core.lib.JSONSettingRegistry.get_item`
|
||||||
|
|
@ -490,29 +489,13 @@ class JSONSettingRegistry(ASettingRegistry):
|
||||||
try:
|
try:
|
||||||
value = data["registry"][name]
|
value = data["registry"][name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise ValueError(
|
raise RegistryItemNotFound(
|
||||||
"Registry doesn't contain value {}".format(name))
|
f"Registry doesn't contain value {name}"
|
||||||
|
)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def get_item(self, name):
|
def _set_item(self, name: str, value: str) -> None:
|
||||||
# type: (str) -> object
|
"""Set item value to the registry.
|
||||||
"""Get item value from registry json.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name (str): Name of the item.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
value of the item
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If item is not found in registry file.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self._get_item(name)
|
|
||||||
|
|
||||||
def _set_item(self, name, value):
|
|
||||||
# type: (str, object) -> None
|
|
||||||
"""Set item value to registry json.
|
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
See :meth:`ayon_core.lib.JSONSettingRegistry.set_item`
|
See :meth:`ayon_core.lib.JSONSettingRegistry.set_item`
|
||||||
|
|
@ -524,41 +507,39 @@ class JSONSettingRegistry(ASettingRegistry):
|
||||||
cfg.truncate(0)
|
cfg.truncate(0)
|
||||||
cfg.seek(0)
|
cfg.seek(0)
|
||||||
json.dump(data, cfg, indent=4)
|
json.dump(data, cfg, indent=4)
|
||||||
|
|
||||||
def set_item(self, name, value):
|
|
||||||
# type: (str, object) -> None
|
|
||||||
"""Set item and its value into json registry file.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name (str): name of the item.
|
|
||||||
value (Any): value of the item.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self._set_item(name, value)
|
|
||||||
|
|
||||||
def _delete_item(self, name):
|
|
||||||
# type: (str) -> None
|
|
||||||
self._get_item.cache_clear()
|
self._get_item.cache_clear()
|
||||||
|
|
||||||
|
def _delete_item(self, name: str) -> None:
|
||||||
with open(self._registry_file, "r+") as cfg:
|
with open(self._registry_file, "r+") as cfg:
|
||||||
data = json.load(cfg)
|
data = json.load(cfg)
|
||||||
del data["registry"][name]
|
del data["registry"][name]
|
||||||
cfg.truncate(0)
|
cfg.truncate(0)
|
||||||
cfg.seek(0)
|
cfg.seek(0)
|
||||||
json.dump(data, cfg, indent=4)
|
json.dump(data, cfg, indent=4)
|
||||||
|
self._get_item.cache_clear()
|
||||||
|
|
||||||
|
|
||||||
class AYONSettingsRegistry(JSONSettingRegistry):
|
class AYONSettingsRegistry(JSONSettingRegistry):
|
||||||
"""Class handling AYON general settings registry.
|
"""Class handling AYON general settings registry.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
name (Optional[str]): Name of the registry.
|
name (Optional[str]): Name of the registry. Using 'None' or not
|
||||||
"""
|
passing name is deprecated.
|
||||||
|
|
||||||
def __init__(self, name=None):
|
"""
|
||||||
|
def __init__(self, name: Optional[str] = None) -> None:
|
||||||
if not name:
|
if not name:
|
||||||
name = "AYON_settings"
|
name = "AYON_settings"
|
||||||
|
warnings.warn(
|
||||||
|
(
|
||||||
|
"Used 'AYONSettingsRegistry' without 'name' argument."
|
||||||
|
" The argument will be required in future versions."
|
||||||
|
),
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
path = get_launcher_storage_dir()
|
path = get_launcher_storage_dir()
|
||||||
super(AYONSettingsRegistry, self).__init__(name, path)
|
super().__init__(name, path)
|
||||||
|
|
||||||
|
|
||||||
def get_local_site_id():
|
def get_local_site_id():
|
||||||
|
|
@ -591,10 +572,26 @@ def get_local_site_id():
|
||||||
def get_ayon_username():
|
def get_ayon_username():
|
||||||
"""AYON username used for templates and publishing.
|
"""AYON username used for templates and publishing.
|
||||||
|
|
||||||
Uses curet ayon api username.
|
Uses current ayon api username.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Username.
|
str: Username.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return ayon_api.get_user()["name"]
|
# Look for username in the connection stack
|
||||||
|
# - this is used when service is working as other user
|
||||||
|
# (e.g. in background sync)
|
||||||
|
# TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather
|
||||||
|
# use public method to get username from connection stack.
|
||||||
|
con = ayon_api.get_server_api_connection()
|
||||||
|
user_stack = getattr(con, "_as_user_stack", None)
|
||||||
|
if user_stack is not None:
|
||||||
|
username = user_stack.username
|
||||||
|
if username is not None:
|
||||||
|
return username
|
||||||
|
|
||||||
|
# Cache the username to avoid multiple API calls
|
||||||
|
# - it is not expected that user would change
|
||||||
|
if _Cache.username is None:
|
||||||
|
_Cache.username = ayon_api.get_user()["name"]
|
||||||
|
return _Cache.username
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ import re
|
||||||
import copy
|
import copy
|
||||||
import numbers
|
import numbers
|
||||||
import warnings
|
import warnings
|
||||||
|
import platform
|
||||||
from string import Formatter
|
from string import Formatter
|
||||||
import typing
|
import typing
|
||||||
from typing import List, Dict, Any, Set
|
from typing import List, Dict, Any, Set
|
||||||
|
|
@ -12,6 +13,7 @@ if typing.TYPE_CHECKING:
|
||||||
|
|
||||||
SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
|
SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
|
||||||
OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
|
OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
|
||||||
|
_IS_WINDOWS = platform.system().lower() == "windows"
|
||||||
|
|
||||||
|
|
||||||
class TemplateUnsolved(Exception):
|
class TemplateUnsolved(Exception):
|
||||||
|
|
@ -277,8 +279,11 @@ class TemplateResult(str):
|
||||||
"""Convert to normalized path."""
|
"""Convert to normalized path."""
|
||||||
|
|
||||||
cls = self.__class__
|
cls = self.__class__
|
||||||
|
path = str(self)
|
||||||
|
if _IS_WINDOWS:
|
||||||
|
path = path.replace("\\", "/")
|
||||||
return cls(
|
return cls(
|
||||||
os.path.normpath(self.replace("\\", "/")),
|
os.path.normpath(path),
|
||||||
self.template,
|
self.template,
|
||||||
self.solved,
|
self.solved,
|
||||||
self.used_values,
|
self.used_values,
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ from .exceptions import (
|
||||||
AnatomyTemplateUnsolved,
|
AnatomyTemplateUnsolved,
|
||||||
)
|
)
|
||||||
from .anatomy import Anatomy
|
from .anatomy import Anatomy
|
||||||
|
from .templates import AnatomyTemplateResult, AnatomyStringTemplate
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
|
|
@ -16,4 +17,7 @@ __all__ = (
|
||||||
"AnatomyTemplateUnsolved",
|
"AnatomyTemplateUnsolved",
|
||||||
|
|
||||||
"Anatomy",
|
"Anatomy",
|
||||||
|
|
||||||
|
"AnatomyTemplateResult",
|
||||||
|
"AnatomyStringTemplate",
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import copy
|
import copy
|
||||||
|
import platform
|
||||||
import collections
|
import collections
|
||||||
import numbers
|
import numbers
|
||||||
|
|
||||||
|
|
@ -15,6 +16,7 @@ from .exceptions import (
|
||||||
AnatomyTemplateUnsolved,
|
AnatomyTemplateUnsolved,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_IS_WINDOWS = platform.system().lower() == "windows"
|
||||||
_PLACEHOLDER = object()
|
_PLACEHOLDER = object()
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -526,6 +528,14 @@ class AnatomyTemplates:
|
||||||
root_key = "{" + root_key + "}"
|
root_key = "{" + root_key + "}"
|
||||||
output = output.replace(str(used_value), root_key)
|
output = output.replace(str(used_value), root_key)
|
||||||
|
|
||||||
|
# Make sure rootless path is with forward slashes
|
||||||
|
if _IS_WINDOWS:
|
||||||
|
output.replace("\\", "/")
|
||||||
|
|
||||||
|
# Make sure there are no double slashes
|
||||||
|
while "//" in output:
|
||||||
|
output = output.replace("//", "/")
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def format(self, data, strict=True):
|
def format(self, data, strict=True):
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,12 @@
|
||||||
"""Core pipeline functionality"""
|
"""Core pipeline functionality"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import platform
|
import platform
|
||||||
import uuid
|
import uuid
|
||||||
|
import warnings
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
@ -14,8 +17,6 @@ from ayon_core.host import HostBase
|
||||||
from ayon_core.lib import (
|
from ayon_core.lib import (
|
||||||
is_in_tests,
|
is_in_tests,
|
||||||
initialize_ayon_connection,
|
initialize_ayon_connection,
|
||||||
emit_event,
|
|
||||||
version_up
|
|
||||||
)
|
)
|
||||||
from ayon_core.addon import load_addons, AddonsManager
|
from ayon_core.addon import load_addons, AddonsManager
|
||||||
from ayon_core.settings import get_project_settings
|
from ayon_core.settings import get_project_settings
|
||||||
|
|
@ -23,13 +24,7 @@ from ayon_core.settings import get_project_settings
|
||||||
from .publish.lib import filter_pyblish_plugins
|
from .publish.lib import filter_pyblish_plugins
|
||||||
from .anatomy import Anatomy
|
from .anatomy import Anatomy
|
||||||
from .template_data import get_template_data_with_names
|
from .template_data import get_template_data_with_names
|
||||||
from .workfile import (
|
from .workfile import get_custom_workfile_template_by_string_context
|
||||||
get_workdir,
|
|
||||||
get_custom_workfile_template_by_string_context,
|
|
||||||
get_workfile_template_key_from_context,
|
|
||||||
get_last_workfile,
|
|
||||||
MissingWorkdirError,
|
|
||||||
)
|
|
||||||
from . import (
|
from . import (
|
||||||
register_loader_plugin_path,
|
register_loader_plugin_path,
|
||||||
register_inventory_action_path,
|
register_inventory_action_path,
|
||||||
|
|
@ -75,7 +70,7 @@ def _get_addons_manager():
|
||||||
|
|
||||||
|
|
||||||
def register_root(path):
|
def register_root(path):
|
||||||
"""Register currently active root"""
|
"""DEPRECATED Register currently active root."""
|
||||||
log.info("Registering root: %s" % path)
|
log.info("Registering root: %s" % path)
|
||||||
_registered_root["_"] = path
|
_registered_root["_"] = path
|
||||||
|
|
||||||
|
|
@ -94,18 +89,29 @@ def registered_root():
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, str]: Root paths.
|
dict[str, str]: Root paths.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
warnings.warn(
|
||||||
|
"Used deprecated function 'registered_root'. Please use 'Anatomy'"
|
||||||
|
" to get roots.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
return _registered_root["_"]
|
return _registered_root["_"]
|
||||||
|
|
||||||
|
|
||||||
def install_host(host):
|
def install_host(host: HostBase) -> None:
|
||||||
"""Install `host` into the running Python session.
|
"""Install `host` into the running Python session.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
host (HostBase): A host interface object.
|
host (HostBase): A host interface object.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
if not isinstance(host, HostBase):
|
||||||
|
log.error(
|
||||||
|
f"Host must be a subclass of 'HostBase', got '{type(host)}'."
|
||||||
|
)
|
||||||
|
|
||||||
global _is_installed
|
global _is_installed
|
||||||
|
|
||||||
_is_installed = True
|
_is_installed = True
|
||||||
|
|
@ -183,7 +189,7 @@ def install_ayon_plugins(project_name=None, host_name=None):
|
||||||
register_inventory_action_path(INVENTORY_PATH)
|
register_inventory_action_path(INVENTORY_PATH)
|
||||||
|
|
||||||
if host_name is None:
|
if host_name is None:
|
||||||
host_name = os.environ.get("AYON_HOST_NAME")
|
host_name = get_current_host_name()
|
||||||
|
|
||||||
addons_manager = _get_addons_manager()
|
addons_manager = _get_addons_manager()
|
||||||
publish_plugin_dirs = addons_manager.collect_publish_plugin_paths(
|
publish_plugin_dirs = addons_manager.collect_publish_plugin_paths(
|
||||||
|
|
@ -366,6 +372,24 @@ def get_current_task_name():
|
||||||
return get_global_context()["task_name"]
|
return get_global_context()["task_name"]
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_project_settings() -> dict[str, Any]:
|
||||||
|
"""Project settings for the current context project.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, Any]: Project settings for the current context project.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If current project is not set.
|
||||||
|
|
||||||
|
"""
|
||||||
|
project_name = get_current_project_name()
|
||||||
|
if not project_name:
|
||||||
|
raise ValueError(
|
||||||
|
"Current project is not set. Can't get project settings."
|
||||||
|
)
|
||||||
|
return get_project_settings(project_name)
|
||||||
|
|
||||||
|
|
||||||
def get_current_project_entity(fields=None):
|
def get_current_project_entity(fields=None):
|
||||||
"""Helper function to get project document based on global Session.
|
"""Helper function to get project document based on global Session.
|
||||||
|
|
||||||
|
|
@ -505,66 +529,64 @@ def get_current_context_custom_workfile_template(project_settings=None):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def change_current_context(folder_entity, task_entity, template_key=None):
|
_PLACEHOLDER = object()
|
||||||
|
|
||||||
|
|
||||||
|
def change_current_context(
|
||||||
|
folder_entity: dict[str, Any],
|
||||||
|
task_entity: dict[str, Any],
|
||||||
|
*,
|
||||||
|
template_key: Optional[str] = _PLACEHOLDER,
|
||||||
|
reason: Optional[str] = None,
|
||||||
|
project_entity: Optional[dict[str, Any]] = None,
|
||||||
|
anatomy: Optional[Anatomy] = None,
|
||||||
|
) -> dict[str, str]:
|
||||||
"""Update active Session to a new task work area.
|
"""Update active Session to a new task work area.
|
||||||
|
|
||||||
This updates the live Session to a different task under folder.
|
This updates the live Session to a different task under a folder.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
* This function does a lot of things related to workfiles which
|
||||||
|
extends arguments options a lot.
|
||||||
|
* We might want to implement 'set_current_context' on host integration
|
||||||
|
instead. But `AYON_WORKDIR`, which is related to 'IWorkfileHost',
|
||||||
|
would not be available in that case which might break some
|
||||||
|
logic.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
folder_entity (Dict[str, Any]): Folder entity to set.
|
folder_entity (Dict[str, Any]): Folder entity to set.
|
||||||
task_entity (Dict[str, Any]): Task entity to set.
|
task_entity (Dict[str, Any]): Task entity to set.
|
||||||
template_key (Union[str, None]): Prepared template key to be used for
|
template_key (Optional[str]): DEPRECATED: Prepared template key to
|
||||||
workfile template in Anatomy.
|
be used for workfile template in Anatomy.
|
||||||
|
reason (Optional[str]): Reason for changing context.
|
||||||
|
anatomy (Optional[Anatomy]): Anatomy object used for workdir
|
||||||
|
calculation.
|
||||||
|
project_entity (Optional[dict[str, Any]]): Project entity used for
|
||||||
|
workdir calculation.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dict[str, str]: The changed key, values in the current Session.
|
dict[str, str]: New context data.
|
||||||
"""
|
|
||||||
|
|
||||||
project_name = get_current_project_name()
|
"""
|
||||||
workdir = None
|
if template_key is not _PLACEHOLDER:
|
||||||
folder_path = None
|
warnings.warn(
|
||||||
task_name = None
|
(
|
||||||
if folder_entity:
|
"Used deprecated argument 'template_key' in"
|
||||||
folder_path = folder_entity["path"]
|
" 'change_current_context'."
|
||||||
if task_entity:
|
" It is not necessary to pass it in anymore."
|
||||||
task_name = task_entity["name"]
|
),
|
||||||
project_entity = ayon_api.get_project(project_name)
|
DeprecationWarning,
|
||||||
host_name = get_current_host_name()
|
stacklevel=2,
|
||||||
workdir = get_workdir(
|
|
||||||
project_entity,
|
|
||||||
folder_entity,
|
|
||||||
task_entity,
|
|
||||||
host_name,
|
|
||||||
template_key=template_key
|
|
||||||
)
|
)
|
||||||
|
|
||||||
envs = {
|
host = registered_host()
|
||||||
"AYON_PROJECT_NAME": project_name,
|
return host.set_current_context(
|
||||||
"AYON_FOLDER_PATH": folder_path,
|
folder_entity,
|
||||||
"AYON_TASK_NAME": task_name,
|
task_entity,
|
||||||
"AYON_WORKDIR": workdir,
|
reason=reason,
|
||||||
}
|
project_entity=project_entity,
|
||||||
|
anatomy=anatomy,
|
||||||
# Update the Session and environments. Pop from environments all keys with
|
)
|
||||||
# value set to None.
|
|
||||||
for key, value in envs.items():
|
|
||||||
if value is None:
|
|
||||||
os.environ.pop(key, None)
|
|
||||||
else:
|
|
||||||
os.environ[key] = value
|
|
||||||
|
|
||||||
data = envs.copy()
|
|
||||||
|
|
||||||
# Convert env keys to human readable keys
|
|
||||||
data["project_name"] = project_name
|
|
||||||
data["folder_path"] = folder_path
|
|
||||||
data["task_name"] = task_name
|
|
||||||
data["workdir_path"] = workdir
|
|
||||||
|
|
||||||
# Emit session change
|
|
||||||
emit_event("taskChanged", data)
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def get_process_id():
|
def get_process_id():
|
||||||
|
|
@ -583,53 +605,16 @@ def get_process_id():
|
||||||
|
|
||||||
|
|
||||||
def version_up_current_workfile():
|
def version_up_current_workfile():
|
||||||
"""Function to increment and save workfile
|
"""DEPRECATED Function to increment and save workfile.
|
||||||
|
|
||||||
|
Please use 'save_next_version' from 'ayon_core.pipeline.workfile' instead.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
host = registered_host()
|
warnings.warn(
|
||||||
|
"Used deprecated 'version_up_current_workfile' please use"
|
||||||
project_name = get_current_project_name()
|
" 'save_next_version' from 'ayon_core.pipeline.workfile' instead.",
|
||||||
folder_path = get_current_folder_path()
|
DeprecationWarning,
|
||||||
task_name = get_current_task_name()
|
stacklevel=2,
|
||||||
host_name = get_current_host_name()
|
|
||||||
|
|
||||||
template_key = get_workfile_template_key_from_context(
|
|
||||||
project_name,
|
|
||||||
folder_path,
|
|
||||||
task_name,
|
|
||||||
host_name,
|
|
||||||
)
|
)
|
||||||
anatomy = Anatomy(project_name)
|
from ayon_core.pipeline.workfile import save_next_version
|
||||||
|
save_next_version()
|
||||||
data = get_template_data_with_names(
|
|
||||||
project_name, folder_path, task_name, host_name
|
|
||||||
)
|
|
||||||
data["root"] = anatomy.roots
|
|
||||||
|
|
||||||
work_template = anatomy.get_template_item("work", template_key)
|
|
||||||
|
|
||||||
# Define saving file extension
|
|
||||||
extensions = host.get_workfile_extensions()
|
|
||||||
current_file = host.get_current_workfile()
|
|
||||||
if current_file:
|
|
||||||
extensions = [os.path.splitext(current_file)[-1]]
|
|
||||||
|
|
||||||
work_root = work_template["directory"].format_strict(data)
|
|
||||||
file_template = work_template["file"].template
|
|
||||||
last_workfile_path = get_last_workfile(
|
|
||||||
work_root, file_template, data, extensions, True
|
|
||||||
)
|
|
||||||
# `get_last_workfile` will return the first expected file version
|
|
||||||
# if no files exist yet. In that case, if they do not exist we will
|
|
||||||
# want to save v001
|
|
||||||
new_workfile_path = last_workfile_path
|
|
||||||
if os.path.exists(new_workfile_path):
|
|
||||||
new_workfile_path = version_up(new_workfile_path)
|
|
||||||
|
|
||||||
# Raise an error if the parent folder doesn't exist as `host.save_workfile`
|
|
||||||
# is not supposed/able to create missing folders.
|
|
||||||
parent_folder = os.path.dirname(new_workfile_path)
|
|
||||||
if not os.path.exists(parent_folder):
|
|
||||||
raise MissingWorkdirError(
|
|
||||||
f"Work area directory '{parent_folder}' does not exist.")
|
|
||||||
|
|
||||||
host.save_workfile(new_workfile_path)
|
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,10 @@ import opentimelineio as otio
|
||||||
from opentimelineio import opentime as _ot
|
from opentimelineio import opentime as _ot
|
||||||
|
|
||||||
|
|
||||||
|
# https://github.com/AcademySoftwareFoundation/OpenTimelineIO/issues/1822
|
||||||
|
OTIO_EPSILON = 1e-9
|
||||||
|
|
||||||
|
|
||||||
def otio_range_to_frame_range(otio_range):
|
def otio_range_to_frame_range(otio_range):
|
||||||
start = _ot.to_frames(
|
start = _ot.to_frames(
|
||||||
otio_range.start_time, otio_range.start_time.rate)
|
otio_range.start_time, otio_range.start_time.rate)
|
||||||
|
|
|
||||||
|
|
@ -720,11 +720,13 @@ def get_representation_path(representation, root=None):
|
||||||
str: fullpath of the representation
|
str: fullpath of the representation
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if root is None:
|
if root is None:
|
||||||
from ayon_core.pipeline import registered_root
|
from ayon_core.pipeline import get_current_project_name, Anatomy
|
||||||
|
|
||||||
root = registered_root()
|
anatomy = Anatomy(get_current_project_name())
|
||||||
|
return get_representation_path_with_anatomy(
|
||||||
|
representation, anatomy
|
||||||
|
)
|
||||||
|
|
||||||
def path_from_representation():
|
def path_from_representation():
|
||||||
try:
|
try:
|
||||||
|
|
@ -772,7 +774,7 @@ def get_representation_path(representation, root=None):
|
||||||
|
|
||||||
dir_path, file_name = os.path.split(path)
|
dir_path, file_name = os.path.split(path)
|
||||||
if not os.path.exists(dir_path):
|
if not os.path.exists(dir_path):
|
||||||
return
|
return None
|
||||||
|
|
||||||
base_name, ext = os.path.splitext(file_name)
|
base_name, ext = os.path.splitext(file_name)
|
||||||
file_name_items = None
|
file_name_items = None
|
||||||
|
|
@ -782,7 +784,7 @@ def get_representation_path(representation, root=None):
|
||||||
file_name_items = base_name.split("%")
|
file_name_items = base_name.split("%")
|
||||||
|
|
||||||
if not file_name_items:
|
if not file_name_items:
|
||||||
return
|
return None
|
||||||
|
|
||||||
filename_start = file_name_items[0]
|
filename_start = file_name_items[0]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -51,7 +51,7 @@ class DiscoverResult:
|
||||||
"*** Discovered {} plugins".format(len(self.plugins))
|
"*** Discovered {} plugins".format(len(self.plugins))
|
||||||
)
|
)
|
||||||
for cls in self.plugins:
|
for cls in self.plugins:
|
||||||
lines.append("- {}".format(cls.__class__.__name__))
|
lines.append("- {}".format(cls.__name__))
|
||||||
|
|
||||||
# Plugin that were defined to be ignored
|
# Plugin that were defined to be ignored
|
||||||
if self.ignored_plugins or full_report:
|
if self.ignored_plugins or full_report:
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ import sys
|
||||||
import inspect
|
import inspect
|
||||||
import copy
|
import copy
|
||||||
import warnings
|
import warnings
|
||||||
|
import hashlib
|
||||||
import xml.etree.ElementTree
|
import xml.etree.ElementTree
|
||||||
from typing import TYPE_CHECKING, Optional, Union, List
|
from typing import TYPE_CHECKING, Optional, Union, List
|
||||||
|
|
||||||
|
|
@ -243,32 +244,38 @@ def publish_plugins_discover(
|
||||||
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
path = os.path.normpath(path)
|
path = os.path.normpath(path)
|
||||||
if not os.path.isdir(path):
|
filenames = []
|
||||||
continue
|
if os.path.isdir(path):
|
||||||
|
filenames.extend(
|
||||||
|
name
|
||||||
|
for name in os.listdir(path)
|
||||||
|
if (
|
||||||
|
os.path.isfile(os.path.join(path, name))
|
||||||
|
and not name.startswith("_")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
filenames.append(os.path.basename(path))
|
||||||
|
path = os.path.dirname(path)
|
||||||
|
|
||||||
for fname in os.listdir(path):
|
dirpath_hash = hashlib.md5(path.encode("utf-8")).hexdigest()
|
||||||
if fname.startswith("_"):
|
for filename in filenames:
|
||||||
continue
|
basename, ext = os.path.splitext(filename)
|
||||||
|
if ext.lower() != ".py":
|
||||||
abspath = os.path.join(path, fname)
|
|
||||||
|
|
||||||
if not os.path.isfile(abspath):
|
|
||||||
continue
|
|
||||||
|
|
||||||
mod_name, mod_ext = os.path.splitext(fname)
|
|
||||||
|
|
||||||
if mod_ext != ".py":
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
filepath = os.path.join(path, filename)
|
||||||
|
module_name = f"{dirpath_hash}.{basename}"
|
||||||
try:
|
try:
|
||||||
module = import_filepath(
|
module = import_filepath(
|
||||||
abspath, mod_name, sys_module_name=mod_name)
|
filepath, module_name, sys_module_name=module_name
|
||||||
|
)
|
||||||
|
|
||||||
except Exception as err: # noqa: BLE001
|
except Exception as err: # noqa: BLE001
|
||||||
# we need broad exception to catch all possible errors.
|
# we need broad exception to catch all possible errors.
|
||||||
result.crashed_file_paths[abspath] = sys.exc_info()
|
result.crashed_file_paths[filepath] = sys.exc_info()
|
||||||
|
|
||||||
log.debug('Skipped: "%s" (%s)', mod_name, err)
|
log.debug('Skipped: "%s" (%s)', filepath, err)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for plugin in pyblish.plugin.plugins_from_module(module):
|
for plugin in pyblish.plugin.plugins_from_module(module):
|
||||||
|
|
@ -354,12 +361,18 @@ def get_plugin_settings(plugin, project_settings, log, category=None):
|
||||||
# Use project settings based on a category name
|
# Use project settings based on a category name
|
||||||
if category:
|
if category:
|
||||||
try:
|
try:
|
||||||
return (
|
output = (
|
||||||
project_settings
|
project_settings
|
||||||
[category]
|
[category]
|
||||||
["publish"]
|
["publish"]
|
||||||
[plugin.__name__]
|
[plugin.__name__]
|
||||||
)
|
)
|
||||||
|
warnings.warn(
|
||||||
|
"Please fill 'settings_category'"
|
||||||
|
f" for plugin '{plugin.__name__}'.",
|
||||||
|
DeprecationWarning
|
||||||
|
)
|
||||||
|
return output
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
@ -384,12 +397,18 @@ def get_plugin_settings(plugin, project_settings, log, category=None):
|
||||||
category_from_file = "core"
|
category_from_file = "core"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return (
|
output = (
|
||||||
project_settings
|
project_settings
|
||||||
[category_from_file]
|
[category_from_file]
|
||||||
[plugin_kind]
|
[plugin_kind]
|
||||||
[plugin.__name__]
|
[plugin.__name__]
|
||||||
)
|
)
|
||||||
|
warnings.warn(
|
||||||
|
"Please fill 'settings_category'"
|
||||||
|
f" for plugin '{plugin.__name__}'.",
|
||||||
|
DeprecationWarning
|
||||||
|
)
|
||||||
|
return output
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
return {}
|
return {}
|
||||||
|
|
@ -1048,7 +1067,7 @@ def main_cli_publish(
|
||||||
|
|
||||||
discover_result = publish_plugins_discover()
|
discover_result = publish_plugins_discover()
|
||||||
publish_plugins = discover_result.plugins
|
publish_plugins = discover_result.plugins
|
||||||
print("\n".join(discover_result.get_report(only_errors=False)))
|
print(discover_result.get_report(only_errors=False))
|
||||||
|
|
||||||
# Error exit as soon as any error occurs.
|
# Error exit as soon as any error occurs.
|
||||||
error_format = ("Failed {plugin.__name__}: "
|
error_format = ("Failed {plugin.__name__}: "
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,8 @@ from .path_resolving import (
|
||||||
get_workdir_with_workdir_data,
|
get_workdir_with_workdir_data,
|
||||||
get_workdir,
|
get_workdir,
|
||||||
|
|
||||||
|
get_last_workfile_with_version_from_paths,
|
||||||
|
get_last_workfile_from_paths,
|
||||||
get_last_workfile_with_version,
|
get_last_workfile_with_version,
|
||||||
get_last_workfile,
|
get_last_workfile,
|
||||||
|
|
||||||
|
|
@ -11,12 +13,21 @@ from .path_resolving import (
|
||||||
get_custom_workfile_template_by_string_context,
|
get_custom_workfile_template_by_string_context,
|
||||||
|
|
||||||
create_workdir_extra_folders,
|
create_workdir_extra_folders,
|
||||||
|
|
||||||
|
get_comments_from_workfile_paths,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .utils import (
|
from .utils import (
|
||||||
should_use_last_workfile_on_launch,
|
should_use_last_workfile_on_launch,
|
||||||
should_open_workfiles_tool_on_launch,
|
should_open_workfiles_tool_on_launch,
|
||||||
MissingWorkdirError,
|
MissingWorkdirError,
|
||||||
|
|
||||||
|
save_workfile_info,
|
||||||
|
save_current_workfile_to,
|
||||||
|
save_workfile_with_current_context,
|
||||||
|
save_next_version,
|
||||||
|
copy_workfile_to_context,
|
||||||
|
find_workfile_rootless_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .build_workfile import BuildWorkfile
|
from .build_workfile import BuildWorkfile
|
||||||
|
|
@ -37,18 +48,29 @@ __all__ = (
|
||||||
"get_workdir_with_workdir_data",
|
"get_workdir_with_workdir_data",
|
||||||
"get_workdir",
|
"get_workdir",
|
||||||
|
|
||||||
|
"get_last_workfile_with_version_from_paths",
|
||||||
|
"get_last_workfile_from_paths",
|
||||||
"get_last_workfile_with_version",
|
"get_last_workfile_with_version",
|
||||||
"get_last_workfile",
|
"get_last_workfile",
|
||||||
|
"find_workfile_rootless_path",
|
||||||
|
|
||||||
"get_custom_workfile_template",
|
"get_custom_workfile_template",
|
||||||
"get_custom_workfile_template_by_string_context",
|
"get_custom_workfile_template_by_string_context",
|
||||||
|
|
||||||
"create_workdir_extra_folders",
|
"create_workdir_extra_folders",
|
||||||
|
|
||||||
|
"get_comments_from_workfile_paths",
|
||||||
|
|
||||||
"should_use_last_workfile_on_launch",
|
"should_use_last_workfile_on_launch",
|
||||||
"should_open_workfiles_tool_on_launch",
|
"should_open_workfiles_tool_on_launch",
|
||||||
"MissingWorkdirError",
|
"MissingWorkdirError",
|
||||||
|
|
||||||
|
"save_workfile_info",
|
||||||
|
"save_current_workfile_to",
|
||||||
|
"save_workfile_with_current_context",
|
||||||
|
"save_next_version",
|
||||||
|
"copy_workfile_to_context",
|
||||||
|
|
||||||
"BuildWorkfile",
|
"BuildWorkfile",
|
||||||
|
|
||||||
"discover_workfile_build_plugins",
|
"discover_workfile_build_plugins",
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,12 @@
|
||||||
|
from __future__ import annotations
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import copy
|
import copy
|
||||||
import platform
|
import platform
|
||||||
|
import warnings
|
||||||
|
import typing
|
||||||
from typing import Optional, Dict, Any
|
from typing import Optional, Dict, Any
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
|
||||||
|
|
@ -15,6 +19,9 @@ from ayon_core.lib import (
|
||||||
from ayon_core.pipeline import version_start, Anatomy
|
from ayon_core.pipeline import version_start, Anatomy
|
||||||
from ayon_core.pipeline.template_data import get_template_data
|
from ayon_core.pipeline.template_data import get_template_data
|
||||||
|
|
||||||
|
if typing.TYPE_CHECKING:
|
||||||
|
from ayon_core.pipeline.anatomy import AnatomyTemplateResult
|
||||||
|
|
||||||
|
|
||||||
def get_workfile_template_key_from_context(
|
def get_workfile_template_key_from_context(
|
||||||
project_name: str,
|
project_name: str,
|
||||||
|
|
@ -111,7 +118,7 @@ def get_workdir_with_workdir_data(
|
||||||
anatomy=None,
|
anatomy=None,
|
||||||
template_key=None,
|
template_key=None,
|
||||||
project_settings=None
|
project_settings=None
|
||||||
):
|
) -> "AnatomyTemplateResult":
|
||||||
"""Fill workdir path from entered data and project's anatomy.
|
"""Fill workdir path from entered data and project's anatomy.
|
||||||
|
|
||||||
It is possible to pass only project's name instead of project's anatomy but
|
It is possible to pass only project's name instead of project's anatomy but
|
||||||
|
|
@ -130,9 +137,9 @@ def get_workdir_with_workdir_data(
|
||||||
if 'template_key' is not passed.
|
if 'template_key' is not passed.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
TemplateResult: Workdir path.
|
AnatomyTemplateResult: Workdir path.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
if not anatomy:
|
if not anatomy:
|
||||||
anatomy = Anatomy(project_name)
|
anatomy = Anatomy(project_name)
|
||||||
|
|
||||||
|
|
@ -147,7 +154,7 @@ def get_workdir_with_workdir_data(
|
||||||
template_obj = anatomy.get_template_item(
|
template_obj = anatomy.get_template_item(
|
||||||
"work", template_key, "directory"
|
"work", template_key, "directory"
|
||||||
)
|
)
|
||||||
# Output is TemplateResult object which contain useful data
|
# Output is AnatomyTemplateResult object which contain useful data
|
||||||
output = template_obj.format_strict(workdir_data)
|
output = template_obj.format_strict(workdir_data)
|
||||||
if output:
|
if output:
|
||||||
return output.normalized()
|
return output.normalized()
|
||||||
|
|
@ -155,14 +162,14 @@ def get_workdir_with_workdir_data(
|
||||||
|
|
||||||
|
|
||||||
def get_workdir(
|
def get_workdir(
|
||||||
project_entity,
|
project_entity: dict[str, Any],
|
||||||
folder_entity,
|
folder_entity: dict[str, Any],
|
||||||
task_entity,
|
task_entity: dict[str, Any],
|
||||||
host_name,
|
host_name: str,
|
||||||
anatomy=None,
|
anatomy=None,
|
||||||
template_key=None,
|
template_key=None,
|
||||||
project_settings=None
|
project_settings=None
|
||||||
):
|
) -> "AnatomyTemplateResult":
|
||||||
"""Fill workdir path from entered data and project's anatomy.
|
"""Fill workdir path from entered data and project's anatomy.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -174,8 +181,8 @@ def get_workdir(
|
||||||
is stored under `AYON_HOST_NAME` key.
|
is stored under `AYON_HOST_NAME` key.
|
||||||
anatomy (Anatomy): Optional argument. Anatomy object is created using
|
anatomy (Anatomy): Optional argument. Anatomy object is created using
|
||||||
project name from `project_entity`. It is preferred to pass this
|
project name from `project_entity`. It is preferred to pass this
|
||||||
argument as initialization of a new Anatomy object may be time
|
argument as initialization of a new Anatomy object may be
|
||||||
consuming.
|
time-consuming.
|
||||||
template_key (str): Key of work templates in anatomy templates. Default
|
template_key (str): Key of work templates in anatomy templates. Default
|
||||||
value is defined in `get_workdir_with_workdir_data`.
|
value is defined in `get_workdir_with_workdir_data`.
|
||||||
project_settings(Dict[str, Any]): Prepared project settings for
|
project_settings(Dict[str, Any]): Prepared project settings for
|
||||||
|
|
@ -183,9 +190,9 @@ def get_workdir(
|
||||||
if 'template_key' is not passed.
|
if 'template_key' is not passed.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
TemplateResult: Workdir path.
|
AnatomyTemplateResult: Workdir path.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
if not anatomy:
|
if not anatomy:
|
||||||
anatomy = Anatomy(
|
anatomy = Anatomy(
|
||||||
project_entity["name"], project_entity=project_entity
|
project_entity["name"], project_entity=project_entity
|
||||||
|
|
@ -197,7 +204,7 @@ def get_workdir(
|
||||||
task_entity,
|
task_entity,
|
||||||
host_name,
|
host_name,
|
||||||
)
|
)
|
||||||
# Output is TemplateResult object which contain useful data
|
# Output is AnatomyTemplateResult object which contain useful data
|
||||||
return get_workdir_with_workdir_data(
|
return get_workdir_with_workdir_data(
|
||||||
workdir_data,
|
workdir_data,
|
||||||
anatomy.project_name,
|
anatomy.project_name,
|
||||||
|
|
@ -207,12 +214,141 @@ def get_workdir(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_last_workfile_with_version(
|
@dataclass
|
||||||
workdir, file_template, fill_data, extensions
|
class WorkfileParsedData:
|
||||||
):
|
version: Optional[int] = None
|
||||||
|
comment: Optional[str] = None
|
||||||
|
ext: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class WorkfileDataParser:
|
||||||
|
"""Parse dynamic data from existing filenames based on template.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_template (str): Workfile file template.
|
||||||
|
data (dict[str, Any]): Data to fill the template with.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
file_template: str,
|
||||||
|
data: dict[str, Any],
|
||||||
|
):
|
||||||
|
data = copy.deepcopy(data)
|
||||||
|
file_template = str(file_template)
|
||||||
|
# Use placeholders that will never be in the filename
|
||||||
|
ext_replacement = "CIextID"
|
||||||
|
version_replacement = "CIversionID"
|
||||||
|
comment_replacement = "CIcommentID"
|
||||||
|
data["version"] = version_replacement
|
||||||
|
data["comment"] = comment_replacement
|
||||||
|
for pattern, replacement in (
|
||||||
|
# Replace `.{ext}` with `{ext}` so we are sure dot is not
|
||||||
|
# at the end
|
||||||
|
(r"\.?{ext}", ext_replacement),
|
||||||
|
):
|
||||||
|
file_template = re.sub(pattern, replacement, file_template)
|
||||||
|
|
||||||
|
file_template = StringTemplate(file_template)
|
||||||
|
# Prepare template that does contain 'comment'
|
||||||
|
comment_template = re.escape(str(file_template.format_strict(data)))
|
||||||
|
# Prepare template that does not contain 'comment'
|
||||||
|
# - comment is usually marked as optional and in that case the regex
|
||||||
|
# to find the comment is different based on the filename
|
||||||
|
# - if filename contains comment then 'comment_template' will match
|
||||||
|
# - if filename does not contain comment then 'file_template' will
|
||||||
|
# match
|
||||||
|
data.pop("comment")
|
||||||
|
file_template = re.escape(str(file_template.format_strict(data)))
|
||||||
|
for src, replacement in (
|
||||||
|
(ext_replacement, r"(?P<ext>\..*)"),
|
||||||
|
(version_replacement, r"(?P<version>[0-9]+)"),
|
||||||
|
(comment_replacement, r"(?P<comment>.+?)"),
|
||||||
|
):
|
||||||
|
comment_template = comment_template.replace(src, replacement)
|
||||||
|
file_template = file_template.replace(src, replacement)
|
||||||
|
|
||||||
|
kwargs = {}
|
||||||
|
if platform.system().lower() == "windows":
|
||||||
|
kwargs["flags"] = re.IGNORECASE
|
||||||
|
|
||||||
|
# Match from beginning to end of string to be safe
|
||||||
|
self._comment_template = re.compile(f"^{comment_template}$", **kwargs)
|
||||||
|
self._file_template = re.compile(f"^{file_template}$", **kwargs)
|
||||||
|
|
||||||
|
def parse_data(self, filename: str) -> WorkfileParsedData:
|
||||||
|
"""Parse the dynamic data from a filename."""
|
||||||
|
match = self._comment_template.match(filename)
|
||||||
|
if not match:
|
||||||
|
match = self._file_template.match(filename)
|
||||||
|
|
||||||
|
if not match:
|
||||||
|
return WorkfileParsedData()
|
||||||
|
|
||||||
|
kwargs = match.groupdict()
|
||||||
|
version = kwargs.get("version")
|
||||||
|
if version is not None:
|
||||||
|
kwargs["version"] = int(version)
|
||||||
|
return WorkfileParsedData(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_dynamic_data_from_workfile(
|
||||||
|
filename: str,
|
||||||
|
file_template: str,
|
||||||
|
template_data: dict[str, Any],
|
||||||
|
) -> WorkfileParsedData:
|
||||||
|
"""Parse dynamic data from a workfile filename.
|
||||||
|
|
||||||
|
Dynamic data are 'version', 'comment' and 'ext'.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename (str): Workfile filename.
|
||||||
|
file_template (str): Workfile file template.
|
||||||
|
template_data (dict[str, Any]): Data to fill the template with.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
WorkfileParsedData: Dynamic data parsed from the filename.
|
||||||
|
|
||||||
|
"""
|
||||||
|
parser = WorkfileDataParser(file_template, template_data)
|
||||||
|
return parser.parse_data(filename)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_dynamic_data_from_workfiles(
|
||||||
|
filenames: list[str],
|
||||||
|
file_template: str,
|
||||||
|
template_data: dict[str, Any],
|
||||||
|
) -> dict[str, WorkfileParsedData]:
|
||||||
|
"""Parse dynamic data from a workfiles filenames.
|
||||||
|
|
||||||
|
Dynamic data are 'version', 'comment' and 'ext'.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filenames (list[str]): Workfiles filenames.
|
||||||
|
file_template (str): Workfile file template.
|
||||||
|
template_data (dict[str, Any]): Data to fill the template with.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, WorkfileParsedData]: Dynamic data parsed from the filenames
|
||||||
|
by filename.
|
||||||
|
|
||||||
|
"""
|
||||||
|
parser = WorkfileDataParser(file_template, template_data)
|
||||||
|
return {
|
||||||
|
filename: parser.parse_data(filename)
|
||||||
|
for filename in filenames
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_last_workfile_with_version_from_paths(
|
||||||
|
filepaths: list[str],
|
||||||
|
file_template: str,
|
||||||
|
template_data: dict[str, Any],
|
||||||
|
extensions: set[str],
|
||||||
|
) -> tuple[Optional[str], Optional[int]]:
|
||||||
"""Return last workfile version.
|
"""Return last workfile version.
|
||||||
|
|
||||||
Usign workfile template and it's filling data find most possible last
|
Using the workfile template and its template data find most possible last
|
||||||
version of workfile which was created for the context.
|
version of workfile which was created for the context.
|
||||||
|
|
||||||
Functionality is fully based on knowing which keys are optional or what
|
Functionality is fully based on knowing which keys are optional or what
|
||||||
|
|
@ -222,50 +358,43 @@ def get_last_workfile_with_version(
|
||||||
last workfile.
|
last workfile.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
workdir (str): Path to dir where workfiles are stored.
|
filepaths (list[str]): Workfile paths.
|
||||||
file_template (str): Template of file name.
|
file_template (str): Template of file name.
|
||||||
fill_data (Dict[str, Any]): Data for filling template.
|
template_data (Dict[str, Any]): Data for filling template.
|
||||||
extensions (Iterable[str]): All allowed file extensions of workfile.
|
extensions (set[str]): All allowed file extensions of workfile.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Tuple[Union[str, None], Union[int, None]]: Last workfile with version
|
tuple[Optional[str], Optional[int]]: Last workfile with version
|
||||||
if there is any workfile otherwise None for both.
|
if there is any workfile otherwise None for both.
|
||||||
"""
|
|
||||||
|
|
||||||
if not os.path.exists(workdir):
|
"""
|
||||||
|
if not filepaths:
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
dotted_extensions = set()
|
dotted_extensions = set()
|
||||||
for ext in extensions:
|
for ext in extensions:
|
||||||
if not ext.startswith("."):
|
if not ext.startswith("."):
|
||||||
ext = ".{}".format(ext)
|
ext = f".{ext}"
|
||||||
dotted_extensions.add(ext)
|
dotted_extensions.add(re.escape(ext))
|
||||||
|
|
||||||
# Fast match on extension
|
|
||||||
filenames = [
|
|
||||||
filename
|
|
||||||
for filename in os.listdir(workdir)
|
|
||||||
if os.path.splitext(filename)[-1] in dotted_extensions
|
|
||||||
]
|
|
||||||
|
|
||||||
# Build template without optionals, version to digits only regex
|
# Build template without optionals, version to digits only regex
|
||||||
# and comment to any definable value.
|
# and comment to any definable value.
|
||||||
# Escape extensions dot for regex
|
# Escape extensions dot for regex
|
||||||
regex_exts = [
|
ext_expression = "(?:" + "|".join(dotted_extensions) + ")"
|
||||||
"\\" + ext
|
|
||||||
for ext in dotted_extensions
|
for pattern, replacement in (
|
||||||
]
|
# Replace `.{ext}` with `{ext}` so we are sure dot is not at the end
|
||||||
ext_expression = "(?:" + "|".join(regex_exts) + ")"
|
(r"\.?{ext}", ext_expression),
|
||||||
|
# Replace optional keys with optional content regex
|
||||||
|
(r"<.*?>", r".*?"),
|
||||||
|
# Replace `{version}` with group regex
|
||||||
|
(r"{version.*?}", r"([0-9]+)"),
|
||||||
|
(r"{comment.*?}", r".+?"),
|
||||||
|
):
|
||||||
|
file_template = re.sub(pattern, replacement, file_template)
|
||||||
|
|
||||||
# Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end
|
|
||||||
file_template = re.sub(r"\.?{ext}", ext_expression, file_template)
|
|
||||||
# Replace optional keys with optional content regex
|
|
||||||
file_template = re.sub(r"<.*?>", r".*?", file_template)
|
|
||||||
# Replace `{version}` with group regex
|
|
||||||
file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template)
|
|
||||||
file_template = re.sub(r"{comment.*?}", r".+?", file_template)
|
|
||||||
file_template = StringTemplate.format_strict_template(
|
file_template = StringTemplate.format_strict_template(
|
||||||
file_template, fill_data
|
file_template, template_data
|
||||||
)
|
)
|
||||||
|
|
||||||
# Match with ignore case on Windows due to the Windows
|
# Match with ignore case on Windows due to the Windows
|
||||||
|
|
@ -278,64 +407,189 @@ def get_last_workfile_with_version(
|
||||||
|
|
||||||
# Get highest version among existing matching files
|
# Get highest version among existing matching files
|
||||||
version = None
|
version = None
|
||||||
output_filenames = []
|
output_filepaths = []
|
||||||
for filename in sorted(filenames):
|
for filepath in sorted(filepaths):
|
||||||
|
filename = os.path.basename(filepath)
|
||||||
match = re.match(file_template, filename, **kwargs)
|
match = re.match(file_template, filename, **kwargs)
|
||||||
if not match:
|
if not match:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not match.groups():
|
if not match.groups():
|
||||||
output_filenames.append(filename)
|
output_filepaths.append(filename)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
file_version = int(match.group(1))
|
file_version = int(match.group(1))
|
||||||
if version is None or file_version > version:
|
if version is None or file_version > version:
|
||||||
output_filenames[:] = []
|
output_filepaths.clear()
|
||||||
version = file_version
|
version = file_version
|
||||||
|
|
||||||
if file_version == version:
|
if file_version == version:
|
||||||
output_filenames.append(filename)
|
output_filepaths.append(filepath)
|
||||||
|
|
||||||
output_filename = None
|
# Use file modification time to use most recent file if there are
|
||||||
if output_filenames:
|
# multiple workfiles with the same version
|
||||||
if len(output_filenames) == 1:
|
output_filepath = None
|
||||||
output_filename = output_filenames[0]
|
last_time = None
|
||||||
else:
|
for _output_filepath in output_filepaths:
|
||||||
last_time = None
|
mod_time = None
|
||||||
for _output_filename in output_filenames:
|
if os.path.exists(_output_filepath):
|
||||||
full_path = os.path.join(workdir, _output_filename)
|
mod_time = os.path.getmtime(_output_filepath)
|
||||||
mod_time = os.path.getmtime(full_path)
|
if (
|
||||||
if last_time is None or last_time < mod_time:
|
last_time is None
|
||||||
output_filename = _output_filename
|
or (mod_time is not None and last_time < mod_time)
|
||||||
last_time = mod_time
|
):
|
||||||
|
output_filepath = _output_filepath
|
||||||
|
last_time = mod_time
|
||||||
|
|
||||||
return output_filename, version
|
return output_filepath, version
|
||||||
|
|
||||||
|
|
||||||
def get_last_workfile(
|
def get_last_workfile_from_paths(
|
||||||
workdir, file_template, fill_data, extensions, full_path=False
|
filepaths: list[str],
|
||||||
):
|
file_template: str,
|
||||||
"""Return last workfile filename.
|
template_data: dict[str, Any],
|
||||||
|
extensions: set[str],
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Return the last workfile filename.
|
||||||
|
|
||||||
Returns file with version 1 if there is not workfile yet.
|
Returns the file with version 1 if there is not workfile yet.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filepaths (list[str]): Paths to workfiles.
|
||||||
|
file_template (str): Template of file name.
|
||||||
|
template_data (dict[str, Any]): Data for filling template.
|
||||||
|
extensions (set[str]): All allowed file extensions of workfile.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[str]: Last workfile path.
|
||||||
|
|
||||||
|
"""
|
||||||
|
filepath, _version = get_last_workfile_with_version_from_paths(
|
||||||
|
filepaths, file_template, template_data, extensions
|
||||||
|
)
|
||||||
|
return filepath
|
||||||
|
|
||||||
|
|
||||||
|
def _filter_dir_files_by_ext(
|
||||||
|
dirpath: str,
|
||||||
|
extensions: set[str],
|
||||||
|
) -> tuple[list[str], set[str]]:
|
||||||
|
"""Filter files by extensions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dirpath (str): List of file paths.
|
||||||
|
extensions (set[str]): Set of file extensions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple[list[str], set[str]]: Filtered list of file paths.
|
||||||
|
|
||||||
|
"""
|
||||||
|
dotted_extensions = set()
|
||||||
|
for ext in extensions:
|
||||||
|
if not ext.startswith("."):
|
||||||
|
ext = f".{ext}"
|
||||||
|
dotted_extensions.add(ext)
|
||||||
|
|
||||||
|
if not os.path.exists(dirpath):
|
||||||
|
return [], dotted_extensions
|
||||||
|
|
||||||
|
filtered_paths = [
|
||||||
|
os.path.join(dirpath, filename)
|
||||||
|
for filename in os.listdir(dirpath)
|
||||||
|
if os.path.splitext(filename)[-1] in dotted_extensions
|
||||||
|
]
|
||||||
|
return filtered_paths, dotted_extensions
|
||||||
|
|
||||||
|
|
||||||
|
def get_last_workfile_with_version(
|
||||||
|
workdir: str,
|
||||||
|
file_template: str,
|
||||||
|
template_data: dict[str, Any],
|
||||||
|
extensions: set[str],
|
||||||
|
) -> tuple[Optional[str], Optional[int]]:
|
||||||
|
"""Return last workfile version.
|
||||||
|
|
||||||
|
Using the workfile template and its filling data to find the most possible
|
||||||
|
last version of workfile which was created for the context.
|
||||||
|
|
||||||
|
Functionality is fully based on knowing which keys are optional or what
|
||||||
|
values are expected as value.
|
||||||
|
|
||||||
|
The last modified file is used if more files can be considered as
|
||||||
|
last workfile.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
workdir (str): Path to dir where workfiles are stored.
|
workdir (str): Path to dir where workfiles are stored.
|
||||||
file_template (str): Template of file name.
|
file_template (str): Template of file name.
|
||||||
fill_data (Dict[str, Any]): Data for filling template.
|
template_data (dict[str, Any]): Data for filling template.
|
||||||
extensions (Iterable[str]): All allowed file extensions of workfile.
|
extensions (set[str]): All allowed file extensions of workfile.
|
||||||
full_path (Optional[bool]): Full path to file is returned if
|
|
||||||
set to True.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Last or first workfile as filename of full path to filename.
|
tuple[Optional[str], Optional[int]]: Last workfile with version
|
||||||
|
if there is any workfile otherwise None for both.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
filename, _version = get_last_workfile_with_version(
|
if not os.path.exists(workdir):
|
||||||
workdir, file_template, fill_data, extensions
|
return None, None
|
||||||
|
|
||||||
|
filepaths, dotted_extensions = _filter_dir_files_by_ext(
|
||||||
|
workdir, extensions
|
||||||
)
|
)
|
||||||
if filename is None:
|
|
||||||
data = copy.deepcopy(fill_data)
|
return get_last_workfile_with_version_from_paths(
|
||||||
|
filepaths,
|
||||||
|
file_template,
|
||||||
|
template_data,
|
||||||
|
dotted_extensions,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_last_workfile(
|
||||||
|
workdir: str,
|
||||||
|
file_template: str,
|
||||||
|
template_data: dict[str, Any],
|
||||||
|
extensions: set[str],
|
||||||
|
full_path: bool = False,
|
||||||
|
) -> str:
|
||||||
|
"""Return last the workfile filename.
|
||||||
|
|
||||||
|
Returns first file name/path if there are not workfiles yet.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
workdir (str): Path to dir where workfiles are stored.
|
||||||
|
file_template (str): Template of file name.
|
||||||
|
template_data (Dict[str, Any]): Data for filling template.
|
||||||
|
extensions (Iterable[str]): All allowed file extensions of workfile.
|
||||||
|
full_path (bool): Return full path to the file or only filename.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Last or first workfile file name or path based on
|
||||||
|
'full_path' value.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# TODO (iLLiCiTiT): Remove the argument 'full_path' and return only full
|
||||||
|
# path. As far as I can tell it is always called with 'full_path' set
|
||||||
|
# to 'True'.
|
||||||
|
# - it has to be 2 step operation, first warn about having it 'False', and
|
||||||
|
# then warn about having it filled.
|
||||||
|
if full_path is False:
|
||||||
|
warnings.warn(
|
||||||
|
"Argument 'full_path' will be removed and will return"
|
||||||
|
" only full path in future.",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
|
||||||
|
filepaths, dotted_extensions = _filter_dir_files_by_ext(
|
||||||
|
workdir, extensions
|
||||||
|
)
|
||||||
|
filepath = get_last_workfile_from_paths(
|
||||||
|
filepaths,
|
||||||
|
file_template,
|
||||||
|
template_data,
|
||||||
|
dotted_extensions
|
||||||
|
)
|
||||||
|
if filepath is None:
|
||||||
|
data = copy.deepcopy(template_data)
|
||||||
data["version"] = version_start.get_versioning_start(
|
data["version"] = version_start.get_versioning_start(
|
||||||
data["project"]["name"],
|
data["project"]["name"],
|
||||||
data["app"],
|
data["app"],
|
||||||
|
|
@ -344,15 +598,15 @@ def get_last_workfile(
|
||||||
product_type="workfile"
|
product_type="workfile"
|
||||||
)
|
)
|
||||||
data.pop("comment", None)
|
data.pop("comment", None)
|
||||||
if not data.get("ext"):
|
if data.get("ext") is None:
|
||||||
data["ext"] = extensions[0]
|
data["ext"] = next(iter(extensions), "")
|
||||||
data["ext"] = data["ext"].lstrip(".")
|
data["ext"] = data["ext"].lstrip(".")
|
||||||
filename = StringTemplate.format_strict_template(file_template, data)
|
filename = StringTemplate.format_strict_template(file_template, data)
|
||||||
|
filepath = os.path.join(workdir, filename)
|
||||||
|
|
||||||
if full_path:
|
if full_path:
|
||||||
return os.path.normpath(os.path.join(workdir, filename))
|
return os.path.normpath(filepath)
|
||||||
|
return os.path.basename(filepath)
|
||||||
return filename
|
|
||||||
|
|
||||||
|
|
||||||
def get_custom_workfile_template(
|
def get_custom_workfile_template(
|
||||||
|
|
@ -389,11 +643,10 @@ def get_custom_workfile_template(
|
||||||
project_settings(Dict[str, Any]): Preloaded project settings.
|
project_settings(Dict[str, Any]): Preloaded project settings.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Path to template or None if none of profiles match current
|
Optional[str]: Path to template or None if none of profiles match
|
||||||
context. Existence of formatted path is not validated.
|
current context. Existence of formatted path is not validated.
|
||||||
None: If no profile is matching context.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
log = Logger.get_logger("CustomWorkfileResolve")
|
log = Logger.get_logger("CustomWorkfileResolve")
|
||||||
|
|
||||||
project_name = project_entity["name"]
|
project_name = project_entity["name"]
|
||||||
|
|
@ -562,3 +815,112 @@ def create_workdir_extra_folders(
|
||||||
fullpath = os.path.join(workdir, subfolder)
|
fullpath = os.path.join(workdir, subfolder)
|
||||||
if not os.path.exists(fullpath):
|
if not os.path.exists(fullpath):
|
||||||
os.makedirs(fullpath)
|
os.makedirs(fullpath)
|
||||||
|
|
||||||
|
|
||||||
|
class CommentMatcher:
|
||||||
|
"""Use anatomy and work file data to parse comments from filenames.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
extensions (set[str]): Set of extensions.
|
||||||
|
file_template (StringTemplate): Workfile file template.
|
||||||
|
data (dict[str, Any]): Data to fill the template with.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
extensions: set[str],
|
||||||
|
file_template: StringTemplate,
|
||||||
|
data: dict[str, Any]
|
||||||
|
):
|
||||||
|
warnings.warn(
|
||||||
|
"Class 'CommentMatcher' is deprecated. Please"
|
||||||
|
" use 'parse_dynamic_data_from_workfiles' instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
self._fname_regex = None
|
||||||
|
|
||||||
|
if "{comment}" not in file_template:
|
||||||
|
# Don't look for comment if template doesn't allow it
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create a regex group for extensions
|
||||||
|
any_extension = "(?:{})".format(
|
||||||
|
"|".join(re.escape(ext.lstrip(".")) for ext in extensions)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use placeholders that will never be in the filename
|
||||||
|
temp_data = copy.deepcopy(data)
|
||||||
|
temp_data["comment"] = "<<comment>>"
|
||||||
|
temp_data["version"] = "<<version>>"
|
||||||
|
temp_data["ext"] = "<<ext>>"
|
||||||
|
|
||||||
|
fname_pattern = re.escape(
|
||||||
|
file_template.format_strict(temp_data)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Replace comment and version with something we can match with regex
|
||||||
|
replacements = (
|
||||||
|
("<<comment>>", r"(?P<comment>.+)"),
|
||||||
|
("<<version>>", r"[0-9]+"),
|
||||||
|
("<<ext>>", any_extension),
|
||||||
|
)
|
||||||
|
for src, dest in replacements:
|
||||||
|
fname_pattern = fname_pattern.replace(re.escape(src), dest)
|
||||||
|
|
||||||
|
# Match from beginning to end of string to be safe
|
||||||
|
self._fname_regex = re.compile(f"^{fname_pattern}$")
|
||||||
|
|
||||||
|
def parse_comment(self, filename: str) -> Optional[str]:
|
||||||
|
"""Parse the {comment} part from a filename."""
|
||||||
|
if self._fname_regex:
|
||||||
|
match = self._fname_regex.match(filename)
|
||||||
|
if match:
|
||||||
|
return match.group("comment")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_comments_from_workfile_paths(
|
||||||
|
filepaths: list[str],
|
||||||
|
extensions: set[str],
|
||||||
|
file_template: StringTemplate,
|
||||||
|
template_data: dict[str, Any],
|
||||||
|
current_filename: Optional[str] = None,
|
||||||
|
) -> tuple[list[str], str]:
|
||||||
|
"""DEPRECATED Collect comments from workfile filenames.
|
||||||
|
|
||||||
|
Based on 'current_filename' is also returned "current comment".
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filepaths (list[str]): List of filepaths to parse.
|
||||||
|
extensions (set[str]): Set of file extensions.
|
||||||
|
file_template (StringTemplate): Workfile file template.
|
||||||
|
template_data (dict[str, Any]): Data to fill the template with.
|
||||||
|
current_filename (str): Filename to check for the current comment.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple[list[str], str]: List of comments and the current comment.
|
||||||
|
|
||||||
|
"""
|
||||||
|
warnings.warn(
|
||||||
|
"Function 'get_comments_from_workfile_paths' is deprecated. Please"
|
||||||
|
" use 'parse_dynamic_data_from_workfiles' instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
current_comment = ""
|
||||||
|
if not filepaths:
|
||||||
|
return [], current_comment
|
||||||
|
|
||||||
|
matcher = CommentMatcher(extensions, file_template, template_data)
|
||||||
|
|
||||||
|
comment_hints = set()
|
||||||
|
for filepath in filepaths:
|
||||||
|
filename = os.path.basename(filepath)
|
||||||
|
comment = matcher.parse_comment(filename)
|
||||||
|
if comment:
|
||||||
|
comment_hints.add(comment)
|
||||||
|
if filename == current_filename:
|
||||||
|
current_comment = comment
|
||||||
|
|
||||||
|
return list(comment_hints), current_comment
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,30 @@
|
||||||
from ayon_core.lib import filter_profiles
|
from __future__ import annotations
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import uuid
|
||||||
|
import typing
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
import ayon_api
|
||||||
|
from ayon_api.operations import OperationsSession
|
||||||
|
|
||||||
|
from ayon_core.lib import filter_profiles, get_ayon_username
|
||||||
from ayon_core.settings import get_project_settings
|
from ayon_core.settings import get_project_settings
|
||||||
|
from ayon_core.host.interfaces import (
|
||||||
|
SaveWorkfileOptionalData,
|
||||||
|
ListWorkfilesOptionalData,
|
||||||
|
CopyWorkfileOptionalData,
|
||||||
|
)
|
||||||
|
from ayon_core.pipeline.version_start import get_versioning_start
|
||||||
|
from ayon_core.pipeline.template_data import get_template_data
|
||||||
|
|
||||||
|
from .path_resolving import (
|
||||||
|
get_workdir,
|
||||||
|
get_workfile_template_key,
|
||||||
|
)
|
||||||
|
|
||||||
|
if typing.TYPE_CHECKING:
|
||||||
|
from ayon_core.pipeline import Anatomy
|
||||||
|
|
||||||
|
|
||||||
class MissingWorkdirError(Exception):
|
class MissingWorkdirError(Exception):
|
||||||
|
|
@ -7,14 +32,61 @@ class MissingWorkdirError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_workfiles_info(
|
||||||
|
workfile_path: str,
|
||||||
|
project_name: str,
|
||||||
|
task_id: str,
|
||||||
|
*,
|
||||||
|
anatomy: Optional["Anatomy"] = None,
|
||||||
|
workfile_entities: Optional[list[dict[str, Any]]] = None,
|
||||||
|
) -> Optional[dict[str, Any]]:
|
||||||
|
"""Find workfile info entity for a workfile path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
workfile_path (str): Workfile path.
|
||||||
|
project_name (str): The name of the project.
|
||||||
|
task_id (str): Task id under which is workfile created.
|
||||||
|
anatomy (Optional[Anatomy]): Project anatomy used to get roots.
|
||||||
|
workfile_entities (Optional[list[dict[str, Any]]]): Pre-fetched
|
||||||
|
workfile entities related to the task.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[dict[str, Any]]: Workfile info entity if found, otherwise
|
||||||
|
`None`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if anatomy is None:
|
||||||
|
anatomy = Anatomy(project_name)
|
||||||
|
|
||||||
|
if workfile_entities is None:
|
||||||
|
workfile_entities = list(ayon_api.get_workfiles_info(
|
||||||
|
project_name,
|
||||||
|
task_ids=[task_id],
|
||||||
|
))
|
||||||
|
|
||||||
|
if platform.system().lower() == "windows":
|
||||||
|
workfile_path = workfile_path.replace("\\", "/")
|
||||||
|
workfile_path = workfile_path.lower()
|
||||||
|
|
||||||
|
for workfile_entity in workfile_entities:
|
||||||
|
path = workfile_entity["path"]
|
||||||
|
filled_path = anatomy.fill_root(path)
|
||||||
|
if platform.system().lower() == "windows":
|
||||||
|
filled_path = filled_path.replace("\\", "/")
|
||||||
|
filled_path = filled_path.lower()
|
||||||
|
if filled_path == workfile_path:
|
||||||
|
return workfile_entity
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def should_use_last_workfile_on_launch(
|
def should_use_last_workfile_on_launch(
|
||||||
project_name,
|
project_name: str,
|
||||||
host_name,
|
host_name: str,
|
||||||
task_name,
|
task_name: str,
|
||||||
task_type,
|
task_type: str,
|
||||||
default_output=False,
|
default_output: bool = False,
|
||||||
project_settings=None,
|
project_settings: Optional[dict[str, Any]] = None,
|
||||||
):
|
) -> bool:
|
||||||
"""Define if host should start last version workfile if possible.
|
"""Define if host should start last version workfile if possible.
|
||||||
|
|
||||||
Default output is `False`. Can be overridden with environment variable
|
Default output is `False`. Can be overridden with environment variable
|
||||||
|
|
@ -124,3 +196,608 @@ def should_open_workfiles_tool_on_launch(
|
||||||
if output is None:
|
if output is None:
|
||||||
return default_output
|
return default_output
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def save_workfile_info(
|
||||||
|
project_name: str,
|
||||||
|
task_id: str,
|
||||||
|
rootless_path: str,
|
||||||
|
host_name: str,
|
||||||
|
version: Optional[int] = None,
|
||||||
|
comment: Optional[str] = None,
|
||||||
|
description: Optional[str] = None,
|
||||||
|
username: Optional[str] = None,
|
||||||
|
workfile_entities: Optional[list[dict[str, Any]]] = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Save workfile info entity for a workfile path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): The name of the project.
|
||||||
|
task_id (str): Task id under which is workfile created.
|
||||||
|
rootless_path (str): Rootless path of the workfile.
|
||||||
|
host_name (str): Name of host which is saving the workfile.
|
||||||
|
version (Optional[int]): Workfile version.
|
||||||
|
comment (Optional[str]): Workfile comment.
|
||||||
|
description (Optional[str]): Workfile description.
|
||||||
|
username (Optional[str]): Username of user who saves the workfile.
|
||||||
|
If not provided, current user is used.
|
||||||
|
workfile_entities (Optional[list[dict[str, Any]]]): Pre-fetched
|
||||||
|
workfile entities related to task.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, Any]: Workfile info entity.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if workfile_entities is None:
|
||||||
|
workfile_entities = list(ayon_api.get_workfiles_info(
|
||||||
|
project_name,
|
||||||
|
task_ids=[task_id],
|
||||||
|
))
|
||||||
|
|
||||||
|
workfile_entity = next(
|
||||||
|
(
|
||||||
|
_ent
|
||||||
|
for _ent in workfile_entities
|
||||||
|
if _ent["path"] == rootless_path
|
||||||
|
),
|
||||||
|
None
|
||||||
|
)
|
||||||
|
|
||||||
|
if username is None:
|
||||||
|
username = get_ayon_username()
|
||||||
|
|
||||||
|
if not workfile_entity:
|
||||||
|
return _create_workfile_info_entity(
|
||||||
|
project_name,
|
||||||
|
task_id,
|
||||||
|
host_name,
|
||||||
|
rootless_path,
|
||||||
|
username,
|
||||||
|
version,
|
||||||
|
comment,
|
||||||
|
description,
|
||||||
|
)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
key: value
|
||||||
|
for key, value in (
|
||||||
|
("host_name", host_name),
|
||||||
|
("version", version),
|
||||||
|
("comment", comment),
|
||||||
|
)
|
||||||
|
if value is not None
|
||||||
|
}
|
||||||
|
|
||||||
|
old_data = workfile_entity["data"]
|
||||||
|
|
||||||
|
changed_data = {}
|
||||||
|
for key, value in data.items():
|
||||||
|
if key not in old_data or old_data[key] != value:
|
||||||
|
changed_data[key] = value
|
||||||
|
|
||||||
|
update_data = {}
|
||||||
|
if changed_data:
|
||||||
|
update_data["data"] = changed_data
|
||||||
|
|
||||||
|
old_description = workfile_entity["attrib"].get("description")
|
||||||
|
if description is not None and old_description != description:
|
||||||
|
update_data["attrib"] = {"description": description}
|
||||||
|
workfile_entity["attrib"]["description"] = description
|
||||||
|
|
||||||
|
# Automatically fix 'createdBy' and 'updatedBy' fields
|
||||||
|
# NOTE both fields were not automatically filled by server
|
||||||
|
# until 1.1.3 release.
|
||||||
|
if workfile_entity.get("createdBy") is None:
|
||||||
|
update_data["createdBy"] = username
|
||||||
|
workfile_entity["createdBy"] = username
|
||||||
|
|
||||||
|
if workfile_entity.get("updatedBy") != username:
|
||||||
|
update_data["updatedBy"] = username
|
||||||
|
workfile_entity["updatedBy"] = username
|
||||||
|
|
||||||
|
if not update_data:
|
||||||
|
return workfile_entity
|
||||||
|
|
||||||
|
session = OperationsSession()
|
||||||
|
session.update_entity(
|
||||||
|
project_name,
|
||||||
|
"workfile",
|
||||||
|
workfile_entity["id"],
|
||||||
|
update_data,
|
||||||
|
)
|
||||||
|
session.commit()
|
||||||
|
return workfile_entity
|
||||||
|
|
||||||
|
|
||||||
|
def save_current_workfile_to(
|
||||||
|
workfile_path: str,
|
||||||
|
folder_path: str,
|
||||||
|
task_name: str,
|
||||||
|
*,
|
||||||
|
version: Optional[int] = None,
|
||||||
|
comment: Optional[str] = None,
|
||||||
|
description: Optional[str] = None,
|
||||||
|
prepared_data: Optional[SaveWorkfileOptionalData] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Save current workfile to new location or context.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
workfile_path (str): Destination workfile path.
|
||||||
|
folder_path (str): Target folder path.
|
||||||
|
task_name (str): Target task name.
|
||||||
|
version (Optional[int]): Workfile version.
|
||||||
|
comment (optional[str]): Workfile comment.
|
||||||
|
description (Optional[str]): Workfile description.
|
||||||
|
prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data
|
||||||
|
for speed enhancements.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from ayon_core.pipeline.context_tools import registered_host
|
||||||
|
|
||||||
|
host = registered_host()
|
||||||
|
context = host.get_current_context()
|
||||||
|
project_name = context["project_name"]
|
||||||
|
folder_entity = ayon_api.get_folder_by_path(
|
||||||
|
project_name, folder_path
|
||||||
|
)
|
||||||
|
task_entity = ayon_api.get_task_by_name(
|
||||||
|
project_name, folder_entity["id"], task_name
|
||||||
|
)
|
||||||
|
host.save_workfile_with_context(
|
||||||
|
workfile_path,
|
||||||
|
folder_entity,
|
||||||
|
task_entity,
|
||||||
|
version=version,
|
||||||
|
comment=comment,
|
||||||
|
description=description,
|
||||||
|
prepared_data=prepared_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def save_workfile_with_current_context(
|
||||||
|
workfile_path: str,
|
||||||
|
*,
|
||||||
|
version: Optional[int] = None,
|
||||||
|
comment: Optional[str] = None,
|
||||||
|
description: Optional[str] = None,
|
||||||
|
prepared_data: Optional[SaveWorkfileOptionalData] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Save current workfile to new location using current context.
|
||||||
|
|
||||||
|
Helper function to save workfile using current context. Calls
|
||||||
|
'save_current_workfile_to' at the end.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
workfile_path (str): Destination workfile path.
|
||||||
|
version (Optional[int]): Workfile version.
|
||||||
|
comment (optional[str]): Workfile comment.
|
||||||
|
description (Optional[str]): Workfile description.
|
||||||
|
prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data
|
||||||
|
for speed enhancements.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from ayon_core.pipeline.context_tools import registered_host
|
||||||
|
|
||||||
|
host = registered_host()
|
||||||
|
context = host.get_current_context()
|
||||||
|
project_name = context["project_name"]
|
||||||
|
folder_path = context["folder_path"]
|
||||||
|
task_name = context["task_name"]
|
||||||
|
folder_entity = task_entity = None
|
||||||
|
if folder_path:
|
||||||
|
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||||
|
if folder_entity and task_name:
|
||||||
|
task_entity = ayon_api.get_task_by_name(
|
||||||
|
project_name, folder_entity["id"], task_name
|
||||||
|
)
|
||||||
|
|
||||||
|
host.save_workfile_with_context(
|
||||||
|
workfile_path,
|
||||||
|
folder_entity,
|
||||||
|
task_entity,
|
||||||
|
version=version,
|
||||||
|
comment=comment,
|
||||||
|
description=description,
|
||||||
|
prepared_data=prepared_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def save_next_version(
|
||||||
|
version: Optional[int] = None,
|
||||||
|
comment: Optional[str] = None,
|
||||||
|
description: Optional[str] = None,
|
||||||
|
*,
|
||||||
|
prepared_data: Optional[SaveWorkfileOptionalData] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Save workfile using current context, version and comment.
|
||||||
|
|
||||||
|
Helper function to save a workfile using the current context. Last
|
||||||
|
workfile version + 1 is used if is not passed in.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
version (Optional[int]): Workfile version that will be used. Last
|
||||||
|
version + 1 is used if is not passed in.
|
||||||
|
comment (optional[str]): Workfile comment. Pass '""' to clear comment.
|
||||||
|
The current workfile comment is used if it is not passed.
|
||||||
|
description (Optional[str]): Workfile description.
|
||||||
|
prepared_data (Optional[SaveWorkfileOptionalData]): Prepared data
|
||||||
|
for speed enhancements.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from ayon_core.pipeline import Anatomy
|
||||||
|
from ayon_core.pipeline.context_tools import registered_host
|
||||||
|
|
||||||
|
host = registered_host()
|
||||||
|
current_path = host.get_current_workfile()
|
||||||
|
if not current_path:
|
||||||
|
current_path = None
|
||||||
|
else:
|
||||||
|
current_path = os.path.normpath(current_path)
|
||||||
|
|
||||||
|
context = host.get_current_context()
|
||||||
|
project_name = context["project_name"]
|
||||||
|
folder_path = context["folder_path"]
|
||||||
|
task_name = context["task_name"]
|
||||||
|
if prepared_data is None:
|
||||||
|
prepared_data = SaveWorkfileOptionalData()
|
||||||
|
|
||||||
|
project_entity = prepared_data.project_entity
|
||||||
|
anatomy = prepared_data.anatomy
|
||||||
|
project_settings = prepared_data.project_settings
|
||||||
|
|
||||||
|
if project_entity is None:
|
||||||
|
project_entity = ayon_api.get_project(project_name)
|
||||||
|
prepared_data.project_entity = project_entity
|
||||||
|
|
||||||
|
if project_settings is None:
|
||||||
|
project_settings = get_project_settings(project_name)
|
||||||
|
prepared_data.project_settings = project_settings
|
||||||
|
|
||||||
|
if anatomy is None:
|
||||||
|
anatomy = Anatomy(project_name, project_entity=project_entity)
|
||||||
|
prepared_data.anatomy = anatomy
|
||||||
|
|
||||||
|
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||||
|
task_entity = ayon_api.get_task_by_name(
|
||||||
|
project_name, folder_entity["id"], task_name
|
||||||
|
)
|
||||||
|
|
||||||
|
template_key = get_workfile_template_key(
|
||||||
|
project_name,
|
||||||
|
task_entity["taskType"],
|
||||||
|
host.name,
|
||||||
|
project_settings=project_settings
|
||||||
|
)
|
||||||
|
file_template = anatomy.get_template_item("work", template_key, "file")
|
||||||
|
template_data = get_template_data(
|
||||||
|
project_entity,
|
||||||
|
folder_entity,
|
||||||
|
task_entity,
|
||||||
|
host.name,
|
||||||
|
project_settings,
|
||||||
|
)
|
||||||
|
workdir = get_workdir(
|
||||||
|
project_entity,
|
||||||
|
folder_entity,
|
||||||
|
task_entity,
|
||||||
|
host.name,
|
||||||
|
anatomy=anatomy,
|
||||||
|
template_key=template_key,
|
||||||
|
project_settings=project_settings,
|
||||||
|
)
|
||||||
|
rootless_dir = workdir.rootless
|
||||||
|
last_workfile = None
|
||||||
|
current_workfile = None
|
||||||
|
if version is None or comment is None:
|
||||||
|
workfiles = host.list_workfiles(
|
||||||
|
project_name, folder_entity, task_entity,
|
||||||
|
prepared_data=ListWorkfilesOptionalData(
|
||||||
|
project_entity=project_entity,
|
||||||
|
anatomy=anatomy,
|
||||||
|
project_settings=project_settings,
|
||||||
|
template_key=template_key,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for workfile in workfiles:
|
||||||
|
if current_workfile is None and workfile.filepath == current_path:
|
||||||
|
current_workfile = workfile
|
||||||
|
|
||||||
|
if workfile.version is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if (
|
||||||
|
last_workfile is None
|
||||||
|
or last_workfile.version < workfile.version
|
||||||
|
):
|
||||||
|
last_workfile = workfile
|
||||||
|
|
||||||
|
if version is None and last_workfile is not None:
|
||||||
|
version = last_workfile.version + 1
|
||||||
|
|
||||||
|
if version is None:
|
||||||
|
version = get_versioning_start(
|
||||||
|
project_name,
|
||||||
|
host.name,
|
||||||
|
task_name=task_entity["name"],
|
||||||
|
task_type=task_entity["taskType"],
|
||||||
|
product_type="workfile"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Re-use comment from the current workfile if is not passed in
|
||||||
|
if comment is None and current_workfile is not None:
|
||||||
|
comment = current_workfile.comment
|
||||||
|
|
||||||
|
template_data["version"] = version
|
||||||
|
if comment:
|
||||||
|
template_data["comment"] = comment
|
||||||
|
|
||||||
|
# Resolve extension
|
||||||
|
# - Don't fill any if the host does not have defined any -> e.g. if host
|
||||||
|
# uses directory instead of a file.
|
||||||
|
# 1. Use the current file extension.
|
||||||
|
# 2. Use the last known workfile extension.
|
||||||
|
# 3. Use the first extensions from 'get_workfile_extensions'.
|
||||||
|
ext = None
|
||||||
|
workfile_extensions = host.get_workfile_extensions()
|
||||||
|
if workfile_extensions:
|
||||||
|
if current_path:
|
||||||
|
ext = os.path.splitext(current_path)[1]
|
||||||
|
elif last_workfile is not None:
|
||||||
|
ext = os.path.splitext(last_workfile.filepath)[1]
|
||||||
|
else:
|
||||||
|
ext = next(iter(workfile_extensions))
|
||||||
|
ext = ext.lstrip(".")
|
||||||
|
|
||||||
|
if ext:
|
||||||
|
template_data["ext"] = ext
|
||||||
|
|
||||||
|
filename = file_template.format_strict(template_data)
|
||||||
|
workfile_path = os.path.join(workdir, filename)
|
||||||
|
rootless_path = f"{rootless_dir}/{filename}"
|
||||||
|
if platform.system().lower() == "windows":
|
||||||
|
rootless_path = rootless_path.replace("\\", "/")
|
||||||
|
prepared_data.rootless_path = rootless_path
|
||||||
|
|
||||||
|
host.save_workfile_with_context(
|
||||||
|
workfile_path,
|
||||||
|
folder_entity,
|
||||||
|
task_entity,
|
||||||
|
version=version,
|
||||||
|
comment=comment,
|
||||||
|
description=description,
|
||||||
|
prepared_data=prepared_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_workfile_to_context(
|
||||||
|
src_workfile_path: str,
|
||||||
|
folder_entity: dict[str, Any],
|
||||||
|
task_entity: dict[str, Any],
|
||||||
|
*,
|
||||||
|
version: Optional[int] = None,
|
||||||
|
comment: Optional[str] = None,
|
||||||
|
description: Optional[str] = None,
|
||||||
|
open_workfile: bool = True,
|
||||||
|
prepared_data: Optional[CopyWorkfileOptionalData] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Copy workfile to a context.
|
||||||
|
|
||||||
|
Copy workfile to a specified folder and task. Destination path is
|
||||||
|
calculated based on passed information.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
src_workfile_path (str): Source workfile path.
|
||||||
|
folder_entity (dict[str, Any]): Target folder entity.
|
||||||
|
task_entity (dict[str, Any]): Target task entity.
|
||||||
|
version (Optional[int]): Workfile version. Use next version if not
|
||||||
|
passed.
|
||||||
|
comment (optional[str]): Workfile comment.
|
||||||
|
description (Optional[str]): Workfile description.
|
||||||
|
prepared_data (Optional[CopyWorkfileOptionalData]): Prepared data
|
||||||
|
for speed enhancements. Rootless path is calculated in this
|
||||||
|
function.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from ayon_core.pipeline import Anatomy
|
||||||
|
from ayon_core.pipeline.context_tools import registered_host
|
||||||
|
|
||||||
|
host = registered_host()
|
||||||
|
project_name = host.get_current_project_name()
|
||||||
|
|
||||||
|
anatomy = prepared_data.anatomy
|
||||||
|
if anatomy is None:
|
||||||
|
if prepared_data.project_entity is None:
|
||||||
|
prepared_data.project_entity = ayon_api.get_project(
|
||||||
|
project_name
|
||||||
|
)
|
||||||
|
anatomy = Anatomy(
|
||||||
|
project_name, project_entity=prepared_data.project_entity
|
||||||
|
)
|
||||||
|
prepared_data.anatomy = anatomy
|
||||||
|
|
||||||
|
project_settings = prepared_data.project_settings
|
||||||
|
if project_settings is None:
|
||||||
|
project_settings = get_project_settings(project_name)
|
||||||
|
prepared_data.project_settings = project_settings
|
||||||
|
|
||||||
|
if version is None:
|
||||||
|
list_prepared_data = None
|
||||||
|
if prepared_data is not None:
|
||||||
|
list_prepared_data = ListWorkfilesOptionalData(
|
||||||
|
project_entity=prepared_data.project_entity,
|
||||||
|
anatomy=prepared_data.anatomy,
|
||||||
|
project_settings=prepared_data.project_settings,
|
||||||
|
workfile_entities=prepared_data.workfile_entities,
|
||||||
|
)
|
||||||
|
|
||||||
|
workfiles = host.list_workfiles(
|
||||||
|
project_name,
|
||||||
|
folder_entity,
|
||||||
|
task_entity,
|
||||||
|
prepared_data=list_prepared_data
|
||||||
|
)
|
||||||
|
if workfiles:
|
||||||
|
version = max(
|
||||||
|
workfile.version
|
||||||
|
for workfile in workfiles
|
||||||
|
) + 1
|
||||||
|
else:
|
||||||
|
version = get_versioning_start(
|
||||||
|
project_name,
|
||||||
|
host.name,
|
||||||
|
task_name=task_entity["name"],
|
||||||
|
task_type=task_entity["taskType"],
|
||||||
|
product_type="workfile"
|
||||||
|
)
|
||||||
|
|
||||||
|
task_type = task_entity["taskType"]
|
||||||
|
template_key = get_workfile_template_key(
|
||||||
|
project_name,
|
||||||
|
task_type,
|
||||||
|
host.name,
|
||||||
|
project_settings=prepared_data.project_settings
|
||||||
|
)
|
||||||
|
|
||||||
|
template_data = get_template_data(
|
||||||
|
prepared_data.project_entity,
|
||||||
|
folder_entity,
|
||||||
|
task_entity,
|
||||||
|
host.name,
|
||||||
|
prepared_data.project_settings,
|
||||||
|
)
|
||||||
|
template_data["version"] = version
|
||||||
|
if comment:
|
||||||
|
template_data["comment"] = comment
|
||||||
|
|
||||||
|
workfile_extensions = host.get_workfile_extensions()
|
||||||
|
if workfile_extensions:
|
||||||
|
ext = os.path.splitext(src_workfile_path)[1].lstrip(".")
|
||||||
|
template_data["ext"] = ext
|
||||||
|
|
||||||
|
workfile_template = anatomy.get_template_item(
|
||||||
|
"work", template_key, "path"
|
||||||
|
)
|
||||||
|
workfile_path = workfile_template.format_strict(template_data)
|
||||||
|
prepared_data.rootless_path = workfile_path.rootless
|
||||||
|
host.copy_workfile(
|
||||||
|
src_workfile_path,
|
||||||
|
workfile_path,
|
||||||
|
folder_entity,
|
||||||
|
task_entity,
|
||||||
|
version=version,
|
||||||
|
comment=comment,
|
||||||
|
description=description,
|
||||||
|
open_workfile=open_workfile,
|
||||||
|
prepared_data=prepared_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def find_workfile_rootless_path(
|
||||||
|
workfile_path: str,
|
||||||
|
project_name: str,
|
||||||
|
folder_entity: dict[str, Any],
|
||||||
|
task_entity: dict[str, Any],
|
||||||
|
host_name: str,
|
||||||
|
*,
|
||||||
|
project_entity: Optional[dict[str, Any]] = None,
|
||||||
|
project_settings: Optional[dict[str, Any]] = None,
|
||||||
|
anatomy: Optional["Anatomy"] = None,
|
||||||
|
) -> str:
|
||||||
|
"""Find rootless workfile path."""
|
||||||
|
if anatomy is None:
|
||||||
|
from ayon_core.pipeline import Anatomy
|
||||||
|
|
||||||
|
anatomy = Anatomy(project_name, project_entity=project_entity)
|
||||||
|
|
||||||
|
task_type = task_entity["taskType"]
|
||||||
|
template_key = get_workfile_template_key(
|
||||||
|
project_name,
|
||||||
|
task_type,
|
||||||
|
host_name,
|
||||||
|
project_settings=project_settings
|
||||||
|
)
|
||||||
|
dir_template = anatomy.get_template_item(
|
||||||
|
"work", template_key, "directory"
|
||||||
|
)
|
||||||
|
result = dir_template.format({"root": anatomy.roots})
|
||||||
|
used_root = result.used_values.get("root")
|
||||||
|
rootless_path = str(workfile_path)
|
||||||
|
if platform.system().lower() == "windows":
|
||||||
|
rootless_path = rootless_path.replace("\\", "/")
|
||||||
|
|
||||||
|
root_key = root_value = None
|
||||||
|
if used_root is not None:
|
||||||
|
root_key, root_value = next(iter(used_root.items()))
|
||||||
|
if platform.system().lower() == "windows":
|
||||||
|
root_value = root_value.replace("\\", "/")
|
||||||
|
|
||||||
|
if root_value and rootless_path.startswith(root_value):
|
||||||
|
rootless_path = rootless_path[len(root_value):].lstrip("/")
|
||||||
|
rootless_path = f"{{root[{root_key}]}}/{rootless_path}"
|
||||||
|
else:
|
||||||
|
success, result = anatomy.find_root_template_from_path(rootless_path)
|
||||||
|
if success:
|
||||||
|
rootless_path = result
|
||||||
|
return rootless_path
|
||||||
|
|
||||||
|
|
||||||
|
def _create_workfile_info_entity(
|
||||||
|
project_name: str,
|
||||||
|
task_id: str,
|
||||||
|
host_name: str,
|
||||||
|
rootless_path: str,
|
||||||
|
username: str,
|
||||||
|
version: Optional[int],
|
||||||
|
comment: Optional[str],
|
||||||
|
description: Optional[str],
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Create workfile entity data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Project name.
|
||||||
|
task_id (str): Task id.
|
||||||
|
host_name (str): Host name.
|
||||||
|
rootless_path (str): Rootless workfile path.
|
||||||
|
username (str): Username.
|
||||||
|
version (Optional[int]): Workfile version.
|
||||||
|
comment (Optional[str]): Workfile comment.
|
||||||
|
description (Optional[str]): Workfile description.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, Any]: Created workfile entity data.
|
||||||
|
|
||||||
|
"""
|
||||||
|
extension = os.path.splitext(rootless_path)[1]
|
||||||
|
|
||||||
|
attrib = {}
|
||||||
|
for key, value in (
|
||||||
|
("extension", extension),
|
||||||
|
("description", description),
|
||||||
|
):
|
||||||
|
if value is not None:
|
||||||
|
attrib[key] = value
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"host_name": host_name,
|
||||||
|
"version": version,
|
||||||
|
"comment": comment,
|
||||||
|
}
|
||||||
|
|
||||||
|
workfile_info = {
|
||||||
|
"id": uuid.uuid4().hex,
|
||||||
|
"path": rootless_path,
|
||||||
|
"taskId": task_id,
|
||||||
|
"attrib": attrib,
|
||||||
|
"data": data,
|
||||||
|
# TODO remove 'createdBy' and 'updatedBy' fields when server is
|
||||||
|
# or above 1.1.3 .
|
||||||
|
"createdBy": username,
|
||||||
|
"updatedBy": username,
|
||||||
|
}
|
||||||
|
|
||||||
|
session = OperationsSession()
|
||||||
|
session.create_entity(
|
||||||
|
project_name, "workfile", workfile_info
|
||||||
|
)
|
||||||
|
session.commit()
|
||||||
|
return workfile_info
|
||||||
|
|
|
||||||
|
|
@ -631,7 +631,7 @@ class AbstractTemplateBuilder(ABC):
|
||||||
"""Open template file with registered host."""
|
"""Open template file with registered host."""
|
||||||
template_preset = self.get_template_preset()
|
template_preset = self.get_template_preset()
|
||||||
template_path = template_preset["path"]
|
template_path = template_preset["path"]
|
||||||
self.host.open_file(template_path)
|
self.host.open_workfile(template_path)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def import_template(self, template_path):
|
def import_template(self, template_path):
|
||||||
|
|
|
||||||
|
|
@ -38,6 +38,8 @@ class CleanUp(pyblish.api.InstancePlugin):
|
||||||
"webpublisher",
|
"webpublisher",
|
||||||
"shell"
|
"shell"
|
||||||
]
|
]
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
exclude_families = ["clip"]
|
exclude_families = ["clip"]
|
||||||
optional = True
|
optional = True
|
||||||
active = True
|
active = True
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,8 @@ class CleanUpFarm(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
order = pyblish.api.IntegratorOrder + 11
|
order = pyblish.api.IntegratorOrder + 11
|
||||||
label = "Clean Up Farm"
|
label = "Clean Up Farm"
|
||||||
|
|
||||||
|
settings_category = "core"
|
||||||
enabled = True
|
enabled = True
|
||||||
|
|
||||||
# Keep "filesequence" for backwards compatibility of older jobs
|
# Keep "filesequence" for backwards compatibility of older jobs
|
||||||
|
|
|
||||||
|
|
@ -46,6 +46,8 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
||||||
order = pyblish.api.CollectorOrder + 0.49
|
order = pyblish.api.CollectorOrder + 0.49
|
||||||
label = "Collect Anatomy Instance data"
|
label = "Collect Anatomy Instance data"
|
||||||
|
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
follow_workfile_version = False
|
follow_workfile_version = False
|
||||||
|
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
|
|
|
||||||
|
|
@ -41,6 +41,7 @@ class CollectAudio(pyblish.api.ContextPlugin):
|
||||||
"max",
|
"max",
|
||||||
"batchdelivery",
|
"batchdelivery",
|
||||||
]
|
]
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
audio_product_name = "audioMain"
|
audio_product_name = "audioMain"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,7 @@ class CollectFramesFixDef(
|
||||||
targets = ["local"]
|
targets = ["local"]
|
||||||
hosts = ["nuke"]
|
hosts = ["nuke"]
|
||||||
families = ["render", "prerender"]
|
families = ["render", "prerender"]
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
rewrite_version_enable = False
|
rewrite_version_enable = False
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,13 +8,7 @@ This module contains a unified plugin that handles:
|
||||||
|
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
|
||||||
import opentimelineio as otio
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
from ayon_core.pipeline.editorial import (
|
|
||||||
get_media_range_with_retimes,
|
|
||||||
otio_range_to_frame_range,
|
|
||||||
otio_range_with_handles,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_otio_clip(instance, logger):
|
def validate_otio_clip(instance, logger):
|
||||||
|
|
@ -74,6 +68,8 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
|
||||||
if not validate_otio_clip(instance, self.log):
|
if not validate_otio_clip(instance, self.log):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
import opentimelineio as otio
|
||||||
|
|
||||||
otio_clip = instance.data["otioClip"]
|
otio_clip = instance.data["otioClip"]
|
||||||
|
|
||||||
# Collect timeline ranges if workfile start frame is available
|
# Collect timeline ranges if workfile start frame is available
|
||||||
|
|
@ -100,6 +96,11 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
def _collect_timeline_ranges(self, instance, otio_clip):
|
def _collect_timeline_ranges(self, instance, otio_clip):
|
||||||
"""Collect basic timeline frame ranges."""
|
"""Collect basic timeline frame ranges."""
|
||||||
|
from ayon_core.pipeline.editorial import (
|
||||||
|
otio_range_to_frame_range,
|
||||||
|
otio_range_with_handles,
|
||||||
|
)
|
||||||
|
|
||||||
workfile_start = instance.data["workfileFrameStart"]
|
workfile_start = instance.data["workfileFrameStart"]
|
||||||
|
|
||||||
# Get timeline ranges
|
# Get timeline ranges
|
||||||
|
|
@ -129,6 +130,8 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
def _collect_source_ranges(self, instance, otio_clip):
|
def _collect_source_ranges(self, instance, otio_clip):
|
||||||
"""Collect source media frame ranges."""
|
"""Collect source media frame ranges."""
|
||||||
|
import opentimelineio as otio
|
||||||
|
|
||||||
# Get source ranges
|
# Get source ranges
|
||||||
otio_src_range = otio_clip.source_range
|
otio_src_range = otio_clip.source_range
|
||||||
otio_available_range = otio_clip.available_range()
|
otio_available_range = otio_clip.available_range()
|
||||||
|
|
@ -178,6 +181,8 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
def _collect_retimed_ranges(self, instance, otio_clip):
|
def _collect_retimed_ranges(self, instance, otio_clip):
|
||||||
"""Handle retimed clip frame ranges."""
|
"""Handle retimed clip frame ranges."""
|
||||||
|
from ayon_core.pipeline.editorial import get_media_range_with_retimes
|
||||||
|
|
||||||
retimed_attributes = get_media_range_with_retimes(otio_clip, 0, 0)
|
retimed_attributes = get_media_range_with_retimes(otio_clip, 0, 0)
|
||||||
self.log.debug(f"Retimed attributes: {retimed_attributes}")
|
self.log.debug(f"Retimed attributes: {retimed_attributes}")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,9 @@
|
||||||
import ayon_api
|
import ayon_api
|
||||||
import ayon_api.utils
|
import ayon_api.utils
|
||||||
|
|
||||||
|
from ayon_core.host import ILoadHost
|
||||||
from ayon_core.pipeline import registered_host
|
from ayon_core.pipeline import registered_host
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -27,16 +29,23 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
host = registered_host()
|
host = registered_host()
|
||||||
if host is None:
|
if host is None:
|
||||||
self.log.warn("No registered host.")
|
self.log.warning("No registered host.")
|
||||||
return
|
return
|
||||||
|
|
||||||
if not hasattr(host, "ls"):
|
if not isinstance(host, ILoadHost):
|
||||||
host_name = host.__name__
|
host_name = host.name
|
||||||
self.log.warn("Host %r doesn't have ls() implemented." % host_name)
|
self.log.warning(
|
||||||
|
f"Host {host_name} does not implement ILoadHost. "
|
||||||
|
"Skipping querying of loaded versions in scene."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
containers = list(host.get_containers())
|
||||||
|
if not containers:
|
||||||
|
# Opt out early if there are no containers
|
||||||
|
self.log.debug("No loaded containers found in scene.")
|
||||||
return
|
return
|
||||||
|
|
||||||
loaded_versions = []
|
|
||||||
containers = list(host.ls())
|
|
||||||
repre_ids = {
|
repre_ids = {
|
||||||
container["representation"]
|
container["representation"]
|
||||||
for container in containers
|
for container in containers
|
||||||
|
|
@ -61,6 +70,7 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
# QUESTION should we add same representation id when loaded multiple
|
# QUESTION should we add same representation id when loaded multiple
|
||||||
# times?
|
# times?
|
||||||
|
loaded_versions = []
|
||||||
for con in containers:
|
for con in containers:
|
||||||
repre_id = con["representation"]
|
repre_id = con["representation"]
|
||||||
repre_entity = repre_entities_by_id.get(repre_id)
|
repre_entity = repre_entities_by_id.get(repre_id)
|
||||||
|
|
@ -80,4 +90,5 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
||||||
}
|
}
|
||||||
loaded_versions.append(version)
|
loaded_versions.append(version)
|
||||||
|
|
||||||
|
self.log.debug(f"Collected {len(loaded_versions)} loaded versions.")
|
||||||
context.data["loadedVersions"] = loaded_versions
|
context.data["loadedVersions"] = loaded_versions
|
||||||
|
|
|
||||||
|
|
@ -12,9 +12,10 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
order = pyblish.api.CollectorOrder
|
order = pyblish.api.CollectorOrder
|
||||||
label = 'Collect Scene Version'
|
label = "Collect Scene Version"
|
||||||
# configurable in Settings
|
# configurable in Settings
|
||||||
hosts = ["*"]
|
hosts = ["*"]
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
# in some cases of headless publishing (for example webpublisher using PS)
|
# in some cases of headless publishing (for example webpublisher using PS)
|
||||||
# you want to ignore version from name and let integrate use next version
|
# you want to ignore version from name and let integrate use next version
|
||||||
|
|
|
||||||
|
|
@ -57,6 +57,7 @@ class ExtractBurnin(publish.Extractor):
|
||||||
"unreal",
|
"unreal",
|
||||||
"batchdelivery",
|
"batchdelivery",
|
||||||
]
|
]
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
optional = True
|
optional = True
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -55,6 +55,8 @@ class ExtractOIIOTranscode(publish.Extractor):
|
||||||
label = "Transcode color spaces"
|
label = "Transcode color spaces"
|
||||||
order = pyblish.api.ExtractorOrder + 0.019
|
order = pyblish.api.ExtractorOrder + 0.019
|
||||||
|
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
optional = True
|
optional = True
|
||||||
|
|
||||||
# Supported extensions
|
# Supported extensions
|
||||||
|
|
|
||||||
|
|
@ -158,6 +158,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
||||||
"""
|
"""
|
||||||
# Not all hosts can import this module.
|
# Not all hosts can import this module.
|
||||||
import opentimelineio as otio
|
import opentimelineio as otio
|
||||||
|
from ayon_core.pipeline.editorial import OTIO_EPSILON
|
||||||
|
|
||||||
output = []
|
output = []
|
||||||
# go trough all audio tracks
|
# go trough all audio tracks
|
||||||
|
|
@ -172,6 +173,14 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
||||||
clip_start = otio_clip.source_range.start_time
|
clip_start = otio_clip.source_range.start_time
|
||||||
fps = clip_start.rate
|
fps = clip_start.rate
|
||||||
conformed_av_start = media_av_start.rescaled_to(fps)
|
conformed_av_start = media_av_start.rescaled_to(fps)
|
||||||
|
|
||||||
|
# Avoid rounding issue on media available range.
|
||||||
|
if clip_start.almost_equal(
|
||||||
|
conformed_av_start,
|
||||||
|
OTIO_EPSILON
|
||||||
|
):
|
||||||
|
conformed_av_start = clip_start
|
||||||
|
|
||||||
# ffmpeg ignores embedded tc
|
# ffmpeg ignores embedded tc
|
||||||
start = clip_start - conformed_av_start
|
start = clip_start - conformed_av_start
|
||||||
duration = otio_clip.source_range.duration
|
duration = otio_clip.source_range.duration
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,10 @@ from ayon_core.lib import (
|
||||||
get_ffmpeg_tool_args,
|
get_ffmpeg_tool_args,
|
||||||
run_subprocess,
|
run_subprocess,
|
||||||
)
|
)
|
||||||
from ayon_core.pipeline import publish
|
from ayon_core.pipeline import (
|
||||||
|
KnownPublishError,
|
||||||
|
publish,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ExtractOTIOReview(
|
class ExtractOTIOReview(
|
||||||
|
|
@ -97,8 +100,11 @@ class ExtractOTIOReview(
|
||||||
|
|
||||||
# skip instance if no reviewable data available
|
# skip instance if no reviewable data available
|
||||||
if (
|
if (
|
||||||
not isinstance(otio_review_clips[0], otio.schema.Clip)
|
len(otio_review_clips) == 1
|
||||||
and len(otio_review_clips) == 1
|
and (
|
||||||
|
not isinstance(otio_review_clips[0], otio.schema.Clip)
|
||||||
|
or otio_review_clips[0].media_reference.is_missing_reference
|
||||||
|
)
|
||||||
):
|
):
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Instance `{}` has nothing to process".format(instance))
|
"Instance `{}` has nothing to process".format(instance))
|
||||||
|
|
@ -248,7 +254,7 @@ class ExtractOTIOReview(
|
||||||
|
|
||||||
# Single video way.
|
# Single video way.
|
||||||
# Extraction via FFmpeg.
|
# Extraction via FFmpeg.
|
||||||
else:
|
elif hasattr(media_ref, "target_url"):
|
||||||
path = media_ref.target_url
|
path = media_ref.target_url
|
||||||
# Set extract range from 0 (FFmpeg ignores
|
# Set extract range from 0 (FFmpeg ignores
|
||||||
# embedded timecode).
|
# embedded timecode).
|
||||||
|
|
@ -352,6 +358,7 @@ class ExtractOTIOReview(
|
||||||
import opentimelineio as otio
|
import opentimelineio as otio
|
||||||
from ayon_core.pipeline.editorial import (
|
from ayon_core.pipeline.editorial import (
|
||||||
trim_media_range,
|
trim_media_range,
|
||||||
|
OTIO_EPSILON,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _round_to_frame(rational_time):
|
def _round_to_frame(rational_time):
|
||||||
|
|
@ -370,6 +377,13 @@ class ExtractOTIOReview(
|
||||||
|
|
||||||
avl_start = avl_range.start_time
|
avl_start = avl_range.start_time
|
||||||
|
|
||||||
|
# Avoid rounding issue on media available range.
|
||||||
|
if start.almost_equal(
|
||||||
|
avl_start,
|
||||||
|
OTIO_EPSILON
|
||||||
|
):
|
||||||
|
avl_start = start
|
||||||
|
|
||||||
# An additional gap is required before the available
|
# An additional gap is required before the available
|
||||||
# range to conform source start point and head handles.
|
# range to conform source start point and head handles.
|
||||||
if start < avl_start:
|
if start < avl_start:
|
||||||
|
|
@ -388,6 +402,14 @@ class ExtractOTIOReview(
|
||||||
# (media duration is shorter then clip requirement).
|
# (media duration is shorter then clip requirement).
|
||||||
end_point = start + duration
|
end_point = start + duration
|
||||||
avl_end_point = avl_range.end_time_exclusive()
|
avl_end_point = avl_range.end_time_exclusive()
|
||||||
|
|
||||||
|
# Avoid rounding issue on media available range.
|
||||||
|
if end_point.almost_equal(
|
||||||
|
avl_end_point,
|
||||||
|
OTIO_EPSILON
|
||||||
|
):
|
||||||
|
avl_end_point = end_point
|
||||||
|
|
||||||
if end_point > avl_end_point:
|
if end_point > avl_end_point:
|
||||||
gap_duration = end_point - avl_end_point
|
gap_duration = end_point - avl_end_point
|
||||||
duration -= gap_duration
|
duration -= gap_duration
|
||||||
|
|
@ -444,7 +466,7 @@ class ExtractOTIOReview(
|
||||||
command = get_ffmpeg_tool_args("ffmpeg")
|
command = get_ffmpeg_tool_args("ffmpeg")
|
||||||
|
|
||||||
input_extension = None
|
input_extension = None
|
||||||
if sequence:
|
if sequence is not None:
|
||||||
input_dir, collection, sequence_fps = sequence
|
input_dir, collection, sequence_fps = sequence
|
||||||
in_frame_start = min(collection.indexes)
|
in_frame_start = min(collection.indexes)
|
||||||
|
|
||||||
|
|
@ -478,7 +500,7 @@ class ExtractOTIOReview(
|
||||||
"-i", input_path
|
"-i", input_path
|
||||||
])
|
])
|
||||||
|
|
||||||
elif video:
|
elif video is not None:
|
||||||
video_path, otio_range = video
|
video_path, otio_range = video
|
||||||
frame_start = otio_range.start_time.value
|
frame_start = otio_range.start_time.value
|
||||||
input_fps = otio_range.start_time.rate
|
input_fps = otio_range.start_time.rate
|
||||||
|
|
@ -496,7 +518,7 @@ class ExtractOTIOReview(
|
||||||
"-i", video_path
|
"-i", video_path
|
||||||
])
|
])
|
||||||
|
|
||||||
elif gap:
|
elif gap is not None:
|
||||||
sec_duration = frames_to_seconds(gap, self.actual_fps)
|
sec_duration = frames_to_seconds(gap, self.actual_fps)
|
||||||
|
|
||||||
# form command for rendering gap files
|
# form command for rendering gap files
|
||||||
|
|
@ -510,6 +532,9 @@ class ExtractOTIOReview(
|
||||||
"-tune", "stillimage"
|
"-tune", "stillimage"
|
||||||
])
|
])
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise KnownPublishError("Sequence, video or gap is required.")
|
||||||
|
|
||||||
if video or sequence:
|
if video or sequence:
|
||||||
command.extend([
|
command.extend([
|
||||||
"-vf", f"scale={self.to_width}:{self.to_height}:flags=lanczos",
|
"-vf", f"scale={self.to_width}:{self.to_height}:flags=lanczos",
|
||||||
|
|
|
||||||
|
|
@ -162,8 +162,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
||||||
"flame",
|
"flame",
|
||||||
"unreal",
|
"unreal",
|
||||||
"batchdelivery",
|
"batchdelivery",
|
||||||
|
"photoshop"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
settings_category = "core"
|
||||||
# Supported extensions
|
# Supported extensions
|
||||||
image_exts = {"exr", "jpg", "jpeg", "png", "dpx", "tga", "tiff", "tif"}
|
image_exts = {"exr", "jpg", "jpeg", "png", "dpx", "tga", "tiff", "tif"}
|
||||||
video_exts = {"mov", "mp4"}
|
video_exts = {"mov", "mp4"}
|
||||||
|
|
@ -202,15 +204,21 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
||||||
def _get_outputs_for_instance(self, instance):
|
def _get_outputs_for_instance(self, instance):
|
||||||
host_name = instance.context.data["hostName"]
|
host_name = instance.context.data["hostName"]
|
||||||
product_type = instance.data["productType"]
|
product_type = instance.data["productType"]
|
||||||
|
task_type = None
|
||||||
|
task_entity = instance.data.get("taskEntity")
|
||||||
|
if task_entity:
|
||||||
|
task_type = task_entity["taskType"]
|
||||||
|
|
||||||
self.log.debug("Host: \"{}\"".format(host_name))
|
self.log.debug("Host: \"{}\"".format(host_name))
|
||||||
self.log.debug("Product type: \"{}\"".format(product_type))
|
self.log.debug("Product type: \"{}\"".format(product_type))
|
||||||
|
self.log.debug("Task type: \"{}\"".format(task_type))
|
||||||
|
|
||||||
profile = filter_profiles(
|
profile = filter_profiles(
|
||||||
self.profiles,
|
self.profiles,
|
||||||
{
|
{
|
||||||
"hosts": host_name,
|
"hosts": host_name,
|
||||||
"product_types": product_type,
|
"product_types": product_type,
|
||||||
|
"task_types": task_type
|
||||||
},
|
},
|
||||||
logger=self.log)
|
logger=self.log)
|
||||||
if not profile:
|
if not profile:
|
||||||
|
|
|
||||||
|
|
@ -38,10 +38,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
"substancedesigner",
|
"substancedesigner",
|
||||||
"nuke",
|
"nuke",
|
||||||
"aftereffects",
|
"aftereffects",
|
||||||
|
"photoshop",
|
||||||
"unreal",
|
"unreal",
|
||||||
"houdini",
|
"houdini",
|
||||||
"batchdelivery",
|
"batchdelivery",
|
||||||
]
|
]
|
||||||
|
settings_category = "core"
|
||||||
enabled = False
|
enabled = False
|
||||||
|
|
||||||
integrate_thumbnail = False
|
integrate_thumbnail = False
|
||||||
|
|
|
||||||
|
|
@ -256,6 +256,7 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
||||||
label = "Collect USD Layer Contributions (Asset/Shot)"
|
label = "Collect USD Layer Contributions (Asset/Shot)"
|
||||||
families = ["usd"]
|
families = ["usd"]
|
||||||
enabled = True
|
enabled = True
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
# A contribution defines a contribution into a (department) layer which
|
# A contribution defines a contribution into a (department) layer which
|
||||||
# will get layered into the target product, usually the asset or shot.
|
# will get layered into the target product, usually the asset or shot.
|
||||||
|
|
@ -633,6 +634,8 @@ class ExtractUSDLayerContribution(publish.Extractor):
|
||||||
label = "Extract USD Layer Contributions (Asset/Shot)"
|
label = "Extract USD Layer Contributions (Asset/Shot)"
|
||||||
order = pyblish.api.ExtractorOrder + 0.45
|
order = pyblish.api.ExtractorOrder + 0.45
|
||||||
|
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
use_ayon_entity_uri = False
|
use_ayon_entity_uri = False
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
|
|
@ -795,6 +798,8 @@ class ExtractUSDAssetContribution(publish.Extractor):
|
||||||
label = "Extract USD Asset/Shot Contributions"
|
label = "Extract USD Asset/Shot Contributions"
|
||||||
order = ExtractUSDLayerContribution.order + 0.01
|
order = ExtractUSDLayerContribution.order + 0.01
|
||||||
|
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
use_ayon_entity_uri = False
|
use_ayon_entity_uri = False
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
|
|
|
||||||
|
|
@ -61,6 +61,8 @@ class IntegrateHeroVersion(
|
||||||
# Must happen after IntegrateNew
|
# Must happen after IntegrateNew
|
||||||
order = pyblish.api.IntegratorOrder + 0.1
|
order = pyblish.api.IntegratorOrder + 0.1
|
||||||
|
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
optional = True
|
optional = True
|
||||||
active = True
|
active = True
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -105,7 +105,7 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin):
|
||||||
created links by its type
|
created links by its type
|
||||||
"""
|
"""
|
||||||
if workfile_instance is None:
|
if workfile_instance is None:
|
||||||
self.log.warn("No workfile in this publish session.")
|
self.log.warning("No workfile in this publish session.")
|
||||||
return
|
return
|
||||||
|
|
||||||
workfile_version_id = workfile_instance.data["versionEntity"]["id"]
|
workfile_version_id = workfile_instance.data["versionEntity"]["id"]
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,8 @@ class IntegrateProductGroup(pyblish.api.InstancePlugin):
|
||||||
order = pyblish.api.IntegratorOrder - 0.1
|
order = pyblish.api.IntegratorOrder - 0.1
|
||||||
label = "Product Group"
|
label = "Product Group"
|
||||||
|
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
# Attributes set by settings
|
# Attributes set by settings
|
||||||
product_grouping_profiles = None
|
product_grouping_profiles = None
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -22,6 +22,8 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin):
|
||||||
label = "Override Integrate Thumbnail Representations"
|
label = "Override Integrate Thumbnail Representations"
|
||||||
order = pyblish.api.IntegratorOrder - 0.1
|
order = pyblish.api.IntegratorOrder - 0.1
|
||||||
|
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
integrate_profiles = []
|
integrate_profiles = []
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
|
|
|
||||||
|
|
@ -31,6 +31,7 @@ class ValidateOutdatedContainers(
|
||||||
|
|
||||||
label = "Validate Outdated Containers"
|
label = "Validate Outdated Containers"
|
||||||
order = pyblish.api.ValidatorOrder
|
order = pyblish.api.ValidatorOrder
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
optional = True
|
optional = True
|
||||||
actions = [ShowInventory]
|
actions = [ShowInventory]
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,7 @@ class ValidateCurrentSaveFile(pyblish.api.ContextPlugin):
|
||||||
label = "Validate File Saved"
|
label = "Validate File Saved"
|
||||||
order = pyblish.api.ValidatorOrder - 0.1
|
order = pyblish.api.ValidatorOrder - 0.1
|
||||||
hosts = ["fusion", "houdini", "max", "maya", "nuke", "substancepainter",
|
hosts = ["fusion", "houdini", "max", "maya", "nuke", "substancepainter",
|
||||||
"cinema4d", "silhouette", "gaffer", "blender"]
|
"cinema4d", "silhouette", "gaffer", "blender", "loki"]
|
||||||
actions = [SaveByVersionUpAction, ShowWorkfilesAction]
|
actions = [SaveByVersionUpAction, ShowWorkfilesAction]
|
||||||
|
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,8 @@ class ValidateIntent(pyblish.api.ContextPlugin):
|
||||||
order = pyblish.api.ValidatorOrder
|
order = pyblish.api.ValidatorOrder
|
||||||
|
|
||||||
label = "Validate Intent"
|
label = "Validate Intent"
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
enabled = False
|
enabled = False
|
||||||
|
|
||||||
# Can be modified by settings
|
# Can be modified by settings
|
||||||
|
|
|
||||||
|
|
@ -34,7 +34,11 @@ class ValidateProductUniqueness(pyblish.api.ContextPlugin):
|
||||||
for instance in context:
|
for instance in context:
|
||||||
|
|
||||||
# Ignore disabled instances
|
# Ignore disabled instances
|
||||||
if not instance.data.get('publish', True):
|
if not instance.data.get("publish", True):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Ignore instances not marked to integrate
|
||||||
|
if not instance.data.get("integrate", True):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Ignore instance without folder data
|
# Ignore instance without folder data
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,7 @@ class ValidateVersion(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin):
|
||||||
order = pyblish.api.ValidatorOrder
|
order = pyblish.api.ValidatorOrder
|
||||||
|
|
||||||
label = "Validate Version"
|
label = "Validate Version"
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
optional = False
|
optional = False
|
||||||
active = True
|
active = True
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import logging
|
||||||
import collections
|
import collections
|
||||||
import copy
|
import copy
|
||||||
import time
|
import time
|
||||||
|
import warnings
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
|
||||||
|
|
@ -175,17 +176,22 @@ def get_project_environments(project_name, project_settings=None):
|
||||||
|
|
||||||
|
|
||||||
def get_current_project_settings():
|
def get_current_project_settings():
|
||||||
"""Project settings for current context project.
|
"""DEPRECATE Project settings for current context project.
|
||||||
|
|
||||||
|
Function requires access to pipeline context which is in
|
||||||
|
'ayon_core.pipeline'.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, Any]: Project settings for current context project.
|
||||||
|
|
||||||
Project name should be stored in environment variable `AYON_PROJECT_NAME`.
|
|
||||||
This function should be used only in host context where environment
|
|
||||||
variable must be set and should not happen that any part of process will
|
|
||||||
change the value of the environment variable.
|
|
||||||
"""
|
"""
|
||||||
project_name = os.environ.get("AYON_PROJECT_NAME")
|
warnings.warn(
|
||||||
if not project_name:
|
"Used deprecated function 'get_current_project_settings' in"
|
||||||
raise ValueError(
|
" 'ayon_core.settings'. The function was moved to"
|
||||||
"Missing context project in environment"
|
" 'ayon_core.pipeline.context_tools'.",
|
||||||
" variable `AYON_PROJECT_NAME`."
|
DeprecationWarning,
|
||||||
)
|
stacklevel=2
|
||||||
return get_project_settings(project_name)
|
)
|
||||||
|
from ayon_core.pipeline.context_tools import get_current_project_settings
|
||||||
|
|
||||||
|
return get_current_project_settings()
|
||||||
|
|
|
||||||
|
|
@ -399,7 +399,11 @@ class ActionsModel:
|
||||||
return cache.get_data()
|
return cache.get_data()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = ayon_api.post("actions/list", **request_data)
|
# 'variant' query is supported since AYON backend 1.10.4
|
||||||
|
query = urlencode({"variant": self._variant})
|
||||||
|
response = ayon_api.post(
|
||||||
|
f"actions/list?{query}", **request_data
|
||||||
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except Exception:
|
except Exception:
|
||||||
self.log.warning("Failed to collect webactions.", exc_info=True)
|
self.log.warning("Failed to collect webactions.", exc_info=True)
|
||||||
|
|
|
||||||
|
|
@ -4,76 +4,6 @@ from abc import ABC, abstractmethod
|
||||||
from ayon_core.style import get_default_entity_icon_color
|
from ayon_core.style import get_default_entity_icon_color
|
||||||
|
|
||||||
|
|
||||||
class WorkfileInfo:
|
|
||||||
"""Information about workarea file with possible additional from database.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
folder_id (str): Folder id.
|
|
||||||
task_id (str): Task id.
|
|
||||||
filepath (str): Filepath.
|
|
||||||
filesize (int): File size.
|
|
||||||
creation_time (float): Creation time (timestamp).
|
|
||||||
modification_time (float): Modification time (timestamp).
|
|
||||||
created_by (Union[str, none]): User who created the file.
|
|
||||||
updated_by (Union[str, none]): User who last updated the file.
|
|
||||||
note (str): Note.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
folder_id,
|
|
||||||
task_id,
|
|
||||||
filepath,
|
|
||||||
filesize,
|
|
||||||
creation_time,
|
|
||||||
modification_time,
|
|
||||||
created_by,
|
|
||||||
updated_by,
|
|
||||||
note,
|
|
||||||
):
|
|
||||||
self.folder_id = folder_id
|
|
||||||
self.task_id = task_id
|
|
||||||
self.filepath = filepath
|
|
||||||
self.filesize = filesize
|
|
||||||
self.creation_time = creation_time
|
|
||||||
self.modification_time = modification_time
|
|
||||||
self.created_by = created_by
|
|
||||||
self.updated_by = updated_by
|
|
||||||
self.note = note
|
|
||||||
|
|
||||||
def to_data(self):
|
|
||||||
"""Converts WorkfileInfo item to data.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict[str, Any]: Folder item data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return {
|
|
||||||
"folder_id": self.folder_id,
|
|
||||||
"task_id": self.task_id,
|
|
||||||
"filepath": self.filepath,
|
|
||||||
"filesize": self.filesize,
|
|
||||||
"creation_time": self.creation_time,
|
|
||||||
"modification_time": self.modification_time,
|
|
||||||
"created_by": self.created_by,
|
|
||||||
"updated_by": self.updated_by,
|
|
||||||
"note": self.note,
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_data(cls, data):
|
|
||||||
"""Re-creates WorkfileInfo item from data.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data (dict[str, Any]): Workfile info item data.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
WorkfileInfo: Workfile info item.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return cls(**data)
|
|
||||||
|
|
||||||
|
|
||||||
class FolderItem:
|
class FolderItem:
|
||||||
"""Item representing folder entity on a server.
|
"""Item representing folder entity on a server.
|
||||||
|
|
||||||
|
|
@ -87,8 +17,8 @@ class FolderItem:
|
||||||
label (str): Folder label.
|
label (str): Folder label.
|
||||||
icon_name (str): Name of icon from font awesome.
|
icon_name (str): Name of icon from font awesome.
|
||||||
icon_color (str): Hex color string that will be used for icon.
|
icon_color (str): Hex color string that will be used for icon.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
def __init__(
|
def __init__(
|
||||||
self, entity_id, parent_id, name, label, icon_name, icon_color
|
self, entity_id, parent_id, name, label, icon_name, icon_color
|
||||||
):
|
):
|
||||||
|
|
@ -104,8 +34,8 @@ class FolderItem:
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, Any]: Folder item data.
|
dict[str, Any]: Folder item data.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
return {
|
return {
|
||||||
"entity_id": self.entity_id,
|
"entity_id": self.entity_id,
|
||||||
"parent_id": self.parent_id,
|
"parent_id": self.parent_id,
|
||||||
|
|
@ -124,8 +54,8 @@ class FolderItem:
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
FolderItem: Folder item.
|
FolderItem: Folder item.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
return cls(**data)
|
return cls(**data)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -144,8 +74,8 @@ class TaskItem:
|
||||||
parent_id (str): Parent folder id.
|
parent_id (str): Parent folder id.
|
||||||
icon_name (str): Name of icon from font awesome.
|
icon_name (str): Name of icon from font awesome.
|
||||||
icon_color (str): Hex color string that will be used for icon.
|
icon_color (str): Hex color string that will be used for icon.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
def __init__(
|
def __init__(
|
||||||
self, task_id, name, task_type, parent_id, icon_name, icon_color
|
self, task_id, name, task_type, parent_id, icon_name, icon_color
|
||||||
):
|
):
|
||||||
|
|
@ -163,8 +93,8 @@ class TaskItem:
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Task id.
|
str: Task id.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
return self.task_id
|
return self.task_id
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
@ -173,8 +103,8 @@ class TaskItem:
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Label of task item.
|
str: Label of task item.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
if self._label is None:
|
if self._label is None:
|
||||||
self._label = "{} ({})".format(self.name, self.task_type)
|
self._label = "{} ({})".format(self.name, self.task_type)
|
||||||
return self._label
|
return self._label
|
||||||
|
|
@ -184,8 +114,8 @@ class TaskItem:
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, Any]: Task item data.
|
dict[str, Any]: Task item data.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
return {
|
return {
|
||||||
"task_id": self.task_id,
|
"task_id": self.task_id,
|
||||||
"name": self.name,
|
"name": self.name,
|
||||||
|
|
@ -204,116 +134,11 @@ class TaskItem:
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
TaskItem: Task item.
|
TaskItem: Task item.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
return cls(**data)
|
return cls(**data)
|
||||||
|
|
||||||
|
|
||||||
class FileItem:
|
|
||||||
"""File item that represents a file.
|
|
||||||
|
|
||||||
Can be used for both Workarea and Published workfile. Workarea file
|
|
||||||
will always exist on disk which is not the case for Published workfile.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dirpath (str): Directory path of file.
|
|
||||||
filename (str): Filename.
|
|
||||||
modified (float): Modified timestamp.
|
|
||||||
created_by (Optional[str]): Username.
|
|
||||||
representation_id (Optional[str]): Representation id of published
|
|
||||||
workfile.
|
|
||||||
filepath (Optional[str]): Prepared filepath.
|
|
||||||
exists (Optional[bool]): If file exists on disk.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
dirpath,
|
|
||||||
filename,
|
|
||||||
modified,
|
|
||||||
created_by=None,
|
|
||||||
updated_by=None,
|
|
||||||
representation_id=None,
|
|
||||||
filepath=None,
|
|
||||||
exists=None
|
|
||||||
):
|
|
||||||
self.filename = filename
|
|
||||||
self.dirpath = dirpath
|
|
||||||
self.modified = modified
|
|
||||||
self.created_by = created_by
|
|
||||||
self.updated_by = updated_by
|
|
||||||
self.representation_id = representation_id
|
|
||||||
self._filepath = filepath
|
|
||||||
self._exists = exists
|
|
||||||
|
|
||||||
@property
|
|
||||||
def filepath(self):
|
|
||||||
"""Filepath of file.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Full path to a file.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if self._filepath is None:
|
|
||||||
self._filepath = os.path.join(self.dirpath, self.filename)
|
|
||||||
return self._filepath
|
|
||||||
|
|
||||||
@property
|
|
||||||
def exists(self):
|
|
||||||
"""File is available.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: If file exists on disk.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if self._exists is None:
|
|
||||||
self._exists = os.path.exists(self.filepath)
|
|
||||||
return self._exists
|
|
||||||
|
|
||||||
def to_data(self):
|
|
||||||
"""Converts file item to data.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict[str, Any]: File item data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return {
|
|
||||||
"filename": self.filename,
|
|
||||||
"dirpath": self.dirpath,
|
|
||||||
"modified": self.modified,
|
|
||||||
"created_by": self.created_by,
|
|
||||||
"representation_id": self.representation_id,
|
|
||||||
"filepath": self.filepath,
|
|
||||||
"exists": self.exists,
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_data(cls, data):
|
|
||||||
"""Re-creates file item from data.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data (dict[str, Any]): File item data.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
FileItem: File item.
|
|
||||||
"""
|
|
||||||
|
|
||||||
required_keys = {
|
|
||||||
"filename",
|
|
||||||
"dirpath",
|
|
||||||
"modified",
|
|
||||||
"representation_id"
|
|
||||||
}
|
|
||||||
missing_keys = required_keys - set(data.keys())
|
|
||||||
if missing_keys:
|
|
||||||
raise KeyError("Missing keys: {}".format(missing_keys))
|
|
||||||
|
|
||||||
return cls(**{
|
|
||||||
key: data[key]
|
|
||||||
for key in required_keys
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
class WorkareaFilepathResult:
|
class WorkareaFilepathResult:
|
||||||
"""Result of workarea file formatting.
|
"""Result of workarea file formatting.
|
||||||
|
|
||||||
|
|
@ -323,8 +148,8 @@ class WorkareaFilepathResult:
|
||||||
exists (bool): True if file exists.
|
exists (bool): True if file exists.
|
||||||
filepath (str): Filepath. If not provided it will be constructed
|
filepath (str): Filepath. If not provided it will be constructed
|
||||||
from root and filename.
|
from root and filename.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
def __init__(self, root, filename, exists, filepath=None):
|
def __init__(self, root, filename, exists, filepath=None):
|
||||||
if not filepath and root and filename:
|
if not filepath and root and filename:
|
||||||
filepath = os.path.join(root, filename)
|
filepath = os.path.join(root, filename)
|
||||||
|
|
@ -341,8 +166,8 @@ class AbstractWorkfilesCommon(ABC):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if host is valid.
|
bool: True if host is valid.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -353,8 +178,8 @@ class AbstractWorkfilesCommon(ABC):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Iterable[str]: List of extensions.
|
Iterable[str]: List of extensions.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -363,8 +188,8 @@ class AbstractWorkfilesCommon(ABC):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if save is enabled.
|
bool: True if save is enabled.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -373,8 +198,8 @@ class AbstractWorkfilesCommon(ABC):
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
enabled (bool): Enable save workfile when True.
|
enabled (bool): Enable save workfile when True.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -386,6 +211,7 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Name of host.
|
str: Name of host.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
@ -395,8 +221,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Name of project.
|
str: Name of project.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -406,8 +232,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
|
||||||
Returns:
|
Returns:
|
||||||
Union[str, None]: Folder id or None if host does not have
|
Union[str, None]: Folder id or None if host does not have
|
||||||
any context.
|
any context.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -417,8 +243,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
|
||||||
Returns:
|
Returns:
|
||||||
Union[str, None]: Task name or None if host does not have
|
Union[str, None]: Task name or None if host does not have
|
||||||
any context.
|
any context.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -428,8 +254,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
|
||||||
Returns:
|
Returns:
|
||||||
Union[str, None]: Path to workfile or None if host does
|
Union[str, None]: Path to workfile or None if host does
|
||||||
not have opened specific file.
|
not have opened specific file.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
@ -439,8 +265,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Anatomy: Project anatomy.
|
Anatomy: Project anatomy.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
@ -450,8 +276,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, Any]: Project settings.
|
dict[str, Any]: Project settings.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -463,8 +289,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, Any]: Project entity data.
|
dict[str, Any]: Project entity data.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -477,8 +303,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, Any]: Folder entity data.
|
dict[str, Any]: Folder entity data.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -491,10 +317,24 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, Any]: Task entity data.
|
dict[str, Any]: Task entity data.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_workfile_entities(self, task_id: str):
|
||||||
|
"""Workfile entities for given task.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
task_id (str): Task id.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[dict[str, Any]]: List of workfile entities.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
def emit_event(self, topic, data=None, source=None):
|
def emit_event(self, topic, data=None, source=None):
|
||||||
"""Emit event.
|
"""Emit event.
|
||||||
|
|
||||||
|
|
@ -502,8 +342,8 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
|
||||||
topic (str): Event topic used for callbacks filtering.
|
topic (str): Event topic used for callbacks filtering.
|
||||||
data (Optional[dict[str, Any]]): Event data.
|
data (Optional[dict[str, Any]]): Event data.
|
||||||
source (Optional[str]): Event source.
|
source (Optional[str]): Event source.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -530,8 +370,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
topic (str): Name of topic.
|
topic (str): Name of topic.
|
||||||
callback (Callable): Callback that will be called when event
|
callback (Callable): Callback that will be called when event
|
||||||
is triggered.
|
is triggered.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -592,8 +432,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
Returns:
|
Returns:
|
||||||
List[str]: File extensions that can be used as workfile for
|
List[str]: File extensions that can be used as workfile for
|
||||||
current host.
|
current host.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Selection information
|
# Selection information
|
||||||
|
|
@ -603,8 +443,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Union[str, None]: Folder id or None if no folder is selected.
|
Union[str, None]: Folder id or None if no folder is selected.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -616,8 +456,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
Args:
|
Args:
|
||||||
folder_id (Union[str, None]): Folder id or None if no folder
|
folder_id (Union[str, None]): Folder id or None if no folder
|
||||||
is selected.
|
is selected.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -626,8 +466,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Union[str, None]: Task id or None if no folder is selected.
|
Union[str, None]: Task id or None if no folder is selected.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -649,8 +489,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
is selected.
|
is selected.
|
||||||
task_name (Union[str, None]): Task name or None if no task
|
task_name (Union[str, None]): Task name or None if no task
|
||||||
is selected.
|
is selected.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -659,18 +499,22 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Union[str, None]: Selected workfile path.
|
Union[str, None]: Selected workfile path.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def set_selected_workfile_path(self, path):
|
def set_selected_workfile_path(
|
||||||
|
self, rootless_path, path, workfile_entity_id
|
||||||
|
):
|
||||||
"""Change selected workfile path.
|
"""Change selected workfile path.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
rootless_path (Union[str, None]): Selected workfile rootless path.
|
||||||
path (Union[str, None]): Selected workfile path.
|
path (Union[str, None]): Selected workfile path.
|
||||||
"""
|
workfile_entity_id (Union[str, None]): Workfile entity id.
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -680,8 +524,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
Returns:
|
Returns:
|
||||||
Union[str, None]: Representation id or None if no representation
|
Union[str, None]: Representation id or None if no representation
|
||||||
is selected.
|
is selected.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -691,8 +535,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
Args:
|
Args:
|
||||||
representation_id (Union[str, None]): Selected workfile
|
representation_id (Union[str, None]): Selected workfile
|
||||||
representation id.
|
representation id.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def get_selected_context(self):
|
def get_selected_context(self):
|
||||||
|
|
@ -700,8 +544,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, Union[str, None]]: Selected context.
|
dict[str, Union[str, None]]: Selected context.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
return {
|
return {
|
||||||
"folder_id": self.get_selected_folder_id(),
|
"folder_id": self.get_selected_folder_id(),
|
||||||
"task_id": self.get_selected_task_id(),
|
"task_id": self.get_selected_task_id(),
|
||||||
|
|
@ -737,8 +581,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
files UI element.
|
files UI element.
|
||||||
representation_id (Optional[str]): Representation id. Used for
|
representation_id (Optional[str]): Representation id. Used for
|
||||||
published filed UI element.
|
published filed UI element.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -750,8 +594,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, Any]: Expected selection data.
|
dict[str, Any]: Expected selection data.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -760,8 +604,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
folder_id (str): Folder id which was selected.
|
folder_id (str): Folder id which was selected.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -771,8 +615,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
Args:
|
Args:
|
||||||
folder_id (str): Folder id under which task is.
|
folder_id (str): Folder id under which task is.
|
||||||
task_name (str): Task name which was selected.
|
task_name (str): Task name which was selected.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -785,8 +629,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
folder_id (str): Folder id under which representation is.
|
folder_id (str): Folder id under which representation is.
|
||||||
task_name (str): Task name under which representation is.
|
task_name (str): Task name under which representation is.
|
||||||
representation_id (str): Representation id which was selected.
|
representation_id (str): Representation id which was selected.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -797,8 +641,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
folder_id (str): Folder id under which workfile is.
|
folder_id (str): Folder id under which workfile is.
|
||||||
task_name (str): Task name under which workfile is.
|
task_name (str): Task name under which workfile is.
|
||||||
workfile_name (str): Workfile filename which was selected.
|
workfile_name (str): Workfile filename which was selected.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -823,8 +667,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
Returns:
|
Returns:
|
||||||
list[FolderItem]: Minimum possible information needed
|
list[FolderItem]: Minimum possible information needed
|
||||||
for visualisation of folder hierarchy.
|
for visualisation of folder hierarchy.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -843,8 +687,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
Returns:
|
Returns:
|
||||||
list[TaskItem]: Minimum possible information needed
|
list[TaskItem]: Minimum possible information needed
|
||||||
for visualisation of tasks.
|
for visualisation of tasks.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -853,8 +697,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: Has unsaved changes.
|
bool: Has unsaved changes.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -867,8 +711,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Workarea directory.
|
str: Workarea directory.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -881,9 +725,9 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
sender (Optional[str]): Who requested workarea file items.
|
sender (Optional[str]): Who requested workarea file items.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[FileItem]: List of workarea file items.
|
list[WorkfileInfo]: List of workarea file items.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -899,8 +743,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, Any]: Data for Save As operation.
|
dict[str, Any]: Data for Save As operation.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -925,12 +769,12 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
WorkareaFilepathResult: Result of the operation.
|
WorkareaFilepathResult: Result of the operation.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_published_file_items(self, folder_id, task_id):
|
def get_published_file_items(self, folder_id: str, task_id: str):
|
||||||
"""Get published file items.
|
"""Get published file items.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -938,44 +782,52 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
task_id (Union[str, None]): Task id.
|
task_id (Union[str, None]): Task id.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[FileItem]: List of published file items.
|
list[PublishedWorkfileInfo]: List of published file items.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_workfile_info(self, folder_id, task_name, filepath):
|
def get_workfile_info(self, folder_id, task_id, rootless_path):
|
||||||
"""Workfile info from database.
|
"""Workfile info from database.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
folder_id (str): Folder id.
|
folder_id (str): Folder id.
|
||||||
task_name (str): Task id.
|
task_id (str): Task id.
|
||||||
filepath (str): Workfile path.
|
rootless_path (str): Workfile path.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Union[WorkfileInfo, None]: Workfile info or None if was passed
|
Optional[WorkfileInfo]: Workfile info or None if was passed
|
||||||
invalid context.
|
invalid context.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def save_workfile_info(self, folder_id, task_name, filepath, note):
|
def save_workfile_info(
|
||||||
|
self,
|
||||||
|
task_id,
|
||||||
|
rootless_path,
|
||||||
|
version=None,
|
||||||
|
comment=None,
|
||||||
|
description=None,
|
||||||
|
):
|
||||||
"""Save workfile info to database.
|
"""Save workfile info to database.
|
||||||
|
|
||||||
At this moment the only information which can be saved about
|
At this moment the only information which can be saved about
|
||||||
workfile is 'note'.
|
workfile is 'description'.
|
||||||
|
|
||||||
When 'note' is 'None' it is only validated if workfile info exists,
|
If value of 'version', 'comment' or 'description' is 'None' it is not
|
||||||
and if not then creates one with empty note.
|
added/updated to entity.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
folder_id (str): Folder id.
|
task_id (str): Task id.
|
||||||
task_name (str): Task id.
|
rootless_path (str): Rootless workfile path.
|
||||||
filepath (str): Workfile path.
|
version (Optional[int]): Version of workfile.
|
||||||
note (Union[str, None]): Note.
|
comment (Optional[str]): User's comment (subversion).
|
||||||
"""
|
description (Optional[str]): Workfile description.
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# General commands
|
# General commands
|
||||||
|
|
@ -985,8 +837,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
|
|
||||||
Triggers 'controller.reset.started' event at the beginning and
|
Triggers 'controller.reset.started' event at the beginning and
|
||||||
'controller.reset.finished' at the end.
|
'controller.reset.finished' at the end.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Controller actions
|
# Controller actions
|
||||||
|
|
@ -998,8 +850,8 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
folder_id (str): Folder id.
|
folder_id (str): Folder id.
|
||||||
task_id (str): Task id.
|
task_id (str): Task id.
|
||||||
filepath (str): Workfile path.
|
filepath (str): Workfile path.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -1013,22 +865,27 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
self,
|
self,
|
||||||
folder_id,
|
folder_id,
|
||||||
task_id,
|
task_id,
|
||||||
|
rootless_workdir,
|
||||||
workdir,
|
workdir,
|
||||||
filename,
|
filename,
|
||||||
template_key,
|
version,
|
||||||
artist_note,
|
comment,
|
||||||
|
description,
|
||||||
):
|
):
|
||||||
"""Save current state of workfile to workarea.
|
"""Save current state of workfile to workarea.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
folder_id (str): Folder id.
|
folder_id (str): Folder id.
|
||||||
task_id (str): Task id.
|
task_id (str): Task id.
|
||||||
workdir (str): Workarea directory.
|
rootless_workdir (str): Workarea directory.
|
||||||
filename (str): Workarea filename.
|
filename (str): Workarea filename.
|
||||||
template_key (str): Template key used to get the workdir
|
template_key (str): Template key used to get the workdir
|
||||||
and filename.
|
and filename.
|
||||||
"""
|
version (Optional[int]): Version of workfile.
|
||||||
|
comment (Optional[str]): User's comment (subversion).
|
||||||
|
description (Optional[str]): Workfile description.
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -1040,8 +897,10 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
task_id,
|
task_id,
|
||||||
workdir,
|
workdir,
|
||||||
filename,
|
filename,
|
||||||
template_key,
|
rootless_workdir,
|
||||||
artist_note,
|
version,
|
||||||
|
comment,
|
||||||
|
description,
|
||||||
):
|
):
|
||||||
"""Action to copy published workfile representation to workarea.
|
"""Action to copy published workfile representation to workarea.
|
||||||
|
|
||||||
|
|
@ -1055,23 +914,40 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
||||||
task_id (str): Task id.
|
task_id (str): Task id.
|
||||||
workdir (str): Workarea directory.
|
workdir (str): Workarea directory.
|
||||||
filename (str): Workarea filename.
|
filename (str): Workarea filename.
|
||||||
template_key (str): Template key.
|
rootless_workdir (str): Rootless workdir.
|
||||||
artist_note (str): Artist note.
|
version (int): Workfile version.
|
||||||
"""
|
comment (str): User's comment (subversion).
|
||||||
|
description (str): Description note.
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def duplicate_workfile(self, src_filepath, workdir, filename, artist_note):
|
def duplicate_workfile(
|
||||||
|
self,
|
||||||
|
folder_id,
|
||||||
|
task_id,
|
||||||
|
src_filepath,
|
||||||
|
rootless_workdir,
|
||||||
|
workdir,
|
||||||
|
filename,
|
||||||
|
description,
|
||||||
|
version,
|
||||||
|
comment
|
||||||
|
):
|
||||||
"""Duplicate workfile.
|
"""Duplicate workfile.
|
||||||
|
|
||||||
Workfiles is not opened when done.
|
Workfiles is not opened when done.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
folder_id (str): Folder id.
|
||||||
|
task_id (str): Task id.
|
||||||
src_filepath (str): Source workfile path.
|
src_filepath (str): Source workfile path.
|
||||||
|
rootless_workdir (str): Rootless workdir.
|
||||||
workdir (str): Destination workdir.
|
workdir (str): Destination workdir.
|
||||||
filename (str): Destination filename.
|
filename (str): Destination filename.
|
||||||
artist_note (str): Artist note.
|
version (int): Workfile version.
|
||||||
|
comment (str): User's comment (subversion).
|
||||||
|
description (str): Workfile description.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,13 @@
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
|
||||||
from ayon_core.host import IWorkfileHost
|
from ayon_core.host import IWorkfileHost
|
||||||
from ayon_core.lib import Logger, emit_event
|
from ayon_core.lib import Logger
|
||||||
from ayon_core.lib.events import QueuedEventSystem
|
from ayon_core.lib.events import QueuedEventSystem
|
||||||
from ayon_core.settings import get_project_settings
|
from ayon_core.settings import get_project_settings
|
||||||
from ayon_core.pipeline import Anatomy, registered_host
|
from ayon_core.pipeline import Anatomy, registered_host
|
||||||
from ayon_core.pipeline.context_tools import (
|
from ayon_core.pipeline.context_tools import get_global_context
|
||||||
change_current_context,
|
|
||||||
get_current_host_name,
|
|
||||||
get_global_context,
|
|
||||||
)
|
|
||||||
from ayon_core.pipeline.workfile import create_workdir_extra_folders
|
|
||||||
|
|
||||||
from ayon_core.tools.common_models import (
|
from ayon_core.tools.common_models import (
|
||||||
HierarchyModel,
|
HierarchyModel,
|
||||||
|
|
@ -140,12 +134,7 @@ class BaseWorkfileController(
|
||||||
if host is None:
|
if host is None:
|
||||||
host = registered_host()
|
host = registered_host()
|
||||||
|
|
||||||
host_is_valid = False
|
host_is_valid = isinstance(host, IWorkfileHost)
|
||||||
if host is not None:
|
|
||||||
missing_methods = (
|
|
||||||
IWorkfileHost.get_missing_workfile_methods(host)
|
|
||||||
)
|
|
||||||
host_is_valid = len(missing_methods) == 0
|
|
||||||
|
|
||||||
self._host = host
|
self._host = host
|
||||||
self._host_is_valid = host_is_valid
|
self._host_is_valid = host_is_valid
|
||||||
|
|
@ -182,7 +171,7 @@ class BaseWorkfileController(
|
||||||
return UsersModel(self)
|
return UsersModel(self)
|
||||||
|
|
||||||
def _create_workfiles_model(self):
|
def _create_workfiles_model(self):
|
||||||
return WorkfilesModel(self)
|
return WorkfilesModel(self._host, self)
|
||||||
|
|
||||||
def _create_expected_selection_obj(self):
|
def _create_expected_selection_obj(self):
|
||||||
return WorkfilesToolExpectedSelection(self)
|
return WorkfilesToolExpectedSelection(self)
|
||||||
|
|
@ -293,28 +282,14 @@ class BaseWorkfileController(
|
||||||
|
|
||||||
# Host information
|
# Host information
|
||||||
def get_workfile_extensions(self):
|
def get_workfile_extensions(self):
|
||||||
host = self._host
|
return self._host.get_workfile_extensions()
|
||||||
if isinstance(host, IWorkfileHost):
|
|
||||||
return host.get_workfile_extensions()
|
|
||||||
return host.file_extensions()
|
|
||||||
|
|
||||||
def has_unsaved_changes(self):
|
def has_unsaved_changes(self):
|
||||||
host = self._host
|
return self._host.workfile_has_unsaved_changes()
|
||||||
if isinstance(host, IWorkfileHost):
|
|
||||||
return host.workfile_has_unsaved_changes()
|
|
||||||
return host.has_unsaved_changes()
|
|
||||||
|
|
||||||
# Current context
|
# Current context
|
||||||
def get_host_name(self):
|
def get_host_name(self):
|
||||||
host = self._host
|
return self._host.name
|
||||||
if isinstance(host, IWorkfileHost):
|
|
||||||
return host.name
|
|
||||||
return get_current_host_name()
|
|
||||||
|
|
||||||
def _get_host_current_context(self):
|
|
||||||
if hasattr(self._host, "get_current_context"):
|
|
||||||
return self._host.get_current_context()
|
|
||||||
return get_global_context()
|
|
||||||
|
|
||||||
def get_current_project_name(self):
|
def get_current_project_name(self):
|
||||||
return self._current_project_name
|
return self._current_project_name
|
||||||
|
|
@ -326,10 +301,7 @@ class BaseWorkfileController(
|
||||||
return self._current_task_name
|
return self._current_task_name
|
||||||
|
|
||||||
def get_current_workfile(self):
|
def get_current_workfile(self):
|
||||||
host = self._host
|
return self._workfiles_model.get_current_workfile()
|
||||||
if isinstance(host, IWorkfileHost):
|
|
||||||
return host.get_current_workfile()
|
|
||||||
return host.current_file()
|
|
||||||
|
|
||||||
# Selection information
|
# Selection information
|
||||||
def get_selected_folder_id(self):
|
def get_selected_folder_id(self):
|
||||||
|
|
@ -350,8 +322,12 @@ class BaseWorkfileController(
|
||||||
def get_selected_workfile_path(self):
|
def get_selected_workfile_path(self):
|
||||||
return self._selection_model.get_selected_workfile_path()
|
return self._selection_model.get_selected_workfile_path()
|
||||||
|
|
||||||
def set_selected_workfile_path(self, path):
|
def set_selected_workfile_path(
|
||||||
self._selection_model.set_selected_workfile_path(path)
|
self, rootless_path, path, workfile_entity_id
|
||||||
|
):
|
||||||
|
self._selection_model.set_selected_workfile_path(
|
||||||
|
rootless_path, path, workfile_entity_id
|
||||||
|
)
|
||||||
|
|
||||||
def get_selected_representation_id(self):
|
def get_selected_representation_id(self):
|
||||||
return self._selection_model.get_selected_representation_id()
|
return self._selection_model.get_selected_representation_id()
|
||||||
|
|
@ -424,7 +400,7 @@ class BaseWorkfileController(
|
||||||
def get_workarea_file_items(self, folder_id, task_name, sender=None):
|
def get_workarea_file_items(self, folder_id, task_name, sender=None):
|
||||||
task_id = self._get_task_id(folder_id, task_name)
|
task_id = self._get_task_id(folder_id, task_name)
|
||||||
return self._workfiles_model.get_workarea_file_items(
|
return self._workfiles_model.get_workarea_file_items(
|
||||||
folder_id, task_id, task_name
|
folder_id, task_id
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_workarea_save_as_data(self, folder_id, task_id):
|
def get_workarea_save_as_data(self, folder_id, task_id):
|
||||||
|
|
@ -450,28 +426,34 @@ class BaseWorkfileController(
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_published_file_items(self, folder_id, task_id):
|
def get_published_file_items(self, folder_id, task_id):
|
||||||
task_name = None
|
|
||||||
if task_id:
|
|
||||||
task = self.get_task_entity(
|
|
||||||
self.get_current_project_name(), task_id
|
|
||||||
)
|
|
||||||
task_name = task.get("name")
|
|
||||||
|
|
||||||
return self._workfiles_model.get_published_file_items(
|
return self._workfiles_model.get_published_file_items(
|
||||||
folder_id, task_name)
|
folder_id, task_id
|
||||||
|
)
|
||||||
|
|
||||||
def get_workfile_info(self, folder_id, task_name, filepath):
|
def get_workfile_info(self, folder_id, task_id, rootless_path):
|
||||||
task_id = self._get_task_id(folder_id, task_name)
|
|
||||||
return self._workfiles_model.get_workfile_info(
|
return self._workfiles_model.get_workfile_info(
|
||||||
folder_id, task_id, filepath
|
folder_id, task_id, rootless_path
|
||||||
)
|
)
|
||||||
|
|
||||||
def save_workfile_info(self, folder_id, task_name, filepath, note):
|
def save_workfile_info(
|
||||||
task_id = self._get_task_id(folder_id, task_name)
|
self,
|
||||||
|
task_id,
|
||||||
|
rootless_path,
|
||||||
|
version=None,
|
||||||
|
comment=None,
|
||||||
|
description=None,
|
||||||
|
):
|
||||||
self._workfiles_model.save_workfile_info(
|
self._workfiles_model.save_workfile_info(
|
||||||
folder_id, task_id, filepath, note
|
task_id,
|
||||||
|
rootless_path,
|
||||||
|
version,
|
||||||
|
comment,
|
||||||
|
description,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get_workfile_entities(self, task_id):
|
||||||
|
return self._workfiles_model.get_workfile_entities(task_id)
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
if not self._host_is_valid:
|
if not self._host_is_valid:
|
||||||
self._emit_event("controller.reset.started")
|
self._emit_event("controller.reset.started")
|
||||||
|
|
@ -509,6 +491,7 @@ class BaseWorkfileController(
|
||||||
|
|
||||||
self._projects_model.reset()
|
self._projects_model.reset()
|
||||||
self._hierarchy_model.reset()
|
self._hierarchy_model.reset()
|
||||||
|
self._workfiles_model.reset()
|
||||||
|
|
||||||
if not expected_folder_id:
|
if not expected_folder_id:
|
||||||
expected_folder_id = folder_id
|
expected_folder_id = folder_id
|
||||||
|
|
@ -528,53 +511,31 @@ class BaseWorkfileController(
|
||||||
|
|
||||||
# Controller actions
|
# Controller actions
|
||||||
def open_workfile(self, folder_id, task_id, filepath):
|
def open_workfile(self, folder_id, task_id, filepath):
|
||||||
self._emit_event("open_workfile.started")
|
self._workfiles_model.open_workfile(folder_id, task_id, filepath)
|
||||||
|
|
||||||
failed = False
|
|
||||||
try:
|
|
||||||
self._open_workfile(folder_id, task_id, filepath)
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
failed = True
|
|
||||||
self.log.warning("Open of workfile failed", exc_info=True)
|
|
||||||
|
|
||||||
self._emit_event(
|
|
||||||
"open_workfile.finished",
|
|
||||||
{"failed": failed},
|
|
||||||
)
|
|
||||||
|
|
||||||
def save_current_workfile(self):
|
def save_current_workfile(self):
|
||||||
current_file = self.get_current_workfile()
|
self._workfiles_model.save_current_workfile()
|
||||||
self._host_save_workfile(current_file)
|
|
||||||
|
|
||||||
def save_as_workfile(
|
def save_as_workfile(
|
||||||
self,
|
self,
|
||||||
folder_id,
|
folder_id,
|
||||||
task_id,
|
task_id,
|
||||||
|
rootless_workdir,
|
||||||
workdir,
|
workdir,
|
||||||
filename,
|
filename,
|
||||||
template_key,
|
version,
|
||||||
artist_note,
|
comment,
|
||||||
|
description,
|
||||||
):
|
):
|
||||||
self._emit_event("save_as.started")
|
self._workfiles_model.save_as_workfile(
|
||||||
|
folder_id,
|
||||||
failed = False
|
task_id,
|
||||||
try:
|
rootless_workdir,
|
||||||
self._save_as_workfile(
|
workdir,
|
||||||
folder_id,
|
filename,
|
||||||
task_id,
|
version,
|
||||||
workdir,
|
comment,
|
||||||
filename,
|
description,
|
||||||
template_key,
|
|
||||||
artist_note=artist_note,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
failed = True
|
|
||||||
self.log.warning("Save as failed", exc_info=True)
|
|
||||||
|
|
||||||
self._emit_event(
|
|
||||||
"save_as.finished",
|
|
||||||
{"failed": failed},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def copy_workfile_representation(
|
def copy_workfile_representation(
|
||||||
|
|
@ -585,64 +546,48 @@ class BaseWorkfileController(
|
||||||
task_id,
|
task_id,
|
||||||
workdir,
|
workdir,
|
||||||
filename,
|
filename,
|
||||||
template_key,
|
rootless_workdir,
|
||||||
artist_note,
|
version,
|
||||||
|
comment,
|
||||||
|
description,
|
||||||
):
|
):
|
||||||
self._emit_event("copy_representation.started")
|
self._workfiles_model.copy_workfile_representation(
|
||||||
|
representation_id,
|
||||||
failed = False
|
representation_filepath,
|
||||||
try:
|
folder_id,
|
||||||
self._save_as_workfile(
|
task_id,
|
||||||
folder_id,
|
workdir,
|
||||||
task_id,
|
filename,
|
||||||
workdir,
|
rootless_workdir,
|
||||||
filename,
|
version,
|
||||||
template_key,
|
comment,
|
||||||
artist_note,
|
description,
|
||||||
src_filepath=representation_filepath
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
failed = True
|
|
||||||
self.log.warning(
|
|
||||||
"Copy of workfile representation failed", exc_info=True
|
|
||||||
)
|
|
||||||
|
|
||||||
self._emit_event(
|
|
||||||
"copy_representation.finished",
|
|
||||||
{"failed": failed},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def duplicate_workfile(self, src_filepath, workdir, filename, artist_note):
|
def duplicate_workfile(
|
||||||
self._emit_event("workfile_duplicate.started")
|
self,
|
||||||
|
folder_id,
|
||||||
failed = False
|
task_id,
|
||||||
try:
|
src_filepath,
|
||||||
dst_filepath = os.path.join(workdir, filename)
|
rootless_workdir,
|
||||||
shutil.copy(src_filepath, dst_filepath)
|
workdir,
|
||||||
except Exception:
|
filename,
|
||||||
failed = True
|
version,
|
||||||
self.log.warning("Duplication of workfile failed", exc_info=True)
|
comment,
|
||||||
|
description
|
||||||
self._emit_event(
|
):
|
||||||
"workfile_duplicate.finished",
|
self._workfiles_model.duplicate_workfile(
|
||||||
{"failed": failed},
|
folder_id,
|
||||||
|
task_id,
|
||||||
|
src_filepath,
|
||||||
|
rootless_workdir,
|
||||||
|
workdir,
|
||||||
|
filename,
|
||||||
|
version,
|
||||||
|
comment,
|
||||||
|
description,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Helper host methods that resolve 'IWorkfileHost' interface
|
|
||||||
def _host_open_workfile(self, filepath):
|
|
||||||
host = self._host
|
|
||||||
if isinstance(host, IWorkfileHost):
|
|
||||||
host.open_workfile(filepath)
|
|
||||||
else:
|
|
||||||
host.open_file(filepath)
|
|
||||||
|
|
||||||
def _host_save_workfile(self, filepath):
|
|
||||||
host = self._host
|
|
||||||
if isinstance(host, IWorkfileHost):
|
|
||||||
host.save_workfile(filepath)
|
|
||||||
else:
|
|
||||||
host.save_file(filepath)
|
|
||||||
|
|
||||||
def _emit_event(self, topic, data=None):
|
def _emit_event(self, topic, data=None):
|
||||||
self.emit_event(topic, data, "controller")
|
self.emit_event(topic, data, "controller")
|
||||||
|
|
||||||
|
|
@ -657,6 +602,11 @@ class BaseWorkfileController(
|
||||||
return None
|
return None
|
||||||
return task_item.id
|
return task_item.id
|
||||||
|
|
||||||
|
def _get_host_current_context(self):
|
||||||
|
if hasattr(self._host, "get_current_context"):
|
||||||
|
return self._host.get_current_context()
|
||||||
|
return get_global_context()
|
||||||
|
|
||||||
# Expected selection
|
# Expected selection
|
||||||
# - expected selection is used to restore selection after refresh
|
# - expected selection is used to restore selection after refresh
|
||||||
# or when current context should be used
|
# or when current context should be used
|
||||||
|
|
@ -665,123 +615,3 @@ class BaseWorkfileController(
|
||||||
"expected_selection_changed",
|
"expected_selection_changed",
|
||||||
self._expected_selection.get_expected_selection_data(),
|
self._expected_selection.get_expected_selection_data(),
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_event_context_data(
|
|
||||||
self, project_name, folder_id, task_id, folder=None, task=None
|
|
||||||
):
|
|
||||||
if folder is None:
|
|
||||||
folder = self.get_folder_entity(project_name, folder_id)
|
|
||||||
if task is None:
|
|
||||||
task = self.get_task_entity(project_name, task_id)
|
|
||||||
return {
|
|
||||||
"project_name": project_name,
|
|
||||||
"folder_id": folder_id,
|
|
||||||
"folder_path": folder["path"],
|
|
||||||
"task_id": task_id,
|
|
||||||
"task_name": task["name"],
|
|
||||||
"host_name": self.get_host_name(),
|
|
||||||
}
|
|
||||||
|
|
||||||
def _open_workfile(self, folder_id, task_id, filepath):
|
|
||||||
project_name = self.get_current_project_name()
|
|
||||||
event_data = self._get_event_context_data(
|
|
||||||
project_name, folder_id, task_id
|
|
||||||
)
|
|
||||||
event_data["filepath"] = filepath
|
|
||||||
|
|
||||||
emit_event("workfile.open.before", event_data, source="workfiles.tool")
|
|
||||||
|
|
||||||
# Change context
|
|
||||||
task_name = event_data["task_name"]
|
|
||||||
if (
|
|
||||||
folder_id != self.get_current_folder_id()
|
|
||||||
or task_name != self.get_current_task_name()
|
|
||||||
):
|
|
||||||
self._change_current_context(project_name, folder_id, task_id)
|
|
||||||
|
|
||||||
self._host_open_workfile(filepath)
|
|
||||||
|
|
||||||
emit_event("workfile.open.after", event_data, source="workfiles.tool")
|
|
||||||
|
|
||||||
def _save_as_workfile(
|
|
||||||
self,
|
|
||||||
folder_id: str,
|
|
||||||
task_id: str,
|
|
||||||
workdir: str,
|
|
||||||
filename: str,
|
|
||||||
template_key: str,
|
|
||||||
artist_note: str,
|
|
||||||
src_filepath=None,
|
|
||||||
):
|
|
||||||
# Trigger before save event
|
|
||||||
project_name = self.get_current_project_name()
|
|
||||||
folder = self.get_folder_entity(project_name, folder_id)
|
|
||||||
task = self.get_task_entity(project_name, task_id)
|
|
||||||
task_name = task["name"]
|
|
||||||
|
|
||||||
# QUESTION should the data be different for 'before' and 'after'?
|
|
||||||
event_data = self._get_event_context_data(
|
|
||||||
project_name, folder_id, task_id, folder, task
|
|
||||||
)
|
|
||||||
event_data.update({
|
|
||||||
"filename": filename,
|
|
||||||
"workdir_path": workdir,
|
|
||||||
})
|
|
||||||
|
|
||||||
emit_event("workfile.save.before", event_data, source="workfiles.tool")
|
|
||||||
|
|
||||||
# Create workfiles root folder
|
|
||||||
if not os.path.exists(workdir):
|
|
||||||
self.log.debug("Initializing work directory: %s", workdir)
|
|
||||||
os.makedirs(workdir)
|
|
||||||
|
|
||||||
# Change context
|
|
||||||
if (
|
|
||||||
folder_id != self.get_current_folder_id()
|
|
||||||
or task_name != self.get_current_task_name()
|
|
||||||
):
|
|
||||||
self._change_current_context(
|
|
||||||
project_name, folder_id, task_id, template_key
|
|
||||||
)
|
|
||||||
|
|
||||||
# Save workfile
|
|
||||||
dst_filepath = os.path.join(workdir, filename)
|
|
||||||
if src_filepath:
|
|
||||||
shutil.copyfile(src_filepath, dst_filepath)
|
|
||||||
self._host_open_workfile(dst_filepath)
|
|
||||||
else:
|
|
||||||
self._host_save_workfile(dst_filepath)
|
|
||||||
|
|
||||||
# Make sure workfile info exists
|
|
||||||
if not artist_note:
|
|
||||||
artist_note = None
|
|
||||||
self.save_workfile_info(
|
|
||||||
folder_id, task_name, dst_filepath, note=artist_note
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create extra folders
|
|
||||||
create_workdir_extra_folders(
|
|
||||||
workdir,
|
|
||||||
self.get_host_name(),
|
|
||||||
task["taskType"],
|
|
||||||
task_name,
|
|
||||||
project_name
|
|
||||||
)
|
|
||||||
|
|
||||||
# Trigger after save events
|
|
||||||
emit_event("workfile.save.after", event_data, source="workfiles.tool")
|
|
||||||
|
|
||||||
def _change_current_context(
|
|
||||||
self, project_name, folder_id, task_id, template_key=None
|
|
||||||
):
|
|
||||||
# Change current context
|
|
||||||
folder_entity = self.get_folder_entity(project_name, folder_id)
|
|
||||||
task_entity = self.get_task_entity(project_name, task_id)
|
|
||||||
change_current_context(
|
|
||||||
folder_entity,
|
|
||||||
task_entity,
|
|
||||||
template_key=template_key
|
|
||||||
)
|
|
||||||
self._current_folder_id = folder_entity["id"]
|
|
||||||
self._current_folder_path = folder_entity["path"]
|
|
||||||
self._current_task_name = task_entity["name"]
|
|
||||||
|
|
|
||||||
|
|
@ -62,7 +62,9 @@ class SelectionModel(object):
|
||||||
def get_selected_workfile_path(self):
|
def get_selected_workfile_path(self):
|
||||||
return self._workfile_path
|
return self._workfile_path
|
||||||
|
|
||||||
def set_selected_workfile_path(self, path):
|
def set_selected_workfile_path(
|
||||||
|
self, rootless_path, path, workfile_entity_id
|
||||||
|
):
|
||||||
if path == self._workfile_path:
|
if path == self._workfile_path:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
@ -72,9 +74,11 @@ class SelectionModel(object):
|
||||||
{
|
{
|
||||||
"project_name": self._controller.get_current_project_name(),
|
"project_name": self._controller.get_current_project_name(),
|
||||||
"path": path,
|
"path": path,
|
||||||
|
"rootless_path": rootless_path,
|
||||||
"folder_id": self._folder_id,
|
"folder_id": self._folder_id,
|
||||||
"task_name": self._task_name,
|
"task_name": self._task_name,
|
||||||
"task_id": self._task_id,
|
"task_id": self._task_id,
|
||||||
|
"workfile_entity_id": workfile_entity_id,
|
||||||
},
|
},
|
||||||
self.event_source
|
self.event_source
|
||||||
)
|
)
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -200,6 +200,9 @@ class FilesWidget(QtWidgets.QWidget):
|
||||||
self._open_workfile(folder_id, task_id, path)
|
self._open_workfile(folder_id, task_id, path)
|
||||||
|
|
||||||
def _on_current_open_requests(self):
|
def _on_current_open_requests(self):
|
||||||
|
# TODO validate if item under mouse is enabled
|
||||||
|
# - this uses selected item, but that does not have to be the one
|
||||||
|
# under mouse
|
||||||
self._on_workarea_open_clicked()
|
self._on_workarea_open_clicked()
|
||||||
|
|
||||||
def _on_duplicate_request(self):
|
def _on_duplicate_request(self):
|
||||||
|
|
@ -210,11 +213,18 @@ class FilesWidget(QtWidgets.QWidget):
|
||||||
result = self._exec_save_as_dialog()
|
result = self._exec_save_as_dialog()
|
||||||
if result is None:
|
if result is None:
|
||||||
return
|
return
|
||||||
|
folder_id = self._selected_folder_id
|
||||||
|
task_id = self._selected_task_id
|
||||||
self._controller.duplicate_workfile(
|
self._controller.duplicate_workfile(
|
||||||
|
folder_id,
|
||||||
|
task_id,
|
||||||
filepath,
|
filepath,
|
||||||
|
result["rootless_workdir"],
|
||||||
result["workdir"],
|
result["workdir"],
|
||||||
result["filename"],
|
result["filename"],
|
||||||
artist_note=result["artist_note"]
|
version=result["version"],
|
||||||
|
comment=result["comment"],
|
||||||
|
description=result["description"]
|
||||||
)
|
)
|
||||||
|
|
||||||
def _on_workarea_browse_clicked(self):
|
def _on_workarea_browse_clicked(self):
|
||||||
|
|
@ -259,10 +269,12 @@ class FilesWidget(QtWidgets.QWidget):
|
||||||
self._controller.save_as_workfile(
|
self._controller.save_as_workfile(
|
||||||
result["folder_id"],
|
result["folder_id"],
|
||||||
result["task_id"],
|
result["task_id"],
|
||||||
|
result["rootless_workdir"],
|
||||||
result["workdir"],
|
result["workdir"],
|
||||||
result["filename"],
|
result["filename"],
|
||||||
result["template_key"],
|
version=result["version"],
|
||||||
artist_note=result["artist_note"]
|
comment=result["comment"],
|
||||||
|
description=result["description"]
|
||||||
)
|
)
|
||||||
|
|
||||||
def _on_workarea_path_changed(self, event):
|
def _on_workarea_path_changed(self, event):
|
||||||
|
|
@ -314,12 +326,16 @@ class FilesWidget(QtWidgets.QWidget):
|
||||||
result["task_id"],
|
result["task_id"],
|
||||||
result["workdir"],
|
result["workdir"],
|
||||||
result["filename"],
|
result["filename"],
|
||||||
result["template_key"],
|
result["rootless_workdir"],
|
||||||
artist_note=result["artist_note"]
|
version=result["version"],
|
||||||
|
comment=result["comment"],
|
||||||
|
description=result["description"],
|
||||||
)
|
)
|
||||||
|
|
||||||
def _on_save_as_request(self):
|
def _on_save_as_request(self):
|
||||||
self._on_published_save_clicked()
|
# Make sure the save is enabled
|
||||||
|
if self._is_save_enabled and self._valid_selected_context:
|
||||||
|
self._on_published_save_clicked()
|
||||||
|
|
||||||
def _set_select_contex_mode(self, enabled):
|
def _set_select_contex_mode(self, enabled):
|
||||||
if self._select_context_mode is enabled:
|
if self._select_context_mode is enabled:
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
import os
|
||||||
|
|
||||||
import qtawesome
|
import qtawesome
|
||||||
from qtpy import QtWidgets, QtCore, QtGui
|
from qtpy import QtWidgets, QtCore, QtGui
|
||||||
|
|
||||||
|
|
@ -205,24 +207,25 @@ class PublishedFilesModel(QtGui.QStandardItemModel):
|
||||||
new_items.append(item)
|
new_items.append(item)
|
||||||
item.setColumnCount(self.columnCount())
|
item.setColumnCount(self.columnCount())
|
||||||
item.setData(self._file_icon, QtCore.Qt.DecorationRole)
|
item.setData(self._file_icon, QtCore.Qt.DecorationRole)
|
||||||
item.setData(file_item.filename, QtCore.Qt.DisplayRole)
|
|
||||||
item.setData(repre_id, REPRE_ID_ROLE)
|
item.setData(repre_id, REPRE_ID_ROLE)
|
||||||
|
|
||||||
if file_item.exists:
|
if file_item.available:
|
||||||
flags = QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
|
flags = QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
|
||||||
else:
|
else:
|
||||||
flags = QtCore.Qt.NoItemFlags
|
flags = QtCore.Qt.NoItemFlags
|
||||||
|
|
||||||
author = file_item.created_by
|
author = file_item.author
|
||||||
user_item = user_items_by_name.get(author)
|
user_item = user_items_by_name.get(author)
|
||||||
if user_item is not None and user_item.full_name:
|
if user_item is not None and user_item.full_name:
|
||||||
author = user_item.full_name
|
author = user_item.full_name
|
||||||
|
|
||||||
item.setFlags(flags)
|
filename = os.path.basename(file_item.filepath)
|
||||||
|
|
||||||
|
item.setFlags(flags)
|
||||||
|
item.setData(filename, QtCore.Qt.DisplayRole)
|
||||||
item.setData(file_item.filepath, FILEPATH_ROLE)
|
item.setData(file_item.filepath, FILEPATH_ROLE)
|
||||||
item.setData(author, AUTHOR_ROLE)
|
item.setData(author, AUTHOR_ROLE)
|
||||||
item.setData(file_item.modified, DATE_MODIFIED_ROLE)
|
item.setData(file_item.file_modified, DATE_MODIFIED_ROLE)
|
||||||
|
|
||||||
self._items_by_id[repre_id] = item
|
self._items_by_id[repre_id] = item
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
import os
|
||||||
|
|
||||||
import qtawesome
|
import qtawesome
|
||||||
from qtpy import QtWidgets, QtCore, QtGui
|
from qtpy import QtWidgets, QtCore, QtGui
|
||||||
|
|
||||||
|
|
@ -10,8 +12,10 @@ from ayon_core.tools.utils.delegates import PrettyTimeDelegate
|
||||||
|
|
||||||
FILENAME_ROLE = QtCore.Qt.UserRole + 1
|
FILENAME_ROLE = QtCore.Qt.UserRole + 1
|
||||||
FILEPATH_ROLE = QtCore.Qt.UserRole + 2
|
FILEPATH_ROLE = QtCore.Qt.UserRole + 2
|
||||||
AUTHOR_ROLE = QtCore.Qt.UserRole + 3
|
ROOTLESS_PATH_ROLE = QtCore.Qt.UserRole + 3
|
||||||
DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 4
|
AUTHOR_ROLE = QtCore.Qt.UserRole + 4
|
||||||
|
DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 5
|
||||||
|
WORKFILE_ENTITY_ID_ROLE = QtCore.Qt.UserRole + 6
|
||||||
|
|
||||||
|
|
||||||
class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
||||||
|
|
@ -198,7 +202,7 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
||||||
items_to_remove = set(self._items_by_filename.keys())
|
items_to_remove = set(self._items_by_filename.keys())
|
||||||
new_items = []
|
new_items = []
|
||||||
for file_item in file_items:
|
for file_item in file_items:
|
||||||
filename = file_item.filename
|
filename = os.path.basename(file_item.filepath)
|
||||||
if filename in self._items_by_filename:
|
if filename in self._items_by_filename:
|
||||||
items_to_remove.discard(filename)
|
items_to_remove.discard(filename)
|
||||||
item = self._items_by_filename[filename]
|
item = self._items_by_filename[filename]
|
||||||
|
|
@ -206,23 +210,28 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
||||||
item = QtGui.QStandardItem()
|
item = QtGui.QStandardItem()
|
||||||
new_items.append(item)
|
new_items.append(item)
|
||||||
item.setColumnCount(self.columnCount())
|
item.setColumnCount(self.columnCount())
|
||||||
item.setFlags(
|
|
||||||
QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
|
|
||||||
)
|
|
||||||
item.setData(self._file_icon, QtCore.Qt.DecorationRole)
|
item.setData(self._file_icon, QtCore.Qt.DecorationRole)
|
||||||
item.setData(file_item.filename, QtCore.Qt.DisplayRole)
|
item.setData(filename, QtCore.Qt.DisplayRole)
|
||||||
item.setData(file_item.filename, FILENAME_ROLE)
|
item.setData(filename, FILENAME_ROLE)
|
||||||
|
|
||||||
|
flags = QtCore.Qt.ItemIsSelectable
|
||||||
|
if file_item.available:
|
||||||
|
flags |= QtCore.Qt.ItemIsEnabled
|
||||||
|
item.setFlags(flags)
|
||||||
updated_by = file_item.updated_by
|
updated_by = file_item.updated_by
|
||||||
user_item = user_items_by_name.get(updated_by)
|
user_item = user_items_by_name.get(updated_by)
|
||||||
if user_item is not None and user_item.full_name:
|
if user_item is not None and user_item.full_name:
|
||||||
updated_by = user_item.full_name
|
updated_by = user_item.full_name
|
||||||
|
|
||||||
|
item.setData(
|
||||||
|
file_item.workfile_entity_id, WORKFILE_ENTITY_ID_ROLE
|
||||||
|
)
|
||||||
item.setData(file_item.filepath, FILEPATH_ROLE)
|
item.setData(file_item.filepath, FILEPATH_ROLE)
|
||||||
|
item.setData(file_item.rootless_path, ROOTLESS_PATH_ROLE)
|
||||||
|
item.setData(file_item.file_modified, DATE_MODIFIED_ROLE)
|
||||||
item.setData(updated_by, AUTHOR_ROLE)
|
item.setData(updated_by, AUTHOR_ROLE)
|
||||||
item.setData(file_item.modified, DATE_MODIFIED_ROLE)
|
|
||||||
|
|
||||||
self._items_by_filename[file_item.filename] = item
|
self._items_by_filename[filename] = item
|
||||||
|
|
||||||
if new_items:
|
if new_items:
|
||||||
root_item.appendRows(new_items)
|
root_item.appendRows(new_items)
|
||||||
|
|
@ -354,14 +363,18 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
|
||||||
|
|
||||||
def _get_selected_info(self):
|
def _get_selected_info(self):
|
||||||
selection_model = self._view.selectionModel()
|
selection_model = self._view.selectionModel()
|
||||||
filepath = None
|
workfile_entity_id = filename = rootless_path = filepath = None
|
||||||
filename = None
|
|
||||||
for index in selection_model.selectedIndexes():
|
for index in selection_model.selectedIndexes():
|
||||||
filepath = index.data(FILEPATH_ROLE)
|
filepath = index.data(FILEPATH_ROLE)
|
||||||
|
rootless_path = index.data(ROOTLESS_PATH_ROLE)
|
||||||
filename = index.data(FILENAME_ROLE)
|
filename = index.data(FILENAME_ROLE)
|
||||||
|
workfile_entity_id = index.data(WORKFILE_ENTITY_ID_ROLE)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"filepath": filepath,
|
"filepath": filepath,
|
||||||
|
"rootless_path": rootless_path,
|
||||||
"filename": filename,
|
"filename": filename,
|
||||||
|
"workfile_entity_id": workfile_entity_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_selected_path(self):
|
def get_selected_path(self):
|
||||||
|
|
@ -374,8 +387,12 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
|
||||||
return self._get_selected_info()["filepath"]
|
return self._get_selected_info()["filepath"]
|
||||||
|
|
||||||
def _on_selection_change(self):
|
def _on_selection_change(self):
|
||||||
filepath = self.get_selected_path()
|
info = self._get_selected_info()
|
||||||
self._controller.set_selected_workfile_path(filepath)
|
self._controller.set_selected_workfile_path(
|
||||||
|
info["rootless_path"],
|
||||||
|
info["filepath"],
|
||||||
|
info["workfile_entity_id"],
|
||||||
|
)
|
||||||
|
|
||||||
def _on_mouse_double_click(self, event):
|
def _on_mouse_double_click(self, event):
|
||||||
if event.button() == QtCore.Qt.LeftButton:
|
if event.button() == QtCore.Qt.LeftButton:
|
||||||
|
|
@ -430,19 +447,25 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
|
||||||
)
|
)
|
||||||
|
|
||||||
def _on_model_refresh(self):
|
def _on_model_refresh(self):
|
||||||
if (
|
if not self._change_selection_on_refresh:
|
||||||
not self._change_selection_on_refresh
|
|
||||||
or self._proxy_model.rowCount() < 1
|
|
||||||
):
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# Find the row with latest date modified
|
# Find the row with latest date modified
|
||||||
|
indexes = [
|
||||||
|
self._proxy_model.index(idx, 0)
|
||||||
|
for idx in range(self._proxy_model.rowCount())
|
||||||
|
]
|
||||||
|
filtered_indexes = [
|
||||||
|
index
|
||||||
|
for index in indexes
|
||||||
|
if self._proxy_model.flags(index) & QtCore.Qt.ItemIsEnabled
|
||||||
|
]
|
||||||
|
if not filtered_indexes:
|
||||||
|
return
|
||||||
|
|
||||||
latest_index = max(
|
latest_index = max(
|
||||||
(
|
filtered_indexes,
|
||||||
self._proxy_model.index(idx, 0)
|
key=lambda model_index: model_index.data(DATE_MODIFIED_ROLE) or 0
|
||||||
for idx in range(self._proxy_model.rowCount())
|
|
||||||
),
|
|
||||||
key=lambda model_index: model_index.data(DATE_MODIFIED_ROLE)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Select row of latest modified
|
# Select row of latest modified
|
||||||
|
|
|
||||||
|
|
@ -108,6 +108,7 @@ class SaveAsDialog(QtWidgets.QDialog):
|
||||||
self._ext_value = None
|
self._ext_value = None
|
||||||
self._filename = None
|
self._filename = None
|
||||||
self._workdir = None
|
self._workdir = None
|
||||||
|
self._rootless_workdir = None
|
||||||
|
|
||||||
self._result = None
|
self._result = None
|
||||||
|
|
||||||
|
|
@ -144,8 +145,8 @@ class SaveAsDialog(QtWidgets.QDialog):
|
||||||
version_layout.addWidget(last_version_check)
|
version_layout.addWidget(last_version_check)
|
||||||
|
|
||||||
# Artist note widget
|
# Artist note widget
|
||||||
artist_note_input = PlaceholderPlainTextEdit(inputs_widget)
|
description_input = PlaceholderPlainTextEdit(inputs_widget)
|
||||||
artist_note_input.setPlaceholderText(
|
description_input.setPlaceholderText(
|
||||||
"Provide a note about this workfile.")
|
"Provide a note about this workfile.")
|
||||||
|
|
||||||
# Preview widget
|
# Preview widget
|
||||||
|
|
@ -166,7 +167,7 @@ class SaveAsDialog(QtWidgets.QDialog):
|
||||||
subversion_label = QtWidgets.QLabel("Subversion:", inputs_widget)
|
subversion_label = QtWidgets.QLabel("Subversion:", inputs_widget)
|
||||||
extension_label = QtWidgets.QLabel("Extension:", inputs_widget)
|
extension_label = QtWidgets.QLabel("Extension:", inputs_widget)
|
||||||
preview_label = QtWidgets.QLabel("Preview:", inputs_widget)
|
preview_label = QtWidgets.QLabel("Preview:", inputs_widget)
|
||||||
artist_note_label = QtWidgets.QLabel("Artist Note:", inputs_widget)
|
description_label = QtWidgets.QLabel("Artist Note:", inputs_widget)
|
||||||
|
|
||||||
# Build inputs
|
# Build inputs
|
||||||
inputs_layout = QtWidgets.QGridLayout(inputs_widget)
|
inputs_layout = QtWidgets.QGridLayout(inputs_widget)
|
||||||
|
|
@ -178,8 +179,8 @@ class SaveAsDialog(QtWidgets.QDialog):
|
||||||
inputs_layout.addWidget(extension_combobox, 2, 1)
|
inputs_layout.addWidget(extension_combobox, 2, 1)
|
||||||
inputs_layout.addWidget(preview_label, 3, 0)
|
inputs_layout.addWidget(preview_label, 3, 0)
|
||||||
inputs_layout.addWidget(preview_widget, 3, 1)
|
inputs_layout.addWidget(preview_widget, 3, 1)
|
||||||
inputs_layout.addWidget(artist_note_label, 4, 0, 1, 2)
|
inputs_layout.addWidget(description_label, 4, 0, 1, 2)
|
||||||
inputs_layout.addWidget(artist_note_input, 5, 0, 1, 2)
|
inputs_layout.addWidget(description_input, 5, 0, 1, 2)
|
||||||
|
|
||||||
# Build layout
|
# Build layout
|
||||||
main_layout = QtWidgets.QVBoxLayout(self)
|
main_layout = QtWidgets.QVBoxLayout(self)
|
||||||
|
|
@ -214,13 +215,13 @@ class SaveAsDialog(QtWidgets.QDialog):
|
||||||
self._extension_combobox = extension_combobox
|
self._extension_combobox = extension_combobox
|
||||||
self._subversion_input = subversion_input
|
self._subversion_input = subversion_input
|
||||||
self._preview_widget = preview_widget
|
self._preview_widget = preview_widget
|
||||||
self._artist_note_input = artist_note_input
|
self._description_input = description_input
|
||||||
|
|
||||||
self._version_label = version_label
|
self._version_label = version_label
|
||||||
self._subversion_label = subversion_label
|
self._subversion_label = subversion_label
|
||||||
self._extension_label = extension_label
|
self._extension_label = extension_label
|
||||||
self._preview_label = preview_label
|
self._preview_label = preview_label
|
||||||
self._artist_note_label = artist_note_label
|
self._description_label = description_label
|
||||||
|
|
||||||
# Post init setup
|
# Post init setup
|
||||||
|
|
||||||
|
|
@ -255,6 +256,7 @@ class SaveAsDialog(QtWidgets.QDialog):
|
||||||
self._folder_id = folder_id
|
self._folder_id = folder_id
|
||||||
self._task_id = task_id
|
self._task_id = task_id
|
||||||
self._workdir = data["workdir"]
|
self._workdir = data["workdir"]
|
||||||
|
self._rootless_workdir = data["rootless_workdir"]
|
||||||
self._comment_value = data["comment"]
|
self._comment_value = data["comment"]
|
||||||
self._ext_value = data["ext"]
|
self._ext_value = data["ext"]
|
||||||
self._template_key = data["template_key"]
|
self._template_key = data["template_key"]
|
||||||
|
|
@ -329,10 +331,13 @@ class SaveAsDialog(QtWidgets.QDialog):
|
||||||
self._result = {
|
self._result = {
|
||||||
"filename": self._filename,
|
"filename": self._filename,
|
||||||
"workdir": self._workdir,
|
"workdir": self._workdir,
|
||||||
|
"rootless_workdir": self._rootless_workdir,
|
||||||
"folder_id": self._folder_id,
|
"folder_id": self._folder_id,
|
||||||
"task_id": self._task_id,
|
"task_id": self._task_id,
|
||||||
"template_key": self._template_key,
|
"template_key": self._template_key,
|
||||||
"artist_note": self._artist_note_input.toPlainText(),
|
"version": self._version_value,
|
||||||
|
"comment": self._comment_value,
|
||||||
|
"description": self._description_input.toPlainText(),
|
||||||
}
|
}
|
||||||
self.close()
|
self.close()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,8 @@ from qtpy import QtWidgets, QtCore
|
||||||
|
|
||||||
|
|
||||||
def file_size_to_string(file_size):
|
def file_size_to_string(file_size):
|
||||||
|
if not file_size:
|
||||||
|
return "N/A"
|
||||||
size = 0
|
size = 0
|
||||||
size_ending_mapping = {
|
size_ending_mapping = {
|
||||||
"KB": 1024 ** 1,
|
"KB": 1024 ** 1,
|
||||||
|
|
@ -43,44 +45,47 @@ class SidePanelWidget(QtWidgets.QWidget):
|
||||||
details_input = QtWidgets.QPlainTextEdit(self)
|
details_input = QtWidgets.QPlainTextEdit(self)
|
||||||
details_input.setReadOnly(True)
|
details_input.setReadOnly(True)
|
||||||
|
|
||||||
artist_note_widget = QtWidgets.QWidget(self)
|
description_widget = QtWidgets.QWidget(self)
|
||||||
note_label = QtWidgets.QLabel("Artist note", artist_note_widget)
|
description_label = QtWidgets.QLabel("Artist note", description_widget)
|
||||||
note_input = QtWidgets.QPlainTextEdit(artist_note_widget)
|
description_input = QtWidgets.QPlainTextEdit(description_widget)
|
||||||
btn_note_save = QtWidgets.QPushButton("Save note", artist_note_widget)
|
btn_description_save = QtWidgets.QPushButton(
|
||||||
|
"Save note", description_widget
|
||||||
|
)
|
||||||
|
|
||||||
artist_note_layout = QtWidgets.QVBoxLayout(artist_note_widget)
|
description_layout = QtWidgets.QVBoxLayout(description_widget)
|
||||||
artist_note_layout.setContentsMargins(0, 0, 0, 0)
|
description_layout.setContentsMargins(0, 0, 0, 0)
|
||||||
artist_note_layout.addWidget(note_label, 0)
|
description_layout.addWidget(description_label, 0)
|
||||||
artist_note_layout.addWidget(note_input, 1)
|
description_layout.addWidget(description_input, 1)
|
||||||
artist_note_layout.addWidget(
|
description_layout.addWidget(
|
||||||
btn_note_save, 0, alignment=QtCore.Qt.AlignRight
|
btn_description_save, 0, alignment=QtCore.Qt.AlignRight
|
||||||
)
|
)
|
||||||
|
|
||||||
main_layout = QtWidgets.QVBoxLayout(self)
|
main_layout = QtWidgets.QVBoxLayout(self)
|
||||||
main_layout.setContentsMargins(0, 0, 0, 0)
|
main_layout.setContentsMargins(0, 0, 0, 0)
|
||||||
main_layout.addWidget(details_label, 0)
|
main_layout.addWidget(details_label, 0)
|
||||||
main_layout.addWidget(details_input, 1)
|
main_layout.addWidget(details_input, 1)
|
||||||
main_layout.addWidget(artist_note_widget, 1)
|
main_layout.addWidget(description_widget, 1)
|
||||||
|
|
||||||
note_input.textChanged.connect(self._on_note_change)
|
description_input.textChanged.connect(self._on_description_change)
|
||||||
btn_note_save.clicked.connect(self._on_save_click)
|
btn_description_save.clicked.connect(self._on_save_click)
|
||||||
|
|
||||||
controller.register_event_callback(
|
controller.register_event_callback(
|
||||||
"selection.workarea.changed", self._on_selection_change
|
"selection.workarea.changed", self._on_selection_change
|
||||||
)
|
)
|
||||||
|
|
||||||
self._details_input = details_input
|
self._details_input = details_input
|
||||||
self._artist_note_widget = artist_note_widget
|
self._description_widget = description_widget
|
||||||
self._note_input = note_input
|
self._description_input = description_input
|
||||||
self._btn_note_save = btn_note_save
|
self._btn_description_save = btn_description_save
|
||||||
|
|
||||||
self._folder_id = None
|
self._folder_id = None
|
||||||
self._task_name = None
|
self._task_id = None
|
||||||
self._filepath = None
|
self._filepath = None
|
||||||
self._orig_note = ""
|
self._rootless_path = None
|
||||||
|
self._orig_description = ""
|
||||||
self._controller = controller
|
self._controller = controller
|
||||||
|
|
||||||
self._set_context(None, None, None)
|
self._set_context(None, None, None, None)
|
||||||
|
|
||||||
def set_published_mode(self, published_mode):
|
def set_published_mode(self, published_mode):
|
||||||
"""Change published mode.
|
"""Change published mode.
|
||||||
|
|
@ -89,64 +94,69 @@ class SidePanelWidget(QtWidgets.QWidget):
|
||||||
published_mode (bool): Published mode enabled.
|
published_mode (bool): Published mode enabled.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self._artist_note_widget.setVisible(not published_mode)
|
self._description_widget.setVisible(not published_mode)
|
||||||
|
|
||||||
def _on_selection_change(self, event):
|
def _on_selection_change(self, event):
|
||||||
folder_id = event["folder_id"]
|
folder_id = event["folder_id"]
|
||||||
task_name = event["task_name"]
|
task_id = event["task_id"]
|
||||||
filepath = event["path"]
|
filepath = event["path"]
|
||||||
|
rootless_path = event["rootless_path"]
|
||||||
|
|
||||||
self._set_context(folder_id, task_name, filepath)
|
self._set_context(folder_id, task_id, rootless_path, filepath)
|
||||||
|
|
||||||
def _on_note_change(self):
|
def _on_description_change(self):
|
||||||
text = self._note_input.toPlainText()
|
text = self._description_input.toPlainText()
|
||||||
self._btn_note_save.setEnabled(self._orig_note != text)
|
self._btn_description_save.setEnabled(self._orig_description != text)
|
||||||
|
|
||||||
def _on_save_click(self):
|
def _on_save_click(self):
|
||||||
note = self._note_input.toPlainText()
|
description = self._description_input.toPlainText()
|
||||||
self._controller.save_workfile_info(
|
self._controller.save_workfile_info(
|
||||||
self._folder_id,
|
self._task_id,
|
||||||
self._task_name,
|
self._rootless_path,
|
||||||
self._filepath,
|
description=description,
|
||||||
note
|
|
||||||
)
|
)
|
||||||
self._orig_note = note
|
self._orig_description = description
|
||||||
self._btn_note_save.setEnabled(False)
|
self._btn_description_save.setEnabled(False)
|
||||||
|
|
||||||
def _set_context(self, folder_id, task_name, filepath):
|
def _set_context(self, folder_id, task_id, rootless_path, filepath):
|
||||||
workfile_info = None
|
workfile_info = None
|
||||||
# Check if folder, task and file are selected
|
# Check if folder, task and file are selected
|
||||||
if bool(folder_id) and bool(task_name) and bool(filepath):
|
if folder_id and task_id and rootless_path:
|
||||||
workfile_info = self._controller.get_workfile_info(
|
workfile_info = self._controller.get_workfile_info(
|
||||||
folder_id, task_name, filepath
|
folder_id, task_id, rootless_path
|
||||||
)
|
)
|
||||||
enabled = workfile_info is not None
|
enabled = workfile_info is not None
|
||||||
|
|
||||||
self._details_input.setEnabled(enabled)
|
self._details_input.setEnabled(enabled)
|
||||||
self._note_input.setEnabled(enabled)
|
self._description_input.setEnabled(enabled)
|
||||||
self._btn_note_save.setEnabled(enabled)
|
self._btn_description_save.setEnabled(enabled)
|
||||||
|
|
||||||
self._folder_id = folder_id
|
self._folder_id = folder_id
|
||||||
self._task_name = task_name
|
self._task_id = task_id
|
||||||
self._filepath = filepath
|
self._filepath = filepath
|
||||||
|
self._rootless_path = rootless_path
|
||||||
|
|
||||||
# Disable inputs and remove texts if any required arguments are
|
# Disable inputs and remove texts if any required arguments are
|
||||||
# missing
|
# missing
|
||||||
if not enabled:
|
if not enabled:
|
||||||
self._orig_note = ""
|
self._orig_description = ""
|
||||||
self._details_input.setPlainText("")
|
self._details_input.setPlainText("")
|
||||||
self._note_input.setPlainText("")
|
self._description_input.setPlainText("")
|
||||||
return
|
return
|
||||||
|
|
||||||
note = workfile_info.note
|
description = workfile_info.description
|
||||||
size_value = file_size_to_string(workfile_info.filesize)
|
size_value = file_size_to_string(workfile_info.file_size)
|
||||||
|
|
||||||
# Append html string
|
# Append html string
|
||||||
datetime_format = "%b %d %Y %H:%M:%S"
|
datetime_format = "%b %d %Y %H:%M:%S"
|
||||||
creation_time = datetime.datetime.fromtimestamp(
|
file_created = workfile_info.file_created
|
||||||
workfile_info.creation_time)
|
modification_time = workfile_info.file_modified
|
||||||
modification_time = datetime.datetime.fromtimestamp(
|
if file_created:
|
||||||
workfile_info.modification_time)
|
file_created = datetime.datetime.fromtimestamp(file_created)
|
||||||
|
|
||||||
|
if modification_time:
|
||||||
|
modification_time = datetime.datetime.fromtimestamp(
|
||||||
|
modification_time)
|
||||||
|
|
||||||
user_items_by_name = self._controller.get_user_items_by_name()
|
user_items_by_name = self._controller.get_user_items_by_name()
|
||||||
|
|
||||||
|
|
@ -156,33 +166,38 @@ class SidePanelWidget(QtWidgets.QWidget):
|
||||||
return user_item.full_name
|
return user_item.full_name
|
||||||
return username
|
return username
|
||||||
|
|
||||||
created_lines = [
|
created_lines = []
|
||||||
creation_time.strftime(datetime_format)
|
|
||||||
]
|
|
||||||
if workfile_info.created_by:
|
if workfile_info.created_by:
|
||||||
created_lines.insert(
|
created_lines.append(
|
||||||
0, convert_username(workfile_info.created_by)
|
convert_username(workfile_info.created_by)
|
||||||
)
|
)
|
||||||
|
if file_created:
|
||||||
|
created_lines.append(file_created.strftime(datetime_format))
|
||||||
|
|
||||||
modified_lines = [
|
if created_lines:
|
||||||
modification_time.strftime(datetime_format)
|
created_lines.insert(0, "<b>Created:</b>")
|
||||||
]
|
|
||||||
|
modified_lines = []
|
||||||
if workfile_info.updated_by:
|
if workfile_info.updated_by:
|
||||||
modified_lines.insert(
|
modified_lines.append(
|
||||||
0, convert_username(workfile_info.updated_by)
|
convert_username(workfile_info.updated_by)
|
||||||
)
|
)
|
||||||
|
if modification_time:
|
||||||
|
modified_lines.append(
|
||||||
|
modification_time.strftime(datetime_format)
|
||||||
|
)
|
||||||
|
if modified_lines:
|
||||||
|
modified_lines.insert(0, "<b>Modified:</b>")
|
||||||
|
|
||||||
lines = (
|
lines = (
|
||||||
"<b>Size:</b>",
|
"<b>Size:</b>",
|
||||||
size_value,
|
size_value,
|
||||||
"<b>Created:</b>",
|
|
||||||
"<br/>".join(created_lines),
|
"<br/>".join(created_lines),
|
||||||
"<b>Modified:</b>",
|
|
||||||
"<br/>".join(modified_lines),
|
"<br/>".join(modified_lines),
|
||||||
)
|
)
|
||||||
self._orig_note = note
|
self._orig_description = description
|
||||||
self._note_input.setPlainText(note)
|
self._description_input.setPlainText(description)
|
||||||
|
|
||||||
# Set as empty string
|
# Set as empty string
|
||||||
self._details_input.setPlainText("")
|
self._details_input.setPlainText("")
|
||||||
self._details_input.appendHtml("<br>".join(lines))
|
self._details_input.appendHtml("<br/>".join(lines))
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
"""Package declaring AYON addon 'core' version."""
|
"""Package declaring AYON addon 'core' version."""
|
||||||
__version__ = "1.4.1+dev"
|
__version__ = "1.5.2+dev"
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
name = "core"
|
name = "core"
|
||||||
title = "Core"
|
title = "Core"
|
||||||
version = "1.4.1+dev"
|
version = "1.5.2+dev"
|
||||||
|
|
||||||
client_dir = "ayon_core"
|
client_dir = "ayon_core"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@
|
||||||
|
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "ayon-core"
|
name = "ayon-core"
|
||||||
version = "1.4.1+dev"
|
version = "1.5.2+dev"
|
||||||
description = ""
|
description = ""
|
||||||
authors = ["Ynput Team <team@ynput.io>"]
|
authors = ["Ynput Team <team@ynput.io>"]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
@ -19,6 +19,7 @@ python = ">=3.9.1,<3.10"
|
||||||
pytest = "^8.0"
|
pytest = "^8.0"
|
||||||
pytest-print = "^1.0"
|
pytest-print = "^1.0"
|
||||||
ayon-python-api = "^1.0"
|
ayon-python-api = "^1.0"
|
||||||
|
arrow = "0.17.0"
|
||||||
# linting dependencies
|
# linting dependencies
|
||||||
ruff = "^0.11.7"
|
ruff = "^0.11.7"
|
||||||
pre-commit = "^4"
|
pre-commit = "^4"
|
||||||
|
|
|
||||||
|
|
@ -747,6 +747,11 @@ class ExtractReviewProfileModel(BaseSettingsModel):
|
||||||
hosts: list[str] = SettingsField(
|
hosts: list[str] = SettingsField(
|
||||||
default_factory=list, title="Host names"
|
default_factory=list, title="Host names"
|
||||||
)
|
)
|
||||||
|
task_types: list[str] = SettingsField(
|
||||||
|
default_factory=list,
|
||||||
|
title="Task Types",
|
||||||
|
enum_resolver=task_types_enum,
|
||||||
|
)
|
||||||
outputs: list[ExtractReviewOutputDefModel] = SettingsField(
|
outputs: list[ExtractReviewOutputDefModel] = SettingsField(
|
||||||
default_factory=list, title="Output Definitions"
|
default_factory=list, title="Output Definitions"
|
||||||
)
|
)
|
||||||
|
|
@ -1348,6 +1353,7 @@ DEFAULT_PUBLISH_VALUES = {
|
||||||
{
|
{
|
||||||
"product_types": [],
|
"product_types": [],
|
||||||
"hosts": [],
|
"hosts": [],
|
||||||
|
"task_types": [],
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"name": "png",
|
"name": "png",
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue