mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge branch 'develop' into enhancement/remove_deprecated_stdout_broker_import
This commit is contained in:
commit
2fb258c0d1
89 changed files with 1230 additions and 1045 deletions
3
.github/workflows/pr_linting.yml
vendored
3
.github/workflows/pr_linting.yml
vendored
|
|
@ -21,6 +21,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/ruff-action@v1
|
||||
- uses: astral-sh/ruff-action@v3
|
||||
with:
|
||||
changed-files: "true"
|
||||
version-file: "pyproject.toml"
|
||||
|
|
|
|||
4
.gitignore
vendored
4
.gitignore
vendored
|
|
@ -77,9 +77,13 @@ dump.sql
|
|||
# Poetry
|
||||
########
|
||||
.poetry/
|
||||
poetry.lock
|
||||
.python-version
|
||||
.editorconfig
|
||||
.pre-commit-config.yaml
|
||||
mypy.ini
|
||||
|
||||
.github_changelog_generator
|
||||
|
||||
# ignore mkdocs build
|
||||
site/
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@ from ayon_core.lib.env_tools import (
|
|||
)
|
||||
|
||||
|
||||
|
||||
@click.group(invoke_without_command=True)
|
||||
@click.pass_context
|
||||
@click.option("--use-staging", is_flag=True,
|
||||
|
|
@ -173,7 +172,6 @@ def contextselection(
|
|||
main(output_path, project, folder, strict)
|
||||
|
||||
|
||||
|
||||
@main_cli.command(
|
||||
context_settings=dict(
|
||||
ignore_unknown_options=True,
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
from ayon_api import get_project, get_folder_by_path, get_task_by_name
|
||||
|
||||
from ayon_core.pipeline import Anatomy
|
||||
from ayon_core.pipeline.anatomy import RootMissingEnv
|
||||
|
||||
from ayon_applications import PreLaunchHook
|
||||
from ayon_applications.exceptions import ApplicationLaunchFailed
|
||||
from ayon_applications.utils import (
|
||||
EnvironmentPrepData,
|
||||
prepare_app_environments,
|
||||
prepare_context_environments
|
||||
)
|
||||
from ayon_core.pipeline import Anatomy
|
||||
|
||||
|
||||
class GlobalHostDataHook(PreLaunchHook):
|
||||
|
|
@ -67,9 +70,12 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
self.data["project_entity"] = project_entity
|
||||
|
||||
# Anatomy
|
||||
try:
|
||||
self.data["anatomy"] = Anatomy(
|
||||
project_name, project_entity=project_entity
|
||||
)
|
||||
except RootMissingEnv as exc:
|
||||
raise ApplicationLaunchFailed(str(exc))
|
||||
|
||||
folder_path = self.data.get("folder_path")
|
||||
if not folder_path:
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from .local_settings import (
|
|||
AYONSettingsRegistry,
|
||||
get_launcher_local_dir,
|
||||
get_launcher_storage_dir,
|
||||
get_addons_resources_dir,
|
||||
get_local_site_id,
|
||||
get_ayon_username,
|
||||
)
|
||||
|
|
@ -142,6 +143,7 @@ __all__ = [
|
|||
"AYONSettingsRegistry",
|
||||
"get_launcher_local_dir",
|
||||
"get_launcher_storage_dir",
|
||||
"get_addons_resources_dir",
|
||||
"get_local_site_id",
|
||||
"get_ayon_username",
|
||||
|
||||
|
|
|
|||
|
|
@ -22,12 +22,10 @@ import clique
|
|||
if typing.TYPE_CHECKING:
|
||||
from typing import Self, Tuple, Union, TypedDict, Pattern
|
||||
|
||||
|
||||
class EnumItemDict(TypedDict):
|
||||
label: str
|
||||
value: Any
|
||||
|
||||
|
||||
EnumItemsInputType = Union[
|
||||
Dict[Any, str],
|
||||
List[Tuple[Any, str]],
|
||||
|
|
@ -35,7 +33,6 @@ if typing.TYPE_CHECKING:
|
|||
List[EnumItemDict]
|
||||
]
|
||||
|
||||
|
||||
class FileDefItemDict(TypedDict):
|
||||
directory: str
|
||||
filenames: List[str]
|
||||
|
|
@ -289,6 +286,7 @@ AttrDefType = TypeVar("AttrDefType", bound=AbstractAttrDef)
|
|||
# UI attribute definitions won't hold value
|
||||
# -----------------------------------------
|
||||
|
||||
|
||||
class UIDef(AbstractAttrDef):
|
||||
is_value_def = False
|
||||
|
||||
|
|
|
|||
|
|
@ -177,10 +177,12 @@ def initialize_ayon_connection(force=False):
|
|||
return _new_get_last_versions(
|
||||
con, *args, **kwargs
|
||||
)
|
||||
|
||||
def _lv_by_pi_wrapper(*args, **kwargs):
|
||||
return _new_get_last_version_by_product_id(
|
||||
con, *args, **kwargs
|
||||
)
|
||||
|
||||
def _lv_by_pn_wrapper(*args, **kwargs):
|
||||
return _new_get_last_version_by_product_name(
|
||||
con, *args, **kwargs
|
||||
|
|
|
|||
|
|
@ -96,6 +96,30 @@ def get_launcher_local_dir(*subdirs: str) -> str:
|
|||
return os.path.join(storage_dir, *subdirs)
|
||||
|
||||
|
||||
def get_addons_resources_dir(addon_name: str, *args) -> str:
|
||||
"""Get directory for storing resources for addons.
|
||||
|
||||
Some addons might need to store ad-hoc resources that are not part of
|
||||
addon client package (e.g. because of size). Studio might define
|
||||
dedicated directory to store them with 'AYON_ADDONS_RESOURCES_DIR'
|
||||
environment variable. By default, is used 'addons_resources' in
|
||||
launcher storage (might be shared across platforms).
|
||||
|
||||
Args:
|
||||
addon_name (str): Addon name.
|
||||
*args (str): Subfolders in resources directory.
|
||||
|
||||
Returns:
|
||||
str: Path to resources directory.
|
||||
|
||||
"""
|
||||
addons_resources_dir = os.getenv("AYON_ADDONS_RESOURCES_DIR")
|
||||
if not addons_resources_dir:
|
||||
addons_resources_dir = get_launcher_storage_dir("addons_resources")
|
||||
|
||||
return os.path.join(addons_resources_dir, addon_name, *args)
|
||||
|
||||
|
||||
class AYONSecureRegistry:
|
||||
"""Store information using keyring.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,17 +0,0 @@
|
|||
# Deprecated file
|
||||
# - the file container 'WeakMethod' implementation for Python 2 which is not
|
||||
# needed anymore.
|
||||
import warnings
|
||||
import weakref
|
||||
|
||||
|
||||
WeakMethod = weakref.WeakMethod
|
||||
|
||||
warnings.warn(
|
||||
(
|
||||
"'ayon_core.lib.python_2_comp' is deprecated."
|
||||
"Please use 'weakref.WeakMethod'."
|
||||
),
|
||||
DeprecationWarning,
|
||||
stacklevel=2
|
||||
)
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
"""Tools for working with python modules and classes."""
|
||||
import os
|
||||
import sys
|
||||
import types
|
||||
from typing import Optional
|
||||
import importlib
|
||||
import inspect
|
||||
import logging
|
||||
|
|
@ -8,13 +10,22 @@ import logging
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def import_filepath(filepath, module_name=None):
|
||||
def import_filepath(
|
||||
filepath: str,
|
||||
module_name: Optional[str] = None,
|
||||
sys_module_name: Optional[str] = None) -> types.ModuleType:
|
||||
"""Import python file as python module.
|
||||
|
||||
Args:
|
||||
filepath (str): Path to python file.
|
||||
module_name (str): Name of loaded module. Only for Python 3. By default
|
||||
is filled with filename of filepath.
|
||||
sys_module_name (str): Name of module in `sys.modules` where to store
|
||||
loaded module. By default is None so module is not added to
|
||||
`sys.modules`.
|
||||
|
||||
Todo (antirotor): We should add the module to the sys.modules always but
|
||||
we need to be careful about it and test it properly.
|
||||
|
||||
"""
|
||||
if module_name is None:
|
||||
|
|
@ -28,6 +39,9 @@ def import_filepath(filepath, module_name=None):
|
|||
module_loader = importlib.machinery.SourceFileLoader(
|
||||
module_name, filepath
|
||||
)
|
||||
# only add to sys.modules if requested
|
||||
if sys_module_name:
|
||||
sys.modules[sys_module_name] = module
|
||||
module_loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
|
@ -126,7 +140,8 @@ def classes_from_module(superclass, module):
|
|||
return classes
|
||||
|
||||
|
||||
def import_module_from_dirpath(dirpath, folder_name, dst_module_name=None):
|
||||
def import_module_from_dirpath(
|
||||
dirpath, folder_name, dst_module_name=None):
|
||||
"""Import passed directory as a python module.
|
||||
|
||||
Imported module can be assigned as a child attribute of already loaded
|
||||
|
|
@ -193,7 +208,7 @@ def is_func_signature_supported(func, *args, **kwargs):
|
|||
Notes:
|
||||
This does NOT check if the function would work with passed arguments
|
||||
only if they can be passed in. If function have *args, **kwargs
|
||||
in paramaters, this will always return 'True'.
|
||||
in parameters, this will always return 'True'.
|
||||
|
||||
Example:
|
||||
>>> def my_function(my_number):
|
||||
|
|
|
|||
|
|
@ -39,6 +39,7 @@ class Terminal:
|
|||
"""
|
||||
|
||||
from ayon_core.lib import env_value_to_bool
|
||||
|
||||
log_no_colors = env_value_to_bool(
|
||||
"AYON_LOG_NO_COLORS", default=None
|
||||
)
|
||||
|
|
|
|||
|
|
@ -162,7 +162,7 @@ def find_tool_in_custom_paths(paths, tool, validation_func=None):
|
|||
# Handle cases when path is just an executable
|
||||
# - it allows to use executable from PATH
|
||||
# - basename must match 'tool' value (without extension)
|
||||
extless_path, ext = os.path.splitext(path)
|
||||
extless_path, _ext = os.path.splitext(path)
|
||||
if extless_path == tool:
|
||||
executable_path = find_executable(tool)
|
||||
if executable_path and (
|
||||
|
|
@ -181,7 +181,7 @@ def find_tool_in_custom_paths(paths, tool, validation_func=None):
|
|||
|
||||
# If path is a file validate it
|
||||
if os.path.isfile(normalized):
|
||||
basename, ext = os.path.splitext(os.path.basename(path))
|
||||
basename, _ext = os.path.splitext(os.path.basename(path))
|
||||
# Check if the filename has actually the sane bane as 'tool'
|
||||
if basename == tool:
|
||||
executable_path = find_executable(normalized)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from .exceptions import (
|
||||
ProjectNotSet,
|
||||
RootMissingEnv,
|
||||
RootCombinationError,
|
||||
TemplateMissingKey,
|
||||
AnatomyTemplateUnsolved,
|
||||
|
|
@ -9,6 +10,7 @@ from .anatomy import Anatomy
|
|||
|
||||
__all__ = (
|
||||
"ProjectNotSet",
|
||||
"RootMissingEnv",
|
||||
"RootCombinationError",
|
||||
"TemplateMissingKey",
|
||||
"AnatomyTemplateUnsolved",
|
||||
|
|
|
|||
|
|
@ -5,6 +5,11 @@ class ProjectNotSet(Exception):
|
|||
"""Exception raised when is created Anatomy without project name."""
|
||||
|
||||
|
||||
class RootMissingEnv(KeyError):
|
||||
"""Raised when root requires environment variables which is not filled."""
|
||||
pass
|
||||
|
||||
|
||||
class RootCombinationError(Exception):
|
||||
"""This exception is raised when templates has combined root types."""
|
||||
|
||||
|
|
|
|||
|
|
@ -2,9 +2,11 @@ import os
|
|||
import platform
|
||||
import numbers
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.lib import Logger, StringTemplate
|
||||
from ayon_core.lib.path_templates import FormatObject
|
||||
|
||||
from .exceptions import RootMissingEnv
|
||||
|
||||
|
||||
class RootItem(FormatObject):
|
||||
"""Represents one item or roots.
|
||||
|
|
@ -21,18 +23,36 @@ class RootItem(FormatObject):
|
|||
multi root setup otherwise None value is expected.
|
||||
"""
|
||||
def __init__(self, parent, root_raw_data, name):
|
||||
super(RootItem, self).__init__()
|
||||
super().__init__()
|
||||
self._log = None
|
||||
lowered_platform_keys = {}
|
||||
for key, value in root_raw_data.items():
|
||||
lowered_platform_keys[key.lower()] = value
|
||||
lowered_platform_keys = {
|
||||
key.lower(): value
|
||||
for key, value in root_raw_data.items()
|
||||
}
|
||||
self.raw_data = lowered_platform_keys
|
||||
self.cleaned_data = self._clean_roots(lowered_platform_keys)
|
||||
self.name = name
|
||||
self.parent = parent
|
||||
|
||||
self.available_platforms = set(lowered_platform_keys.keys())
|
||||
self.value = lowered_platform_keys.get(platform.system().lower())
|
||||
|
||||
current_platform = platform.system().lower()
|
||||
# WARNING: Using environment variables in roots is not considered
|
||||
# as production safe. Some features may not work as expected, for
|
||||
# example USD resolver or site sync.
|
||||
try:
|
||||
self.value = lowered_platform_keys[current_platform].format_map(
|
||||
os.environ
|
||||
)
|
||||
except KeyError:
|
||||
result = StringTemplate(self.value).format(os.environ.copy())
|
||||
is_are = "is" if len(result.missing_keys) == 1 else "are"
|
||||
missing_keys = ", ".join(result.missing_keys)
|
||||
raise RootMissingEnv(
|
||||
f"Root \"{name}\" requires environment variable/s"
|
||||
f" {missing_keys} which {is_are} not available."
|
||||
)
|
||||
|
||||
self.clean_value = self._clean_root(self.value)
|
||||
|
||||
def __format__(self, *args, **kwargs):
|
||||
|
|
@ -105,10 +125,10 @@ class RootItem(FormatObject):
|
|||
|
||||
def _clean_roots(self, raw_data):
|
||||
"""Clean all values of raw root item values."""
|
||||
cleaned = {}
|
||||
for key, value in raw_data.items():
|
||||
cleaned[key] = self._clean_root(value)
|
||||
return cleaned
|
||||
return {
|
||||
key: self._clean_root(value)
|
||||
for key, value in raw_data.items()
|
||||
}
|
||||
|
||||
def path_remapper(self, path, dst_platform=None, src_platform=None):
|
||||
"""Remap path for specific platform.
|
||||
|
|
|
|||
|
|
@ -27,7 +27,8 @@ from .workfile import (
|
|||
get_workdir,
|
||||
get_custom_workfile_template_by_string_context,
|
||||
get_workfile_template_key_from_context,
|
||||
get_last_workfile
|
||||
get_last_workfile,
|
||||
MissingWorkdirError,
|
||||
)
|
||||
from . import (
|
||||
register_loader_plugin_path,
|
||||
|
|
@ -251,7 +252,7 @@ def uninstall_host():
|
|||
pyblish.api.deregister_discovery_filter(filter_pyblish_plugins)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
deregister_inventory_action_path(INVENTORY_PATH)
|
||||
log.info("Global plug-ins unregistred")
|
||||
log.info("Global plug-ins unregistered")
|
||||
|
||||
deregister_host()
|
||||
|
||||
|
|
@ -617,7 +618,18 @@ def version_up_current_workfile():
|
|||
last_workfile_path = get_last_workfile(
|
||||
work_root, file_template, data, extensions, True
|
||||
)
|
||||
new_workfile_path = version_up(last_workfile_path)
|
||||
# `get_last_workfile` will return the first expected file version
|
||||
# if no files exist yet. In that case, if they do not exist we will
|
||||
# want to save v001
|
||||
new_workfile_path = last_workfile_path
|
||||
if os.path.exists(new_workfile_path):
|
||||
new_workfile_path = version_up(new_workfile_path)
|
||||
|
||||
# Raise an error if the parent folder doesn't exist as `host.save_workfile`
|
||||
# is not supposed/able to create missing folders.
|
||||
parent_folder = os.path.dirname(new_workfile_path)
|
||||
if not os.path.exists(parent_folder):
|
||||
raise MissingWorkdirError(
|
||||
f"Work area directory '{parent_folder}' does not exist.")
|
||||
|
||||
host.save_workfile(new_workfile_path)
|
||||
|
|
|
|||
|
|
@ -1259,50 +1259,6 @@ class CreateContext:
|
|||
with self._bulk_context("add", sender) as bulk_info:
|
||||
yield bulk_info
|
||||
|
||||
# Set publish attributes before bulk context is exited
|
||||
for instance in bulk_info.get_data():
|
||||
publish_attributes = instance.publish_attributes
|
||||
# Prepare publish plugin attributes and set it on instance
|
||||
for plugin in self.plugins_with_defs:
|
||||
try:
|
||||
if is_func_signature_supported(
|
||||
plugin.convert_attribute_values, self, instance
|
||||
):
|
||||
plugin.convert_attribute_values(self, instance)
|
||||
|
||||
elif plugin.__instanceEnabled__:
|
||||
output = plugin.convert_attribute_values(
|
||||
publish_attributes
|
||||
)
|
||||
if output:
|
||||
publish_attributes.update(output)
|
||||
|
||||
except Exception:
|
||||
self.log.error(
|
||||
"Failed to convert attribute values of"
|
||||
f" plugin '{plugin.__name__}'",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
for plugin in self.plugins_with_defs:
|
||||
attr_defs = None
|
||||
try:
|
||||
attr_defs = plugin.get_attr_defs_for_instance(
|
||||
self, instance
|
||||
)
|
||||
except Exception:
|
||||
self.log.error(
|
||||
"Failed to get attribute definitions"
|
||||
f" from plugin '{plugin.__name__}'.",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
if not attr_defs:
|
||||
continue
|
||||
instance.set_publish_plugin_attr_defs(
|
||||
plugin.__name__, attr_defs
|
||||
)
|
||||
|
||||
@contextmanager
|
||||
def bulk_instances_collection(self, sender=None):
|
||||
"""DEPRECATED use 'bulk_add_instances' instead."""
|
||||
|
|
@ -2251,6 +2207,50 @@ class CreateContext:
|
|||
if not instances_to_validate:
|
||||
return
|
||||
|
||||
# Set publish attributes before bulk callbacks are triggered
|
||||
for instance in instances_to_validate:
|
||||
publish_attributes = instance.publish_attributes
|
||||
# Prepare publish plugin attributes and set it on instance
|
||||
for plugin in self.plugins_with_defs:
|
||||
try:
|
||||
if is_func_signature_supported(
|
||||
plugin.convert_attribute_values, self, instance
|
||||
):
|
||||
plugin.convert_attribute_values(self, instance)
|
||||
|
||||
elif plugin.__instanceEnabled__:
|
||||
output = plugin.convert_attribute_values(
|
||||
publish_attributes
|
||||
)
|
||||
if output:
|
||||
publish_attributes.update(output)
|
||||
|
||||
except Exception:
|
||||
self.log.error(
|
||||
"Failed to convert attribute values of"
|
||||
f" plugin '{plugin.__name__}'",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
for plugin in self.plugins_with_defs:
|
||||
attr_defs = None
|
||||
try:
|
||||
attr_defs = plugin.get_attr_defs_for_instance(
|
||||
self, instance
|
||||
)
|
||||
except Exception:
|
||||
self.log.error(
|
||||
"Failed to get attribute definitions"
|
||||
f" from plugin '{plugin.__name__}'.",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
if not attr_defs:
|
||||
continue
|
||||
instance.set_publish_plugin_attr_defs(
|
||||
plugin.__name__, attr_defs
|
||||
)
|
||||
|
||||
# Cache folder and task entities for all instances at once
|
||||
self.get_instances_context_info(instances_to_validate)
|
||||
|
||||
|
|
@ -2303,9 +2303,15 @@ class CreateContext:
|
|||
for plugin_name, plugin_value in item_changes.pop(
|
||||
"publish_attributes"
|
||||
).items():
|
||||
if plugin_value is None:
|
||||
current_publish[plugin_name] = None
|
||||
continue
|
||||
plugin_changes = current_publish.setdefault(
|
||||
plugin_name, {}
|
||||
)
|
||||
if plugin_changes is None:
|
||||
current_publish[plugin_name] = plugin_value
|
||||
else:
|
||||
plugin_changes.update(plugin_value)
|
||||
|
||||
item_values.update(item_changes)
|
||||
|
|
|
|||
|
|
@ -160,29 +160,26 @@ class AttributeValues:
|
|||
return self._attr_defs_by_key.get(key, default)
|
||||
|
||||
def update(self, value):
|
||||
changes = {}
|
||||
for _key, _value in dict(value).items():
|
||||
if _key in self._data and self._data.get(_key) == _value:
|
||||
continue
|
||||
self._data[_key] = _value
|
||||
changes[_key] = _value
|
||||
|
||||
changes = self._update(value)
|
||||
if changes:
|
||||
self._parent.attribute_value_changed(self._key, changes)
|
||||
|
||||
def pop(self, key, default=None):
|
||||
has_key = key in self._data
|
||||
value = self._data.pop(key, default)
|
||||
# Remove attribute definition if is 'UnknownDef'
|
||||
# - gives option to get rid of unknown values
|
||||
attr_def = self._attr_defs_by_key.get(key)
|
||||
if isinstance(attr_def, UnknownDef):
|
||||
self._attr_defs_by_key.pop(key)
|
||||
self._attr_defs.remove(attr_def)
|
||||
elif has_key:
|
||||
self._parent.attribute_value_changed(self._key, {key: None})
|
||||
value, changes = self._pop(key, default)
|
||||
if changes:
|
||||
self._parent.attribute_value_changed(self._key, changes)
|
||||
return value
|
||||
|
||||
def set_value(self, value):
|
||||
pop_keys = set(value.keys()) - set(self._data.keys())
|
||||
changes = self._update(value)
|
||||
for key in pop_keys:
|
||||
_, key_changes = self._pop(key, None)
|
||||
changes.update(key_changes)
|
||||
|
||||
if changes:
|
||||
self._parent.attribute_value_changed(self._key, changes)
|
||||
|
||||
def reset_values(self):
|
||||
self._data = {}
|
||||
|
||||
|
|
@ -228,6 +225,29 @@ class AttributeValues:
|
|||
|
||||
return serialize_attr_defs(self._attr_defs)
|
||||
|
||||
def _update(self, value):
|
||||
changes = {}
|
||||
for key, value in dict(value).items():
|
||||
if key in self._data and self._data.get(key) == value:
|
||||
continue
|
||||
self._data[key] = value
|
||||
changes[key] = value
|
||||
return changes
|
||||
|
||||
def _pop(self, key, default):
|
||||
has_key = key in self._data
|
||||
value = self._data.pop(key, default)
|
||||
# Remove attribute definition if is 'UnknownDef'
|
||||
# - gives option to get rid of unknown values
|
||||
attr_def = self._attr_defs_by_key.get(key)
|
||||
changes = {}
|
||||
if isinstance(attr_def, UnknownDef):
|
||||
self._attr_defs_by_key.pop(key)
|
||||
self._attr_defs.remove(attr_def)
|
||||
elif has_key:
|
||||
changes[key] = None
|
||||
return value, changes
|
||||
|
||||
|
||||
class CreatorAttributeValues(AttributeValues):
|
||||
"""Creator specific attribute values of an instance."""
|
||||
|
|
@ -270,6 +290,23 @@ class PublishAttributes:
|
|||
def __getitem__(self, key):
|
||||
return self._data[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""Set value for plugin.
|
||||
|
||||
Args:
|
||||
key (str): Plugin name.
|
||||
value (dict[str, Any]): Value to set.
|
||||
|
||||
"""
|
||||
current_value = self._data.get(key)
|
||||
if isinstance(current_value, PublishAttributeValues):
|
||||
current_value.set_value(value)
|
||||
else:
|
||||
self._data[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
self.pop(key)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self._data
|
||||
|
||||
|
|
|
|||
|
|
@ -255,7 +255,7 @@ def deliver_sequence(
|
|||
report_items[""].append(msg)
|
||||
return report_items, 0
|
||||
|
||||
dir_path, file_name = os.path.split(str(src_path))
|
||||
dir_path, _file_name = os.path.split(str(src_path))
|
||||
|
||||
context = repre["context"]
|
||||
ext = context.get("ext", context.get("representation"))
|
||||
|
|
@ -270,7 +270,7 @@ def deliver_sequence(
|
|||
# context.representation could be .psd
|
||||
ext = ext.replace("..", ".")
|
||||
|
||||
src_collections, remainder = clique.assemble(os.listdir(dir_path))
|
||||
src_collections, _remainder = clique.assemble(os.listdir(dir_path))
|
||||
src_collection = None
|
||||
for col in src_collections:
|
||||
if col.tail != ext:
|
||||
|
|
|
|||
|
|
@ -660,14 +660,6 @@ def _get_legacy_product_name_and_group(
|
|||
warnings.warn("Using legacy product name for renders",
|
||||
DeprecationWarning)
|
||||
|
||||
if not source_product_name.startswith(product_type):
|
||||
resulting_group_name = '{}{}{}{}{}'.format(
|
||||
product_type,
|
||||
task_name[0].upper(), task_name[1:],
|
||||
source_product_name[0].upper(), source_product_name[1:])
|
||||
else:
|
||||
resulting_group_name = source_product_name
|
||||
|
||||
# create product name `<product type><Task><Product name>`
|
||||
if not source_product_name.startswith(product_type):
|
||||
resulting_group_name = '{}{}{}{}{}'.format(
|
||||
|
|
@ -808,14 +800,14 @@ def _create_instances_for_aov(
|
|||
frames_to_render is not None
|
||||
and isinstance(collected_files, (list, tuple)) # not single file
|
||||
):
|
||||
frames_to_render = convert_frames_str_to_list(frames_to_render)
|
||||
aov_frames_to_render = convert_frames_str_to_list(frames_to_render)
|
||||
collections, _ = clique.assemble(collected_files)
|
||||
collected_files = _get_real_files_to_render(
|
||||
collections[0], frames_to_render)
|
||||
collections[0], aov_frames_to_render)
|
||||
else:
|
||||
frame_start = int(skeleton.get("frameStartHandle"))
|
||||
frame_end = int(skeleton.get("frameEndHandle"))
|
||||
frames_to_render = list(range(frame_start, frame_end + 1))
|
||||
aov_frames_to_render = list(range(frame_start, frame_end + 1))
|
||||
|
||||
dynamic_data = {
|
||||
"aov": aov,
|
||||
|
|
@ -937,8 +929,8 @@ def _create_instances_for_aov(
|
|||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": collected_files,
|
||||
"frameStart": frames_to_render[0],
|
||||
"frameEnd": frames_to_render[-1],
|
||||
"frameStart": aov_frames_to_render[0],
|
||||
"frameEnd": aov_frames_to_render[-1],
|
||||
# If expectedFile are absolute, we need only filenames
|
||||
"stagingDir": staging_dir,
|
||||
"fps": new_instance.get("fps"),
|
||||
|
|
@ -1168,7 +1160,7 @@ def prepare_cache_representations(skeleton_data, exp_files, anatomy):
|
|||
|
||||
"""
|
||||
representations = []
|
||||
collections, remainders = clique.assemble(exp_files)
|
||||
collections, _remainders = clique.assemble(exp_files)
|
||||
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
|
||||
|
|
|
|||
|
|
@ -13,15 +13,7 @@ from .utils import get_representation_path_from_context
|
|||
|
||||
|
||||
class LoaderPlugin(list):
|
||||
"""Load representation into host application
|
||||
|
||||
Arguments:
|
||||
context (dict): avalon-core:context-1.0
|
||||
|
||||
.. versionadded:: 4.0
|
||||
This class was introduced
|
||||
|
||||
"""
|
||||
"""Load representation into host application"""
|
||||
|
||||
product_types = set()
|
||||
representations = set()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
from __future__ import annotations
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
from typing import Any, Union
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.lib import Logger
|
||||
|
|
@ -9,7 +11,7 @@ from .anatomy import Anatomy
|
|||
from .template_data import get_project_template_data
|
||||
|
||||
|
||||
def concatenate_splitted_paths(split_paths, anatomy):
|
||||
def concatenate_splitted_paths(split_paths, anatomy: Anatomy):
|
||||
log = Logger.get_logger("concatenate_splitted_paths")
|
||||
pattern_array = re.compile(r"\[.*\]")
|
||||
output = []
|
||||
|
|
@ -47,7 +49,7 @@ def concatenate_splitted_paths(split_paths, anatomy):
|
|||
return output
|
||||
|
||||
|
||||
def fill_paths(path_list, anatomy):
|
||||
def fill_paths(path_list: list[str], anatomy: Anatomy):
|
||||
format_data = get_project_template_data(project_name=anatomy.project_name)
|
||||
format_data["root"] = anatomy.roots
|
||||
filled_paths = []
|
||||
|
|
@ -59,7 +61,7 @@ def fill_paths(path_list, anatomy):
|
|||
return filled_paths
|
||||
|
||||
|
||||
def create_project_folders(project_name, basic_paths=None):
|
||||
def create_project_folders(project_name: str, basic_paths=None):
|
||||
log = Logger.get_logger("create_project_folders")
|
||||
anatomy = Anatomy(project_name)
|
||||
if basic_paths is None:
|
||||
|
|
@ -80,8 +82,19 @@ def create_project_folders(project_name, basic_paths=None):
|
|||
os.makedirs(path)
|
||||
|
||||
|
||||
def _list_path_items(folder_structure):
|
||||
def _list_path_items(
|
||||
folder_structure: Union[dict[str, Any], list[str]]):
|
||||
output = []
|
||||
|
||||
# Allow leaf folders of the `project_folder_structure` to use a list of
|
||||
# strings instead of a dictionary of keys with empty values.
|
||||
if isinstance(folder_structure, list):
|
||||
if not all(isinstance(item, str) for item in folder_structure):
|
||||
raise ValueError(
|
||||
f"List items must all be strings. Got: {folder_structure}")
|
||||
return [[path] for path in folder_structure]
|
||||
|
||||
# Process key, value as key for folder names and value its subfolders
|
||||
for key, value in folder_structure.items():
|
||||
if not value:
|
||||
output.append(key)
|
||||
|
|
@ -99,7 +112,7 @@ def _list_path_items(folder_structure):
|
|||
return output
|
||||
|
||||
|
||||
def get_project_basic_paths(project_name):
|
||||
def get_project_basic_paths(project_name: str):
|
||||
project_settings = get_project_settings(project_name)
|
||||
folder_structure = (
|
||||
project_settings["core"]["project_folder_structure"]
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
"""Library functions for publishing."""
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import sys
|
||||
import inspect
|
||||
|
|
@ -12,8 +14,8 @@ import pyblish.plugin
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.lib import (
|
||||
Logger,
|
||||
import_filepath,
|
||||
Logger,
|
||||
filter_profiles,
|
||||
)
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
|
@ -163,7 +165,7 @@ class HelpContent:
|
|||
|
||||
def load_help_content_from_filepath(filepath):
|
||||
"""Load help content from xml file.
|
||||
Xml file may containt errors and warnings.
|
||||
Xml file may contain errors and warnings.
|
||||
"""
|
||||
errors = {}
|
||||
warnings = {}
|
||||
|
|
@ -208,8 +210,9 @@ def load_help_content_from_plugin(plugin):
|
|||
return load_help_content_from_filepath(filepath)
|
||||
|
||||
|
||||
def publish_plugins_discover(paths=None):
|
||||
"""Find and return available pyblish plug-ins
|
||||
def publish_plugins_discover(
|
||||
paths: Optional[list[str]] = None) -> DiscoverResult:
|
||||
"""Find and return available pyblish plug-ins.
|
||||
|
||||
Overridden function from `pyblish` module to be able to collect
|
||||
crashed files and reason of their crash.
|
||||
|
|
@ -252,17 +255,14 @@ def publish_plugins_discover(paths=None):
|
|||
continue
|
||||
|
||||
try:
|
||||
module = import_filepath(abspath, mod_name)
|
||||
module = import_filepath(
|
||||
abspath, mod_name, sys_module_name=mod_name)
|
||||
|
||||
# Store reference to original module, to avoid
|
||||
# garbage collection from collecting it's global
|
||||
# imports, such as `import os`.
|
||||
sys.modules[abspath] = module
|
||||
|
||||
except Exception as err:
|
||||
except Exception as err: # noqa: BLE001
|
||||
# we need broad exception to catch all possible errors.
|
||||
result.crashed_file_paths[abspath] = sys.exc_info()
|
||||
|
||||
log.debug("Skipped: \"%s\" (%s)", mod_name, err)
|
||||
log.debug('Skipped: "%s" (%s)', mod_name, err)
|
||||
continue
|
||||
|
||||
for plugin in pyblish.plugin.plugins_from_module(module):
|
||||
|
|
@ -280,9 +280,8 @@ def publish_plugins_discover(paths=None):
|
|||
continue
|
||||
|
||||
plugin_names.append(plugin.__name__)
|
||||
|
||||
plugin.__module__ = module.__file__
|
||||
key = "{0}.{1}".format(plugin.__module__, plugin.__name__)
|
||||
plugin.__file__ = module.__file__
|
||||
key = f"{module.__file__}.{plugin.__name__}"
|
||||
plugins[key] = plugin
|
||||
|
||||
# Include plug-ins from registration.
|
||||
|
|
@ -361,7 +360,7 @@ def get_plugin_settings(plugin, project_settings, log, category=None):
|
|||
# Settings category determined from path
|
||||
# - usually path is './<category>/plugins/publish/<plugin file>'
|
||||
# - category can be host name of addon name ('maya', 'deadline', ...)
|
||||
filepath = os.path.normpath(inspect.getsourcefile(plugin))
|
||||
filepath = os.path.normpath(inspect.getfile(plugin))
|
||||
|
||||
split_path = filepath.rsplit(os.path.sep, 5)
|
||||
if len(split_path) < 4:
|
||||
|
|
@ -427,7 +426,7 @@ def filter_pyblish_plugins(plugins):
|
|||
log = Logger.get_logger("filter_pyblish_plugins")
|
||||
|
||||
# TODO: Don't use host from 'pyblish.api' but from defined host by us.
|
||||
# - kept becau on farm is probably used host 'shell' which propably
|
||||
# - kept because on farm is probably used host 'shell' which probably
|
||||
# affect how settings are applied there
|
||||
host_name = pyblish.api.current_host()
|
||||
project_name = os.environ.get("AYON_PROJECT_NAME")
|
||||
|
|
@ -529,7 +528,7 @@ def filter_instances_for_context_plugin(plugin, context):
|
|||
|
||||
Args:
|
||||
plugin (pyblish.api.Plugin): Plugin with filters.
|
||||
context (pyblish.api.Context): Pyblish context with insances.
|
||||
context (pyblish.api.Context): Pyblish context with instances.
|
||||
|
||||
Returns:
|
||||
Iterator[pyblish.lib.Instance]: Iteration of valid instances.
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ def validate(data, schema=None):
|
|||
if not _CACHED:
|
||||
_precache()
|
||||
|
||||
root, schema = data["schema"].rsplit(":", 1)
|
||||
_root, schema = data["schema"].rsplit(":", 1)
|
||||
|
||||
if isinstance(schema, str):
|
||||
schema = _cache[schema + ".json"]
|
||||
|
|
|
|||
|
|
@ -226,11 +226,26 @@ class _CacheItems:
|
|||
thumbnails_cache = ThumbnailsCache()
|
||||
|
||||
|
||||
def get_thumbnail_path(project_name, thumbnail_id):
|
||||
def get_thumbnail_path(
|
||||
project_name: str,
|
||||
entity_type: str,
|
||||
entity_id: str,
|
||||
thumbnail_id: str
|
||||
):
|
||||
"""Get path to thumbnail image.
|
||||
|
||||
Thumbnail is cached by thumbnail id but is received using entity type and
|
||||
entity id.
|
||||
|
||||
Notes:
|
||||
Function 'get_thumbnail_by_id' can't be used because does not work
|
||||
for artists. The endpoint can't validate artist permissions.
|
||||
|
||||
Args:
|
||||
project_name (str): Project where thumbnail belongs to.
|
||||
entity_type (str): Entity type "folder", "task", "version"
|
||||
and "workfile".
|
||||
entity_id (str): Entity id.
|
||||
thumbnail_id (Union[str, None]): Thumbnail id.
|
||||
|
||||
Returns:
|
||||
|
|
@ -251,7 +266,7 @@ def get_thumbnail_path(project_name, thumbnail_id):
|
|||
# 'get_thumbnail_by_id' did not return output of
|
||||
# 'ServerAPI' method.
|
||||
con = ayon_api.get_server_api_connection()
|
||||
result = con.get_thumbnail_by_id(project_name, thumbnail_id)
|
||||
result = con.get_thumbnail(project_name, entity_type, entity_id)
|
||||
|
||||
if result is not None and result.is_valid:
|
||||
return _CacheItems.thumbnails_cache.store_thumbnail(
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ from .path_resolving import (
|
|||
from .utils import (
|
||||
should_use_last_workfile_on_launch,
|
||||
should_open_workfiles_tool_on_launch,
|
||||
MissingWorkdirError,
|
||||
)
|
||||
|
||||
from .build_workfile import BuildWorkfile
|
||||
|
|
@ -46,6 +47,7 @@ __all__ = (
|
|||
|
||||
"should_use_last_workfile_on_launch",
|
||||
"should_open_workfiles_tool_on_launch",
|
||||
"MissingWorkdirError",
|
||||
|
||||
"BuildWorkfile",
|
||||
|
||||
|
|
|
|||
|
|
@ -329,9 +329,9 @@ def get_last_workfile(
|
|||
|
||||
Returns:
|
||||
str: Last or first workfile as filename of full path to filename.
|
||||
"""
|
||||
|
||||
filename, version = get_last_workfile_with_version(
|
||||
"""
|
||||
filename, _version = get_last_workfile_with_version(
|
||||
workdir, file_template, fill_data, extensions
|
||||
)
|
||||
if filename is None:
|
||||
|
|
|
|||
|
|
@ -2,6 +2,11 @@ from ayon_core.lib import filter_profiles
|
|||
from ayon_core.settings import get_project_settings
|
||||
|
||||
|
||||
class MissingWorkdirError(Exception):
|
||||
"""Raised when accessing a work directory not found on disk."""
|
||||
pass
|
||||
|
||||
|
||||
def should_use_last_workfile_on_launch(
|
||||
project_name,
|
||||
host_name,
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ from ayon_core.tools.utils import show_message_dialog
|
|||
OTIO = None
|
||||
FRAME_SPLITTER = "__frame_splitter__"
|
||||
|
||||
|
||||
def _import_otio():
|
||||
global OTIO
|
||||
if OTIO is None:
|
||||
|
|
|
|||
|
|
@ -394,7 +394,6 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
if aov:
|
||||
anatomy_data["aov"] = aov
|
||||
|
||||
|
||||
def _fill_folder_data(self, instance, project_entity, anatomy_data):
|
||||
# QUESTION: should we make sure that all folder data are popped if
|
||||
# folder data cannot be found?
|
||||
|
|
|
|||
|
|
@ -39,6 +39,7 @@ class CollectAudio(pyblish.api.ContextPlugin):
|
|||
"blender",
|
||||
"houdini",
|
||||
"max",
|
||||
"circuit",
|
||||
]
|
||||
|
||||
audio_product_name = "audioMain"
|
||||
|
|
|
|||
|
|
@ -43,4 +43,3 @@ class CollectCoreJobEnvVars(pyblish.api.ContextPlugin):
|
|||
if value:
|
||||
self.log.debug(f"Setting job env: {key}: {value}")
|
||||
env[key] = value
|
||||
|
||||
|
|
|
|||
|
|
@ -194,7 +194,6 @@ class CollectOtioSubsetResources(
|
|||
repre = self._create_representation(
|
||||
frame_start, frame_end, file=filename)
|
||||
|
||||
|
||||
else:
|
||||
_trim = False
|
||||
dirname, filename = os.path.split(media_ref.target_url)
|
||||
|
|
@ -209,7 +208,6 @@ class CollectOtioSubsetResources(
|
|||
repre = self._create_representation(
|
||||
frame_start, frame_end, file=filename, trim=_trim)
|
||||
|
||||
|
||||
instance.data["originalDirname"] = self.staging_dir
|
||||
|
||||
# add representation to instance data
|
||||
|
|
@ -221,7 +219,6 @@ class CollectOtioSubsetResources(
|
|||
|
||||
instance.data["representations"].append(repre)
|
||||
|
||||
|
||||
self.log.debug(instance.data)
|
||||
|
||||
def _create_representation(self, start, end, **kwargs):
|
||||
|
|
|
|||
|
|
@ -31,6 +31,9 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
# Keep "filesequence" for backwards compatibility of older jobs
|
||||
targets = ["filesequence", "farm"]
|
||||
label = "Collect rendered frames"
|
||||
settings_category = "core"
|
||||
|
||||
remove_files = False
|
||||
|
||||
_context = None
|
||||
|
||||
|
|
@ -120,7 +123,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
self._fill_staging_dir(repre_data, anatomy)
|
||||
representations.append(repre_data)
|
||||
|
||||
if not staging_dir_persistent:
|
||||
if self.remove_files and not staging_dir_persistent:
|
||||
add_repre_files_for_cleanup(instance, repre_data)
|
||||
|
||||
instance.data["representations"] = representations
|
||||
|
|
@ -170,7 +173,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
os.environ.update(session_data)
|
||||
|
||||
staging_dir_persistent = self._process_path(data, anatomy)
|
||||
if not staging_dir_persistent:
|
||||
if self.remove_files and not staging_dir_persistent:
|
||||
context.data["cleanupFullPaths"].append(path)
|
||||
context.data["cleanupEmptyDirs"].append(
|
||||
os.path.dirname(path)
|
||||
|
|
|
|||
|
|
@ -54,7 +54,8 @@ class ExtractBurnin(publish.Extractor):
|
|||
"houdini",
|
||||
"max",
|
||||
"blender",
|
||||
"unreal"
|
||||
"unreal",
|
||||
"circuit",
|
||||
]
|
||||
|
||||
optional = True
|
||||
|
|
|
|||
|
|
@ -91,7 +91,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
"webpublisher",
|
||||
"aftereffects",
|
||||
"flame",
|
||||
"unreal"
|
||||
"unreal",
|
||||
"circuit",
|
||||
]
|
||||
|
||||
# Supported extensions
|
||||
|
|
@ -196,7 +197,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
).format(repre_name))
|
||||
continue
|
||||
|
||||
input_ext = repre["ext"]
|
||||
input_ext = repre["ext"].lower()
|
||||
if input_ext.startswith("."):
|
||||
input_ext = input_ext[1:]
|
||||
|
||||
|
|
|
|||
|
|
@ -39,7 +39,8 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
"nuke",
|
||||
"aftereffects",
|
||||
"unreal",
|
||||
"houdini"
|
||||
"houdini",
|
||||
"circuit",
|
||||
]
|
||||
enabled = False
|
||||
|
||||
|
|
@ -162,9 +163,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
# Store new staging to cleanup paths
|
||||
instance.context.data["cleanupFullPaths"].append(dst_staging)
|
||||
|
||||
thumbnail_created = False
|
||||
oiio_supported = is_oiio_supported()
|
||||
thumbnail_created = False
|
||||
for repre in filtered_repres:
|
||||
# Reset for each iteration to handle cases where multiple
|
||||
# reviewable thumbnails are needed
|
||||
repre_thumb_created = False
|
||||
repre_files = repre["files"]
|
||||
src_staging = os.path.normpath(repre["stagingDir"])
|
||||
if not isinstance(repre_files, (list, tuple)):
|
||||
|
|
@ -213,7 +217,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
)
|
||||
# If the input can read by OIIO then use OIIO method for
|
||||
# conversion otherwise use ffmpeg
|
||||
thumbnail_created = self._create_thumbnail_oiio(
|
||||
repre_thumb_created = self._create_thumbnail_oiio(
|
||||
full_input_path,
|
||||
full_output_path,
|
||||
colorspace_data
|
||||
|
|
@ -222,21 +226,22 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
# Try to use FFMPEG if OIIO is not supported or for cases when
|
||||
# oiiotool isn't available or representation is not having
|
||||
# colorspace data
|
||||
if not thumbnail_created:
|
||||
if not repre_thumb_created:
|
||||
if oiio_supported:
|
||||
self.log.debug(
|
||||
"Converting with FFMPEG because input"
|
||||
" can't be read by OIIO."
|
||||
)
|
||||
|
||||
thumbnail_created = self._create_thumbnail_ffmpeg(
|
||||
repre_thumb_created = self._create_thumbnail_ffmpeg(
|
||||
full_input_path, full_output_path
|
||||
)
|
||||
|
||||
# Skip representation and try next one if wasn't created
|
||||
if not thumbnail_created:
|
||||
if not repre_thumb_created:
|
||||
continue
|
||||
|
||||
thumbnail_created = True
|
||||
if len(explicit_repres) > 1:
|
||||
repre_name = "thumbnail_{}".format(repre["outputName"])
|
||||
else:
|
||||
|
|
@ -449,7 +454,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
# output arguments from presets
|
||||
jpeg_items.extend(ffmpeg_args.get("output") or [])
|
||||
# we just want one frame from movie files
|
||||
jpeg_items.extend(["-vframes", "1"])
|
||||
jpeg_items.extend(["-frames:v", "1"])
|
||||
|
||||
if resolution_arg:
|
||||
jpeg_items.extend(resolution_arg)
|
||||
|
|
@ -497,7 +502,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
"-i", video_file_path,
|
||||
"-analyzeduration", max_int,
|
||||
"-probesize", max_int,
|
||||
"-vframes", "1"
|
||||
"-frames:v", "1"
|
||||
]
|
||||
|
||||
# add output file path
|
||||
|
|
|
|||
|
|
@ -170,7 +170,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
"-analyzeduration", max_int,
|
||||
"-probesize", max_int,
|
||||
"-i", src_path,
|
||||
"-vframes", "1",
|
||||
"-frames:v", "1",
|
||||
dst_path
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -619,8 +619,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# used for all represe
|
||||
# from temp to final
|
||||
original_directory = (
|
||||
instance.data.get("originalDirname") or instance_stagingdir)
|
||||
|
||||
instance.data.get("originalDirname") or stagingdir)
|
||||
_rootless = self.get_rootless_path(anatomy, original_directory)
|
||||
if _rootless == original_directory:
|
||||
raise KnownPublishError((
|
||||
|
|
@ -684,7 +683,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
elif is_sequence_representation:
|
||||
# Collection of files (sequence)
|
||||
src_collections, remainders = clique.assemble(files)
|
||||
src_collections, _remainders = clique.assemble(files)
|
||||
|
||||
src_collection = src_collections[0]
|
||||
destination_indexes = list(src_collection.indexes)
|
||||
|
|
|
|||
138
client/ayon_core/plugins/publish/integrate_attach_reviewable.py
Normal file
138
client/ayon_core/plugins/publish/integrate_attach_reviewable.py
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
import copy
|
||||
import pyblish.api
|
||||
from typing import List
|
||||
|
||||
from ayon_core.lib import EnumDef
|
||||
from ayon_core.pipeline import OptionalPyblishPluginMixin
|
||||
|
||||
|
||||
class AttachReviewables(
|
||||
pyblish.api.InstancePlugin, OptionalPyblishPluginMixin
|
||||
):
|
||||
"""Attach reviewable to other instances
|
||||
|
||||
This pre-integrator plugin allows instances to be 'attached to' other
|
||||
instances by moving all its representations over to the other instance.
|
||||
Even though this technically could work for any representation the current
|
||||
intent is to use for reviewables only, like e.g. `review` or `render`
|
||||
product type.
|
||||
|
||||
When the reviewable is attached to another instance, the instance itself
|
||||
will not be published as a separate entity. Instead, the representations
|
||||
will be copied/moved to the instances it is attached to.
|
||||
"""
|
||||
|
||||
families = ["render", "review"]
|
||||
order = pyblish.api.IntegratorOrder - 0.499
|
||||
label = "Attach reviewables"
|
||||
|
||||
settings_category = "core"
|
||||
|
||||
def process(self, instance):
|
||||
# TODO: Support farm.
|
||||
# If instance is being submitted to the farm we should pass through
|
||||
# the 'attached reviewables' metadata to the farm job
|
||||
# TODO: Reviewable frame range and resolutions
|
||||
# Because we are attaching the data to another instance, how do we
|
||||
# correctly propagate the resolution + frame rate to the other
|
||||
# instance? Do we even need to?
|
||||
# TODO: If this were to attach 'renders' to another instance that would
|
||||
# mean there wouldn't necessarily be a render publish separate as a
|
||||
# result. Is that correct expected behavior?
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
attach_to = attr_values.get("attach", [])
|
||||
if not attach_to:
|
||||
self.log.debug(
|
||||
"Reviewable is not set to attach to another instance."
|
||||
)
|
||||
return
|
||||
|
||||
attach_instances: List[pyblish.api.Instance] = []
|
||||
for attach_instance_id in attach_to:
|
||||
# Find the `pyblish.api.Instance` matching the `CreatedInstance.id`
|
||||
# in the `attach_to` list
|
||||
attach_instance = next(
|
||||
(
|
||||
_inst
|
||||
for _inst in instance.context
|
||||
if _inst.data.get("instance_id") == attach_instance_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
if attach_instance is None:
|
||||
continue
|
||||
|
||||
# Skip inactive instances
|
||||
if not attach_instance.data.get("active", True):
|
||||
continue
|
||||
|
||||
# For now do not support attaching to 'farm' instances until we
|
||||
# can pass the 'attaching' on to the farm jobs.
|
||||
if attach_instance.data.get("farm"):
|
||||
self.log.warning(
|
||||
"Attaching to farm instances is not supported yet."
|
||||
)
|
||||
continue
|
||||
|
||||
attach_instances.append(attach_instance)
|
||||
|
||||
instances_names = ", ".join(
|
||||
instance.name for instance in attach_instances
|
||||
)
|
||||
self.log.info(
|
||||
f"Attaching reviewable to other instances: {instances_names}"
|
||||
)
|
||||
|
||||
# Copy the representations of this reviewable instance to the other
|
||||
# instance
|
||||
representations = instance.data.get("representations", [])
|
||||
for attach_instance in attach_instances:
|
||||
self.log.info(f"Attaching to {attach_instance.name}")
|
||||
attach_instance.data.setdefault("representations", []).extend(
|
||||
copy.deepcopy(representations)
|
||||
)
|
||||
|
||||
# Delete representations on the reviewable instance itself
|
||||
for repre in representations:
|
||||
self.log.debug(
|
||||
"Marking representation as deleted because it was "
|
||||
f"attached to other instances instead: {repre}"
|
||||
)
|
||||
repre.setdefault("tags", []).append("delete")
|
||||
|
||||
# Stop integrator from trying to integrate this instance
|
||||
if attach_to:
|
||||
instance.data["integrate"] = False
|
||||
|
||||
@classmethod
|
||||
def get_attr_defs_for_instance(cls, create_context, instance):
|
||||
# TODO: Check if instance is actually a 'reviewable'
|
||||
# Filtering of instance, if needed, can be customized
|
||||
if not cls.instance_matches_plugin_families(instance):
|
||||
return []
|
||||
|
||||
items = []
|
||||
for other_instance in create_context.instances:
|
||||
if other_instance == instance:
|
||||
continue
|
||||
|
||||
# Do not allow attaching to other reviewable instances
|
||||
if other_instance.data["productType"] in cls.families:
|
||||
continue
|
||||
|
||||
items.append(
|
||||
{
|
||||
"label": other_instance.label,
|
||||
"value": str(other_instance.id),
|
||||
}
|
||||
)
|
||||
|
||||
return [
|
||||
EnumDef(
|
||||
"attach",
|
||||
label="Attach reviewable",
|
||||
multiselection=True,
|
||||
items=items,
|
||||
tooltip="Attach this reviewable to another instance",
|
||||
)
|
||||
]
|
||||
|
|
@ -27,8 +27,10 @@ import collections
|
|||
|
||||
import pyblish.api
|
||||
import ayon_api
|
||||
from ayon_api import RequestTypes
|
||||
from ayon_api.operations import OperationsSession
|
||||
|
||||
|
||||
InstanceFilterResult = collections.namedtuple(
|
||||
"InstanceFilterResult",
|
||||
["instance", "thumbnail_path", "version_id"]
|
||||
|
|
@ -161,6 +163,30 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
|
|||
return None
|
||||
return os.path.normpath(filled_path)
|
||||
|
||||
def _create_thumbnail(self, project_name: str, src_filepath: str) -> str:
|
||||
"""Upload thumbnail to AYON and return its id.
|
||||
|
||||
This is temporary fix of 'create_thumbnail' function in ayon_api to
|
||||
fix jpeg mime type.
|
||||
|
||||
"""
|
||||
mime_type = None
|
||||
with open(src_filepath, "rb") as stream:
|
||||
if b"\xff\xd8\xff" == stream.read(3):
|
||||
mime_type = "image/jpeg"
|
||||
|
||||
if mime_type is None:
|
||||
return ayon_api.create_thumbnail(project_name, src_filepath)
|
||||
|
||||
response = ayon_api.upload_file(
|
||||
f"projects/{project_name}/thumbnails",
|
||||
src_filepath,
|
||||
request_type=RequestTypes.post,
|
||||
headers={"Content-Type": mime_type},
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()["id"]
|
||||
|
||||
def _integrate_thumbnails(
|
||||
self,
|
||||
filtered_instance_items,
|
||||
|
|
@ -179,7 +205,7 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
|
|||
).format(instance_label))
|
||||
continue
|
||||
|
||||
thumbnail_id = ayon_api.create_thumbnail(
|
||||
thumbnail_id = self._create_thumbnail(
|
||||
project_name, thumbnail_path
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -173,7 +173,6 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
|
|||
if frame_end is not None:
|
||||
options["frame_end"] = frame_end
|
||||
|
||||
|
||||
options["label"] = align
|
||||
self._add_burnin(text, align, options, DRAWTEXT)
|
||||
|
||||
|
|
|
|||
|
|
@ -353,7 +353,6 @@ class BaseObj:
|
|||
self.items[item.id] = item
|
||||
item.fill_data_format()
|
||||
|
||||
|
||||
def reset(self):
|
||||
for item in self.items.values():
|
||||
item.reset()
|
||||
|
|
|
|||
|
|
@ -282,7 +282,7 @@ class ItemTable(BaseItem):
|
|||
value.draw(image, drawer)
|
||||
|
||||
def value_width(self):
|
||||
row_heights, col_widths = self.size_values
|
||||
_row_heights, col_widths = self.size_values
|
||||
width = 0
|
||||
for _width in col_widths:
|
||||
width += _width
|
||||
|
|
@ -292,7 +292,7 @@ class ItemTable(BaseItem):
|
|||
return width
|
||||
|
||||
def value_height(self):
|
||||
row_heights, col_widths = self.size_values
|
||||
row_heights, _col_widths = self.size_values
|
||||
height = 0
|
||||
for _height in row_heights:
|
||||
height += _height
|
||||
|
|
@ -569,21 +569,21 @@ class TableField(BaseItem):
|
|||
|
||||
@property
|
||||
def item_pos_x(self):
|
||||
pos_x, pos_y, width, height = (
|
||||
pos_x, _pos_y, _width, _height = (
|
||||
self.parent.content_pos_info_by_cord(self.row_idx, self.col_idx)
|
||||
)
|
||||
return pos_x
|
||||
|
||||
@property
|
||||
def item_pos_y(self):
|
||||
pos_x, pos_y, width, height = (
|
||||
_pos_x, pos_y, _width, _height = (
|
||||
self.parent.content_pos_info_by_cord(self.row_idx, self.col_idx)
|
||||
)
|
||||
return pos_y
|
||||
|
||||
@property
|
||||
def value_pos_x(self):
|
||||
pos_x, pos_y, width, height = (
|
||||
pos_x, _pos_y, width, _height = (
|
||||
self.parent.content_pos_info_by_cord(self.row_idx, self.col_idx)
|
||||
)
|
||||
alignment_hor = self.style["alignment-horizontal"].lower()
|
||||
|
|
@ -605,7 +605,7 @@ class TableField(BaseItem):
|
|||
|
||||
@property
|
||||
def value_pos_y(self):
|
||||
pos_x, pos_y, width, height = (
|
||||
_pos_x, pos_y, _width, height = (
|
||||
self.parent.content_pos_info_by_cord(self.row_idx, self.col_idx)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -227,6 +227,9 @@ class HierarchyModel(object):
|
|||
self._tasks_by_id = NestedCacheItem(
|
||||
levels=2, default_factory=dict, lifetime=self.lifetime)
|
||||
|
||||
self._entity_ids_by_assignee = NestedCacheItem(
|
||||
levels=2, default_factory=dict, lifetime=self.lifetime)
|
||||
|
||||
self._folders_refreshing = set()
|
||||
self._tasks_refreshing = set()
|
||||
self._controller = controller
|
||||
|
|
@ -238,6 +241,8 @@ class HierarchyModel(object):
|
|||
self._task_items.reset()
|
||||
self._tasks_by_id.reset()
|
||||
|
||||
self._entity_ids_by_assignee.reset()
|
||||
|
||||
def refresh_project(self, project_name):
|
||||
"""Force to refresh folder items for a project.
|
||||
|
||||
|
|
@ -461,6 +466,54 @@ class HierarchyModel(object):
|
|||
output = self.get_task_entities(project_name, {task_id})
|
||||
return output[task_id]
|
||||
|
||||
def get_entity_ids_for_assignees(
|
||||
self, project_name: str, assignees: list[str]
|
||||
):
|
||||
folder_ids = set()
|
||||
task_ids = set()
|
||||
output = {
|
||||
"folder_ids": folder_ids,
|
||||
"task_ids": task_ids,
|
||||
}
|
||||
assignees = set(assignees)
|
||||
for assignee in tuple(assignees):
|
||||
cache = self._entity_ids_by_assignee[project_name][assignee]
|
||||
if cache.is_valid:
|
||||
assignees.discard(assignee)
|
||||
assignee_data = cache.get_data()
|
||||
folder_ids.update(assignee_data["folder_ids"])
|
||||
task_ids.update(assignee_data["task_ids"])
|
||||
|
||||
if not assignees:
|
||||
return output
|
||||
|
||||
tasks = ayon_api.get_tasks(
|
||||
project_name,
|
||||
assignees_all=assignees,
|
||||
fields={"id", "folderId", "assignees"},
|
||||
)
|
||||
tasks_assignee = {}
|
||||
for task in tasks:
|
||||
folder_ids.add(task["folderId"])
|
||||
task_ids.add(task["id"])
|
||||
for assignee in task["assignees"]:
|
||||
tasks_assignee.setdefault(assignee, []).append(task)
|
||||
|
||||
for assignee, tasks in tasks_assignee.items():
|
||||
cache = self._entity_ids_by_assignee[project_name][assignee]
|
||||
assignee_folder_ids = set()
|
||||
assignee_task_ids = set()
|
||||
assignee_data = {
|
||||
"folder_ids": assignee_folder_ids,
|
||||
"task_ids": assignee_task_ids,
|
||||
}
|
||||
for task in tasks:
|
||||
assignee_folder_ids.add(task["folderId"])
|
||||
assignee_task_ids.add(task["id"])
|
||||
cache.update_data(assignee_data)
|
||||
|
||||
return output
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _folder_refresh_event_manager(self, project_name, sender):
|
||||
self._folders_refreshing.add(project_name)
|
||||
|
|
|
|||
|
|
@ -21,8 +21,49 @@ class ThumbnailsModel:
|
|||
self._folders_cache.reset()
|
||||
self._versions_cache.reset()
|
||||
|
||||
def get_thumbnail_path(self, project_name, thumbnail_id):
|
||||
return self._get_thumbnail_path(project_name, thumbnail_id)
|
||||
def get_thumbnail_paths(
|
||||
self,
|
||||
project_name,
|
||||
entity_type,
|
||||
entity_ids,
|
||||
):
|
||||
output = {
|
||||
entity_id: None
|
||||
for entity_id in entity_ids
|
||||
}
|
||||
if not project_name or not entity_type or not entity_ids:
|
||||
return output
|
||||
|
||||
thumbnail_id_by_entity_id = {}
|
||||
if entity_type == "folder":
|
||||
thumbnail_id_by_entity_id = self.get_folder_thumbnail_ids(
|
||||
project_name, entity_ids
|
||||
)
|
||||
|
||||
elif entity_type == "version":
|
||||
thumbnail_id_by_entity_id = self.get_version_thumbnail_ids(
|
||||
project_name, entity_ids
|
||||
)
|
||||
|
||||
if not thumbnail_id_by_entity_id:
|
||||
return output
|
||||
|
||||
entity_ids_by_thumbnail_id = collections.defaultdict(set)
|
||||
for entity_id, thumbnail_id in thumbnail_id_by_entity_id.items():
|
||||
if not thumbnail_id:
|
||||
continue
|
||||
entity_ids_by_thumbnail_id[thumbnail_id].add(entity_id)
|
||||
|
||||
for thumbnail_id, entity_ids in entity_ids_by_thumbnail_id.items():
|
||||
thumbnail_path = self._get_thumbnail_path(
|
||||
project_name, entity_type, next(iter(entity_ids)), thumbnail_id
|
||||
)
|
||||
if not thumbnail_path:
|
||||
continue
|
||||
for entity_id in entity_ids:
|
||||
output[entity_id] = thumbnail_path
|
||||
|
||||
return output
|
||||
|
||||
def get_folder_thumbnail_ids(self, project_name, folder_ids):
|
||||
project_cache = self._folders_cache[project_name]
|
||||
|
|
@ -56,7 +97,13 @@ class ThumbnailsModel:
|
|||
output[version_id] = cache.get_data()
|
||||
return output
|
||||
|
||||
def _get_thumbnail_path(self, project_name, thumbnail_id):
|
||||
def _get_thumbnail_path(
|
||||
self,
|
||||
project_name,
|
||||
entity_type,
|
||||
entity_id,
|
||||
thumbnail_id
|
||||
):
|
||||
if not thumbnail_id:
|
||||
return None
|
||||
|
||||
|
|
@ -64,7 +111,12 @@ class ThumbnailsModel:
|
|||
if thumbnail_id in project_cache:
|
||||
return project_cache[thumbnail_id]
|
||||
|
||||
filepath = get_thumbnail_path(project_name, thumbnail_id)
|
||||
filepath = get_thumbnail_path(
|
||||
project_name,
|
||||
entity_type,
|
||||
entity_id,
|
||||
thumbnail_id
|
||||
)
|
||||
project_cache[thumbnail_id] = filepath
|
||||
return filepath
|
||||
|
||||
|
|
|
|||
|
|
@ -248,4 +248,3 @@ class EnhancedTabBar(QtWidgets.QTabBar):
|
|||
|
||||
else:
|
||||
super().mouseReleaseEvent(event)
|
||||
|
||||
|
|
|
|||
|
|
@ -492,7 +492,7 @@ def show(parent=None):
|
|||
|
||||
try:
|
||||
module.window.close()
|
||||
del(module.window)
|
||||
del module.window
|
||||
except (AttributeError, RuntimeError):
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -160,8 +160,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
Returns:
|
||||
list[FolderItem]: Minimum possible information needed
|
||||
for visualisation of folder hierarchy.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -180,8 +180,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
Returns:
|
||||
list[TaskItem]: Minimum possible information needed
|
||||
for visualisation of tasks.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -190,8 +190,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
|
||||
Returns:
|
||||
Union[str, None]: Selected project name.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -200,8 +200,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
|
||||
Returns:
|
||||
Union[str, None]: Selected folder id.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -210,8 +210,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
|
||||
Returns:
|
||||
Union[str, None]: Selected task id.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -220,8 +220,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
|
||||
Returns:
|
||||
Union[str, None]: Selected task name.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -238,8 +238,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
|
||||
Returns:
|
||||
dict[str, Union[str, None]]: Selected context.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -249,8 +249,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
Args:
|
||||
project_name (Union[str, None]): Project nameor None if no project
|
||||
is selected.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -260,8 +260,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
Args:
|
||||
folder_id (Union[str, None]): Folder id or None if no folder
|
||||
is selected.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -273,8 +273,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
is selected.
|
||||
task_name (Union[str, None]): Task name or None if no task
|
||||
is selected.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
# Actions
|
||||
|
|
@ -290,8 +290,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
Returns:
|
||||
list[ActionItem]: List of action items that should be shown
|
||||
for given context.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -303,8 +303,8 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
folder_id (Union[str, None]): Folder id.
|
||||
task_id (Union[str, None]): Task id.
|
||||
action_id (str): Action identifier.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -317,10 +317,10 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
project_name (Union[str, None]): Project name.
|
||||
folder_id (Union[str, None]): Folder id.
|
||||
task_id (Union[str, None]): Task id.
|
||||
action_id (Iterable[str]): Action identifiers.
|
||||
action_ids (Iterable[str]): Action identifiers.
|
||||
enabled (bool): New value of force not open workfile.
|
||||
"""
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -340,5 +340,17 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
Triggers 'controller.refresh.actions.started' event at the beginning
|
||||
and 'controller.refresh.actions.finished' at the end.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_my_tasks_entity_ids(self, project_name: str):
|
||||
"""Get entity ids for my tasks.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name.
|
||||
|
||||
Returns:
|
||||
dict[str, Union[list[str]]]: Folder and task ids.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from ayon_core.lib import Logger
|
||||
from ayon_core.lib import Logger, get_ayon_username
|
||||
from ayon_core.lib.events import QueuedEventSystem
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.tools.common_models import ProjectsModel, HierarchyModel
|
||||
|
|
@ -6,6 +6,8 @@ from ayon_core.tools.common_models import ProjectsModel, HierarchyModel
|
|||
from .abstract import AbstractLauncherFrontEnd, AbstractLauncherBackend
|
||||
from .models import LauncherSelectionModel, ActionsModel
|
||||
|
||||
NOT_SET = object()
|
||||
|
||||
|
||||
class BaseLauncherController(
|
||||
AbstractLauncherFrontEnd, AbstractLauncherBackend
|
||||
|
|
@ -15,6 +17,8 @@ class BaseLauncherController(
|
|||
self._event_system = None
|
||||
self._log = None
|
||||
|
||||
self._username = NOT_SET
|
||||
|
||||
self._selection_model = LauncherSelectionModel(self)
|
||||
self._projects_model = ProjectsModel(self)
|
||||
self._hierarchy_model = HierarchyModel(self)
|
||||
|
|
@ -168,5 +172,19 @@ class BaseLauncherController(
|
|||
|
||||
self._emit_event("controller.refresh.actions.finished")
|
||||
|
||||
def get_my_tasks_entity_ids(self, project_name: str):
|
||||
username = self._get_my_username()
|
||||
assignees = []
|
||||
if username:
|
||||
assignees.append(username)
|
||||
return self._hierarchy_model.get_entity_ids_for_assignees(
|
||||
project_name, assignees
|
||||
)
|
||||
|
||||
def _get_my_username(self):
|
||||
if self._username is NOT_SET:
|
||||
self._username = get_ayon_username()
|
||||
return self._username
|
||||
|
||||
def _emit_event(self, topic, data=None):
|
||||
self.emit_event(topic, data, "controller")
|
||||
|
|
|
|||
|
|
@ -5,17 +5,17 @@ from ayon_core.tools.utils import (
|
|||
PlaceholderLineEdit,
|
||||
SquareButton,
|
||||
RefreshButton,
|
||||
)
|
||||
from ayon_core.tools.utils import (
|
||||
ProjectsCombobox,
|
||||
FoldersWidget,
|
||||
TasksWidget,
|
||||
NiceCheckbox,
|
||||
)
|
||||
from ayon_core.tools.utils.lib import checkstate_int_to_enum
|
||||
|
||||
|
||||
class HierarchyPage(QtWidgets.QWidget):
|
||||
def __init__(self, controller, parent):
|
||||
super(HierarchyPage, self).__init__(parent)
|
||||
super().__init__(parent)
|
||||
|
||||
# Header
|
||||
header_widget = QtWidgets.QWidget(self)
|
||||
|
|
@ -43,23 +43,36 @@ class HierarchyPage(QtWidgets.QWidget):
|
|||
)
|
||||
content_body.setOrientation(QtCore.Qt.Horizontal)
|
||||
|
||||
# - Folders widget with filter
|
||||
folders_wrapper = QtWidgets.QWidget(content_body)
|
||||
# - filters
|
||||
filters_widget = QtWidgets.QWidget(self)
|
||||
|
||||
folders_filter_text = PlaceholderLineEdit(folders_wrapper)
|
||||
folders_filter_text = PlaceholderLineEdit(filters_widget)
|
||||
folders_filter_text.setPlaceholderText("Filter folders...")
|
||||
|
||||
folders_widget = FoldersWidget(controller, folders_wrapper)
|
||||
my_tasks_tooltip = (
|
||||
"Filter folders and task to only those you are assigned to."
|
||||
)
|
||||
my_tasks_label = QtWidgets.QLabel("My tasks", filters_widget)
|
||||
my_tasks_label.setToolTip(my_tasks_tooltip)
|
||||
|
||||
folders_wrapper_layout = QtWidgets.QVBoxLayout(folders_wrapper)
|
||||
folders_wrapper_layout.setContentsMargins(0, 0, 0, 0)
|
||||
folders_wrapper_layout.addWidget(folders_filter_text, 0)
|
||||
folders_wrapper_layout.addWidget(folders_widget, 1)
|
||||
my_tasks_checkbox = NiceCheckbox(filters_widget)
|
||||
my_tasks_checkbox.setChecked(False)
|
||||
my_tasks_checkbox.setToolTip(my_tasks_tooltip)
|
||||
|
||||
filters_layout = QtWidgets.QHBoxLayout(filters_widget)
|
||||
filters_layout.setContentsMargins(0, 0, 0, 0)
|
||||
filters_layout.addWidget(folders_filter_text, 1)
|
||||
filters_layout.addWidget(my_tasks_label, 0)
|
||||
filters_layout.addWidget(my_tasks_checkbox, 0)
|
||||
|
||||
# - Folders widget
|
||||
folders_widget = FoldersWidget(controller, content_body)
|
||||
folders_widget.set_header_visible(True)
|
||||
|
||||
# - Tasks widget
|
||||
tasks_widget = TasksWidget(controller, content_body)
|
||||
|
||||
content_body.addWidget(folders_wrapper)
|
||||
content_body.addWidget(folders_widget)
|
||||
content_body.addWidget(tasks_widget)
|
||||
content_body.setStretchFactor(0, 100)
|
||||
content_body.setStretchFactor(1, 65)
|
||||
|
|
@ -67,20 +80,27 @@ class HierarchyPage(QtWidgets.QWidget):
|
|||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.setContentsMargins(0, 0, 0, 0)
|
||||
main_layout.addWidget(header_widget, 0)
|
||||
main_layout.addWidget(filters_widget, 0)
|
||||
main_layout.addWidget(content_body, 1)
|
||||
|
||||
btn_back.clicked.connect(self._on_back_clicked)
|
||||
refresh_btn.clicked.connect(self._on_refresh_clicked)
|
||||
folders_filter_text.textChanged.connect(self._on_filter_text_changed)
|
||||
my_tasks_checkbox.stateChanged.connect(
|
||||
self._on_my_tasks_checkbox_state_changed
|
||||
)
|
||||
|
||||
self._is_visible = False
|
||||
self._controller = controller
|
||||
|
||||
self._btn_back = btn_back
|
||||
self._projects_combobox = projects_combobox
|
||||
self._my_tasks_checkbox = my_tasks_checkbox
|
||||
self._folders_widget = folders_widget
|
||||
self._tasks_widget = tasks_widget
|
||||
|
||||
self._project_name = None
|
||||
|
||||
# Post init
|
||||
projects_combobox.set_listen_to_selection_change(self._is_visible)
|
||||
|
||||
|
|
@ -91,10 +111,14 @@ class HierarchyPage(QtWidgets.QWidget):
|
|||
self._projects_combobox.set_listen_to_selection_change(visible)
|
||||
if visible and project_name:
|
||||
self._projects_combobox.set_selection(project_name)
|
||||
self._project_name = project_name
|
||||
|
||||
def refresh(self):
|
||||
self._folders_widget.refresh()
|
||||
self._tasks_widget.refresh()
|
||||
self._on_my_tasks_checkbox_state_changed(
|
||||
self._my_tasks_checkbox.checkState()
|
||||
)
|
||||
|
||||
def _on_back_clicked(self):
|
||||
self._controller.set_selected_project(None)
|
||||
|
|
@ -104,3 +128,16 @@ class HierarchyPage(QtWidgets.QWidget):
|
|||
|
||||
def _on_filter_text_changed(self, text):
|
||||
self._folders_widget.set_name_filter(text)
|
||||
|
||||
def _on_my_tasks_checkbox_state_changed(self, state):
|
||||
folder_ids = None
|
||||
task_ids = None
|
||||
state = checkstate_int_to_enum(state)
|
||||
if state == QtCore.Qt.Checked:
|
||||
entity_ids = self._controller.get_my_tasks_entity_ids(
|
||||
self._project_name
|
||||
)
|
||||
folder_ids = entity_ids["folder_ids"]
|
||||
task_ids = entity_ids["task_ids"]
|
||||
self._folders_widget.set_folder_ids_filter(folder_ids)
|
||||
self._tasks_widget.set_task_ids_filter(task_ids)
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class LauncherWindow(QtWidgets.QWidget):
|
|||
page_side_anim_interval = 250
|
||||
|
||||
def __init__(self, controller=None, parent=None):
|
||||
super(LauncherWindow, self).__init__(parent)
|
||||
super().__init__(parent)
|
||||
|
||||
if controller is None:
|
||||
controller = BaseLauncherController()
|
||||
|
|
@ -153,14 +153,14 @@ class LauncherWindow(QtWidgets.QWidget):
|
|||
self.resize(520, 740)
|
||||
|
||||
def showEvent(self, event):
|
||||
super(LauncherWindow, self).showEvent(event)
|
||||
super().showEvent(event)
|
||||
self._window_is_active = True
|
||||
if not self._actions_refresh_timer.isActive():
|
||||
self._actions_refresh_timer.start()
|
||||
self._controller.refresh()
|
||||
|
||||
def closeEvent(self, event):
|
||||
super(LauncherWindow, self).closeEvent(event)
|
||||
super().closeEvent(event)
|
||||
self._window_is_active = False
|
||||
self._actions_refresh_timer.stop()
|
||||
|
||||
|
|
@ -176,7 +176,7 @@ class LauncherWindow(QtWidgets.QWidget):
|
|||
self._on_actions_refresh_timeout()
|
||||
self._actions_refresh_timer.start()
|
||||
|
||||
super(LauncherWindow, self).changeEvent(event)
|
||||
super().changeEvent(event)
|
||||
|
||||
def _on_actions_refresh_timeout(self):
|
||||
# Stop timer if widget is not visible
|
||||
|
|
|
|||
|
|
@ -733,7 +733,12 @@ class FrontendLoaderController(_BaseLoaderController):
|
|||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_thumbnail_path(self, project_name, thumbnail_id):
|
||||
def get_thumbnail_paths(
|
||||
self,
|
||||
project_name,
|
||||
entity_type,
|
||||
entity_ids
|
||||
):
|
||||
"""Get thumbnail path for thumbnail id.
|
||||
|
||||
This method should get a path to a thumbnail based on thumbnail id.
|
||||
|
|
@ -742,10 +747,11 @@ class FrontendLoaderController(_BaseLoaderController):
|
|||
|
||||
Args:
|
||||
project_name (str): Project name.
|
||||
thumbnail_id (str): Thumbnail id.
|
||||
entity_type (str): Entity type.
|
||||
entity_ids (set[str]): Entity ids.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Thumbnail path or None if not found.
|
||||
dict[str, Union[str, None]]: Thumbnail path by entity id.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -259,9 +259,14 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
|
|||
project_name, version_ids
|
||||
)
|
||||
|
||||
def get_thumbnail_path(self, project_name, thumbnail_id):
|
||||
return self._thumbnails_model.get_thumbnail_path(
|
||||
project_name, thumbnail_id
|
||||
def get_thumbnail_paths(
|
||||
self,
|
||||
project_name,
|
||||
entity_type,
|
||||
entity_ids,
|
||||
):
|
||||
return self._thumbnails_model.get_thumbnail_paths(
|
||||
project_name, entity_type, entity_ids
|
||||
)
|
||||
|
||||
def change_products_group(self, project_name, product_ids, group_name):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from __future__ import annotations
|
||||
from qtpy import QtGui, QtCore
|
||||
|
||||
from ._multicombobox import (
|
||||
|
|
|
|||
|
|
@ -501,38 +501,29 @@ class LoaderWindow(QtWidgets.QWidget):
|
|||
self._update_thumbnails()
|
||||
|
||||
def _update_thumbnails(self):
|
||||
# TODO make this threaded and show loading animation while running
|
||||
project_name = self._selected_project_name
|
||||
thumbnail_ids = set()
|
||||
entity_type = None
|
||||
entity_ids = set()
|
||||
if self._selected_version_ids:
|
||||
thumbnail_id_by_entity_id = (
|
||||
self._controller.get_version_thumbnail_ids(
|
||||
project_name,
|
||||
self._selected_version_ids
|
||||
)
|
||||
)
|
||||
thumbnail_ids = set(thumbnail_id_by_entity_id.values())
|
||||
entity_ids = set(self._selected_version_ids)
|
||||
entity_type = "version"
|
||||
elif self._selected_folder_ids:
|
||||
thumbnail_id_by_entity_id = (
|
||||
self._controller.get_folder_thumbnail_ids(
|
||||
project_name,
|
||||
self._selected_folder_ids
|
||||
entity_ids = set(self._selected_folder_ids)
|
||||
entity_type = "folder"
|
||||
|
||||
thumbnail_path_by_entity_id = self._controller.get_thumbnail_paths(
|
||||
project_name, entity_type, entity_ids
|
||||
)
|
||||
)
|
||||
thumbnail_ids = set(thumbnail_id_by_entity_id.values())
|
||||
|
||||
thumbnail_ids.discard(None)
|
||||
|
||||
if not thumbnail_ids:
|
||||
self._thumbnails_widget.set_current_thumbnails(None)
|
||||
return
|
||||
|
||||
thumbnail_paths = set()
|
||||
for thumbnail_id in thumbnail_ids:
|
||||
thumbnail_path = self._controller.get_thumbnail_path(
|
||||
project_name, thumbnail_id)
|
||||
thumbnail_paths.add(thumbnail_path)
|
||||
thumbnail_paths = set(thumbnail_path_by_entity_id.values())
|
||||
thumbnail_paths.discard(None)
|
||||
self._thumbnails_widget.set_current_thumbnail_paths(thumbnail_paths)
|
||||
|
||||
if thumbnail_paths:
|
||||
self._thumbnails_widget.set_current_thumbnail_paths(
|
||||
thumbnail_paths
|
||||
)
|
||||
else:
|
||||
self._thumbnails_widget.set_current_thumbnails(None)
|
||||
|
||||
def _on_projects_refresh(self):
|
||||
self._refresh_handler.set_project_refreshed()
|
||||
|
|
|
|||
|
|
@ -358,7 +358,7 @@ class PublishReportMaker:
|
|||
|
||||
exception = result.get("error")
|
||||
if exception:
|
||||
fname, line_no, func, exc = exception.traceback
|
||||
fname, line_no, func, _ = exception.traceback
|
||||
|
||||
# Conversion of exception into string may crash
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -738,4 +738,3 @@ def main(force=False):
|
|||
sys.exit(1)
|
||||
|
||||
main()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
from __future__ import annotations
|
||||
import collections
|
||||
from typing import Optional
|
||||
|
||||
from qtpy import QtWidgets, QtGui, QtCore
|
||||
|
||||
|
|
@ -33,7 +35,10 @@ class FoldersQtModel(QtGui.QStandardItemModel):
|
|||
refreshed = QtCore.Signal()
|
||||
|
||||
def __init__(self, controller):
|
||||
super(FoldersQtModel, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
self.setColumnCount(1)
|
||||
self.setHeaderData(0, QtCore.Qt.Horizontal, "Folders")
|
||||
|
||||
self._controller = controller
|
||||
self._items_by_id = {}
|
||||
|
|
@ -334,6 +339,29 @@ class FoldersQtModel(QtGui.QStandardItemModel):
|
|||
self.refreshed.emit()
|
||||
|
||||
|
||||
class FoldersProxyModel(RecursiveSortFilterProxyModel):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self._folder_ids_filter = None
|
||||
|
||||
def set_folder_ids_filter(self, folder_ids: Optional[list[str]]):
|
||||
if self._folder_ids_filter == folder_ids:
|
||||
return
|
||||
self._folder_ids_filter = folder_ids
|
||||
self.invalidateFilter()
|
||||
|
||||
def filterAcceptsRow(self, row, parent_index):
|
||||
if self._folder_ids_filter is not None:
|
||||
if not self._folder_ids_filter:
|
||||
return False
|
||||
source_index = self.sourceModel().index(row, 0, parent_index)
|
||||
folder_id = source_index.data(FOLDER_ID_ROLE)
|
||||
if folder_id not in self._folder_ids_filter:
|
||||
return False
|
||||
return super().filterAcceptsRow(row, parent_index)
|
||||
|
||||
|
||||
class FoldersWidget(QtWidgets.QWidget):
|
||||
"""Folders widget.
|
||||
|
||||
|
|
@ -369,13 +397,13 @@ class FoldersWidget(QtWidgets.QWidget):
|
|||
refreshed = QtCore.Signal()
|
||||
|
||||
def __init__(self, controller, parent, handle_expected_selection=False):
|
||||
super(FoldersWidget, self).__init__(parent)
|
||||
super().__init__(parent)
|
||||
|
||||
folders_view = TreeView(self)
|
||||
folders_view.setHeaderHidden(True)
|
||||
|
||||
folders_model = FoldersQtModel(controller)
|
||||
folders_proxy_model = RecursiveSortFilterProxyModel()
|
||||
folders_proxy_model = FoldersProxyModel()
|
||||
folders_proxy_model.setSourceModel(folders_model)
|
||||
folders_proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
|
||||
|
|
@ -446,6 +474,18 @@ class FoldersWidget(QtWidgets.QWidget):
|
|||
if name:
|
||||
self._folders_view.expandAll()
|
||||
|
||||
def set_folder_ids_filter(self, folder_ids: Optional[list[str]]):
|
||||
"""Set filter of folder ids.
|
||||
|
||||
Args:
|
||||
folder_ids (list[str]): The list of folder ids.
|
||||
|
||||
"""
|
||||
self._folders_proxy_model.set_folder_ids_filter(folder_ids)
|
||||
|
||||
def set_header_visible(self, visible: bool):
|
||||
self._folders_view.setHeaderHidden(not visible)
|
||||
|
||||
def refresh(self):
|
||||
"""Refresh folders model.
|
||||
|
||||
|
|
|
|||
|
|
@ -286,6 +286,7 @@ class ProjectSortFilterProxy(QtCore.QSortFilterProxyModel):
|
|||
self._sort_by_type = True
|
||||
# Disable case sensitivity
|
||||
self.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
self.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
|
||||
def _type_sort(self, l_index, r_index):
|
||||
if not self._sort_by_type:
|
||||
|
|
@ -349,21 +350,21 @@ class ProjectSortFilterProxy(QtCore.QSortFilterProxyModel):
|
|||
if project_name is None:
|
||||
return True
|
||||
|
||||
string_pattern = self.filterRegularExpression().pattern()
|
||||
if string_pattern:
|
||||
return string_pattern.lower() in project_name.lower()
|
||||
|
||||
# Current project keep always visible
|
||||
default = super(ProjectSortFilterProxy, self).filterAcceptsRow(
|
||||
source_row, source_parent
|
||||
)
|
||||
if not default:
|
||||
return default
|
||||
|
||||
# Make sure current project is visible
|
||||
if index.data(PROJECT_IS_CURRENT_ROLE):
|
||||
return True
|
||||
|
||||
default = super().filterAcceptsRow(source_row, source_parent)
|
||||
if not default:
|
||||
return default
|
||||
|
||||
string_pattern = self.filterRegularExpression().pattern()
|
||||
if (
|
||||
string_pattern
|
||||
and string_pattern.lower() not in project_name.lower()
|
||||
):
|
||||
return False
|
||||
|
||||
if (
|
||||
self._filter_inactive
|
||||
and not index.data(PROJECT_IS_ACTIVE_ROLE)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
from __future__ import annotations
|
||||
from typing import Optional
|
||||
|
||||
from qtpy import QtWidgets, QtGui, QtCore
|
||||
|
||||
from ayon_core.style import (
|
||||
|
|
@ -343,6 +346,29 @@ class TasksQtModel(QtGui.QStandardItemModel):
|
|||
return self._has_content
|
||||
|
||||
|
||||
class TasksProxyModel(QtCore.QSortFilterProxyModel):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self._task_ids_filter: Optional[set[str]] = None
|
||||
|
||||
def set_task_ids_filter(self, task_ids: Optional[set[str]]):
|
||||
if self._task_ids_filter == task_ids:
|
||||
return
|
||||
self._task_ids_filter = task_ids
|
||||
self.invalidateFilter()
|
||||
|
||||
def filterAcceptsRow(self, row, parent_index):
|
||||
if self._task_ids_filter is not None:
|
||||
if not self._task_ids_filter:
|
||||
return False
|
||||
source_index = self.sourceModel().index(row, 0, parent_index)
|
||||
task_id = source_index.data(ITEM_ID_ROLE)
|
||||
if task_id is not None and task_id not in self._task_ids_filter:
|
||||
return False
|
||||
return super().filterAcceptsRow(row, parent_index)
|
||||
|
||||
|
||||
class TasksWidget(QtWidgets.QWidget):
|
||||
"""Tasks widget.
|
||||
|
||||
|
|
@ -364,7 +390,7 @@ class TasksWidget(QtWidgets.QWidget):
|
|||
tasks_view.setIndentation(0)
|
||||
|
||||
tasks_model = TasksQtModel(controller)
|
||||
tasks_proxy_model = QtCore.QSortFilterProxyModel()
|
||||
tasks_proxy_model = TasksProxyModel()
|
||||
tasks_proxy_model.setSourceModel(tasks_model)
|
||||
tasks_proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
|
||||
|
|
@ -490,6 +516,15 @@ class TasksWidget(QtWidgets.QWidget):
|
|||
)
|
||||
return True
|
||||
|
||||
def set_task_ids_filter(self, task_ids: Optional[list[str]]):
|
||||
"""Set filter of folder ids.
|
||||
|
||||
Args:
|
||||
task_ids (list[str]): The list of folder ids.
|
||||
|
||||
"""
|
||||
self._tasks_proxy_model.set_task_ids_filter(task_ids)
|
||||
|
||||
def _on_tasks_refresh_finished(self, event):
|
||||
"""Tasks were refreshed in controller.
|
||||
|
||||
|
|
@ -540,7 +575,7 @@ class TasksWidget(QtWidgets.QWidget):
|
|||
if self._tasks_model.is_refreshing:
|
||||
return
|
||||
|
||||
parent_id, task_id, task_name, _ = self._get_selected_item_ids()
|
||||
_parent_id, task_id, task_name, _ = self._get_selected_item_ids()
|
||||
self._controller.set_selected_task(task_id, task_name)
|
||||
self.selection_changed.emit()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import logging
|
||||
import math
|
||||
from typing import Optional, List, Set, Any
|
||||
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
|
@ -410,10 +411,12 @@ class ExpandingTextEdit(QtWidgets.QTextEdit):
|
|||
document = self.document().clone()
|
||||
document.setTextWidth(document_width)
|
||||
|
||||
return margins.top() + document.size().height() + margins.bottom()
|
||||
return math.ceil(
|
||||
margins.top() + document.size().height() + margins.bottom()
|
||||
)
|
||||
|
||||
def sizeHint(self):
|
||||
width = super(ExpandingTextEdit, self).sizeHint().width()
|
||||
width = super().sizeHint().width()
|
||||
return QtCore.QSize(width, self.heightForWidth(width))
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1016,6 +1016,7 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
|||
workdir,
|
||||
filename,
|
||||
template_key,
|
||||
artist_note,
|
||||
):
|
||||
"""Save current state of workfile to workarea.
|
||||
|
||||
|
|
@ -1040,6 +1041,7 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
|||
workdir,
|
||||
filename,
|
||||
template_key,
|
||||
artist_note,
|
||||
):
|
||||
"""Action to copy published workfile representation to workarea.
|
||||
|
||||
|
|
@ -1054,12 +1056,13 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
|||
workdir (str): Workarea directory.
|
||||
filename (str): Workarea filename.
|
||||
template_key (str): Template key.
|
||||
artist_note (str): Artist note.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def duplicate_workfile(self, src_filepath, workdir, filename):
|
||||
def duplicate_workfile(self, src_filepath, workdir, filename, artist_note):
|
||||
"""Duplicate workfile.
|
||||
|
||||
Workfiles is not opened when done.
|
||||
|
|
@ -1068,6 +1071,7 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
|||
src_filepath (str): Source workfile path.
|
||||
workdir (str): Destination workdir.
|
||||
filename (str): Destination filename.
|
||||
artist_note (str): Artist note.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -554,6 +554,7 @@ class BaseWorkfileController(
|
|||
workdir,
|
||||
filename,
|
||||
template_key,
|
||||
artist_note,
|
||||
):
|
||||
self._emit_event("save_as.started")
|
||||
|
||||
|
|
@ -565,6 +566,7 @@ class BaseWorkfileController(
|
|||
workdir,
|
||||
filename,
|
||||
template_key,
|
||||
artist_note=artist_note,
|
||||
)
|
||||
except Exception:
|
||||
failed = True
|
||||
|
|
@ -584,6 +586,7 @@ class BaseWorkfileController(
|
|||
workdir,
|
||||
filename,
|
||||
template_key,
|
||||
artist_note,
|
||||
):
|
||||
self._emit_event("copy_representation.started")
|
||||
|
||||
|
|
@ -595,6 +598,7 @@ class BaseWorkfileController(
|
|||
workdir,
|
||||
filename,
|
||||
template_key,
|
||||
artist_note,
|
||||
src_filepath=representation_filepath
|
||||
)
|
||||
except Exception:
|
||||
|
|
@ -608,7 +612,7 @@ class BaseWorkfileController(
|
|||
{"failed": failed},
|
||||
)
|
||||
|
||||
def duplicate_workfile(self, src_filepath, workdir, filename):
|
||||
def duplicate_workfile(self, src_filepath, workdir, filename, artist_note):
|
||||
self._emit_event("workfile_duplicate.started")
|
||||
|
||||
failed = False
|
||||
|
|
@ -701,11 +705,12 @@ class BaseWorkfileController(
|
|||
|
||||
def _save_as_workfile(
|
||||
self,
|
||||
folder_id,
|
||||
task_id,
|
||||
workdir,
|
||||
filename,
|
||||
template_key,
|
||||
folder_id: str,
|
||||
task_id: str,
|
||||
workdir: str,
|
||||
filename: str,
|
||||
template_key: str,
|
||||
artist_note: str,
|
||||
src_filepath=None,
|
||||
):
|
||||
# Trigger before save event
|
||||
|
|
@ -748,7 +753,11 @@ class BaseWorkfileController(
|
|||
self._host_save_workfile(dst_filepath)
|
||||
|
||||
# Make sure workfile info exists
|
||||
self.save_workfile_info(folder_id, task_name, dst_filepath, None)
|
||||
if not artist_note:
|
||||
artist_note = None
|
||||
self.save_workfile_info(
|
||||
folder_id, task_name, dst_filepath, note=artist_note
|
||||
)
|
||||
|
||||
# Create extra folders
|
||||
create_workdir_extra_folders(
|
||||
|
|
|
|||
|
|
@ -462,7 +462,7 @@ class WorkfileEntitiesModel:
|
|||
anatomy = self._controller.project_anatomy
|
||||
|
||||
workdir, filename = os.path.split(filepath)
|
||||
success, rootless_dir = anatomy.find_root_template_from_path(workdir)
|
||||
_, rootless_dir = anatomy.find_root_template_from_path(workdir)
|
||||
return "/".join([
|
||||
os.path.normpath(rootless_dir).replace("\\", "/"),
|
||||
filename
|
||||
|
|
|
|||
|
|
@ -213,7 +213,8 @@ class FilesWidget(QtWidgets.QWidget):
|
|||
self._controller.duplicate_workfile(
|
||||
filepath,
|
||||
result["workdir"],
|
||||
result["filename"]
|
||||
result["filename"],
|
||||
artist_note=result["artist_note"]
|
||||
)
|
||||
|
||||
def _on_workarea_browse_clicked(self):
|
||||
|
|
@ -261,6 +262,7 @@ class FilesWidget(QtWidgets.QWidget):
|
|||
result["workdir"],
|
||||
result["filename"],
|
||||
result["template_key"],
|
||||
artist_note=result["artist_note"]
|
||||
)
|
||||
|
||||
def _on_workarea_path_changed(self, event):
|
||||
|
|
@ -313,6 +315,7 @@ class FilesWidget(QtWidgets.QWidget):
|
|||
result["workdir"],
|
||||
result["filename"],
|
||||
result["template_key"],
|
||||
artist_note=result["artist_note"]
|
||||
)
|
||||
|
||||
def _on_save_as_request(self):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from qtpy import QtWidgets, QtCore
|
||||
|
||||
from ayon_core.tools.utils import PlaceholderLineEdit
|
||||
from ayon_core.tools.utils import PlaceholderLineEdit, PlaceholderPlainTextEdit
|
||||
|
||||
|
||||
class SubversionLineEdit(QtWidgets.QWidget):
|
||||
|
|
@ -143,6 +143,11 @@ class SaveAsDialog(QtWidgets.QDialog):
|
|||
version_layout.addWidget(version_input)
|
||||
version_layout.addWidget(last_version_check)
|
||||
|
||||
# Artist note widget
|
||||
artist_note_input = PlaceholderPlainTextEdit(inputs_widget)
|
||||
artist_note_input.setPlaceholderText(
|
||||
"Provide a note about this workfile.")
|
||||
|
||||
# Preview widget
|
||||
preview_widget = QtWidgets.QLabel("Preview filename", inputs_widget)
|
||||
preview_widget.setWordWrap(True)
|
||||
|
|
@ -161,6 +166,7 @@ class SaveAsDialog(QtWidgets.QDialog):
|
|||
subversion_label = QtWidgets.QLabel("Subversion:", inputs_widget)
|
||||
extension_label = QtWidgets.QLabel("Extension:", inputs_widget)
|
||||
preview_label = QtWidgets.QLabel("Preview:", inputs_widget)
|
||||
artist_note_label = QtWidgets.QLabel("Artist Note:", inputs_widget)
|
||||
|
||||
# Build inputs
|
||||
inputs_layout = QtWidgets.QGridLayout(inputs_widget)
|
||||
|
|
@ -172,6 +178,8 @@ class SaveAsDialog(QtWidgets.QDialog):
|
|||
inputs_layout.addWidget(extension_combobox, 2, 1)
|
||||
inputs_layout.addWidget(preview_label, 3, 0)
|
||||
inputs_layout.addWidget(preview_widget, 3, 1)
|
||||
inputs_layout.addWidget(artist_note_label, 4, 0, 1, 2)
|
||||
inputs_layout.addWidget(artist_note_input, 5, 0, 1, 2)
|
||||
|
||||
# Build layout
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
|
|
@ -206,11 +214,13 @@ class SaveAsDialog(QtWidgets.QDialog):
|
|||
self._extension_combobox = extension_combobox
|
||||
self._subversion_input = subversion_input
|
||||
self._preview_widget = preview_widget
|
||||
self._artist_note_input = artist_note_input
|
||||
|
||||
self._version_label = version_label
|
||||
self._subversion_label = subversion_label
|
||||
self._extension_label = extension_label
|
||||
self._preview_label = preview_label
|
||||
self._artist_note_label = artist_note_label
|
||||
|
||||
# Post init setup
|
||||
|
||||
|
|
@ -322,6 +332,7 @@ class SaveAsDialog(QtWidgets.QDialog):
|
|||
"folder_id": self._folder_id,
|
||||
"task_id": self._task_id,
|
||||
"template_key": self._template_key,
|
||||
"artist_note": self._artist_note_input.toPlainText(),
|
||||
}
|
||||
self.close()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'core' version."""
|
||||
__version__ = "1.1.4+dev"
|
||||
__version__ = "1.1.8+dev"
|
||||
|
|
|
|||
12
docs/css/custom.css
Normal file
12
docs/css/custom.css
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
[data-md-color-scheme="slate"] {
|
||||
/* simple slate overrides */
|
||||
--md-primary-fg-color: hsl(155, 49%, 50%);
|
||||
--md-accent-fg-color: rgb(93, 200, 156);
|
||||
--md-typeset-a-color: hsl(155, 49%, 45%) !important;
|
||||
}
|
||||
[data-md-color-scheme="default"] {
|
||||
/* simple default overrides */
|
||||
--md-primary-fg-color: hsl(155, 49%, 50%);
|
||||
--md-accent-fg-color: rgb(93, 200, 156);
|
||||
--md-typeset-a-color: hsl(155, 49%, 45%) !important;
|
||||
}
|
||||
BIN
docs/img/ay-symbol-blackw-full.png
Normal file
BIN
docs/img/ay-symbol-blackw-full.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.6 KiB |
BIN
docs/img/favicon.ico
Normal file
BIN
docs/img/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 490 B |
1
docs/index.md
Normal file
1
docs/index.md
Normal file
|
|
@ -0,0 +1 @@
|
|||
--8<-- "README.md"
|
||||
1
docs/license.md
Normal file
1
docs/license.md
Normal file
|
|
@ -0,0 +1 @@
|
|||
--8<-- "LICENSE"
|
||||
71
mkdocs.yml
Normal file
71
mkdocs.yml
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
site_name: ayon-core
|
||||
repo_url: https://github.com/ynput/ayon-core
|
||||
|
||||
nav:
|
||||
- Home: index.md
|
||||
- License: license.md
|
||||
|
||||
theme:
|
||||
name: material
|
||||
palette:
|
||||
- media: "(prefers-color-scheme: dark)"
|
||||
scheme: slate
|
||||
toggle:
|
||||
icon: material/toggle-switch-off-outline
|
||||
name: Switch to light mode
|
||||
- media: "(prefers-color-scheme: light)"
|
||||
scheme: default
|
||||
toggle:
|
||||
icon: material/toggle-switch
|
||||
name: Switch to dark mode
|
||||
logo: img/ay-symbol-blackw-full.png
|
||||
favicon: img/favicon.ico
|
||||
features:
|
||||
- navigation.sections
|
||||
- navigation.path
|
||||
- navigation.prune
|
||||
|
||||
extra:
|
||||
version:
|
||||
provider: mike
|
||||
|
||||
extra_css: [css/custom.css]
|
||||
|
||||
markdown_extensions:
|
||||
- mdx_gh_links
|
||||
- pymdownx.snippets
|
||||
|
||||
plugins:
|
||||
- search
|
||||
- offline
|
||||
- mkdocs-autoapi:
|
||||
autoapi_dir: ./
|
||||
autoapi_add_nav_entry: Reference
|
||||
autoapi_ignore:
|
||||
- .*
|
||||
- docs/**/*
|
||||
- tests/**/*
|
||||
- tools/**/*
|
||||
- stubs/**/* # mocha fix
|
||||
- ./**/pythonrc.py # houdini fix
|
||||
- .*/**/*
|
||||
- ./*.py
|
||||
- mkdocstrings:
|
||||
handlers:
|
||||
python:
|
||||
paths:
|
||||
- ./
|
||||
- client/*
|
||||
- server/*
|
||||
- services/*
|
||||
- minify:
|
||||
minify_html: true
|
||||
minify_js: true
|
||||
minify_css: true
|
||||
htmlmin_opts:
|
||||
remove_comments: true
|
||||
cache_safe: true
|
||||
- mike
|
||||
|
||||
hooks:
|
||||
- mkdocs_hooks.py
|
||||
191
mkdocs_hooks.py
Normal file
191
mkdocs_hooks.py
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
import json
|
||||
import glob
|
||||
import logging
|
||||
|
||||
TMP_FILE = "./missing_init_files.json"
|
||||
NFILES = []
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
class ColorFormatter(logging.Formatter):
|
||||
grey = "\x1b[38;20m"
|
||||
green = "\x1b[32;20m"
|
||||
yellow = "\x1b[33;20m"
|
||||
red = "\x1b[31;20m"
|
||||
bold_red = "\x1b[31;1m"
|
||||
reset = "\x1b[0m"
|
||||
fmt = (
|
||||
"%(asctime)s - %(name)s - %(levelname)s - %(message)s " # noqa
|
||||
"(%(filename)s:%(lineno)d)"
|
||||
)
|
||||
|
||||
FORMATS = {
|
||||
logging.DEBUG: grey + fmt + reset,
|
||||
logging.INFO: green + fmt + reset,
|
||||
logging.WARNING: yellow + fmt + reset,
|
||||
logging.ERROR: red + fmt + reset,
|
||||
logging.CRITICAL: bold_red + fmt + reset,
|
||||
}
|
||||
|
||||
def format(self, record):
|
||||
log_fmt = self.FORMATS.get(record.levelno)
|
||||
formatter = logging.Formatter(log_fmt)
|
||||
return formatter.format(record)
|
||||
|
||||
|
||||
ch = logging.StreamHandler()
|
||||
ch.setFormatter(ColorFormatter())
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
handlers=[ch],
|
||||
)
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
def create_init_file(dirpath, msg):
|
||||
global NFILES
|
||||
ini_file = f"{dirpath}/__init__.py"
|
||||
Path(ini_file).touch()
|
||||
NFILES.append(ini_file)
|
||||
logging.info(f"{msg}: created '{ini_file}'")
|
||||
|
||||
|
||||
def create_parent_init_files(dirpath: str, rootpath: str, msg: str):
|
||||
parent_path = dirpath
|
||||
while parent_path != rootpath:
|
||||
parent_path = os.path.dirname(parent_path)
|
||||
parent_init = os.path.join(parent_path, "__init__.py")
|
||||
if not os.path.exists(parent_init):
|
||||
create_init_file(parent_path, msg)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def add_missing_init_files(*roots, msg=""):
|
||||
"""
|
||||
This function takes in one or more root directories as arguments and scans
|
||||
them for Python files without an `__init__.py` file. It generates a JSON
|
||||
file named `missing_init_files.json` containing the paths of these files.
|
||||
|
||||
Args:
|
||||
*roots: Variable number of root directories to scan.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
for root in roots:
|
||||
if not os.path.exists(root):
|
||||
continue
|
||||
rootpath = os.path.abspath(root)
|
||||
for dirpath, dirs, files in os.walk(rootpath):
|
||||
if "__init__.py" in files:
|
||||
continue
|
||||
|
||||
if "." in dirpath:
|
||||
continue
|
||||
|
||||
if not glob.glob(os.path.join(dirpath, "*.py")):
|
||||
continue
|
||||
|
||||
create_init_file(dirpath, msg)
|
||||
create_parent_init_files(dirpath, rootpath, msg)
|
||||
|
||||
with open(TMP_FILE, "w") as f:
|
||||
json.dump(NFILES, f)
|
||||
|
||||
|
||||
def remove_missing_init_files(msg=""):
|
||||
"""
|
||||
This function removes temporary `__init__.py` files created in the
|
||||
`add_missing_init_files()` function. It reads the paths of these files from
|
||||
a JSON file named `missing_init_files.json`.
|
||||
|
||||
Args:
|
||||
None
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
global NFILES
|
||||
nfiles = []
|
||||
if os.path.exists(TMP_FILE):
|
||||
with open(TMP_FILE, "r") as f:
|
||||
nfiles = json.load(f)
|
||||
else:
|
||||
nfiles = NFILES
|
||||
|
||||
for file in nfiles:
|
||||
Path(file).unlink()
|
||||
logging.info(f"{msg}: removed {file}")
|
||||
|
||||
os.remove(TMP_FILE)
|
||||
NFILES = []
|
||||
|
||||
|
||||
def remove_pychache_dirs(msg=""):
|
||||
"""
|
||||
This function walks the current directory and removes all existing
|
||||
'__pycache__' directories.
|
||||
|
||||
Args:
|
||||
msg: An optional message to display during the removal process.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
nremoved = 0
|
||||
|
||||
for dirpath, dirs, files in os.walk("."):
|
||||
if "__pycache__" in dirs:
|
||||
pydir = Path(f"{dirpath}/__pycache__")
|
||||
rmtree(pydir)
|
||||
nremoved += 1
|
||||
logging.info(f"{msg}: removed '{pydir}'")
|
||||
|
||||
if not nremoved:
|
||||
logging.info(f"{msg}: no __pycache__ dirs found")
|
||||
|
||||
|
||||
# mkdocs hooks ----------------------------------------------------------------
|
||||
|
||||
|
||||
def on_startup(command, dirty):
|
||||
remove_pychache_dirs(msg="HOOK - on_startup")
|
||||
|
||||
|
||||
def on_pre_build(config):
|
||||
"""
|
||||
This function is called before the MkDocs build process begins. It adds
|
||||
temporary `__init__.py` files to directories that do not contain one, to
|
||||
make sure mkdocs doesn't ignore them.
|
||||
"""
|
||||
try:
|
||||
add_missing_init_files(
|
||||
"client",
|
||||
"server",
|
||||
"services",
|
||||
msg="HOOK - on_pre_build",
|
||||
)
|
||||
except BaseException as e:
|
||||
logging.error(e)
|
||||
remove_missing_init_files(
|
||||
msg="HOOK - on_post_build: cleaning up on error !"
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
def on_post_build(config):
|
||||
"""
|
||||
This function is called after the MkDocs build process ends. It removes
|
||||
temporary `__init__.py` files that were added in the `on_pre_build()`
|
||||
function.
|
||||
"""
|
||||
remove_missing_init_files(msg="HOOK - on_post_build")
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
name = "core"
|
||||
title = "Core"
|
||||
version = "1.1.4+dev"
|
||||
version = "1.1.8+dev"
|
||||
|
||||
client_dir = "ayon_core"
|
||||
|
||||
|
|
|
|||
728
poetry.lock
generated
728
poetry.lock
generated
|
|
@ -1,728 +0,0 @@
|
|||
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "appdirs"
|
||||
version = "1.4.4"
|
||||
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
|
||||
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "25.1.0"
|
||||
description = "Classes Without Boilerplate"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"},
|
||||
{file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
|
||||
tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""]
|
||||
|
||||
[[package]]
|
||||
name = "ayon-python-api"
|
||||
version = "1.0.12"
|
||||
description = "AYON Python API"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "ayon-python-api-1.0.12.tar.gz", hash = "sha256:8e4c03436df8afdda4c6ad4efce436068771995bb0153a90e003364afa0e7f55"},
|
||||
{file = "ayon_python_api-1.0.12-py3-none-any.whl", hash = "sha256:65f61c2595dd6deb26fed5e3fda7baef887f475fa4b21df12513646ddccf4a7d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
appdirs = ">=1,<2"
|
||||
requests = ">=2.27.1"
|
||||
Unidecode = ">=1.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.1.31"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"},
|
||||
{file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfgv"
|
||||
version = "3.4.0"
|
||||
description = "Validate configuration and produce human readable error messages."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
|
||||
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.1"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"},
|
||||
{file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"},
|
||||
{file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"},
|
||||
{file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"},
|
||||
{file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"},
|
||||
{file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"},
|
||||
{file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clique"
|
||||
version = "2.0.0"
|
||||
description = "Manage collections with common numerical component"
|
||||
optional = false
|
||||
python-versions = ">=3.0, <4.0"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "clique-2.0.0-py2.py3-none-any.whl", hash = "sha256:45e2a4c6078382e0b217e5e369494279cf03846d95ee601f93290bed5214c22e"},
|
||||
{file = "clique-2.0.0.tar.gz", hash = "sha256:6e1115dbf21b1726f4b3db9e9567a662d6bdf72487c4a0a1f8cb7f10cf4f4754"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["lowdown (>=0.2.0,<1)", "pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)", "pytest-runner (>=2.7,<3)", "sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)"]
|
||||
doc = ["lowdown (>=0.2.0,<1)", "sphinx (>=2,<4)", "sphinx-rtd-theme (>=0.1.6,<1)"]
|
||||
test = ["pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)", "pytest-runner (>=2.7,<3)"]
|
||||
|
||||
[[package]]
|
||||
name = "codespell"
|
||||
version = "2.4.1"
|
||||
description = "Fix common misspellings in text files"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "codespell-2.4.1-py3-none-any.whl", hash = "sha256:3dadafa67df7e4a3dbf51e0d7315061b80d265f9552ebd699b3dd6834b47e425"},
|
||||
{file = "codespell-2.4.1.tar.gz", hash = "sha256:299fcdcb09d23e81e35a671bbe746d5ad7e8385972e65dbb833a2eaac33c01e5"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"]
|
||||
hard-encoding-detection = ["chardet"]
|
||||
toml = ["tomli ; python_version < \"3.11\""]
|
||||
types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
groups = ["dev"]
|
||||
markers = "sys_platform == \"win32\""
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "distlib"
|
||||
version = "0.3.9"
|
||||
description = "Distribution utilities"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"},
|
||||
{file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.2.2"
|
||||
description = "Backport of PEP 654 (exception groups)"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
|
||||
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.17.0"
|
||||
description = "A platform independent file lock."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"},
|
||||
{file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"]
|
||||
typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""]
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.6.7"
|
||||
description = "File identification library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "identify-2.6.7-py2.py3-none-any.whl", hash = "sha256:155931cb617a401807b09ecec6635d6c692d180090a1cedca8ef7d58ba5b6aa0"},
|
||||
{file = "identify-2.6.7.tar.gz", hash = "sha256:3fa266b42eba321ee0b2bb0936a6a6b9e36a1351cbb69055b3082f4193035684"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
license = ["ukkonen"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
|
||||
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.0.0"
|
||||
description = "brain-dead simple config-ini parsing"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
|
||||
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mock"
|
||||
version = "5.1.0"
|
||||
description = "Rolling backport of unittest.mock for all Pythons"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "mock-5.1.0-py3-none-any.whl", hash = "sha256:18c694e5ae8a208cdb3d2c20a993ca1a7b0efa258c247a1e565150f477f83744"},
|
||||
{file = "mock-5.1.0.tar.gz", hash = "sha256:5e96aad5ccda4718e0a229ed94b2024df75cc2d55575ba5762d31f5767b8767d"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
build = ["blurb", "twine", "wheel"]
|
||||
docs = ["sphinx"]
|
||||
test = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "nodeenv"
|
||||
version = "1.9.1"
|
||||
description = "Node.js virtual environment builder"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
|
||||
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentimelineio"
|
||||
version = "0.17.0"
|
||||
description = "Editorial interchange format and API"
|
||||
optional = false
|
||||
python-versions = "!=3.9.0,>=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "OpenTimelineIO-0.17.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:2dd31a570cabfd6227c1b1dd0cc038da10787492c26c55de058326e21fe8a313"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a1da5d4803d1ba5e846b181a9e0f4a392c76b9acc5e08947772bc086f2ebfc0"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3527977aec8202789a42d60e1e0dc11b4154f585ef72921760445f43e7967a00"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3aafb4c50455832ed2627c2cac654b896473a5c1f8348ddc07c10be5cfbd59"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp310-cp310-win32.whl", hash = "sha256:fee45af9f6330773893cd0858e92f8256bb5bde4229b44a76f03e59a9fb1b1b6"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:d51887619689c21d67cc4b11b1088f99ae44094513315e7a144be00f1393bfa8"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:cbf05c3e8c0187969f79e91f7495d1f0dc3609557874d8e601ba2e072c70ddb1"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d3430c3f4e88c5365d7b6afbee920b0815b62ecf141abe44cd739c9eedc04284"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1912345227b0bd1654c7153863eadbcee60362aa46340678e576e5d2aa3106a"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51e06eb11a868d970c1534e39faf916228d5163bf3598076d408d8f393ab0bd4"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp311-cp311-win32.whl", hash = "sha256:5c3a3f4780b25a8c1a80d788becba691d12b629069ad8783d0db21027639276f"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:43c8726b33af30ba42928972192311ea0f986edbbd5f74651bada182d4fe805c"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:9a9af4105a088c0ab131780e49db268db7e37871aac33db842de6b2b16f14e39"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e653ad1dd3b85f5c312a742dc24b61b330964aa391dc5bc072fe8b9c85adff1"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a77823c27a1b93c6b87682372c3734ac5fddc10bfe53875e657d43c60fb885"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4f4efcf3ddd81b62c4feb49a0bcc309b50ffeb6a8c48ab173d169a029006f4d"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp312-cp312-win32.whl", hash = "sha256:9872ab74a20bb2bb3a50af04e80fe9238998d67d6be4e30e45aebe25d3eefac6"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:c83b78be3312d3152d7e07ab32b0086fe220acc2a5b035b70ad69a787c0ece62"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:0e671a6f2a1f772445bb326c7640dc977cfc3db589fe108a783a0311939cfac8"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b931a3189b4ce064f06f15a89fe08ef4de01f7dcf0abc441fe2e02ef2a3311bb"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:923cb54d806c981cf1e91916c3e57fba5664c22f37763dd012bad5a5a7bd4db4"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp37-cp37m-win32.whl", hash = "sha256:8e16598c5084dcb21df3d83978b0e5f72300af9edd4cdcb85e3b0ba5da0df4e8"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7eed5033494888fb3f802af50e60559e279b2f398802748872903c2f54efd2c9"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:118baa22b9227da5003bee653601a68686ae2823682dcd7d13c88178c63081c3"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:43389eacdee2169de454e1c79ecfea82f54a9e73b67151427a9b621349a22b7f"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17659b1e6aa42ed617a942f7a2bfc6ecc375d0464ec127ce9edf896278ecaee9"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36d5ea8cfbebf3c9013cc680eef5be48bffb515aafa9dc31e99bf66052a4ca3d"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp38-cp38-win32.whl", hash = "sha256:cc67c74eb4b73bc0f7d135d3ff3dbbd86b2d451a9b142690a8d1631ad79c46f2"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:69b39079bee6fa4aff34c6ad6544df394bc7388483fa5ce958ecd16e243a53ad"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a33554894dea17c22feec0201991e705c2c90a679ba2a012a0c558a7130df711"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b1ad3b3155370245b851b2f7b60006b2ebbb5bb76dd0fdc49bb4dce73fa7d96"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:030454a9c0e9e82e5a153119f9afb8f3f4e64a3b27f80ac0dcde44b029fd3f3f"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bce64376a28919533bd4f744ff8885118abefa73f78fd408f95fa7a9489855b6"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp39-cp39-win32.whl", hash = "sha256:fa8cdceb25f9003c3c0b5b32baef2c764949d88b867161ddc6f44f48f6bbfa4a"},
|
||||
{file = "OpenTimelineIO-0.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:fbcf8a000cd688633c8dc5d22e91912013c67c674329eba603358e3b54da32bf"},
|
||||
{file = "opentimelineio-0.17.0.tar.gz", hash = "sha256:10ef324e710457e9977387cd9ef91eb24a9837bfb370aec3330f9c0f146cea85"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["check-manifest", "coverage (>=4.5)", "flake8 (>=3.5)", "urllib3 (>=1.24.3)"]
|
||||
view = ["PySide2 (>=5.11,<6.0) ; platform_machine == \"x86_64\"", "PySide6 (>=6.2,<7.0) ; platform_machine == \"aarch64\""]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "24.2"
|
||||
description = "Core utilities for Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
|
||||
{file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "4.3.6"
|
||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
|
||||
{file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
|
||||
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
|
||||
type = ["mypy (>=1.11.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.5.0"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
|
||||
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "tox"]
|
||||
testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "pre-commit"
|
||||
version = "3.8.0"
|
||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"},
|
||||
{file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cfgv = ">=2.0.0"
|
||||
identify = ">=1.0.0"
|
||||
nodeenv = ">=0.11.1"
|
||||
pyyaml = ">=5.1"
|
||||
virtualenv = ">=20.10.0"
|
||||
|
||||
[[package]]
|
||||
name = "pyblish-base"
|
||||
version = "1.8.12"
|
||||
description = "Plug-in driven automation framework for content"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pyblish-base-1.8.12.tar.gz", hash = "sha256:ebc184eb038864380555227a8b58055dd24ece7e6ef7f16d33416c718512871b"},
|
||||
{file = "pyblish_base-1.8.12-py2.py3-none-any.whl", hash = "sha256:2cbe956bfbd4175a2d7d22b344cd345800f4d4437153434ab658fc12646a11e8"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.3.4"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"},
|
||||
{file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||
iniconfig = "*"
|
||||
packaging = "*"
|
||||
pluggy = ">=1.5,<2"
|
||||
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-print"
|
||||
version = "1.0.2"
|
||||
description = "pytest-print adds the printer fixture you can use to print messages to the user (directly to the pytest runner, not stdout)"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pytest_print-1.0.2-py3-none-any.whl", hash = "sha256:3ae7891085dddc3cd697bd6956787240107fe76d6b5cdcfcd782e33ca6543de9"},
|
||||
{file = "pytest_print-1.0.2.tar.gz", hash = "sha256:2780350a7bbe7117f99c5d708dc7b0431beceda021b1fd3f11200670d7f33679"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=8.3.2"
|
||||
|
||||
[package.extras]
|
||||
test = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.2"
|
||||
description = "YAML parser and emitter for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
|
||||
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.3"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
|
||||
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.3.7"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"},
|
||||
{file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"},
|
||||
{file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"},
|
||||
{file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"},
|
||||
{file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"},
|
||||
{file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "3.0.4"
|
||||
description = "Python helper for Semantic Versioning (https://semver.org)"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746"},
|
||||
{file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.2.1"
|
||||
description = "A lil' TOML parser"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"},
|
||||
{file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"},
|
||||
{file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"},
|
||||
{file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"},
|
||||
{file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"},
|
||||
{file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"},
|
||||
{file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"},
|
||||
{file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"},
|
||||
{file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"},
|
||||
{file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"},
|
||||
{file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"},
|
||||
{file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"},
|
||||
{file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unidecode"
|
||||
version = "1.3.8"
|
||||
description = "ASCII transliterations of Unicode text"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39"},
|
||||
{file = "Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.3.0"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"},
|
||||
{file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
|
||||
h2 = ["h2 (>=4,<5)"]
|
||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "virtualenv"
|
||||
version = "20.29.2"
|
||||
description = "Virtual Python Environment builder"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"},
|
||||
{file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
distlib = ">=0.3.7,<1"
|
||||
filelock = ">=3.12.2,<4"
|
||||
platformdirs = ">=3.9.1,<5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
|
||||
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.9.1,<3.10"
|
||||
content-hash = "0a399d239c49db714c1166c20286fdd5cd62faf12e45ab85833c4d6ea7a04a2a"
|
||||
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
[tool.poetry]
|
||||
name = "ayon-core"
|
||||
version = "1.1.4+dev"
|
||||
version = "1.1.8+dev"
|
||||
description = ""
|
||||
authors = ["Ynput Team <team@ynput.io>"]
|
||||
readme = "README.md"
|
||||
|
|
@ -29,6 +29,17 @@ attrs = "^25.0.0"
|
|||
pyblish-base = "^1.8.7"
|
||||
clique = "^2.0.0"
|
||||
opentimelineio = "^0.17.0"
|
||||
tomlkit = "^0.13.2"
|
||||
requests = "^2.32.3"
|
||||
mkdocs-material = "^9.6.7"
|
||||
mkdocs-autoapi = "^0.4.0"
|
||||
mkdocstrings-python = "^1.16.2"
|
||||
mkdocs-minify-plugin = "^0.8.0"
|
||||
markdown-checklist = "^0.4.4"
|
||||
mdx-gh-links = "^0.4"
|
||||
pymdown-extensions = "^10.14.3"
|
||||
mike = "^2.1.3"
|
||||
mkdocstrings-shell = "^1.0.2"
|
||||
|
||||
|
||||
[tool.ruff]
|
||||
|
|
@ -72,6 +83,8 @@ indent-width = 4
|
|||
target-version = "py39"
|
||||
|
||||
[tool.ruff.lint]
|
||||
preview = true
|
||||
pydocstyle.convention = "google"
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E", "F", "W"]
|
||||
ignore = []
|
||||
|
|
|
|||
|
|
@ -12,6 +12,10 @@ from ayon_server.settings import (
|
|||
from ayon_server.types import ColorRGBA_uint8
|
||||
|
||||
|
||||
class EnabledModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True)
|
||||
|
||||
|
||||
class ValidateBaseModel(BaseSettingsModel):
|
||||
_isGroup = True
|
||||
enabled: bool = SettingsField(True)
|
||||
|
|
@ -925,6 +929,20 @@ class IntegrateHeroVersionModel(BaseSettingsModel):
|
|||
"hero versions.")
|
||||
|
||||
|
||||
class CollectRenderedFilesModel(BaseSettingsModel):
|
||||
remove_files: bool = SettingsField(
|
||||
False,
|
||||
title="Remove rendered files",
|
||||
description=(
|
||||
"Remove rendered files and metadata json on publish.\n\n"
|
||||
"Note that when enabled but the render is to a configured "
|
||||
"persistent staging directory the files will not be removed. "
|
||||
"However with this disabled the files will **not** be removed in "
|
||||
"either case."
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class CleanUpModel(BaseSettingsModel):
|
||||
_isGroup = True
|
||||
paterns: list[str] = SettingsField( # codespell:ignore paterns
|
||||
|
|
@ -1026,6 +1044,21 @@ class PublishPuginsModel(BaseSettingsModel):
|
|||
default_factory=IntegrateHeroVersionModel,
|
||||
title="Integrate Hero Version"
|
||||
)
|
||||
AttachReviewables: EnabledModel = SettingsField(
|
||||
default_factory=EnabledModel,
|
||||
title="Attach Reviewables",
|
||||
description=(
|
||||
"When enabled, expose an 'Attach Reviewables' attribute on review"
|
||||
" and render instances in the publisher to allow including the"
|
||||
" media to be attached to another instance.\n\n"
|
||||
"If a reviewable is attached to another instance it will not be "
|
||||
"published as a render/review product of its own."
|
||||
)
|
||||
)
|
||||
CollectRenderedFiles: CollectRenderedFilesModel = SettingsField(
|
||||
default_factory=CollectRenderedFilesModel,
|
||||
title="Clean up farm rendered files"
|
||||
)
|
||||
CleanUp: CleanUpModel = SettingsField(
|
||||
default_factory=CleanUpModel,
|
||||
title="Clean Up"
|
||||
|
|
@ -1410,6 +1443,12 @@ DEFAULT_PUBLISH_VALUES = {
|
|||
],
|
||||
"use_hardlinks": False
|
||||
},
|
||||
"AttachReviewables": {
|
||||
"enabled": True,
|
||||
},
|
||||
"CollectRenderedFiles": {
|
||||
"remove_files": False
|
||||
},
|
||||
"CleanUp": {
|
||||
"paterns": [], # codespell:ignore paterns
|
||||
"remove_temp_renders": False
|
||||
|
|
|
|||
|
|
@ -101,6 +101,7 @@ def test_image_sequence():
|
|||
expected_data,
|
||||
)
|
||||
|
||||
|
||||
def test_media_retimed():
|
||||
"""
|
||||
EXR image sequence.
|
||||
|
|
|
|||
|
|
@ -215,6 +215,7 @@ def test_short_movie_tail_gap_handles():
|
|||
|
||||
assert calls == expected
|
||||
|
||||
|
||||
def test_multiple_review_clips_no_gap():
|
||||
"""
|
||||
Use multiple review clips (image sequence).
|
||||
|
|
@ -298,6 +299,7 @@ def test_multiple_review_clips_no_gap():
|
|||
|
||||
assert calls == expected
|
||||
|
||||
|
||||
def test_multiple_review_clips_with_gap():
|
||||
"""
|
||||
Use multiple review clips (image sequence) with gap.
|
||||
|
|
|
|||
|
|
@ -257,7 +257,6 @@ def test_movie_timewarp():
|
|||
)
|
||||
|
||||
|
||||
|
||||
def test_img_sequence_no_handles():
|
||||
"""
|
||||
Img sequence clip (no embedded timecode)
|
||||
|
|
@ -334,6 +333,7 @@ def test_img_sequence_relative_source_range():
|
|||
expected_data
|
||||
)
|
||||
|
||||
|
||||
def test_img_sequence_conform_to_23_976fps():
|
||||
"""
|
||||
Img sequence clip
|
||||
|
|
@ -409,6 +409,7 @@ def test_img_sequence_reverse_speed_no_tc():
|
|||
handle_end=0,
|
||||
)
|
||||
|
||||
|
||||
def test_img_sequence_reverse_speed_from_24_to_23_976fps():
|
||||
"""
|
||||
Img sequence clip
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue