Merge branch 'develop' into bugfix/ociodisplay

This commit is contained in:
timsergeeff 2025-11-05 18:59:38 +03:00 committed by GitHub
commit 960f3b0fb7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
30 changed files with 737 additions and 344 deletions

View file

@ -35,6 +35,9 @@ body:
label: Version label: Version
description: What version are you running? Look to AYON Tray description: What version are you running? Look to AYON Tray
options: options:
- 1.6.7
- 1.6.6
- 1.6.5
- 1.6.4 - 1.6.4
- 1.6.3 - 1.6.3
- 1.6.2 - 1.6.2

View file

@ -2,7 +2,6 @@
"""Base class for AYON addons.""" """Base class for AYON addons."""
from __future__ import annotations from __future__ import annotations
import copy
import os import os
import sys import sys
import time import time
@ -13,6 +12,7 @@ import collections
import warnings import warnings
from uuid import uuid4 from uuid import uuid4
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from urllib.parse import urlencode
from types import ModuleType from types import ModuleType
import typing import typing
from typing import Optional, Any, Union from typing import Optional, Any, Union
@ -136,39 +136,50 @@ def load_addons(force: bool = False) -> None:
time.sleep(0.1) time.sleep(0.1)
def _get_ayon_bundle_data() -> Optional[dict[str, Any]]: def _get_ayon_bundle_data() -> tuple[
dict[str, Any], Optional[dict[str, Any]]
]:
studio_bundle_name = os.environ.get("AYON_STUDIO_BUNDLE_NAME") studio_bundle_name = os.environ.get("AYON_STUDIO_BUNDLE_NAME")
project_bundle_name = os.getenv("AYON_BUNDLE_NAME") project_bundle_name = os.getenv("AYON_BUNDLE_NAME")
# If AYON launcher <1.4.0 was used
if not studio_bundle_name:
studio_bundle_name = project_bundle_name
bundles = ayon_api.get_bundles()["bundles"] bundles = ayon_api.get_bundles()["bundles"]
project_bundle = next( studio_bundle = next(
( (
bundle bundle
for bundle in bundles for bundle in bundles
if bundle["name"] == project_bundle_name if bundle["name"] == studio_bundle_name
), ),
None None
) )
studio_bundle = None
if studio_bundle_name and project_bundle_name != studio_bundle_name: if studio_bundle is None:
studio_bundle = next( raise RuntimeError(f"Failed to find bundle '{studio_bundle_name}'.")
project_bundle = None
if project_bundle_name and project_bundle_name != studio_bundle_name:
project_bundle = next(
( (
bundle bundle
for bundle in bundles for bundle in bundles
if bundle["name"] == studio_bundle_name if bundle["name"] == project_bundle_name
), ),
None None
) )
if project_bundle and studio_bundle: if project_bundle is None:
addons = copy.deepcopy(studio_bundle["addons"]) raise RuntimeError(
addons.update(project_bundle["addons"]) f"Failed to find project bundle '{project_bundle_name}'."
project_bundle["addons"] = addons )
return project_bundle
return studio_bundle, project_bundle
def _get_ayon_addons_information( def _get_ayon_addons_information(
bundle_info: dict[str, Any] studio_bundle: dict[str, Any],
) -> list[dict[str, Any]]: project_bundle: Optional[dict[str, Any]],
) -> dict[str, str]:
"""Receive information about addons to use from server. """Receive information about addons to use from server.
Todos: Todos:
@ -181,22 +192,20 @@ def _get_ayon_addons_information(
list[dict[str, Any]]: List of addon information to use. list[dict[str, Any]]: List of addon information to use.
""" """
output = [] key_values = {
bundle_addons = bundle_info["addons"] "summary": "true",
addons = ayon_api.get_addons_info()["addons"] "bundle_name": studio_bundle["name"],
for addon in addons: }
name = addon["name"] if project_bundle:
versions = addon.get("versions") key_values["project_bundle_name"] = project_bundle["name"]
addon_version = bundle_addons.get(name)
if addon_version is None or not versions: query = urlencode(key_values)
continue
version = versions.get(addon_version) response = ayon_api.get(f"settings?{query}")
if version: return {
version = copy.deepcopy(version) addon["name"]: addon["version"]
version["name"] = name for addon in response.data["addons"]
version["version"] = addon_version }
output.append(version)
return output
def _load_ayon_addons(log: logging.Logger) -> list[ModuleType]: def _load_ayon_addons(log: logging.Logger) -> list[ModuleType]:
@ -214,8 +223,8 @@ def _load_ayon_addons(log: logging.Logger) -> list[ModuleType]:
""" """
all_addon_modules = [] all_addon_modules = []
bundle_info = _get_ayon_bundle_data() studio_bundle, project_bundle = _get_ayon_bundle_data()
addons_info = _get_ayon_addons_information(bundle_info) addons_info = _get_ayon_addons_information(studio_bundle, project_bundle)
if not addons_info: if not addons_info:
return all_addon_modules return all_addon_modules
@ -227,17 +236,16 @@ def _load_ayon_addons(log: logging.Logger) -> list[ModuleType]:
dev_addons_info = {} dev_addons_info = {}
if dev_mode_enabled: if dev_mode_enabled:
# Get dev addons info only when dev mode is enabled # Get dev addons info only when dev mode is enabled
dev_addons_info = bundle_info.get("addonDevelopment", dev_addons_info) dev_addons_info = studio_bundle.get(
"addonDevelopment", dev_addons_info
)
addons_dir_exists = os.path.exists(addons_dir) addons_dir_exists = os.path.exists(addons_dir)
if not addons_dir_exists: if not addons_dir_exists:
log.warning( log.warning(
f"Addons directory does not exists. Path \"{addons_dir}\"") f"Addons directory does not exists. Path \"{addons_dir}\"")
for addon_info in addons_info: for addon_name, addon_version in addons_info.items():
addon_name = addon_info["name"]
addon_version = addon_info["version"]
# core addon does not have any addon object # core addon does not have any addon object
if addon_name == "core": if addon_name == "core":
continue continue

View file

@ -11,6 +11,7 @@ from .local_settings import (
get_launcher_storage_dir, get_launcher_storage_dir,
get_addons_resources_dir, get_addons_resources_dir,
get_local_site_id, get_local_site_id,
get_ayon_user_entity,
get_ayon_username, get_ayon_username,
) )
from .ayon_connection import initialize_ayon_connection from .ayon_connection import initialize_ayon_connection
@ -73,6 +74,7 @@ from .log import (
) )
from .path_templates import ( from .path_templates import (
DefaultKeysDict,
TemplateUnsolved, TemplateUnsolved,
StringTemplate, StringTemplate,
FormatObject, FormatObject,
@ -148,6 +150,7 @@ __all__ = [
"get_launcher_storage_dir", "get_launcher_storage_dir",
"get_addons_resources_dir", "get_addons_resources_dir",
"get_local_site_id", "get_local_site_id",
"get_ayon_user_entity",
"get_ayon_username", "get_ayon_username",
"initialize_ayon_connection", "initialize_ayon_connection",
@ -228,6 +231,7 @@ __all__ = [
"get_version_from_path", "get_version_from_path",
"get_last_version_from_path", "get_last_version_from_path",
"DefaultKeysDict",
"TemplateUnsolved", "TemplateUnsolved",
"StringTemplate", "StringTemplate",
"FormatObject", "FormatObject",

View file

@ -5,6 +5,7 @@ import json
import platform import platform
import configparser import configparser
import warnings import warnings
import copy
from datetime import datetime from datetime import datetime
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from functools import lru_cache from functools import lru_cache
@ -13,6 +14,8 @@ from typing import Optional, Any
import platformdirs import platformdirs
import ayon_api import ayon_api
from .cache import NestedCacheItem, CacheItem
_PLACEHOLDER = object() _PLACEHOLDER = object()
@ -23,6 +26,7 @@ class RegistryItemNotFound(ValueError):
class _Cache: class _Cache:
username = None username = None
user_entities_by_name = NestedCacheItem()
def _get_ayon_appdirs(*args: str) -> str: def _get_ayon_appdirs(*args: str) -> str:
@ -569,6 +573,68 @@ def get_local_site_id():
return site_id return site_id
def _get_ayon_service_username() -> Optional[str]:
# TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather
# use public method to get username from connection stack.
con = ayon_api.get_server_api_connection()
user_stack = getattr(con, "_as_user_stack", None)
if user_stack is None:
return None
return user_stack.username
def get_ayon_user_entity(username: Optional[str] = None) -> dict[str, Any]:
"""AYON user entity used for templates and publishing.
Note:
Usually only service and admin users can receive the full user entity.
Args:
username (Optional[str]): Username of the user. If not passed, then
the current user in 'ayon_api' is used.
Returns:
dict[str, Any]: User entity.
"""
service_username = _get_ayon_service_username()
# Handle service user handling first
if service_username:
if username is None:
username = service_username
cache: CacheItem = _Cache.user_entities_by_name[username]
if not cache.is_valid:
if username == service_username:
user = ayon_api.get_user()
else:
user = ayon_api.get_user(username)
cache.update_data(user)
return copy.deepcopy(cache.get_data())
# Cache current user
current_user = None
if _Cache.username is None:
current_user = ayon_api.get_user()
_Cache.username = current_user["name"]
if username is None:
username = _Cache.username
cache: CacheItem = _Cache.user_entities_by_name[username]
if not cache.is_valid:
user = None
if username == _Cache.username:
if current_user is None:
current_user = ayon_api.get_user()
user = current_user
if user is None:
user = ayon_api.get_user(username)
cache.update_data(user)
return copy.deepcopy(cache.get_data())
def get_ayon_username(): def get_ayon_username():
"""AYON username used for templates and publishing. """AYON username used for templates and publishing.
@ -578,20 +644,5 @@ def get_ayon_username():
str: Username. str: Username.
""" """
# Look for username in the connection stack user = get_ayon_user_entity()
# - this is used when service is working as other user return user["name"]
# (e.g. in background sync)
# TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather
# use public method to get username from connection stack.
con = ayon_api.get_server_api_connection()
user_stack = getattr(con, "_as_user_stack", None)
if user_stack is not None:
username = user_stack.username
if username is not None:
return username
# Cache the username to avoid multiple API calls
# - it is not expected that user would change
if _Cache.username is None:
_Cache.username = ayon_api.get_user()["name"]
return _Cache.username

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import os import os
import re import re
import copy import copy
@ -5,11 +7,7 @@ import numbers
import warnings import warnings
import platform import platform
from string import Formatter from string import Formatter
import typing from typing import Any, Union, Iterable
from typing import List, Dict, Any, Set
if typing.TYPE_CHECKING:
from typing import Union
SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)") SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?") OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
@ -44,6 +42,54 @@ class TemplateUnsolved(Exception):
) )
class DefaultKeysDict(dict):
"""Dictionary that supports the default key to use for str conversion.
Is helpful for changes of a key in a template from string to dictionary
for example '{folder}' -> '{folder[name]}'.
>>> data = DefaultKeysDict(
>>> "name",
>>> {"folder": {"name": "FolderName"}}
>>> )
>>> print("{folder[name]}".format_map(data))
FolderName
>>> print("{folder}".format_map(data))
FolderName
Args:
default_key (Union[str, Iterable[str]]): Default key to use for str
conversion. Can also expect multiple keys for more nested
dictionary.
"""
def __init__(
self, default_keys: Union[str, Iterable[str]], *args, **kwargs
) -> None:
if isinstance(default_keys, str):
default_keys = [default_keys]
else:
default_keys = list(default_keys)
if not default_keys:
raise ValueError(
"Default key must be set. Got empty default keys."
)
self._default_keys = default_keys
super().__init__(*args, **kwargs)
def __str__(self) -> str:
return str(self.get_default_value())
def get_default_keys(self) -> list[str]:
return list(self._default_keys)
def get_default_value(self) -> Any:
value = self
for key in self._default_keys:
value = value[key]
return value
class StringTemplate: class StringTemplate:
"""String that can be formatted.""" """String that can be formatted."""
def __init__(self, template: str): def __init__(self, template: str):
@ -84,7 +130,7 @@ class StringTemplate:
if substr: if substr:
new_parts.append(substr) new_parts.append(substr)
self._parts: List["Union[str, OptionalPart, FormattingPart]"] = ( self._parts: list[Union[str, OptionalPart, FormattingPart]] = (
self.find_optional_parts(new_parts) self.find_optional_parts(new_parts)
) )
@ -105,7 +151,7 @@ class StringTemplate:
def template(self) -> str: def template(self) -> str:
return self._template return self._template
def format(self, data: Dict[str, Any]) -> "TemplateResult": def format(self, data: dict[str, Any]) -> "TemplateResult":
""" Figure out with whole formatting. """ Figure out with whole formatting.
Separate advanced keys (*Like '{project[name]}') from string which must Separate advanced keys (*Like '{project[name]}') from string which must
@ -145,29 +191,29 @@ class StringTemplate:
invalid_types invalid_types
) )
def format_strict(self, data: Dict[str, Any]) -> "TemplateResult": def format_strict(self, data: dict[str, Any]) -> "TemplateResult":
result = self.format(data) result = self.format(data)
result.validate() result.validate()
return result return result
@classmethod @classmethod
def format_template( def format_template(
cls, template: str, data: Dict[str, Any] cls, template: str, data: dict[str, Any]
) -> "TemplateResult": ) -> "TemplateResult":
objected_template = cls(template) objected_template = cls(template)
return objected_template.format(data) return objected_template.format(data)
@classmethod @classmethod
def format_strict_template( def format_strict_template(
cls, template: str, data: Dict[str, Any] cls, template: str, data: dict[str, Any]
) -> "TemplateResult": ) -> "TemplateResult":
objected_template = cls(template) objected_template = cls(template)
return objected_template.format_strict(data) return objected_template.format_strict(data)
@staticmethod @staticmethod
def find_optional_parts( def find_optional_parts(
parts: List["Union[str, FormattingPart]"] parts: list[Union[str, FormattingPart]]
) -> List["Union[str, OptionalPart, FormattingPart]"]: ) -> list[Union[str, OptionalPart, FormattingPart]]:
new_parts = [] new_parts = []
tmp_parts = {} tmp_parts = {}
counted_symb = -1 counted_symb = -1
@ -192,7 +238,7 @@ class StringTemplate:
len(parts) == 1 len(parts) == 1
and isinstance(parts[0], str) and isinstance(parts[0], str)
): ):
value = "<{}>".format(parts[0]) value = f"<{parts[0]}>"
else: else:
value = OptionalPart(parts) value = OptionalPart(parts)
@ -223,7 +269,7 @@ class TemplateResult(str):
only used keys. only used keys.
solved (bool): For check if all required keys were filled. solved (bool): For check if all required keys were filled.
template (str): Original template. template (str): Original template.
missing_keys (Iterable[str]): Missing keys that were not in the data. missing_keys (list[str]): Missing keys that were not in the data.
Include missing optional keys. Include missing optional keys.
invalid_types (dict): When key was found in data, but value had not invalid_types (dict): When key was found in data, but value had not
allowed DataType. Allowed data types are `numbers`, allowed DataType. Allowed data types are `numbers`,
@ -232,11 +278,11 @@ class TemplateResult(str):
of number. of number.
""" """
used_values: Dict[str, Any] = None used_values: dict[str, Any] = None
solved: bool = None solved: bool = None
template: str = None template: str = None
missing_keys: List[str] = None missing_keys: list[str] = None
invalid_types: Dict[str, Any] = None invalid_types: dict[str, Any] = None
def __new__( def __new__(
cls, filled_template, template, solved, cls, filled_template, template, solved,
@ -296,21 +342,21 @@ class TemplatePartResult:
"""Result to store result of template parts.""" """Result to store result of template parts."""
def __init__(self, optional: bool = False): def __init__(self, optional: bool = False):
# Missing keys or invalid value types of required keys # Missing keys or invalid value types of required keys
self._missing_keys: Set[str] = set() self._missing_keys: set[str] = set()
self._invalid_types: Dict[str, Any] = {} self._invalid_types: dict[str, Any] = {}
# Missing keys or invalid value types of optional keys # Missing keys or invalid value types of optional keys
self._missing_optional_keys: Set[str] = set() self._missing_optional_keys: set[str] = set()
self._invalid_optional_types: Dict[str, Any] = {} self._invalid_optional_types: dict[str, Any] = {}
# Used values stored by key with origin type # Used values stored by key with origin type
# - key without any padding or key modifiers # - key without any padding or key modifiers
# - value from filling data # - value from filling data
# Example: {"version": 1} # Example: {"version": 1}
self._used_values: Dict[str, Any] = {} self._used_values: dict[str, Any] = {}
# Used values stored by key with all modifirs # Used values stored by key with all modifirs
# - value is already formatted string # - value is already formatted string
# Example: {"version:0>3": "001"} # Example: {"version:0>3": "001"}
self._really_used_values: Dict[str, Any] = {} self._really_used_values: dict[str, Any] = {}
# Concatenated string output after formatting # Concatenated string output after formatting
self._output: str = "" self._output: str = ""
# Is this result from optional part # Is this result from optional part
@ -336,8 +382,9 @@ class TemplatePartResult:
self._really_used_values.update(other.really_used_values) self._really_used_values.update(other.really_used_values)
else: else:
raise TypeError("Cannot add data from \"{}\" to \"{}\"".format( raise TypeError(
str(type(other)), self.__class__.__name__) f"Cannot add data from \"{type(other)}\""
f" to \"{self.__class__.__name__}\""
) )
@property @property
@ -362,40 +409,41 @@ class TemplatePartResult:
return self._output return self._output
@property @property
def missing_keys(self) -> Set[str]: def missing_keys(self) -> set[str]:
return self._missing_keys return self._missing_keys
@property @property
def missing_optional_keys(self) -> Set[str]: def missing_optional_keys(self) -> set[str]:
return self._missing_optional_keys return self._missing_optional_keys
@property @property
def invalid_types(self) -> Dict[str, Any]: def invalid_types(self) -> dict[str, Any]:
return self._invalid_types return self._invalid_types
@property @property
def invalid_optional_types(self) -> Dict[str, Any]: def invalid_optional_types(self) -> dict[str, Any]:
return self._invalid_optional_types return self._invalid_optional_types
@property @property
def really_used_values(self) -> Dict[str, Any]: def really_used_values(self) -> dict[str, Any]:
return self._really_used_values return self._really_used_values
@property @property
def realy_used_values(self) -> Dict[str, Any]: def realy_used_values(self) -> dict[str, Any]:
warnings.warn( warnings.warn(
"Property 'realy_used_values' is deprecated." "Property 'realy_used_values' is deprecated."
" Use 'really_used_values' instead.", " Use 'really_used_values' instead.",
DeprecationWarning DeprecationWarning,
stacklevel=2,
) )
return self._really_used_values return self._really_used_values
@property @property
def used_values(self) -> Dict[str, Any]: def used_values(self) -> dict[str, Any]:
return self._used_values return self._used_values
@staticmethod @staticmethod
def split_keys_to_subdicts(values: Dict[str, Any]) -> Dict[str, Any]: def split_keys_to_subdicts(values: dict[str, Any]) -> dict[str, Any]:
output = {} output = {}
formatter = Formatter() formatter = Formatter()
for key, value in values.items(): for key, value in values.items():
@ -410,7 +458,7 @@ class TemplatePartResult:
data[last_key] = value data[last_key] = value
return output return output
def get_clean_used_values(self) -> Dict[str, Any]: def get_clean_used_values(self) -> dict[str, Any]:
new_used_values = {} new_used_values = {}
for key, value in self.used_values.items(): for key, value in self.used_values.items():
if isinstance(value, FormatObject): if isinstance(value, FormatObject):
@ -426,7 +474,8 @@ class TemplatePartResult:
warnings.warn( warnings.warn(
"Method 'add_realy_used_value' is deprecated." "Method 'add_realy_used_value' is deprecated."
" Use 'add_really_used_value' instead.", " Use 'add_really_used_value' instead.",
DeprecationWarning DeprecationWarning,
stacklevel=2,
) )
self.add_really_used_value(key, value) self.add_really_used_value(key, value)
@ -479,7 +528,7 @@ class FormattingPart:
self, self,
field_name: str, field_name: str,
format_spec: str, format_spec: str,
conversion: "Union[str, None]", conversion: Union[str, None],
): ):
format_spec_v = "" format_spec_v = ""
if format_spec: if format_spec:
@ -546,7 +595,7 @@ class FormattingPart:
return not queue return not queue
@staticmethod @staticmethod
def keys_to_template_base(keys: List[str]): def keys_to_template_base(keys: list[str]):
if not keys: if not keys:
return None return None
# Create copy of keys # Create copy of keys
@ -556,7 +605,7 @@ class FormattingPart:
return f"{template_base}{joined_keys}" return f"{template_base}{joined_keys}"
def format( def format(
self, data: Dict[str, Any], result: TemplatePartResult self, data: dict[str, Any], result: TemplatePartResult
) -> TemplatePartResult: ) -> TemplatePartResult:
"""Format the formattings string. """Format the formattings string.
@ -635,6 +684,12 @@ class FormattingPart:
result.add_output(self.template) result.add_output(self.template)
return result return result
if isinstance(value, DefaultKeysDict):
try:
value = value.get_default_value()
except KeyError:
pass
if not self.validate_value_type(value): if not self.validate_value_type(value):
result.add_invalid_type(key, value) result.add_invalid_type(key, value)
result.add_output(self.template) result.add_output(self.template)
@ -687,23 +742,25 @@ class OptionalPart:
def __init__( def __init__(
self, self,
parts: List["Union[str, OptionalPart, FormattingPart]"] parts: list[Union[str, OptionalPart, FormattingPart]]
): ):
self._parts: List["Union[str, OptionalPart, FormattingPart]"] = parts self._parts: list[Union[str, OptionalPart, FormattingPart]] = parts
@property @property
def parts(self) -> List["Union[str, OptionalPart, FormattingPart]"]: def parts(self) -> list[Union[str, OptionalPart, FormattingPart]]:
return self._parts return self._parts
def __str__(self) -> str: def __str__(self) -> str:
return "<{}>".format("".join([str(p) for p in self._parts])) joined_parts = "".join([str(p) for p in self._parts])
return f"<{joined_parts}>"
def __repr__(self) -> str: def __repr__(self) -> str:
return "<Optional:{}>".format("".join([str(p) for p in self._parts])) joined_parts = "".join([str(p) for p in self._parts])
return f"<Optional:{joined_parts}>"
def format( def format(
self, self,
data: Dict[str, Any], data: dict[str, Any],
result: TemplatePartResult, result: TemplatePartResult,
) -> TemplatePartResult: ) -> TemplatePartResult:
new_result = TemplatePartResult(True) new_result = TemplatePartResult(True)

View file

@ -1,11 +1,9 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
"""AYON plugin tools.""" """AYON plugin tools."""
import os import os
import logging
import re import re
import collections import collections
log = logging.getLogger(__name__)
CAPITALIZE_REGEX = re.compile(r"[a-zA-Z0-9]") CAPITALIZE_REGEX = re.compile(r"[a-zA-Z0-9]")

View file

@ -202,7 +202,8 @@ def is_clip_from_media_sequence(otio_clip):
def remap_range_on_file_sequence(otio_clip, otio_range): def remap_range_on_file_sequence(otio_clip, otio_range):
""" """ Remap the provided range on a file sequence clip.
Args: Args:
otio_clip (otio.schema.Clip): The OTIO clip to check. otio_clip (otio.schema.Clip): The OTIO clip to check.
otio_range (otio.schema.TimeRange): The trim range to apply. otio_range (otio.schema.TimeRange): The trim range to apply.
@ -249,7 +250,11 @@ def remap_range_on_file_sequence(otio_clip, otio_range):
if ( if (
is_clip_from_media_sequence(otio_clip) is_clip_from_media_sequence(otio_clip)
and available_range_start_frame == media_ref.start_frame and available_range_start_frame == media_ref.start_frame
and conformed_src_in.to_frames() < media_ref.start_frame
# source range should be included in available range from media
# using round instead of conformed_src_in.to_frames() to avoid
# any precision issue with frame rate.
and round(conformed_src_in.value) < media_ref.start_frame
): ):
media_in = otio.opentime.RationalTime( media_in = otio.opentime.RationalTime(
0, rate=available_range_rate 0, rate=available_range_rate

View file

@ -249,7 +249,8 @@ def create_skeleton_instance(
# map inputVersions `ObjectId` -> `str` so json supports it # map inputVersions `ObjectId` -> `str` so json supports it
"inputVersions": list(map(str, data.get("inputVersions", []))), "inputVersions": list(map(str, data.get("inputVersions", []))),
"colorspace": data.get("colorspace"), "colorspace": data.get("colorspace"),
"hasExplicitFrames": data.get("hasExplicitFrames") "hasExplicitFrames": data.get("hasExplicitFrames", False),
"reuseLastVersion": data.get("reuseLastVersion", False),
} }
if data.get("renderlayer"): if data.get("renderlayer"):

View file

@ -7,13 +7,20 @@ import copy
import warnings import warnings
import hashlib import hashlib
import xml.etree.ElementTree import xml.etree.ElementTree
from typing import TYPE_CHECKING, Optional, Union, List from typing import TYPE_CHECKING, Optional, Union, List, Any
import clique
import speedcopy
import logging
import ayon_api
import pyblish.util import pyblish.util
import pyblish.plugin import pyblish.plugin
import pyblish.api import pyblish.api
from ayon_api import (
get_server_api_connection,
get_representations,
get_last_version_by_product_name
)
from ayon_core.lib import ( from ayon_core.lib import (
import_filepath, import_filepath,
Logger, Logger,
@ -34,6 +41,8 @@ if TYPE_CHECKING:
TRAIT_INSTANCE_KEY: str = "representations_with_traits" TRAIT_INSTANCE_KEY: str = "representations_with_traits"
log = logging.getLogger(__name__)
def get_template_name_profiles( def get_template_name_profiles(
project_name, project_settings=None, logger=None project_name, project_settings=None, logger=None
@ -974,7 +983,26 @@ def get_instance_expected_output_path(
"version": version "version": version
}) })
path_template_obj = anatomy.get_template_item("publish", "default")["path"] # Get instance publish template name
task_name = task_type = None
task_entity = instance.data.get("taskEntity")
if task_entity:
task_name = task_entity["name"]
task_type = task_entity["taskType"]
template_name = get_publish_template_name(
project_name=instance.context.data["projectName"],
host_name=instance.context.data["hostName"],
product_type=instance.data["productType"],
task_name=task_name,
task_type=task_type,
project_settings=instance.context.data["project_settings"],
)
path_template_obj = anatomy.get_template_item(
"publish",
template_name
)["path"]
template_filled = path_template_obj.format_strict(template_data) template_filled = path_template_obj.format_strict(template_data)
return os.path.normpath(template_filled) return os.path.normpath(template_filled)
@ -1030,7 +1058,7 @@ def main_cli_publish(
# NOTE: ayon-python-api does not have public api function to find # NOTE: ayon-python-api does not have public api function to find
# out if is used service user. So we need to have try > except # out if is used service user. So we need to have try > except
# block. # block.
con = ayon_api.get_server_api_connection() con = get_server_api_connection()
try: try:
con.set_default_service_username(username) con.set_default_service_username(username)
except ValueError: except ValueError:
@ -1143,3 +1171,90 @@ def get_trait_representations(
""" """
return instance.data.get(TRAIT_INSTANCE_KEY, []) return instance.data.get(TRAIT_INSTANCE_KEY, [])
def fill_sequence_gaps_with_previous_version(
collection: str,
staging_dir: str,
instance: pyblish.plugin.Instance,
current_repre_name: str,
start_frame: int,
end_frame: int
) -> tuple[Optional[dict[str, Any]], Optional[dict[int, str]]]:
"""Tries to replace missing frames from ones from last version"""
used_version_entity, repre_file_paths = _get_last_version_files(
instance, current_repre_name
)
if repre_file_paths is None:
# issues in getting last version files
return (None, None)
prev_collection = clique.assemble(
repre_file_paths,
patterns=[clique.PATTERNS["frames"]],
minimum_items=1
)[0][0]
prev_col_format = prev_collection.format("{head}{padding}{tail}")
added_files = {}
anatomy = instance.context.data["anatomy"]
col_format = collection.format("{head}{padding}{tail}")
for frame in range(start_frame, end_frame + 1):
if frame in collection.indexes:
continue
hole_fpath = os.path.join(staging_dir, col_format % frame)
previous_version_path = prev_col_format % frame
previous_version_path = anatomy.fill_root(previous_version_path)
if not os.path.exists(previous_version_path):
log.warning(
"Missing frame should be replaced from "
f"'{previous_version_path}' but that doesn't exist. "
)
return (None, None)
log.warning(
f"Replacing missing '{hole_fpath}' with "
f"'{previous_version_path}'"
)
speedcopy.copyfile(previous_version_path, hole_fpath)
added_files[frame] = hole_fpath
return (used_version_entity, added_files)
def _get_last_version_files(
instance: pyblish.plugin.Instance,
current_repre_name: str,
) -> tuple[Optional[dict[str, Any]], Optional[list[str]]]:
product_name = instance.data["productName"]
project_name = instance.data["projectEntity"]["name"]
folder_entity = instance.data["folderEntity"]
version_entity = get_last_version_by_product_name(
project_name,
product_name,
folder_entity["id"],
fields={"id", "attrib"}
)
if not version_entity:
return None, None
matching_repres = get_representations(
project_name,
version_ids=[version_entity["id"]],
representation_names=[current_repre_name],
fields={"files"}
)
matching_repre = next(matching_repres, None)
if not matching_repre:
return None, None
repre_file_paths = [
file_info["path"]
for file_info in matching_repre["files"]
]
return (version_entity, repre_file_paths)

View file

@ -1,27 +1,50 @@
from __future__ import annotations
from typing import Optional, Any
import ayon_api import ayon_api
from ayon_core.settings import get_studio_settings from ayon_core.settings import get_studio_settings
from ayon_core.lib.local_settings import get_ayon_username from ayon_core.lib import DefaultKeysDict
from ayon_core.lib.local_settings import get_ayon_user_entity
def get_general_template_data(settings=None, username=None): def get_general_template_data(
settings: Optional[dict[str, Any]] = None,
username: Optional[str] = None,
user_entity: Optional[dict[str, Any]] = None,
):
"""General template data based on system settings or machine. """General template data based on system settings or machine.
Output contains formatting keys: Output contains formatting keys:
- 'studio[name]' - Studio name filled from system settings - 'studio[name]' - Studio name filled from system settings
- 'studio[code]' - Studio code filled from system settings - 'studio[code]' - Studio code filled from system settings
- 'user' - User's name using 'get_ayon_username' - 'user[name]' - User's name
- 'user[attrib][...]' - User's attributes
- 'user[data][...]' - User's data
Args: Args:
settings (Dict[str, Any]): Studio or project settings. settings (Dict[str, Any]): Studio or project settings.
username (Optional[str]): AYON Username. username (Optional[str]): AYON Username.
""" user_entity (Optional[dict[str, Any]]): User entity.
"""
if not settings: if not settings:
settings = get_studio_settings() settings = get_studio_settings()
if username is None: if user_entity is None:
username = get_ayon_username() user_entity = get_ayon_user_entity(username)
# Use dictionary with default value for backwards compatibility
# - we did support '{user}' now it should be '{user[name]}'
user_data = DefaultKeysDict(
"name",
{
"name": user_entity["name"],
"attrib": user_entity["attrib"],
"data": user_entity["data"],
}
)
core_settings = settings["core"] core_settings = settings["core"]
return { return {
@ -29,7 +52,7 @@ def get_general_template_data(settings=None, username=None):
"name": core_settings["studio_name"], "name": core_settings["studio_name"],
"code": core_settings["studio_code"] "code": core_settings["studio_code"]
}, },
"user": username "user": user_data,
} }
@ -150,7 +173,8 @@ def get_template_data(
task_entity=None, task_entity=None,
host_name=None, host_name=None,
settings=None, settings=None,
username=None username=None,
user_entity=None,
): ):
"""Prepare data for templates filling from entered documents and info. """Prepare data for templates filling from entered documents and info.
@ -173,13 +197,18 @@ def get_template_data(
host_name (Optional[str]): Used to fill '{app}' key. host_name (Optional[str]): Used to fill '{app}' key.
settings (Union[Dict, None]): Prepared studio or project settings. settings (Union[Dict, None]): Prepared studio or project settings.
They're queried if not passed (may be slower). They're queried if not passed (may be slower).
username (Optional[str]): AYON Username. username (Optional[str]): DEPRECATED AYON Username.
user_entity (Optional[dict[str, Any]): AYON user entity.
Returns: Returns:
Dict[str, Any]: Data prepared for filling workdir template. Dict[str, Any]: Data prepared for filling workdir template.
""" """
template_data = get_general_template_data(settings, username=username) template_data = get_general_template_data(
settings,
username=username,
user_entity=user_entity,
)
template_data.update(get_project_template_data(project_entity)) template_data.update(get_project_template_data(project_entity))
if folder_entity: if folder_entity:
template_data.update(get_folder_template_data( template_data.update(get_folder_template_data(

View file

@ -16,6 +16,7 @@ Provides:
import json import json
import pyblish.api import pyblish.api
from ayon_core.lib import get_ayon_user_entity
from ayon_core.pipeline.template_data import get_template_data from ayon_core.pipeline.template_data import get_template_data
@ -55,17 +56,18 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin):
if folder_entity: if folder_entity:
task_entity = context.data["taskEntity"] task_entity = context.data["taskEntity"]
username = context.data["user"]
user_entity = get_ayon_user_entity(username)
anatomy_data = get_template_data( anatomy_data = get_template_data(
project_entity, project_entity,
folder_entity, folder_entity,
task_entity, task_entity,
host_name, host_name=host_name,
project_settings settings=project_settings,
user_entity=user_entity,
) )
anatomy_data.update(context.data.get("datetimeData") or {}) anatomy_data.update(context.data.get("datetimeData") or {})
username = context.data["user"]
anatomy_data["user"] = username
# Backwards compatibility for 'username' key # Backwards compatibility for 'username' key
anatomy_data["username"] = username anatomy_data["username"] = username

View file

@ -32,6 +32,7 @@ class CollectCoreJobEnvVars(pyblish.api.ContextPlugin):
for key in [ for key in [
"AYON_BUNDLE_NAME", "AYON_BUNDLE_NAME",
"AYON_STUDIO_BUNDLE_NAME",
"AYON_USE_STAGING", "AYON_USE_STAGING",
"AYON_IN_TESTS", "AYON_IN_TESTS",
# NOTE Not sure why workdir is needed? # NOTE Not sure why workdir is needed?

View file

@ -71,6 +71,12 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
import opentimelineio as otio import opentimelineio as otio
otio_clip = instance.data["otioClip"] otio_clip = instance.data["otioClip"]
if isinstance(
otio_clip.media_reference,
otio.schema.MissingReference
):
self.log.info("Clip has no media reference")
return
# Collect timeline ranges if workfile start frame is available # Collect timeline ranges if workfile start frame is available
if "workfileFrameStart" in instance.data: if "workfileFrameStart" in instance.data:

View file

@ -60,6 +60,13 @@ class CollectOtioSubsetResources(
# get basic variables # get basic variables
otio_clip = instance.data["otioClip"] otio_clip = instance.data["otioClip"]
if isinstance(
otio_clip.media_reference,
otio.schema.MissingReference
):
self.log.info("Clip has no media reference")
return
otio_available_range = otio_clip.available_range() otio_available_range = otio_clip.available_range()
media_fps = otio_available_range.start_time.rate media_fps = otio_available_range.start_time.rate
available_duration = otio_available_range.duration.value available_duration = otio_available_range.duration.value

View file

@ -13,6 +13,8 @@ import copy
import pyblish.api import pyblish.api
from ayon_core.pipeline.publish import get_publish_template_name
class CollectResourcesPath(pyblish.api.InstancePlugin): class CollectResourcesPath(pyblish.api.InstancePlugin):
"""Generate directory path where the files and resources will be stored. """Generate directory path where the files and resources will be stored.
@ -77,16 +79,29 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
# This is for cases of Deprecated anatomy without `folder` # This is for cases of Deprecated anatomy without `folder`
# TODO remove when all clients have solved this issue # TODO remove when all clients have solved this issue
template_data.update({ template_data.update({"frame": "FRAME_TEMP", "representation": "TEMP"})
"frame": "FRAME_TEMP",
"representation": "TEMP"
})
publish_templates = anatomy.get_template_item( task_name = task_type = None
"publish", "default", "directory" task_entity = instance.data.get("taskEntity")
if task_entity:
task_name = task_entity["name"]
task_type = task_entity["taskType"]
template_name = get_publish_template_name(
project_name=instance.context.data["projectName"],
host_name=instance.context.data["hostName"],
product_type=instance.data["productType"],
task_name=task_name,
task_type=task_type,
project_settings=instance.context.data["project_settings"],
logger=self.log,
) )
publish_template = anatomy.get_template_item(
"publish", template_name, "directory")
publish_folder = os.path.normpath( publish_folder = os.path.normpath(
publish_templates.format_strict(template_data) publish_template.format_strict(template_data)
) )
resources_folder = os.path.join(publish_folder, "resources") resources_folder = os.path.join(publish_folder, "resources")

View file

@ -130,7 +130,7 @@ class ExtractOTIOReview(
# NOTE it looks like it is set only in hiero integration # NOTE it looks like it is set only in hiero integration
res_data = {"width": self.to_width, "height": self.to_height} res_data = {"width": self.to_width, "height": self.to_height}
for key in res_data: for key in res_data:
for meta_prefix in ("ayon.source.", "openpype.source."): for meta_prefix in ("ayon.source", "openpype.source"):
meta_key = f"{meta_prefix}.{key}" meta_key = f"{meta_prefix}.{key}"
value = media_metadata.get(meta_key) value = media_metadata.get(meta_key)
if value is not None: if value is not None:

View file

@ -13,14 +13,15 @@ import clique
import speedcopy import speedcopy
import pyblish.api import pyblish.api
from ayon_api import get_last_version_by_product_name, get_representations
from ayon_core.lib import ( from ayon_core.lib import (
get_ffmpeg_tool_args, get_ffmpeg_tool_args,
filter_profiles, filter_profiles,
path_to_subprocess_arg, path_to_subprocess_arg,
run_subprocess, run_subprocess,
) )
from ayon_core.pipeline.publish.lib import (
fill_sequence_gaps_with_previous_version
)
from ayon_core.lib.transcoding import ( from ayon_core.lib.transcoding import (
IMAGE_EXTENSIONS, IMAGE_EXTENSIONS,
get_ffprobe_streams, get_ffprobe_streams,
@ -130,7 +131,7 @@ def frame_to_timecode(frame: int, fps: float) -> str:
class ExtractReview(pyblish.api.InstancePlugin): class ExtractReview(pyblish.api.InstancePlugin):
"""Extracting Review mov file for Ftrack """Extracting Reviewable medias
Compulsory attribute of representation is tags list with "review", Compulsory attribute of representation is tags list with "review",
otherwise the representation is ignored. otherwise the representation is ignored.
@ -508,10 +509,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
resolution_width=temp_data.resolution_width, resolution_width=temp_data.resolution_width,
resolution_height=temp_data.resolution_height, resolution_height=temp_data.resolution_height,
extension=temp_data.input_ext, extension=temp_data.input_ext,
temp_data=temp_data temp_data=temp_data,
) )
elif fill_missing_frames == "previous_version": elif fill_missing_frames == "previous_version":
new_frame_files = self.fill_sequence_gaps_with_previous( fill_output = fill_sequence_gaps_with_previous_version(
collection=collection, collection=collection,
staging_dir=new_repre["stagingDir"], staging_dir=new_repre["stagingDir"],
instance=instance, instance=instance,
@ -519,8 +520,13 @@ class ExtractReview(pyblish.api.InstancePlugin):
start_frame=temp_data.frame_start, start_frame=temp_data.frame_start,
end_frame=temp_data.frame_end, end_frame=temp_data.frame_end,
) )
_, new_frame_files = fill_output
# fallback to original workflow # fallback to original workflow
if new_frame_files is None: if new_frame_files is None:
self.log.warning(
"Falling back to filling from currently "
"last rendered."
)
new_frame_files = ( new_frame_files = (
self.fill_sequence_gaps_from_existing( self.fill_sequence_gaps_from_existing(
collection=collection, collection=collection,
@ -612,8 +618,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
"name": "{}_{}".format(output_name, output_ext), "name": "{}_{}".format(output_name, output_ext),
"outputName": output_name, "outputName": output_name,
"outputDef": output_def, "outputDef": output_def,
"frameStartFtrack": temp_data.output_frame_start,
"frameEndFtrack": temp_data.output_frame_end,
"ffmpeg_cmd": subprcs_cmd "ffmpeg_cmd": subprcs_cmd
}) })
@ -1050,92 +1054,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
return all_args return all_args
def fill_sequence_gaps_with_previous(
self,
collection: str,
staging_dir: str,
instance: pyblish.plugin.Instance,
current_repre_name: str,
start_frame: int,
end_frame: int
) -> Optional[dict[int, str]]:
"""Tries to replace missing frames from ones from last version"""
repre_file_paths = self._get_last_version_files(
instance, current_repre_name)
if repre_file_paths is None:
# issues in getting last version files, falling back
return None
prev_collection = clique.assemble(
repre_file_paths,
patterns=[clique.PATTERNS["frames"]],
minimum_items=1
)[0][0]
prev_col_format = prev_collection.format("{head}{padding}{tail}")
added_files = {}
anatomy = instance.context.data["anatomy"]
col_format = collection.format("{head}{padding}{tail}")
for frame in range(start_frame, end_frame + 1):
if frame in collection.indexes:
continue
hole_fpath = os.path.join(staging_dir, col_format % frame)
previous_version_path = prev_col_format % frame
previous_version_path = anatomy.fill_root(previous_version_path)
if not os.path.exists(previous_version_path):
self.log.warning(
"Missing frame should be replaced from "
f"'{previous_version_path}' but that doesn't exist. "
"Falling back to filling from currently last rendered."
)
return None
self.log.warning(
f"Replacing missing '{hole_fpath}' with "
f"'{previous_version_path}'"
)
speedcopy.copyfile(previous_version_path, hole_fpath)
added_files[frame] = hole_fpath
return added_files
def _get_last_version_files(
self,
instance: pyblish.plugin.Instance,
current_repre_name: str,
):
product_name = instance.data["productName"]
project_name = instance.data["projectEntity"]["name"]
folder_entity = instance.data["folderEntity"]
version_entity = get_last_version_by_product_name(
project_name,
product_name,
folder_entity["id"],
fields={"id"}
)
if not version_entity:
return None
matching_repres = get_representations(
project_name,
version_ids=[version_entity["id"]],
representation_names=[current_repre_name],
fields={"files"}
)
if not matching_repres:
return None
matching_repre = list(matching_repres)[0]
repre_file_paths = [
file_info["path"]
for file_info in matching_repre["files"]
]
return repre_file_paths
def fill_sequence_gaps_with_blanks( def fill_sequence_gaps_with_blanks(
self, self,
collection: str, collection: str,
@ -1384,15 +1302,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
return audio_in_args, audio_filters, audio_out_args return audio_in_args, audio_filters, audio_out_args
for audio in audio_inputs: for audio in audio_inputs:
# NOTE modified, always was expected "frameStartFtrack" which is
# STRANGE?!!! There should be different key, right?
# TODO use different frame start!
offset_seconds = 0 offset_seconds = 0
frame_start_ftrack = instance.data.get("frameStartFtrack")
if frame_start_ftrack is not None:
offset_frames = frame_start_ftrack - audio["offset"]
offset_seconds = offset_frames / temp_data.fps
if offset_seconds > 0: if offset_seconds > 0:
audio_in_args.append( audio_in_args.append(
"-ss {}".format(offset_seconds) "-ss {}".format(offset_seconds)

View file

@ -121,7 +121,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"version", "version",
"representation", "representation",
"username", "username",
"user",
"output", "output",
# OpenPype keys - should be removed # OpenPype keys - should be removed
"asset", # folder[name] "asset", # folder[name]
@ -796,6 +795,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
if value is not None: if value is not None:
repre_context[key] = value repre_context[key] = value
# Keep only username
# NOTE This is to avoid storing all user attributes and data
# to representation
if "user" not in repre_context:
repre_context["user"] = {
"name": template_data["user"]["name"]
}
# Use previous representation's id if there is a name match # Use previous representation's id if there is a name match
existing = existing_repres_by_name.get(repre["name"].lower()) existing = existing_repres_by_name.get(repre["name"].lower())
repre_id = None repre_id = None

View file

@ -89,7 +89,6 @@ class IntegrateHeroVersion(
"family", "family",
"representation", "representation",
"username", "username",
"user",
"output" "output"
] ]
# QUESTION/TODO this process should happen on server if crashed due to # QUESTION/TODO this process should happen on server if crashed due to
@ -364,6 +363,14 @@ class IntegrateHeroVersion(
if value is not None: if value is not None:
repre_context[key] = value repre_context[key] = value
# Keep only username
# NOTE This is to avoid storing all user attributes and data
# to representation
if "user" not in repre_context:
repre_context["user"] = {
"name": anatomy_data["user"]["name"]
}
# Prepare new repre # Prepare new repre
repre_entity = copy.deepcopy(repre_info["representation"]) repre_entity = copy.deepcopy(repre_info["representation"])
repre_entity.pop("id", None) repre_entity.pop("id", None)

View file

@ -41,6 +41,7 @@ class PushToContextController:
self._process_item_id = None self._process_item_id = None
self._use_original_name = False self._use_original_name = False
self._version_up = False
self.set_source(project_name, version_ids) self.set_source(project_name, version_ids)
@ -212,7 +213,7 @@ class PushToContextController:
self._user_values.variant, self._user_values.variant,
comment=self._user_values.comment, comment=self._user_values.comment,
new_folder_name=self._user_values.new_folder_name, new_folder_name=self._user_values.new_folder_name,
dst_version=1, version_up=self._version_up,
use_original_name=self._use_original_name, use_original_name=self._use_original_name,
) )
item_ids.append(item_id) item_ids.append(item_id)
@ -229,6 +230,9 @@ class PushToContextController:
thread.start() thread.start()
return item_ids return item_ids
def set_version_up(self, state):
self._version_up = state
def wait_for_process_thread(self): def wait_for_process_thread(self):
if self._process_thread is None: if self._process_thread is None:
return return

View file

@ -3,9 +3,10 @@ import re
import copy import copy
import itertools import itertools
import sys import sys
import tempfile
import traceback import traceback
import uuid import uuid
from typing import Optional, Dict from typing import Optional, Any
import ayon_api import ayon_api
from ayon_api.utils import create_entity_id from ayon_api.utils import create_entity_id
@ -88,7 +89,7 @@ class ProjectPushItem:
variant, variant,
comment, comment,
new_folder_name, new_folder_name,
dst_version, version_up,
item_id=None, item_id=None,
use_original_name=False use_original_name=False
): ):
@ -99,7 +100,7 @@ class ProjectPushItem:
self.dst_project_name = dst_project_name self.dst_project_name = dst_project_name
self.dst_folder_id = dst_folder_id self.dst_folder_id = dst_folder_id
self.dst_task_name = dst_task_name self.dst_task_name = dst_task_name
self.dst_version = dst_version self.version_up = version_up
self.variant = variant self.variant = variant
self.new_folder_name = new_folder_name self.new_folder_name = new_folder_name
self.comment = comment or "" self.comment = comment or ""
@ -117,7 +118,7 @@ class ProjectPushItem:
str(self.dst_folder_id), str(self.dst_folder_id),
str(self.new_folder_name), str(self.new_folder_name),
str(self.dst_task_name), str(self.dst_task_name),
str(self.dst_version), str(self.version_up),
self.use_original_name self.use_original_name
]) ])
return self._repr_value return self._repr_value
@ -132,7 +133,7 @@ class ProjectPushItem:
"dst_project_name": self.dst_project_name, "dst_project_name": self.dst_project_name,
"dst_folder_id": self.dst_folder_id, "dst_folder_id": self.dst_folder_id,
"dst_task_name": self.dst_task_name, "dst_task_name": self.dst_task_name,
"dst_version": self.dst_version, "version_up": self.version_up,
"variant": self.variant, "variant": self.variant,
"comment": self.comment, "comment": self.comment,
"new_folder_name": self.new_folder_name, "new_folder_name": self.new_folder_name,
@ -225,8 +226,8 @@ class ProjectPushRepreItem:
but filenames are not template based. but filenames are not template based.
Args: Args:
repre_entity (Dict[str, Ant]): Representation entity. repre_entity (dict[str, Ant]): Representation entity.
roots (Dict[str, str]): Project roots (based on project anatomy). roots (dict[str, str]): Project roots (based on project anatomy).
""" """
def __init__(self, repre_entity, roots): def __init__(self, repre_entity, roots):
@ -482,6 +483,8 @@ class ProjectPushItemProcess:
self._log_info("Destination project was found") self._log_info("Destination project was found")
self._fill_or_create_destination_folder() self._fill_or_create_destination_folder()
self._log_info("Destination folder was determined") self._log_info("Destination folder was determined")
self._fill_or_create_destination_task()
self._log_info("Destination task was determined")
self._determine_product_type() self._determine_product_type()
self._determine_publish_template_name() self._determine_publish_template_name()
self._determine_product_name() self._determine_product_name()
@ -650,10 +653,10 @@ class ProjectPushItemProcess:
def _create_folder( def _create_folder(
self, self,
src_folder_entity, src_folder_entity: dict[str, Any],
project_entity, project_entity: dict[str, Any],
parent_folder_entity, parent_folder_entity: dict[str, Any],
folder_name folder_name: str
): ):
parent_id = None parent_id = None
if parent_folder_entity: if parent_folder_entity:
@ -702,12 +705,19 @@ class ProjectPushItemProcess:
if new_folder_name != folder_name: if new_folder_name != folder_name:
folder_label = folder_name folder_label = folder_name
# TODO find out how to define folder type src_folder_type = src_folder_entity["folderType"]
dst_folder_type = self._get_dst_folder_type(
project_entity,
src_folder_type
)
new_thumbnail_id = self._create_new_folder_thumbnail(
project_entity, src_folder_entity)
folder_entity = new_folder_entity( folder_entity = new_folder_entity(
folder_name, folder_name,
"Folder", dst_folder_type,
parent_id=parent_id, parent_id=parent_id,
attribs=new_folder_attrib attribs=new_folder_attrib,
thumbnail_id=new_thumbnail_id
) )
if folder_label: if folder_label:
folder_entity["label"] = folder_label folder_entity["label"] = folder_label
@ -727,10 +737,59 @@ class ProjectPushItemProcess:
folder_entity["path"] = "/".join([parent_path, folder_name]) folder_entity["path"] = "/".join([parent_path, folder_name])
return folder_entity return folder_entity
def _create_new_folder_thumbnail(
self,
project_entity: dict[str, Any],
src_folder_entity: dict[str, Any]
) -> Optional[str]:
"""Copy thumbnail possibly set on folder.
Could be different from representation thumbnails, and it is only shown
when folder is selected.
"""
if not src_folder_entity["thumbnailId"]:
return None
thumbnail = ayon_api.get_folder_thumbnail(
self._item.src_project_name,
src_folder_entity["id"],
src_folder_entity["thumbnailId"]
)
if not thumbnail.id:
return None
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
tmp_file.write(thumbnail.content)
temp_file_path = tmp_file.name
new_thumbnail_id = None
try:
new_thumbnail_id = ayon_api.create_thumbnail(
project_entity["name"], temp_file_path)
finally:
if os.path.exists(temp_file_path):
os.remove(temp_file_path)
return new_thumbnail_id
def _get_dst_folder_type(
self,
project_entity: dict[str, Any],
src_folder_type: str
) -> str:
"""Get new folder type."""
for folder_type in project_entity["folderTypes"]:
if folder_type["name"].lower() == src_folder_type.lower():
return folder_type["name"]
self._status.set_failed(
f"'{src_folder_type}' folder type is not configured in "
f"project Anatomy."
)
raise PushToProjectError(self._status.fail_reason)
def _fill_or_create_destination_folder(self): def _fill_or_create_destination_folder(self):
dst_project_name = self._item.dst_project_name dst_project_name = self._item.dst_project_name
dst_folder_id = self._item.dst_folder_id dst_folder_id = self._item.dst_folder_id
dst_task_name = self._item.dst_task_name
new_folder_name = self._item.new_folder_name new_folder_name = self._item.new_folder_name
if not dst_folder_id and not new_folder_name: if not dst_folder_id and not new_folder_name:
self._status.set_failed( self._status.set_failed(
@ -761,9 +820,11 @@ class ProjectPushItemProcess:
new_folder_name new_folder_name
) )
self._folder_entity = folder_entity self._folder_entity = folder_entity
if not dst_task_name:
self._task_info = {} def _fill_or_create_destination_task(self):
return folder_entity = self._folder_entity
dst_task_name = self._item.dst_task_name
dst_project_name = self._item.dst_project_name
folder_path = folder_entity["path"] folder_path = folder_entity["path"]
folder_tasks = { folder_tasks = {
@ -772,6 +833,20 @@ class ProjectPushItemProcess:
dst_project_name, folder_ids=[folder_entity["id"]] dst_project_name, folder_ids=[folder_entity["id"]]
) )
} }
if not dst_task_name:
src_task_info = self._get_src_task_info()
if not src_task_info: # really no task selected nor on source
self._task_info = {}
return
dst_task_name = src_task_info["name"]
if dst_task_name.lower() not in folder_tasks:
task_info = self._make_sure_task_exists(
folder_entity, src_task_info
)
folder_tasks[dst_task_name.lower()] = task_info
task_info = folder_tasks.get(dst_task_name.lower()) task_info = folder_tasks.get(dst_task_name.lower())
if not task_info: if not task_info:
self._status.set_failed( self._status.set_failed(
@ -790,7 +865,10 @@ class ProjectPushItemProcess:
task_type["name"]: task_type task_type["name"]: task_type
for task_type in self._project_entity["taskTypes"] for task_type in self._project_entity["taskTypes"]
} }
task_type_info = task_types_by_name.get(task_type_name, {}) task_type_info = copy.deepcopy(
task_types_by_name.get(task_type_name, {})
)
task_type_info.pop("name") # do not overwrite real task name
task_info.update(task_type_info) task_info.update(task_type_info)
self._task_info = task_info self._task_info = task_info
@ -870,10 +948,22 @@ class ProjectPushItemProcess:
self._product_entity = product_entity self._product_entity = product_entity
return product_entity return product_entity
src_attrib = self._src_product_entity["attrib"]
dst_attrib = {}
for key in {
"description",
"productGroup",
}:
value = src_attrib.get(key)
if value:
dst_attrib[key] = value
product_entity = new_product_entity( product_entity = new_product_entity(
product_name, product_name,
product_type, product_type,
folder_id, folder_id,
attribs=dst_attrib
) )
self._operations.create_entity( self._operations.create_entity(
project_name, "product", product_entity project_name, "product", product_entity
@ -884,7 +974,7 @@ class ProjectPushItemProcess:
"""Make sure version document exits in database.""" """Make sure version document exits in database."""
project_name = self._item.dst_project_name project_name = self._item.dst_project_name
version = self._item.dst_version version_up = self._item.version_up
src_version_entity = self._src_version_entity src_version_entity = self._src_version_entity
product_entity = self._product_entity product_entity = self._product_entity
product_id = product_entity["id"] product_id = product_entity["id"]
@ -912,27 +1002,29 @@ class ProjectPushItemProcess:
"description", "description",
"intent", "intent",
}: }:
if key in src_attrib: value = src_attrib.get(key)
dst_attrib[key] = src_attrib[key] if value:
dst_attrib[key] = value
if version is None: last_version_entity = ayon_api.get_last_version_by_product_id(
last_version_entity = ayon_api.get_last_version_by_product_id( project_name, product_id
project_name, product_id )
if last_version_entity is None:
dst_version = get_versioning_start(
project_name,
self.host_name,
task_name=self._task_info.get("name"),
task_type=self._task_info.get("taskType"),
product_type=product_type,
product_name=product_entity["name"],
) )
if last_version_entity: else:
version = int(last_version_entity["version"]) + 1 dst_version = int(last_version_entity["version"])
else: if version_up:
version = get_versioning_start( dst_version += 1
project_name,
self.host_name,
task_name=self._task_info["name"],
task_type=self._task_info["taskType"],
product_type=product_type,
product_name=product_entity["name"],
)
existing_version_entity = ayon_api.get_version_by_name( existing_version_entity = ayon_api.get_version_by_name(
project_name, version, product_id project_name, dst_version, product_id
) )
thumbnail_id = self._copy_version_thumbnail() thumbnail_id = self._copy_version_thumbnail()
@ -950,10 +1042,16 @@ class ProjectPushItemProcess:
existing_version_entity["attrib"].update(dst_attrib) existing_version_entity["attrib"].update(dst_attrib)
self._version_entity = existing_version_entity self._version_entity = existing_version_entity
return return
copied_tags = self._get_transferable_tags(src_version_entity)
copied_status = self._get_transferable_status(src_version_entity)
version_entity = new_version_entity( version_entity = new_version_entity(
version, dst_version,
product_id, product_id,
author=src_version_entity["author"],
status=copied_status,
tags=copied_tags,
task_id=self._task_info.get("id"),
attribs=dst_attrib, attribs=dst_attrib,
thumbnail_id=thumbnail_id, thumbnail_id=thumbnail_id,
) )
@ -962,6 +1060,47 @@ class ProjectPushItemProcess:
) )
self._version_entity = version_entity self._version_entity = version_entity
def _make_sure_task_exists(
self,
folder_entity: dict[str, Any],
task_info: dict[str, Any],
) -> dict[str, Any]:
"""Creates destination task from source task information"""
project_name = self._item.dst_project_name
found_task_type = False
src_task_type = task_info["taskType"]
for task_type in self._project_entity["taskTypes"]:
if task_type["name"].lower() == src_task_type.lower():
found_task_type = True
break
if not found_task_type:
self._status.set_failed(
f"'{src_task_type}' task type is not configured in "
"project Anatomy."
)
raise PushToProjectError(self._status.fail_reason)
task_info = self._operations.create_task(
project_name,
task_info["name"],
folder_id=folder_entity["id"],
task_type=src_task_type,
attrib=task_info["attrib"],
)
self._task_info = task_info.data
return self._task_info
def _get_src_task_info(self):
src_version_entity = self._src_version_entity
if not src_version_entity["taskId"]:
return None
src_task = ayon_api.get_task_by_id(
self._item.src_project_name, src_version_entity["taskId"]
)
return src_task
def _integrate_representations(self): def _integrate_representations(self):
try: try:
self._real_integrate_representations() self._real_integrate_representations()
@ -1197,18 +1336,42 @@ class ProjectPushItemProcess:
if context_value and isinstance(context_value, dict): if context_value and isinstance(context_value, dict):
for context_sub_key in context_value.keys(): for context_sub_key in context_value.keys():
value_to_update = formatting_data.get(context_key, {}).get( value_to_update = formatting_data.get(context_key, {}).get(
context_sub_key) context_sub_key
)
if value_to_update: if value_to_update:
repre_context[context_key][ repre_context[context_key][context_sub_key] = (
context_sub_key] = value_to_update value_to_update
)
else: else:
value_to_update = formatting_data.get(context_key) value_to_update = formatting_data.get(context_key)
if value_to_update: if value_to_update:
repre_context[context_key] = value_to_update repre_context[context_key] = value_to_update
if "task" not in formatting_data: if "task" not in formatting_data:
repre_context.pop("task") repre_context.pop("task", None)
return repre_context return repre_context
def _get_transferable_tags(self, src_version_entity):
"""Copy over only tags present in destination project"""
dst_project_tags = [
tag["name"] for tag in self._project_entity["tags"]
]
copied_tags = []
for src_tag in src_version_entity["tags"]:
if src_tag in dst_project_tags:
copied_tags.append(src_tag)
return copied_tags
def _get_transferable_status(self, src_version_entity):
"""Copy over status, first status if not matching found"""
dst_project_statuses = {
status["name"]: status
for status in self._project_entity["statuses"]
}
copied_status = dst_project_statuses.get(src_version_entity["status"])
if copied_status:
return copied_status["name"]
return None
class IntegrateModel: class IntegrateModel:
def __init__(self, controller): def __init__(self, controller):
@ -1231,7 +1394,7 @@ class IntegrateModel:
variant, variant,
comment, comment,
new_folder_name, new_folder_name,
dst_version, version_up,
use_original_name use_original_name
): ):
"""Create new item for integration. """Create new item for integration.
@ -1245,7 +1408,7 @@ class IntegrateModel:
variant (str): Variant name. variant (str): Variant name.
comment (Union[str, None]): Comment. comment (Union[str, None]): Comment.
new_folder_name (Union[str, None]): New folder name. new_folder_name (Union[str, None]): New folder name.
dst_version (int): Destination version number. version_up (bool): Should destination product be versioned up
use_original_name (bool): If original product names should be used use_original_name (bool): If original product names should be used
Returns: Returns:
@ -1262,7 +1425,7 @@ class IntegrateModel:
variant, variant,
comment=comment, comment=comment,
new_folder_name=new_folder_name, new_folder_name=new_folder_name,
dst_version=dst_version, version_up=version_up,
use_original_name=use_original_name use_original_name=use_original_name
) )
process_item = ProjectPushItemProcess(self, item) process_item = ProjectPushItemProcess(self, item)
@ -1281,6 +1444,6 @@ class IntegrateModel:
return return
item.integrate() item.integrate()
def get_items(self) -> Dict[str, ProjectPushItemProcess]: def get_items(self) -> dict[str, ProjectPushItemProcess]:
"""Returns dict of all ProjectPushItemProcess items """ """Returns dict of all ProjectPushItemProcess items """
return self._process_items return self._process_items

View file

@ -144,6 +144,8 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
variant_input.setPlaceholderText("< Variant >") variant_input.setPlaceholderText("< Variant >")
variant_input.setObjectName("ValidatedLineEdit") variant_input.setObjectName("ValidatedLineEdit")
version_up_checkbox = NiceCheckbox(True, parent=inputs_widget)
comment_input = PlaceholderLineEdit(inputs_widget) comment_input = PlaceholderLineEdit(inputs_widget)
comment_input.setPlaceholderText("< Publish comment >") comment_input.setPlaceholderText("< Publish comment >")
@ -153,7 +155,11 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
inputs_layout.addRow("New folder name", folder_name_input) inputs_layout.addRow("New folder name", folder_name_input)
inputs_layout.addRow("Variant", variant_input) inputs_layout.addRow("Variant", variant_input)
inputs_layout.addRow( inputs_layout.addRow(
"Use original product names", original_names_checkbox) "Use original product names", original_names_checkbox
)
inputs_layout.addRow(
"Version up existing Product", version_up_checkbox
)
inputs_layout.addRow("Comment", comment_input) inputs_layout.addRow("Comment", comment_input)
main_splitter.addWidget(context_widget) main_splitter.addWidget(context_widget)
@ -209,8 +215,11 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
"Show error detail dialog to copy full error." "Show error detail dialog to copy full error."
) )
original_names_checkbox.setToolTip( original_names_checkbox.setToolTip(
"Required for multi copy, doesn't allow changes " "Required for multi copy, doesn't allow changes variant values."
"variant values." )
version_up_checkbox.setToolTip(
"Version up existing product. If not selected version will be "
"updated."
) )
overlay_close_btn = QtWidgets.QPushButton( overlay_close_btn = QtWidgets.QPushButton(
@ -259,6 +268,8 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
library_only_checkbox.stateChanged.connect(self._on_library_only_change) library_only_checkbox.stateChanged.connect(self._on_library_only_change)
original_names_checkbox.stateChanged.connect( original_names_checkbox.stateChanged.connect(
self._on_original_names_change) self._on_original_names_change)
version_up_checkbox.stateChanged.connect(
self._on_version_up_checkbox_change)
publish_btn.clicked.connect(self._on_select_click) publish_btn.clicked.connect(self._on_select_click)
cancel_btn.clicked.connect(self._on_close_click) cancel_btn.clicked.connect(self._on_close_click)
@ -308,6 +319,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
self._folder_name_input = folder_name_input self._folder_name_input = folder_name_input
self._comment_input = comment_input self._comment_input = comment_input
self._use_original_names_checkbox = original_names_checkbox self._use_original_names_checkbox = original_names_checkbox
self._library_only_checkbox = library_only_checkbox
self._publish_btn = publish_btn self._publish_btn = publish_btn
@ -328,6 +340,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
self._new_folder_name_input_text = None self._new_folder_name_input_text = None
self._variant_input_text = None self._variant_input_text = None
self._comment_input_text = None self._comment_input_text = None
self._version_up_checkbox = version_up_checkbox
self._first_show = True self._first_show = True
self._show_timer = show_timer self._show_timer = show_timer
@ -344,6 +357,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
show_detail_btn.setVisible(False) show_detail_btn.setVisible(False)
overlay_close_btn.setVisible(False) overlay_close_btn.setVisible(False)
overlay_try_btn.setVisible(False) overlay_try_btn.setVisible(False)
version_up_checkbox.setChecked(False)
# Support of public api function of controller # Support of public api function of controller
def set_source(self, project_name, version_ids): def set_source(self, project_name, version_ids):
@ -376,7 +390,6 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
self._invalidate_new_folder_name( self._invalidate_new_folder_name(
new_folder_name, user_values["is_new_folder_name_valid"] new_folder_name, user_values["is_new_folder_name_valid"]
) )
self._controller._invalidate()
self._projects_combobox.refresh() self._projects_combobox.refresh()
def _on_first_show(self): def _on_first_show(self):
@ -415,14 +428,18 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
self._comment_input_text = text self._comment_input_text = text
self._user_input_changed_timer.start() self._user_input_changed_timer.start()
def _on_library_only_change(self, state: int) -> None: def _on_library_only_change(self) -> None:
"""Change toggle state, reset filter, recalculate dropdown""" """Change toggle state, reset filter, recalculate dropdown"""
state = bool(state) is_checked = self._library_only_checkbox.isChecked()
self._projects_combobox.set_standard_filter_enabled(state) self._projects_combobox.set_standard_filter_enabled(is_checked)
def _on_original_names_change(self, state: int) -> None: def _on_original_names_change(self) -> None:
use_original_name = bool(state) is_checked = self._use_original_names_checkbox.isChecked()
self._invalidate_use_original_names(use_original_name) self._invalidate_use_original_names(is_checked)
def _on_version_up_checkbox_change(self) -> None:
is_checked = self._version_up_checkbox.isChecked()
self._controller.set_version_up(is_checked)
def _on_user_input_timer(self): def _on_user_input_timer(self):
folder_name_enabled = self._new_folder_name_enabled folder_name_enabled = self._new_folder_name_enabled

View file

@ -41,7 +41,7 @@ class ScrollMessageBox(QtWidgets.QDialog):
""" """
def __init__(self, icon, title, messages, cancelable=False): def __init__(self, icon, title, messages, cancelable=False):
super(ScrollMessageBox, self).__init__() super().__init__()
self.setWindowTitle(title) self.setWindowTitle(title)
self.icon = icon self.icon = icon
@ -49,8 +49,6 @@ class ScrollMessageBox(QtWidgets.QDialog):
self.setWindowFlags(QtCore.Qt.WindowTitleHint) self.setWindowFlags(QtCore.Qt.WindowTitleHint)
layout = QtWidgets.QVBoxLayout(self)
scroll_widget = QtWidgets.QScrollArea(self) scroll_widget = QtWidgets.QScrollArea(self)
scroll_widget.setWidgetResizable(True) scroll_widget.setWidgetResizable(True)
content_widget = QtWidgets.QWidget(self) content_widget = QtWidgets.QWidget(self)
@ -63,14 +61,8 @@ class ScrollMessageBox(QtWidgets.QDialog):
content_layout.addWidget(label_widget) content_layout.addWidget(label_widget)
message_len = max(message_len, len(message)) message_len = max(message_len, len(message))
# guess size of scrollable area # Set minimum width
# WARNING: 'desktop' method probably won't work in PySide6 scroll_widget.setMinimumWidth(360)
desktop = QtWidgets.QApplication.desktop()
max_width = desktop.availableGeometry().width()
scroll_widget.setMinimumWidth(
min(max_width, message_len * 6)
)
layout.addWidget(scroll_widget)
buttons = QtWidgets.QDialogButtonBox.Ok buttons = QtWidgets.QDialogButtonBox.Ok
if cancelable: if cancelable:
@ -86,7 +78,9 @@ class ScrollMessageBox(QtWidgets.QDialog):
btn.clicked.connect(self._on_copy_click) btn.clicked.connect(self._on_copy_click)
btn_box.addButton(btn, QtWidgets.QDialogButtonBox.NoRole) btn_box.addButton(btn, QtWidgets.QDialogButtonBox.NoRole)
layout.addWidget(btn_box) main_layout = QtWidgets.QVBoxLayout(self)
main_layout.addWidget(scroll_widget, 1)
main_layout.addWidget(btn_box, 0)
def _on_copy_click(self): def _on_copy_click(self):
clipboard = QtWidgets.QApplication.clipboard() clipboard = QtWidgets.QApplication.clipboard()
@ -104,7 +98,7 @@ class SimplePopup(QtWidgets.QDialog):
on_clicked = QtCore.Signal() on_clicked = QtCore.Signal()
def __init__(self, parent=None, *args, **kwargs): def __init__(self, parent=None, *args, **kwargs):
super(SimplePopup, self).__init__(parent=parent, *args, **kwargs) super().__init__(parent=parent, *args, **kwargs)
# Set default title # Set default title
self.setWindowTitle("Popup") self.setWindowTitle("Popup")
@ -161,7 +155,7 @@ class SimplePopup(QtWidgets.QDialog):
geo = self._calculate_window_geometry() geo = self._calculate_window_geometry()
self.setGeometry(geo) self.setGeometry(geo)
return super(SimplePopup, self).showEvent(event) return super().showEvent(event)
def _on_clicked(self): def _on_clicked(self):
"""Callback for when the 'show' button is clicked. """Callback for when the 'show' button is clicked.
@ -228,9 +222,7 @@ class PopupUpdateKeys(SimplePopup):
on_clicked_state = QtCore.Signal(bool) on_clicked_state = QtCore.Signal(bool)
def __init__(self, parent=None, *args, **kwargs): def __init__(self, parent=None, *args, **kwargs):
super(PopupUpdateKeys, self).__init__( super().__init__(parent=parent, *args, **kwargs)
parent=parent, *args, **kwargs
)
layout = self.layout() layout = self.layout()

View file

@ -358,9 +358,8 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
if not self._host_is_valid: if not self._host_is_valid:
return return
self._folders_widget.set_project_name( self._project_name = self._controller.get_current_project_name()
self._controller.get_current_project_name() self._folders_widget.set_project_name(self._project_name)
)
def _on_save_as_finished(self, event): def _on_save_as_finished(self, event):
if event["failed"]: if event["failed"]:

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
"""Package declaring AYON addon 'core' version.""" """Package declaring AYON addon 'core' version."""
__version__ = "1.6.4+dev" __version__ = "1.6.7+dev"

View file

@ -19,3 +19,6 @@ OpenTimelineIO = "0.16.0"
opencolorio = "^2.3.2,<2.4.0" opencolorio = "^2.3.2,<2.4.0"
Pillow = "9.5.0" Pillow = "9.5.0"
websocket-client = ">=0.40.0,<2" websocket-client = ">=0.40.0,<2"
[ayon.runtimeDependencies.darwin]
pyobjc-core = "^11.1"

View file

@ -1,6 +1,6 @@
name = "core" name = "core"
title = "Core" title = "Core"
version = "1.6.4+dev" version = "1.6.7+dev"
client_dir = "ayon_core" client_dir = "ayon_core"

View file

@ -5,7 +5,7 @@
[tool.poetry] [tool.poetry]
name = "ayon-core" name = "ayon-core"
version = "1.6.4+dev" version = "1.6.7+dev"
description = "" description = ""
authors = ["Ynput Team <team@ynput.io>"] authors = ["Ynput Team <team@ynput.io>"]
readme = "README.md" readme = "README.md"
@ -27,17 +27,6 @@ codespell = "^2.2.6"
semver = "^3.0.2" semver = "^3.0.2"
mypy = "^1.14.0" mypy = "^1.14.0"
mock = "^5.0.0" mock = "^5.0.0"
tomlkit = "^0.13.2"
requests = "^2.32.3"
mkdocs-material = "^9.6.7"
mkdocs-autoapi = "^0.4.0"
mkdocstrings-python = "^1.16.2"
mkdocs-minify-plugin = "^0.8.0"
markdown-checklist = "^0.4.4"
mdx-gh-links = "^0.4"
pymdown-extensions = "^10.14.3"
mike = "^2.1.3"
mkdocstrings-shell = "^1.0.2"
nxtools = "^1.6" nxtools = "^1.6"
[tool.poetry.group.test.dependencies] [tool.poetry.group.test.dependencies]

View file

@ -454,7 +454,7 @@ DEFAULT_TOOLS_VALUES = {
"hosts": [], "hosts": [],
"task_types": [], "task_types": [],
"tasks": [], "tasks": [],
"template": "{product[type]}{Task[name]}{Variant}" "template": "{product[type]}{Task[name]}{Variant}<_{Aov}>"
}, },
{ {
"product_types": [ "product_types": [

View file

@ -246,75 +246,75 @@ def test_multiple_review_clips_no_gap():
expected = [ expected = [
# 10 head black frames generated from gap (991-1000) # 10 head black frames generated from gap (991-1000)
'/path/to/ffmpeg -t 0.4 -r 25.0 -f lavfi' '/path/to/ffmpeg -t 0.4 -r 25.0 -f lavfi'
' -i color=c=black:s=1280x720 -tune ' ' -i color=c=black:s=1920x1080 -tune '
'stillimage -start_number 991 -pix_fmt rgba C:/result/output.%04d.png', 'stillimage -start_number 991 -pix_fmt rgba C:/result/output.%04d.png',
# Alternance 25fps tiff sequence and 24fps exr sequence # Alternance 25fps tiff sequence and 24fps exr sequence
# for 100 frames each # for 100 frames each
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
f'C:\\no_tc{os.sep}output.%04d.tif ' f'C:\\no_tc{os.sep}output.%04d.tif '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1001 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 1001 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i '
f'C:\\with_tc{os.sep}output.%04d.exr ' f'C:\\with_tc{os.sep}output.%04d.exr '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1102 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 1102 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
f'C:\\no_tc{os.sep}output.%04d.tif ' f'C:\\no_tc{os.sep}output.%04d.tif '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1198 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 1198 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i '
f'C:\\with_tc{os.sep}output.%04d.exr ' f'C:\\with_tc{os.sep}output.%04d.exr '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1299 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 1299 -pix_fmt rgba C:/result/output.%04d.png',
# Repeated 25fps tiff sequence multiple times till the end # Repeated 25fps tiff sequence multiple times till the end
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
f'C:\\no_tc{os.sep}output.%04d.tif ' f'C:\\no_tc{os.sep}output.%04d.tif '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1395 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 1395 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
f'C:\\no_tc{os.sep}output.%04d.tif ' f'C:\\no_tc{os.sep}output.%04d.tif '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1496 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 1496 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
f'C:\\no_tc{os.sep}output.%04d.tif ' f'C:\\no_tc{os.sep}output.%04d.tif '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1597 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 1597 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
f'C:\\no_tc{os.sep}output.%04d.tif ' f'C:\\no_tc{os.sep}output.%04d.tif '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1698 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 1698 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
f'C:\\no_tc{os.sep}output.%04d.tif ' f'C:\\no_tc{os.sep}output.%04d.tif '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1799 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 1799 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
f'C:\\no_tc{os.sep}output.%04d.tif ' f'C:\\no_tc{os.sep}output.%04d.tif '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1900 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 1900 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
f'C:\\no_tc{os.sep}output.%04d.tif ' f'C:\\no_tc{os.sep}output.%04d.tif '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 2001 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 2001 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
f'C:\\no_tc{os.sep}output.%04d.tif ' f'C:\\no_tc{os.sep}output.%04d.tif '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 2102 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 2102 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
f'C:\\no_tc{os.sep}output.%04d.tif ' f'C:\\no_tc{os.sep}output.%04d.tif '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 2203 -pix_fmt rgba C:/result/output.%04d.png' '-start_number 2203 -pix_fmt rgba C:/result/output.%04d.png'
] ]
@ -348,12 +348,12 @@ def test_multiple_review_clips_with_gap():
'/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i '
f'C:\\with_tc{os.sep}output.%04d.exr ' f'C:\\with_tc{os.sep}output.%04d.exr '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1003 -pix_fmt rgba C:/result/output.%04d.png', '-start_number 1003 -pix_fmt rgba C:/result/output.%04d.png',
'/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i ' '/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i '
f'C:\\with_tc{os.sep}output.%04d.exr ' f'C:\\with_tc{os.sep}output.%04d.exr '
'-vf scale=1280:720:flags=lanczos -compression_level 5 ' '-vf scale=1920:1080:flags=lanczos -compression_level 5 '
'-start_number 1091 -pix_fmt rgba C:/result/output.%04d.png' '-start_number 1091 -pix_fmt rgba C:/result/output.%04d.png'
] ]