mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge branch 'develop' into enhancement/1416-loader-actions
This commit is contained in:
commit
174807277a
21 changed files with 478 additions and 128 deletions
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,8 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to AYON Tray
|
||||
options:
|
||||
- 1.6.7
|
||||
- 1.6.6
|
||||
- 1.6.5
|
||||
- 1.6.4
|
||||
- 1.6.3
|
||||
|
|
|
|||
|
|
@ -141,6 +141,9 @@ def _get_ayon_bundle_data() -> tuple[
|
|||
]:
|
||||
studio_bundle_name = os.environ.get("AYON_STUDIO_BUNDLE_NAME")
|
||||
project_bundle_name = os.getenv("AYON_BUNDLE_NAME")
|
||||
# If AYON launcher <1.4.0 was used
|
||||
if not studio_bundle_name:
|
||||
studio_bundle_name = project_bundle_name
|
||||
bundles = ayon_api.get_bundles()["bundles"]
|
||||
studio_bundle = next(
|
||||
(
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ from .local_settings import (
|
|||
get_launcher_storage_dir,
|
||||
get_addons_resources_dir,
|
||||
get_local_site_id,
|
||||
get_ayon_user_entity,
|
||||
get_ayon_username,
|
||||
)
|
||||
from .ayon_connection import initialize_ayon_connection
|
||||
|
|
@ -74,6 +75,7 @@ from .log import (
|
|||
)
|
||||
|
||||
from .path_templates import (
|
||||
DefaultKeysDict,
|
||||
TemplateUnsolved,
|
||||
StringTemplate,
|
||||
FormatObject,
|
||||
|
|
@ -151,6 +153,7 @@ __all__ = [
|
|||
"get_launcher_storage_dir",
|
||||
"get_addons_resources_dir",
|
||||
"get_local_site_id",
|
||||
"get_ayon_user_entity",
|
||||
"get_ayon_username",
|
||||
|
||||
"initialize_ayon_connection",
|
||||
|
|
@ -231,6 +234,7 @@ __all__ = [
|
|||
"get_version_from_path",
|
||||
"get_last_version_from_path",
|
||||
|
||||
"DefaultKeysDict",
|
||||
"TemplateUnsolved",
|
||||
"StringTemplate",
|
||||
"FormatObject",
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import json
|
|||
import platform
|
||||
import configparser
|
||||
import warnings
|
||||
import copy
|
||||
from datetime import datetime
|
||||
from abc import ABC, abstractmethod
|
||||
from functools import lru_cache
|
||||
|
|
@ -13,6 +14,8 @@ from typing import Optional, Any
|
|||
import platformdirs
|
||||
import ayon_api
|
||||
|
||||
from .cache import NestedCacheItem, CacheItem
|
||||
|
||||
_PLACEHOLDER = object()
|
||||
|
||||
|
||||
|
|
@ -23,6 +26,7 @@ class RegistryItemNotFound(ValueError):
|
|||
|
||||
class _Cache:
|
||||
username = None
|
||||
user_entities_by_name = NestedCacheItem()
|
||||
|
||||
|
||||
def _get_ayon_appdirs(*args: str) -> str:
|
||||
|
|
@ -569,6 +573,68 @@ def get_local_site_id():
|
|||
return site_id
|
||||
|
||||
|
||||
def _get_ayon_service_username() -> Optional[str]:
|
||||
# TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather
|
||||
# use public method to get username from connection stack.
|
||||
con = ayon_api.get_server_api_connection()
|
||||
user_stack = getattr(con, "_as_user_stack", None)
|
||||
if user_stack is None:
|
||||
return None
|
||||
return user_stack.username
|
||||
|
||||
|
||||
def get_ayon_user_entity(username: Optional[str] = None) -> dict[str, Any]:
|
||||
"""AYON user entity used for templates and publishing.
|
||||
|
||||
Note:
|
||||
Usually only service and admin users can receive the full user entity.
|
||||
|
||||
Args:
|
||||
username (Optional[str]): Username of the user. If not passed, then
|
||||
the current user in 'ayon_api' is used.
|
||||
|
||||
Returns:
|
||||
dict[str, Any]: User entity.
|
||||
|
||||
"""
|
||||
service_username = _get_ayon_service_username()
|
||||
# Handle service user handling first
|
||||
if service_username:
|
||||
if username is None:
|
||||
username = service_username
|
||||
cache: CacheItem = _Cache.user_entities_by_name[username]
|
||||
if not cache.is_valid:
|
||||
if username == service_username:
|
||||
user = ayon_api.get_user()
|
||||
else:
|
||||
user = ayon_api.get_user(username)
|
||||
cache.update_data(user)
|
||||
return copy.deepcopy(cache.get_data())
|
||||
|
||||
# Cache current user
|
||||
current_user = None
|
||||
if _Cache.username is None:
|
||||
current_user = ayon_api.get_user()
|
||||
_Cache.username = current_user["name"]
|
||||
|
||||
if username is None:
|
||||
username = _Cache.username
|
||||
|
||||
cache: CacheItem = _Cache.user_entities_by_name[username]
|
||||
if not cache.is_valid:
|
||||
user = None
|
||||
if username == _Cache.username:
|
||||
if current_user is None:
|
||||
current_user = ayon_api.get_user()
|
||||
user = current_user
|
||||
|
||||
if user is None:
|
||||
user = ayon_api.get_user(username)
|
||||
cache.update_data(user)
|
||||
|
||||
return copy.deepcopy(cache.get_data())
|
||||
|
||||
|
||||
def get_ayon_username():
|
||||
"""AYON username used for templates and publishing.
|
||||
|
||||
|
|
@ -578,20 +644,5 @@ def get_ayon_username():
|
|||
str: Username.
|
||||
|
||||
"""
|
||||
# Look for username in the connection stack
|
||||
# - this is used when service is working as other user
|
||||
# (e.g. in background sync)
|
||||
# TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather
|
||||
# use public method to get username from connection stack.
|
||||
con = ayon_api.get_server_api_connection()
|
||||
user_stack = getattr(con, "_as_user_stack", None)
|
||||
if user_stack is not None:
|
||||
username = user_stack.username
|
||||
if username is not None:
|
||||
return username
|
||||
|
||||
# Cache the username to avoid multiple API calls
|
||||
# - it is not expected that user would change
|
||||
if _Cache.username is None:
|
||||
_Cache.username = ayon_api.get_user()["name"]
|
||||
return _Cache.username
|
||||
user = get_ayon_user_entity()
|
||||
return user["name"]
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import copy
|
||||
|
|
@ -5,11 +7,7 @@ import numbers
|
|||
import warnings
|
||||
import platform
|
||||
from string import Formatter
|
||||
import typing
|
||||
from typing import List, Dict, Any, Set
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from typing import Union
|
||||
from typing import Any, Union, Iterable
|
||||
|
||||
SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
|
||||
OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
|
||||
|
|
@ -44,6 +42,54 @@ class TemplateUnsolved(Exception):
|
|||
)
|
||||
|
||||
|
||||
class DefaultKeysDict(dict):
|
||||
"""Dictionary that supports the default key to use for str conversion.
|
||||
|
||||
Is helpful for changes of a key in a template from string to dictionary
|
||||
for example '{folder}' -> '{folder[name]}'.
|
||||
>>> data = DefaultKeysDict(
|
||||
>>> "name",
|
||||
>>> {"folder": {"name": "FolderName"}}
|
||||
>>> )
|
||||
>>> print("{folder[name]}".format_map(data))
|
||||
FolderName
|
||||
>>> print("{folder}".format_map(data))
|
||||
FolderName
|
||||
|
||||
Args:
|
||||
default_key (Union[str, Iterable[str]]): Default key to use for str
|
||||
conversion. Can also expect multiple keys for more nested
|
||||
dictionary.
|
||||
|
||||
"""
|
||||
def __init__(
|
||||
self, default_keys: Union[str, Iterable[str]], *args, **kwargs
|
||||
) -> None:
|
||||
if isinstance(default_keys, str):
|
||||
default_keys = [default_keys]
|
||||
else:
|
||||
default_keys = list(default_keys)
|
||||
if not default_keys:
|
||||
raise ValueError(
|
||||
"Default key must be set. Got empty default keys."
|
||||
)
|
||||
|
||||
self._default_keys = default_keys
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.get_default_value())
|
||||
|
||||
def get_default_keys(self) -> list[str]:
|
||||
return list(self._default_keys)
|
||||
|
||||
def get_default_value(self) -> Any:
|
||||
value = self
|
||||
for key in self._default_keys:
|
||||
value = value[key]
|
||||
return value
|
||||
|
||||
|
||||
class StringTemplate:
|
||||
"""String that can be formatted."""
|
||||
def __init__(self, template: str):
|
||||
|
|
@ -84,7 +130,7 @@ class StringTemplate:
|
|||
if substr:
|
||||
new_parts.append(substr)
|
||||
|
||||
self._parts: List["Union[str, OptionalPart, FormattingPart]"] = (
|
||||
self._parts: list[Union[str, OptionalPart, FormattingPart]] = (
|
||||
self.find_optional_parts(new_parts)
|
||||
)
|
||||
|
||||
|
|
@ -105,7 +151,7 @@ class StringTemplate:
|
|||
def template(self) -> str:
|
||||
return self._template
|
||||
|
||||
def format(self, data: Dict[str, Any]) -> "TemplateResult":
|
||||
def format(self, data: dict[str, Any]) -> "TemplateResult":
|
||||
""" Figure out with whole formatting.
|
||||
|
||||
Separate advanced keys (*Like '{project[name]}') from string which must
|
||||
|
|
@ -145,29 +191,29 @@ class StringTemplate:
|
|||
invalid_types
|
||||
)
|
||||
|
||||
def format_strict(self, data: Dict[str, Any]) -> "TemplateResult":
|
||||
def format_strict(self, data: dict[str, Any]) -> "TemplateResult":
|
||||
result = self.format(data)
|
||||
result.validate()
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def format_template(
|
||||
cls, template: str, data: Dict[str, Any]
|
||||
cls, template: str, data: dict[str, Any]
|
||||
) -> "TemplateResult":
|
||||
objected_template = cls(template)
|
||||
return objected_template.format(data)
|
||||
|
||||
@classmethod
|
||||
def format_strict_template(
|
||||
cls, template: str, data: Dict[str, Any]
|
||||
cls, template: str, data: dict[str, Any]
|
||||
) -> "TemplateResult":
|
||||
objected_template = cls(template)
|
||||
return objected_template.format_strict(data)
|
||||
|
||||
@staticmethod
|
||||
def find_optional_parts(
|
||||
parts: List["Union[str, FormattingPart]"]
|
||||
) -> List["Union[str, OptionalPart, FormattingPart]"]:
|
||||
parts: list[Union[str, FormattingPart]]
|
||||
) -> list[Union[str, OptionalPart, FormattingPart]]:
|
||||
new_parts = []
|
||||
tmp_parts = {}
|
||||
counted_symb = -1
|
||||
|
|
@ -192,7 +238,7 @@ class StringTemplate:
|
|||
len(parts) == 1
|
||||
and isinstance(parts[0], str)
|
||||
):
|
||||
value = "<{}>".format(parts[0])
|
||||
value = f"<{parts[0]}>"
|
||||
else:
|
||||
value = OptionalPart(parts)
|
||||
|
||||
|
|
@ -223,7 +269,7 @@ class TemplateResult(str):
|
|||
only used keys.
|
||||
solved (bool): For check if all required keys were filled.
|
||||
template (str): Original template.
|
||||
missing_keys (Iterable[str]): Missing keys that were not in the data.
|
||||
missing_keys (list[str]): Missing keys that were not in the data.
|
||||
Include missing optional keys.
|
||||
invalid_types (dict): When key was found in data, but value had not
|
||||
allowed DataType. Allowed data types are `numbers`,
|
||||
|
|
@ -232,11 +278,11 @@ class TemplateResult(str):
|
|||
of number.
|
||||
"""
|
||||
|
||||
used_values: Dict[str, Any] = None
|
||||
used_values: dict[str, Any] = None
|
||||
solved: bool = None
|
||||
template: str = None
|
||||
missing_keys: List[str] = None
|
||||
invalid_types: Dict[str, Any] = None
|
||||
missing_keys: list[str] = None
|
||||
invalid_types: dict[str, Any] = None
|
||||
|
||||
def __new__(
|
||||
cls, filled_template, template, solved,
|
||||
|
|
@ -296,21 +342,21 @@ class TemplatePartResult:
|
|||
"""Result to store result of template parts."""
|
||||
def __init__(self, optional: bool = False):
|
||||
# Missing keys or invalid value types of required keys
|
||||
self._missing_keys: Set[str] = set()
|
||||
self._invalid_types: Dict[str, Any] = {}
|
||||
self._missing_keys: set[str] = set()
|
||||
self._invalid_types: dict[str, Any] = {}
|
||||
# Missing keys or invalid value types of optional keys
|
||||
self._missing_optional_keys: Set[str] = set()
|
||||
self._invalid_optional_types: Dict[str, Any] = {}
|
||||
self._missing_optional_keys: set[str] = set()
|
||||
self._invalid_optional_types: dict[str, Any] = {}
|
||||
|
||||
# Used values stored by key with origin type
|
||||
# - key without any padding or key modifiers
|
||||
# - value from filling data
|
||||
# Example: {"version": 1}
|
||||
self._used_values: Dict[str, Any] = {}
|
||||
self._used_values: dict[str, Any] = {}
|
||||
# Used values stored by key with all modifirs
|
||||
# - value is already formatted string
|
||||
# Example: {"version:0>3": "001"}
|
||||
self._really_used_values: Dict[str, Any] = {}
|
||||
self._really_used_values: dict[str, Any] = {}
|
||||
# Concatenated string output after formatting
|
||||
self._output: str = ""
|
||||
# Is this result from optional part
|
||||
|
|
@ -336,8 +382,9 @@ class TemplatePartResult:
|
|||
self._really_used_values.update(other.really_used_values)
|
||||
|
||||
else:
|
||||
raise TypeError("Cannot add data from \"{}\" to \"{}\"".format(
|
||||
str(type(other)), self.__class__.__name__)
|
||||
raise TypeError(
|
||||
f"Cannot add data from \"{type(other)}\""
|
||||
f" to \"{self.__class__.__name__}\""
|
||||
)
|
||||
|
||||
@property
|
||||
|
|
@ -362,40 +409,41 @@ class TemplatePartResult:
|
|||
return self._output
|
||||
|
||||
@property
|
||||
def missing_keys(self) -> Set[str]:
|
||||
def missing_keys(self) -> set[str]:
|
||||
return self._missing_keys
|
||||
|
||||
@property
|
||||
def missing_optional_keys(self) -> Set[str]:
|
||||
def missing_optional_keys(self) -> set[str]:
|
||||
return self._missing_optional_keys
|
||||
|
||||
@property
|
||||
def invalid_types(self) -> Dict[str, Any]:
|
||||
def invalid_types(self) -> dict[str, Any]:
|
||||
return self._invalid_types
|
||||
|
||||
@property
|
||||
def invalid_optional_types(self) -> Dict[str, Any]:
|
||||
def invalid_optional_types(self) -> dict[str, Any]:
|
||||
return self._invalid_optional_types
|
||||
|
||||
@property
|
||||
def really_used_values(self) -> Dict[str, Any]:
|
||||
def really_used_values(self) -> dict[str, Any]:
|
||||
return self._really_used_values
|
||||
|
||||
@property
|
||||
def realy_used_values(self) -> Dict[str, Any]:
|
||||
def realy_used_values(self) -> dict[str, Any]:
|
||||
warnings.warn(
|
||||
"Property 'realy_used_values' is deprecated."
|
||||
" Use 'really_used_values' instead.",
|
||||
DeprecationWarning
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return self._really_used_values
|
||||
|
||||
@property
|
||||
def used_values(self) -> Dict[str, Any]:
|
||||
def used_values(self) -> dict[str, Any]:
|
||||
return self._used_values
|
||||
|
||||
@staticmethod
|
||||
def split_keys_to_subdicts(values: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def split_keys_to_subdicts(values: dict[str, Any]) -> dict[str, Any]:
|
||||
output = {}
|
||||
formatter = Formatter()
|
||||
for key, value in values.items():
|
||||
|
|
@ -410,7 +458,7 @@ class TemplatePartResult:
|
|||
data[last_key] = value
|
||||
return output
|
||||
|
||||
def get_clean_used_values(self) -> Dict[str, Any]:
|
||||
def get_clean_used_values(self) -> dict[str, Any]:
|
||||
new_used_values = {}
|
||||
for key, value in self.used_values.items():
|
||||
if isinstance(value, FormatObject):
|
||||
|
|
@ -426,7 +474,8 @@ class TemplatePartResult:
|
|||
warnings.warn(
|
||||
"Method 'add_realy_used_value' is deprecated."
|
||||
" Use 'add_really_used_value' instead.",
|
||||
DeprecationWarning
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self.add_really_used_value(key, value)
|
||||
|
||||
|
|
@ -479,7 +528,7 @@ class FormattingPart:
|
|||
self,
|
||||
field_name: str,
|
||||
format_spec: str,
|
||||
conversion: "Union[str, None]",
|
||||
conversion: Union[str, None],
|
||||
):
|
||||
format_spec_v = ""
|
||||
if format_spec:
|
||||
|
|
@ -546,7 +595,7 @@ class FormattingPart:
|
|||
return not queue
|
||||
|
||||
@staticmethod
|
||||
def keys_to_template_base(keys: List[str]):
|
||||
def keys_to_template_base(keys: list[str]):
|
||||
if not keys:
|
||||
return None
|
||||
# Create copy of keys
|
||||
|
|
@ -556,7 +605,7 @@ class FormattingPart:
|
|||
return f"{template_base}{joined_keys}"
|
||||
|
||||
def format(
|
||||
self, data: Dict[str, Any], result: TemplatePartResult
|
||||
self, data: dict[str, Any], result: TemplatePartResult
|
||||
) -> TemplatePartResult:
|
||||
"""Format the formattings string.
|
||||
|
||||
|
|
@ -635,6 +684,12 @@ class FormattingPart:
|
|||
result.add_output(self.template)
|
||||
return result
|
||||
|
||||
if isinstance(value, DefaultKeysDict):
|
||||
try:
|
||||
value = value.get_default_value()
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if not self.validate_value_type(value):
|
||||
result.add_invalid_type(key, value)
|
||||
result.add_output(self.template)
|
||||
|
|
@ -687,23 +742,25 @@ class OptionalPart:
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
parts: List["Union[str, OptionalPart, FormattingPart]"]
|
||||
parts: list[Union[str, OptionalPart, FormattingPart]]
|
||||
):
|
||||
self._parts: List["Union[str, OptionalPart, FormattingPart]"] = parts
|
||||
self._parts: list[Union[str, OptionalPart, FormattingPart]] = parts
|
||||
|
||||
@property
|
||||
def parts(self) -> List["Union[str, OptionalPart, FormattingPart]"]:
|
||||
def parts(self) -> list[Union[str, OptionalPart, FormattingPart]]:
|
||||
return self._parts
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<{}>".format("".join([str(p) for p in self._parts]))
|
||||
joined_parts = "".join([str(p) for p in self._parts])
|
||||
return f"<{joined_parts}>"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<Optional:{}>".format("".join([str(p) for p in self._parts]))
|
||||
joined_parts = "".join([str(p) for p in self._parts])
|
||||
return f"<Optional:{joined_parts}>"
|
||||
|
||||
def format(
|
||||
self,
|
||||
data: Dict[str, Any],
|
||||
data: dict[str, Any],
|
||||
result: TemplatePartResult,
|
||||
) -> TemplatePartResult:
|
||||
new_result = TemplatePartResult(True)
|
||||
|
|
|
|||
|
|
@ -202,7 +202,8 @@ def is_clip_from_media_sequence(otio_clip):
|
|||
|
||||
|
||||
def remap_range_on_file_sequence(otio_clip, otio_range):
|
||||
"""
|
||||
""" Remap the provided range on a file sequence clip.
|
||||
|
||||
Args:
|
||||
otio_clip (otio.schema.Clip): The OTIO clip to check.
|
||||
otio_range (otio.schema.TimeRange): The trim range to apply.
|
||||
|
|
@ -249,7 +250,11 @@ def remap_range_on_file_sequence(otio_clip, otio_range):
|
|||
if (
|
||||
is_clip_from_media_sequence(otio_clip)
|
||||
and available_range_start_frame == media_ref.start_frame
|
||||
and conformed_src_in.to_frames() < media_ref.start_frame
|
||||
|
||||
# source range should be included in available range from media
|
||||
# using round instead of conformed_src_in.to_frames() to avoid
|
||||
# any precision issue with frame rate.
|
||||
and round(conformed_src_in.value) < media_ref.start_frame
|
||||
):
|
||||
media_in = otio.opentime.RationalTime(
|
||||
0, rate=available_range_rate
|
||||
|
|
|
|||
|
|
@ -983,7 +983,26 @@ def get_instance_expected_output_path(
|
|||
"version": version
|
||||
})
|
||||
|
||||
path_template_obj = anatomy.get_template_item("publish", "default")["path"]
|
||||
# Get instance publish template name
|
||||
task_name = task_type = None
|
||||
task_entity = instance.data.get("taskEntity")
|
||||
if task_entity:
|
||||
task_name = task_entity["name"]
|
||||
task_type = task_entity["taskType"]
|
||||
|
||||
template_name = get_publish_template_name(
|
||||
project_name=instance.context.data["projectName"],
|
||||
host_name=instance.context.data["hostName"],
|
||||
product_type=instance.data["productType"],
|
||||
task_name=task_name,
|
||||
task_type=task_type,
|
||||
project_settings=instance.context.data["project_settings"],
|
||||
)
|
||||
|
||||
path_template_obj = anatomy.get_template_item(
|
||||
"publish",
|
||||
template_name
|
||||
)["path"]
|
||||
template_filled = path_template_obj.format_strict(template_data)
|
||||
return os.path.normpath(template_filled)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,27 +1,50 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import Optional, Any
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.settings import get_studio_settings
|
||||
from ayon_core.lib.local_settings import get_ayon_username
|
||||
from ayon_core.lib import DefaultKeysDict
|
||||
from ayon_core.lib.local_settings import get_ayon_user_entity
|
||||
|
||||
|
||||
def get_general_template_data(settings=None, username=None):
|
||||
def get_general_template_data(
|
||||
settings: Optional[dict[str, Any]] = None,
|
||||
username: Optional[str] = None,
|
||||
user_entity: Optional[dict[str, Any]] = None,
|
||||
):
|
||||
"""General template data based on system settings or machine.
|
||||
|
||||
Output contains formatting keys:
|
||||
- 'studio[name]' - Studio name filled from system settings
|
||||
- 'studio[code]' - Studio code filled from system settings
|
||||
- 'user' - User's name using 'get_ayon_username'
|
||||
- 'studio[name]' - Studio name filled from system settings
|
||||
- 'studio[code]' - Studio code filled from system settings
|
||||
- 'user[name]' - User's name
|
||||
- 'user[attrib][...]' - User's attributes
|
||||
- 'user[data][...]' - User's data
|
||||
|
||||
Args:
|
||||
settings (Dict[str, Any]): Studio or project settings.
|
||||
username (Optional[str]): AYON Username.
|
||||
"""
|
||||
user_entity (Optional[dict[str, Any]]): User entity.
|
||||
|
||||
"""
|
||||
if not settings:
|
||||
settings = get_studio_settings()
|
||||
|
||||
if username is None:
|
||||
username = get_ayon_username()
|
||||
if user_entity is None:
|
||||
user_entity = get_ayon_user_entity(username)
|
||||
|
||||
# Use dictionary with default value for backwards compatibility
|
||||
# - we did support '{user}' now it should be '{user[name]}'
|
||||
user_data = DefaultKeysDict(
|
||||
"name",
|
||||
{
|
||||
"name": user_entity["name"],
|
||||
"attrib": user_entity["attrib"],
|
||||
"data": user_entity["data"],
|
||||
}
|
||||
)
|
||||
|
||||
core_settings = settings["core"]
|
||||
return {
|
||||
|
|
@ -29,7 +52,7 @@ def get_general_template_data(settings=None, username=None):
|
|||
"name": core_settings["studio_name"],
|
||||
"code": core_settings["studio_code"]
|
||||
},
|
||||
"user": username
|
||||
"user": user_data,
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -150,7 +173,8 @@ def get_template_data(
|
|||
task_entity=None,
|
||||
host_name=None,
|
||||
settings=None,
|
||||
username=None
|
||||
username=None,
|
||||
user_entity=None,
|
||||
):
|
||||
"""Prepare data for templates filling from entered documents and info.
|
||||
|
||||
|
|
@ -173,13 +197,18 @@ def get_template_data(
|
|||
host_name (Optional[str]): Used to fill '{app}' key.
|
||||
settings (Union[Dict, None]): Prepared studio or project settings.
|
||||
They're queried if not passed (may be slower).
|
||||
username (Optional[str]): AYON Username.
|
||||
username (Optional[str]): DEPRECATED AYON Username.
|
||||
user_entity (Optional[dict[str, Any]): AYON user entity.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Data prepared for filling workdir template.
|
||||
"""
|
||||
|
||||
template_data = get_general_template_data(settings, username=username)
|
||||
template_data = get_general_template_data(
|
||||
settings,
|
||||
username=username,
|
||||
user_entity=user_entity,
|
||||
)
|
||||
template_data.update(get_project_template_data(project_entity))
|
||||
if folder_entity:
|
||||
template_data.update(get_folder_template_data(
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ Provides:
|
|||
import json
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.lib import get_ayon_user_entity
|
||||
from ayon_core.pipeline.template_data import get_template_data
|
||||
|
||||
|
||||
|
|
@ -55,17 +56,18 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin):
|
|||
if folder_entity:
|
||||
task_entity = context.data["taskEntity"]
|
||||
|
||||
username = context.data["user"]
|
||||
user_entity = get_ayon_user_entity(username)
|
||||
anatomy_data = get_template_data(
|
||||
project_entity,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
host_name,
|
||||
project_settings
|
||||
host_name=host_name,
|
||||
settings=project_settings,
|
||||
user_entity=user_entity,
|
||||
)
|
||||
anatomy_data.update(context.data.get("datetimeData") or {})
|
||||
|
||||
username = context.data["user"]
|
||||
anatomy_data["user"] = username
|
||||
# Backwards compatibility for 'username' key
|
||||
anatomy_data["username"] = username
|
||||
|
||||
|
|
|
|||
|
|
@ -71,6 +71,12 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
|
|||
import opentimelineio as otio
|
||||
|
||||
otio_clip = instance.data["otioClip"]
|
||||
if isinstance(
|
||||
otio_clip.media_reference,
|
||||
otio.schema.MissingReference
|
||||
):
|
||||
self.log.info("Clip has no media reference")
|
||||
return
|
||||
|
||||
# Collect timeline ranges if workfile start frame is available
|
||||
if "workfileFrameStart" in instance.data:
|
||||
|
|
|
|||
|
|
@ -60,6 +60,13 @@ class CollectOtioSubsetResources(
|
|||
|
||||
# get basic variables
|
||||
otio_clip = instance.data["otioClip"]
|
||||
if isinstance(
|
||||
otio_clip.media_reference,
|
||||
otio.schema.MissingReference
|
||||
):
|
||||
self.log.info("Clip has no media reference")
|
||||
return
|
||||
|
||||
otio_available_range = otio_clip.available_range()
|
||||
media_fps = otio_available_range.start_time.rate
|
||||
available_duration = otio_available_range.duration.value
|
||||
|
|
|
|||
|
|
@ -130,7 +130,7 @@ class ExtractOTIOReview(
|
|||
# NOTE it looks like it is set only in hiero integration
|
||||
res_data = {"width": self.to_width, "height": self.to_height}
|
||||
for key in res_data:
|
||||
for meta_prefix in ("ayon.source.", "openpype.source."):
|
||||
for meta_prefix in ("ayon.source", "openpype.source"):
|
||||
meta_key = f"{meta_prefix}.{key}"
|
||||
value = media_metadata.get(meta_key)
|
||||
if value is not None:
|
||||
|
|
|
|||
|
|
@ -121,7 +121,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"version",
|
||||
"representation",
|
||||
"username",
|
||||
"user",
|
||||
"output",
|
||||
# OpenPype keys - should be removed
|
||||
"asset", # folder[name]
|
||||
|
|
@ -796,6 +795,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
if value is not None:
|
||||
repre_context[key] = value
|
||||
|
||||
# Keep only username
|
||||
# NOTE This is to avoid storing all user attributes and data
|
||||
# to representation
|
||||
if "user" not in repre_context:
|
||||
repre_context["user"] = {
|
||||
"name": template_data["user"]["name"]
|
||||
}
|
||||
|
||||
# Use previous representation's id if there is a name match
|
||||
existing = existing_repres_by_name.get(repre["name"].lower())
|
||||
repre_id = None
|
||||
|
|
|
|||
|
|
@ -89,7 +89,6 @@ class IntegrateHeroVersion(
|
|||
"family",
|
||||
"representation",
|
||||
"username",
|
||||
"user",
|
||||
"output"
|
||||
]
|
||||
# QUESTION/TODO this process should happen on server if crashed due to
|
||||
|
|
@ -364,6 +363,14 @@ class IntegrateHeroVersion(
|
|||
if value is not None:
|
||||
repre_context[key] = value
|
||||
|
||||
# Keep only username
|
||||
# NOTE This is to avoid storing all user attributes and data
|
||||
# to representation
|
||||
if "user" not in repre_context:
|
||||
repre_context["user"] = {
|
||||
"name": anatomy_data["user"]["name"]
|
||||
}
|
||||
|
||||
# Prepare new repre
|
||||
repre_entity = copy.deepcopy(repre_info["representation"])
|
||||
repre_entity.pop("id", None)
|
||||
|
|
|
|||
|
|
@ -3,9 +3,10 @@ import re
|
|||
import copy
|
||||
import itertools
|
||||
import sys
|
||||
import tempfile
|
||||
import traceback
|
||||
import uuid
|
||||
from typing import Optional, Dict
|
||||
from typing import Optional, Any
|
||||
|
||||
import ayon_api
|
||||
from ayon_api.utils import create_entity_id
|
||||
|
|
@ -225,8 +226,8 @@ class ProjectPushRepreItem:
|
|||
but filenames are not template based.
|
||||
|
||||
Args:
|
||||
repre_entity (Dict[str, Ant]): Representation entity.
|
||||
roots (Dict[str, str]): Project roots (based on project anatomy).
|
||||
repre_entity (dict[str, Ant]): Representation entity.
|
||||
roots (dict[str, str]): Project roots (based on project anatomy).
|
||||
"""
|
||||
|
||||
def __init__(self, repre_entity, roots):
|
||||
|
|
@ -482,6 +483,8 @@ class ProjectPushItemProcess:
|
|||
self._log_info("Destination project was found")
|
||||
self._fill_or_create_destination_folder()
|
||||
self._log_info("Destination folder was determined")
|
||||
self._fill_or_create_destination_task()
|
||||
self._log_info("Destination task was determined")
|
||||
self._determine_product_type()
|
||||
self._determine_publish_template_name()
|
||||
self._determine_product_name()
|
||||
|
|
@ -650,10 +653,10 @@ class ProjectPushItemProcess:
|
|||
|
||||
def _create_folder(
|
||||
self,
|
||||
src_folder_entity,
|
||||
project_entity,
|
||||
parent_folder_entity,
|
||||
folder_name
|
||||
src_folder_entity: dict[str, Any],
|
||||
project_entity: dict[str, Any],
|
||||
parent_folder_entity: dict[str, Any],
|
||||
folder_name: str
|
||||
):
|
||||
parent_id = None
|
||||
if parent_folder_entity:
|
||||
|
|
@ -702,12 +705,19 @@ class ProjectPushItemProcess:
|
|||
if new_folder_name != folder_name:
|
||||
folder_label = folder_name
|
||||
|
||||
# TODO find out how to define folder type
|
||||
src_folder_type = src_folder_entity["folderType"]
|
||||
dst_folder_type = self._get_dst_folder_type(
|
||||
project_entity,
|
||||
src_folder_type
|
||||
)
|
||||
new_thumbnail_id = self._create_new_folder_thumbnail(
|
||||
project_entity, src_folder_entity)
|
||||
folder_entity = new_folder_entity(
|
||||
folder_name,
|
||||
"Folder",
|
||||
dst_folder_type,
|
||||
parent_id=parent_id,
|
||||
attribs=new_folder_attrib
|
||||
attribs=new_folder_attrib,
|
||||
thumbnail_id=new_thumbnail_id
|
||||
)
|
||||
if folder_label:
|
||||
folder_entity["label"] = folder_label
|
||||
|
|
@ -727,10 +737,59 @@ class ProjectPushItemProcess:
|
|||
folder_entity["path"] = "/".join([parent_path, folder_name])
|
||||
return folder_entity
|
||||
|
||||
def _create_new_folder_thumbnail(
|
||||
self,
|
||||
project_entity: dict[str, Any],
|
||||
src_folder_entity: dict[str, Any]
|
||||
) -> Optional[str]:
|
||||
"""Copy thumbnail possibly set on folder.
|
||||
|
||||
Could be different from representation thumbnails, and it is only shown
|
||||
when folder is selected.
|
||||
"""
|
||||
if not src_folder_entity["thumbnailId"]:
|
||||
return None
|
||||
|
||||
thumbnail = ayon_api.get_folder_thumbnail(
|
||||
self._item.src_project_name,
|
||||
src_folder_entity["id"],
|
||||
src_folder_entity["thumbnailId"]
|
||||
)
|
||||
if not thumbnail.id:
|
||||
return None
|
||||
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
|
||||
tmp_file.write(thumbnail.content)
|
||||
temp_file_path = tmp_file.name
|
||||
|
||||
new_thumbnail_id = None
|
||||
try:
|
||||
new_thumbnail_id = ayon_api.create_thumbnail(
|
||||
project_entity["name"], temp_file_path)
|
||||
finally:
|
||||
if os.path.exists(temp_file_path):
|
||||
os.remove(temp_file_path)
|
||||
return new_thumbnail_id
|
||||
|
||||
def _get_dst_folder_type(
|
||||
self,
|
||||
project_entity: dict[str, Any],
|
||||
src_folder_type: str
|
||||
) -> str:
|
||||
"""Get new folder type."""
|
||||
for folder_type in project_entity["folderTypes"]:
|
||||
if folder_type["name"].lower() == src_folder_type.lower():
|
||||
return folder_type["name"]
|
||||
|
||||
self._status.set_failed(
|
||||
f"'{src_folder_type}' folder type is not configured in "
|
||||
f"project Anatomy."
|
||||
)
|
||||
raise PushToProjectError(self._status.fail_reason)
|
||||
|
||||
def _fill_or_create_destination_folder(self):
|
||||
dst_project_name = self._item.dst_project_name
|
||||
dst_folder_id = self._item.dst_folder_id
|
||||
dst_task_name = self._item.dst_task_name
|
||||
new_folder_name = self._item.new_folder_name
|
||||
if not dst_folder_id and not new_folder_name:
|
||||
self._status.set_failed(
|
||||
|
|
@ -761,9 +820,11 @@ class ProjectPushItemProcess:
|
|||
new_folder_name
|
||||
)
|
||||
self._folder_entity = folder_entity
|
||||
if not dst_task_name:
|
||||
self._task_info = {}
|
||||
return
|
||||
|
||||
def _fill_or_create_destination_task(self):
|
||||
folder_entity = self._folder_entity
|
||||
dst_task_name = self._item.dst_task_name
|
||||
dst_project_name = self._item.dst_project_name
|
||||
|
||||
folder_path = folder_entity["path"]
|
||||
folder_tasks = {
|
||||
|
|
@ -772,6 +833,20 @@ class ProjectPushItemProcess:
|
|||
dst_project_name, folder_ids=[folder_entity["id"]]
|
||||
)
|
||||
}
|
||||
|
||||
if not dst_task_name:
|
||||
src_task_info = self._get_src_task_info()
|
||||
if not src_task_info: # really no task selected nor on source
|
||||
self._task_info = {}
|
||||
return
|
||||
|
||||
dst_task_name = src_task_info["name"]
|
||||
if dst_task_name.lower() not in folder_tasks:
|
||||
task_info = self._make_sure_task_exists(
|
||||
folder_entity, src_task_info
|
||||
)
|
||||
folder_tasks[dst_task_name.lower()] = task_info
|
||||
|
||||
task_info = folder_tasks.get(dst_task_name.lower())
|
||||
if not task_info:
|
||||
self._status.set_failed(
|
||||
|
|
@ -790,7 +865,10 @@ class ProjectPushItemProcess:
|
|||
task_type["name"]: task_type
|
||||
for task_type in self._project_entity["taskTypes"]
|
||||
}
|
||||
task_type_info = task_types_by_name.get(task_type_name, {})
|
||||
task_type_info = copy.deepcopy(
|
||||
task_types_by_name.get(task_type_name, {})
|
||||
)
|
||||
task_type_info.pop("name") # do not overwrite real task name
|
||||
task_info.update(task_type_info)
|
||||
self._task_info = task_info
|
||||
|
||||
|
|
@ -925,8 +1003,8 @@ class ProjectPushItemProcess:
|
|||
version = get_versioning_start(
|
||||
project_name,
|
||||
self.host_name,
|
||||
task_name=self._task_info["name"],
|
||||
task_type=self._task_info["taskType"],
|
||||
task_name=self._task_info.get("name"),
|
||||
task_type=self._task_info.get("taskType"),
|
||||
product_type=product_type,
|
||||
product_name=product_entity["name"],
|
||||
)
|
||||
|
|
@ -950,10 +1028,16 @@ class ProjectPushItemProcess:
|
|||
existing_version_entity["attrib"].update(dst_attrib)
|
||||
self._version_entity = existing_version_entity
|
||||
return
|
||||
copied_tags = self._get_transferable_tags(src_version_entity)
|
||||
copied_status = self._get_transferable_status(src_version_entity)
|
||||
|
||||
version_entity = new_version_entity(
|
||||
version,
|
||||
product_id,
|
||||
author=src_version_entity["author"],
|
||||
status=copied_status,
|
||||
tags=copied_tags,
|
||||
task_id=self._task_info.get("id"),
|
||||
attribs=dst_attrib,
|
||||
thumbnail_id=thumbnail_id,
|
||||
)
|
||||
|
|
@ -962,6 +1046,47 @@ class ProjectPushItemProcess:
|
|||
)
|
||||
self._version_entity = version_entity
|
||||
|
||||
def _make_sure_task_exists(
|
||||
self,
|
||||
folder_entity: dict[str, Any],
|
||||
task_info: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Creates destination task from source task information"""
|
||||
project_name = self._item.dst_project_name
|
||||
found_task_type = False
|
||||
src_task_type = task_info["taskType"]
|
||||
for task_type in self._project_entity["taskTypes"]:
|
||||
if task_type["name"].lower() == src_task_type.lower():
|
||||
found_task_type = True
|
||||
break
|
||||
|
||||
if not found_task_type:
|
||||
self._status.set_failed(
|
||||
f"'{src_task_type}' task type is not configured in "
|
||||
"project Anatomy."
|
||||
)
|
||||
|
||||
raise PushToProjectError(self._status.fail_reason)
|
||||
|
||||
task_info = self._operations.create_task(
|
||||
project_name,
|
||||
task_info["name"],
|
||||
folder_id=folder_entity["id"],
|
||||
task_type=src_task_type,
|
||||
attrib=task_info["attrib"],
|
||||
)
|
||||
self._task_info = task_info.data
|
||||
return self._task_info
|
||||
|
||||
def _get_src_task_info(self):
|
||||
src_version_entity = self._src_version_entity
|
||||
if not src_version_entity["taskId"]:
|
||||
return None
|
||||
src_task = ayon_api.get_task_by_id(
|
||||
self._item.src_project_name, src_version_entity["taskId"]
|
||||
)
|
||||
return src_task
|
||||
|
||||
def _integrate_representations(self):
|
||||
try:
|
||||
self._real_integrate_representations()
|
||||
|
|
@ -1197,18 +1322,42 @@ class ProjectPushItemProcess:
|
|||
if context_value and isinstance(context_value, dict):
|
||||
for context_sub_key in context_value.keys():
|
||||
value_to_update = formatting_data.get(context_key, {}).get(
|
||||
context_sub_key)
|
||||
context_sub_key
|
||||
)
|
||||
if value_to_update:
|
||||
repre_context[context_key][
|
||||
context_sub_key] = value_to_update
|
||||
repre_context[context_key][context_sub_key] = (
|
||||
value_to_update
|
||||
)
|
||||
else:
|
||||
value_to_update = formatting_data.get(context_key)
|
||||
if value_to_update:
|
||||
repre_context[context_key] = value_to_update
|
||||
if "task" not in formatting_data:
|
||||
repre_context.pop("task")
|
||||
repre_context.pop("task", None)
|
||||
return repre_context
|
||||
|
||||
def _get_transferable_tags(self, src_version_entity):
|
||||
"""Copy over only tags present in destination project"""
|
||||
dst_project_tags = [
|
||||
tag["name"] for tag in self._project_entity["tags"]
|
||||
]
|
||||
copied_tags = []
|
||||
for src_tag in src_version_entity["tags"]:
|
||||
if src_tag in dst_project_tags:
|
||||
copied_tags.append(src_tag)
|
||||
return copied_tags
|
||||
|
||||
def _get_transferable_status(self, src_version_entity):
|
||||
"""Copy over status, first status if not matching found"""
|
||||
dst_project_statuses = {
|
||||
status["name"]: status
|
||||
for status in self._project_entity["statuses"]
|
||||
}
|
||||
copied_status = dst_project_statuses.get(src_version_entity["status"])
|
||||
if copied_status:
|
||||
return copied_status["name"]
|
||||
return None
|
||||
|
||||
|
||||
class IntegrateModel:
|
||||
def __init__(self, controller):
|
||||
|
|
@ -1281,6 +1430,6 @@ class IntegrateModel:
|
|||
return
|
||||
item.integrate()
|
||||
|
||||
def get_items(self) -> Dict[str, ProjectPushItemProcess]:
|
||||
def get_items(self) -> dict[str, ProjectPushItemProcess]:
|
||||
"""Returns dict of all ProjectPushItemProcess items """
|
||||
return self._process_items
|
||||
|
|
|
|||
|
|
@ -358,9 +358,8 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
|
|||
if not self._host_is_valid:
|
||||
return
|
||||
|
||||
self._folders_widget.set_project_name(
|
||||
self._controller.get_current_project_name()
|
||||
)
|
||||
self._project_name = self._controller.get_current_project_name()
|
||||
self._folders_widget.set_project_name(self._project_name)
|
||||
|
||||
def _on_save_as_finished(self, event):
|
||||
if event["failed"]:
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'core' version."""
|
||||
__version__ = "1.6.5+dev"
|
||||
__version__ = "1.6.7+dev"
|
||||
|
|
|
|||
|
|
@ -19,3 +19,6 @@ OpenTimelineIO = "0.16.0"
|
|||
opencolorio = "^2.3.2,<2.4.0"
|
||||
Pillow = "9.5.0"
|
||||
websocket-client = ">=0.40.0,<2"
|
||||
|
||||
[ayon.runtimeDependencies.darwin]
|
||||
pyobjc-core = "^11.1"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
name = "core"
|
||||
title = "Core"
|
||||
version = "1.6.5+dev"
|
||||
version = "1.6.7+dev"
|
||||
|
||||
client_dir = "ayon_core"
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
[tool.poetry]
|
||||
name = "ayon-core"
|
||||
version = "1.6.5+dev"
|
||||
version = "1.6.7+dev"
|
||||
description = ""
|
||||
authors = ["Ynput Team <team@ynput.io>"]
|
||||
readme = "README.md"
|
||||
|
|
|
|||
|
|
@ -246,75 +246,75 @@ def test_multiple_review_clips_no_gap():
|
|||
expected = [
|
||||
# 10 head black frames generated from gap (991-1000)
|
||||
'/path/to/ffmpeg -t 0.4 -r 25.0 -f lavfi'
|
||||
' -i color=c=black:s=1280x720 -tune '
|
||||
' -i color=c=black:s=1920x1080 -tune '
|
||||
'stillimage -start_number 991 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
# Alternance 25fps tiff sequence and 24fps exr sequence
|
||||
# for 100 frames each
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
|
||||
f'C:\\no_tc{os.sep}output.%04d.tif '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1001 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i '
|
||||
f'C:\\with_tc{os.sep}output.%04d.exr '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1102 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
|
||||
f'C:\\no_tc{os.sep}output.%04d.tif '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1198 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i '
|
||||
f'C:\\with_tc{os.sep}output.%04d.exr '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1299 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
# Repeated 25fps tiff sequence multiple times till the end
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
|
||||
f'C:\\no_tc{os.sep}output.%04d.tif '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1395 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
|
||||
f'C:\\no_tc{os.sep}output.%04d.tif '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1496 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
|
||||
f'C:\\no_tc{os.sep}output.%04d.tif '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1597 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
|
||||
f'C:\\no_tc{os.sep}output.%04d.tif '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1698 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
|
||||
f'C:\\no_tc{os.sep}output.%04d.tif '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1799 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
|
||||
f'C:\\no_tc{os.sep}output.%04d.tif '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1900 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
|
||||
f'C:\\no_tc{os.sep}output.%04d.tif '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 2001 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
|
||||
f'C:\\no_tc{os.sep}output.%04d.tif '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 2102 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i '
|
||||
f'C:\\no_tc{os.sep}output.%04d.tif '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 2203 -pix_fmt rgba C:/result/output.%04d.png'
|
||||
]
|
||||
|
||||
|
|
@ -348,12 +348,12 @@ def test_multiple_review_clips_with_gap():
|
|||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i '
|
||||
f'C:\\with_tc{os.sep}output.%04d.exr '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1003 -pix_fmt rgba C:/result/output.%04d.png',
|
||||
|
||||
'/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i '
|
||||
f'C:\\with_tc{os.sep}output.%04d.exr '
|
||||
'-vf scale=1280:720:flags=lanczos -compression_level 5 '
|
||||
'-vf scale=1920:1080:flags=lanczos -compression_level 5 '
|
||||
'-start_number 1091 -pix_fmt rgba C:/result/output.%04d.png'
|
||||
]
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue