Merge branch 'develop' into enhancement/get-repre-path-function

This commit is contained in:
Ondřej Samohel 2025-11-07 10:07:00 +01:00 committed by GitHub
commit 7e13d33588
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 879 additions and 240 deletions

View file

@ -141,6 +141,9 @@ def _get_ayon_bundle_data() -> tuple[
]:
studio_bundle_name = os.environ.get("AYON_STUDIO_BUNDLE_NAME")
project_bundle_name = os.getenv("AYON_BUNDLE_NAME")
# If AYON launcher <1.4.0 was used
if not studio_bundle_name:
studio_bundle_name = project_bundle_name
bundles = ayon_api.get_bundles()["bundles"]
studio_bundle = next(
(

View file

@ -11,6 +11,7 @@ from .local_settings import (
get_launcher_storage_dir,
get_addons_resources_dir,
get_local_site_id,
get_ayon_user_entity,
get_ayon_username,
)
from .ayon_connection import initialize_ayon_connection
@ -73,6 +74,7 @@ from .log import (
)
from .path_templates import (
DefaultKeysDict,
TemplateUnsolved,
StringTemplate,
FormatObject,
@ -148,6 +150,7 @@ __all__ = [
"get_launcher_storage_dir",
"get_addons_resources_dir",
"get_local_site_id",
"get_ayon_user_entity",
"get_ayon_username",
"initialize_ayon_connection",
@ -228,6 +231,7 @@ __all__ = [
"get_version_from_path",
"get_last_version_from_path",
"DefaultKeysDict",
"TemplateUnsolved",
"StringTemplate",
"FormatObject",

View file

@ -604,7 +604,11 @@ class EnumDef(AbstractAttrDef):
if value is None:
return copy.deepcopy(self.default)
return list(self._item_values.intersection(value))
return [
v
for v in value
if v in self._item_values
]
def is_value_valid(self, value: Any) -> bool:
"""Check if item is available in possible values."""

View file

@ -5,6 +5,7 @@ import json
import platform
import configparser
import warnings
import copy
from datetime import datetime
from abc import ABC, abstractmethod
from functools import lru_cache
@ -13,6 +14,8 @@ from typing import Optional, Any
import platformdirs
import ayon_api
from .cache import NestedCacheItem, CacheItem
_PLACEHOLDER = object()
@ -23,6 +26,7 @@ class RegistryItemNotFound(ValueError):
class _Cache:
username = None
user_entities_by_name = NestedCacheItem()
def _get_ayon_appdirs(*args: str) -> str:
@ -569,6 +573,68 @@ def get_local_site_id():
return site_id
def _get_ayon_service_username() -> Optional[str]:
# TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather
# use public method to get username from connection stack.
con = ayon_api.get_server_api_connection()
user_stack = getattr(con, "_as_user_stack", None)
if user_stack is None:
return None
return user_stack.username
def get_ayon_user_entity(username: Optional[str] = None) -> dict[str, Any]:
"""AYON user entity used for templates and publishing.
Note:
Usually only service and admin users can receive the full user entity.
Args:
username (Optional[str]): Username of the user. If not passed, then
the current user in 'ayon_api' is used.
Returns:
dict[str, Any]: User entity.
"""
service_username = _get_ayon_service_username()
# Handle service user handling first
if service_username:
if username is None:
username = service_username
cache: CacheItem = _Cache.user_entities_by_name[username]
if not cache.is_valid:
if username == service_username:
user = ayon_api.get_user()
else:
user = ayon_api.get_user(username)
cache.update_data(user)
return copy.deepcopy(cache.get_data())
# Cache current user
current_user = None
if _Cache.username is None:
current_user = ayon_api.get_user()
_Cache.username = current_user["name"]
if username is None:
username = _Cache.username
cache: CacheItem = _Cache.user_entities_by_name[username]
if not cache.is_valid:
user = None
if username == _Cache.username:
if current_user is None:
current_user = ayon_api.get_user()
user = current_user
if user is None:
user = ayon_api.get_user(username)
cache.update_data(user)
return copy.deepcopy(cache.get_data())
def get_ayon_username():
"""AYON username used for templates and publishing.
@ -578,20 +644,5 @@ def get_ayon_username():
str: Username.
"""
# Look for username in the connection stack
# - this is used when service is working as other user
# (e.g. in background sync)
# TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather
# use public method to get username from connection stack.
con = ayon_api.get_server_api_connection()
user_stack = getattr(con, "_as_user_stack", None)
if user_stack is not None:
username = user_stack.username
if username is not None:
return username
# Cache the username to avoid multiple API calls
# - it is not expected that user would change
if _Cache.username is None:
_Cache.username = ayon_api.get_user()["name"]
return _Cache.username
user = get_ayon_user_entity()
return user["name"]

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import os
import re
import copy
@ -5,11 +7,7 @@ import numbers
import warnings
import platform
from string import Formatter
import typing
from typing import List, Dict, Any, Set
if typing.TYPE_CHECKING:
from typing import Union
from typing import Any, Union, Iterable
SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
@ -44,6 +42,54 @@ class TemplateUnsolved(Exception):
)
class DefaultKeysDict(dict):
"""Dictionary that supports the default key to use for str conversion.
Is helpful for changes of a key in a template from string to dictionary
for example '{folder}' -> '{folder[name]}'.
>>> data = DefaultKeysDict(
>>> "name",
>>> {"folder": {"name": "FolderName"}}
>>> )
>>> print("{folder[name]}".format_map(data))
FolderName
>>> print("{folder}".format_map(data))
FolderName
Args:
default_key (Union[str, Iterable[str]]): Default key to use for str
conversion. Can also expect multiple keys for more nested
dictionary.
"""
def __init__(
self, default_keys: Union[str, Iterable[str]], *args, **kwargs
) -> None:
if isinstance(default_keys, str):
default_keys = [default_keys]
else:
default_keys = list(default_keys)
if not default_keys:
raise ValueError(
"Default key must be set. Got empty default keys."
)
self._default_keys = default_keys
super().__init__(*args, **kwargs)
def __str__(self) -> str:
return str(self.get_default_value())
def get_default_keys(self) -> list[str]:
return list(self._default_keys)
def get_default_value(self) -> Any:
value = self
for key in self._default_keys:
value = value[key]
return value
class StringTemplate:
"""String that can be formatted."""
def __init__(self, template: str):
@ -84,7 +130,7 @@ class StringTemplate:
if substr:
new_parts.append(substr)
self._parts: List["Union[str, OptionalPart, FormattingPart]"] = (
self._parts: list[Union[str, OptionalPart, FormattingPart]] = (
self.find_optional_parts(new_parts)
)
@ -105,7 +151,7 @@ class StringTemplate:
def template(self) -> str:
return self._template
def format(self, data: Dict[str, Any]) -> "TemplateResult":
def format(self, data: dict[str, Any]) -> "TemplateResult":
""" Figure out with whole formatting.
Separate advanced keys (*Like '{project[name]}') from string which must
@ -145,29 +191,29 @@ class StringTemplate:
invalid_types
)
def format_strict(self, data: Dict[str, Any]) -> "TemplateResult":
def format_strict(self, data: dict[str, Any]) -> "TemplateResult":
result = self.format(data)
result.validate()
return result
@classmethod
def format_template(
cls, template: str, data: Dict[str, Any]
cls, template: str, data: dict[str, Any]
) -> "TemplateResult":
objected_template = cls(template)
return objected_template.format(data)
@classmethod
def format_strict_template(
cls, template: str, data: Dict[str, Any]
cls, template: str, data: dict[str, Any]
) -> "TemplateResult":
objected_template = cls(template)
return objected_template.format_strict(data)
@staticmethod
def find_optional_parts(
parts: List["Union[str, FormattingPart]"]
) -> List["Union[str, OptionalPart, FormattingPart]"]:
parts: list[Union[str, FormattingPart]]
) -> list[Union[str, OptionalPart, FormattingPart]]:
new_parts = []
tmp_parts = {}
counted_symb = -1
@ -192,7 +238,7 @@ class StringTemplate:
len(parts) == 1
and isinstance(parts[0], str)
):
value = "<{}>".format(parts[0])
value = f"<{parts[0]}>"
else:
value = OptionalPart(parts)
@ -223,7 +269,7 @@ class TemplateResult(str):
only used keys.
solved (bool): For check if all required keys were filled.
template (str): Original template.
missing_keys (Iterable[str]): Missing keys that were not in the data.
missing_keys (list[str]): Missing keys that were not in the data.
Include missing optional keys.
invalid_types (dict): When key was found in data, but value had not
allowed DataType. Allowed data types are `numbers`,
@ -232,11 +278,11 @@ class TemplateResult(str):
of number.
"""
used_values: Dict[str, Any] = None
used_values: dict[str, Any] = None
solved: bool = None
template: str = None
missing_keys: List[str] = None
invalid_types: Dict[str, Any] = None
missing_keys: list[str] = None
invalid_types: dict[str, Any] = None
def __new__(
cls, filled_template, template, solved,
@ -296,21 +342,21 @@ class TemplatePartResult:
"""Result to store result of template parts."""
def __init__(self, optional: bool = False):
# Missing keys or invalid value types of required keys
self._missing_keys: Set[str] = set()
self._invalid_types: Dict[str, Any] = {}
self._missing_keys: set[str] = set()
self._invalid_types: dict[str, Any] = {}
# Missing keys or invalid value types of optional keys
self._missing_optional_keys: Set[str] = set()
self._invalid_optional_types: Dict[str, Any] = {}
self._missing_optional_keys: set[str] = set()
self._invalid_optional_types: dict[str, Any] = {}
# Used values stored by key with origin type
# - key without any padding or key modifiers
# - value from filling data
# Example: {"version": 1}
self._used_values: Dict[str, Any] = {}
self._used_values: dict[str, Any] = {}
# Used values stored by key with all modifirs
# - value is already formatted string
# Example: {"version:0>3": "001"}
self._really_used_values: Dict[str, Any] = {}
self._really_used_values: dict[str, Any] = {}
# Concatenated string output after formatting
self._output: str = ""
# Is this result from optional part
@ -336,8 +382,9 @@ class TemplatePartResult:
self._really_used_values.update(other.really_used_values)
else:
raise TypeError("Cannot add data from \"{}\" to \"{}\"".format(
str(type(other)), self.__class__.__name__)
raise TypeError(
f"Cannot add data from \"{type(other)}\""
f" to \"{self.__class__.__name__}\""
)
@property
@ -362,40 +409,41 @@ class TemplatePartResult:
return self._output
@property
def missing_keys(self) -> Set[str]:
def missing_keys(self) -> set[str]:
return self._missing_keys
@property
def missing_optional_keys(self) -> Set[str]:
def missing_optional_keys(self) -> set[str]:
return self._missing_optional_keys
@property
def invalid_types(self) -> Dict[str, Any]:
def invalid_types(self) -> dict[str, Any]:
return self._invalid_types
@property
def invalid_optional_types(self) -> Dict[str, Any]:
def invalid_optional_types(self) -> dict[str, Any]:
return self._invalid_optional_types
@property
def really_used_values(self) -> Dict[str, Any]:
def really_used_values(self) -> dict[str, Any]:
return self._really_used_values
@property
def realy_used_values(self) -> Dict[str, Any]:
def realy_used_values(self) -> dict[str, Any]:
warnings.warn(
"Property 'realy_used_values' is deprecated."
" Use 'really_used_values' instead.",
DeprecationWarning
DeprecationWarning,
stacklevel=2,
)
return self._really_used_values
@property
def used_values(self) -> Dict[str, Any]:
def used_values(self) -> dict[str, Any]:
return self._used_values
@staticmethod
def split_keys_to_subdicts(values: Dict[str, Any]) -> Dict[str, Any]:
def split_keys_to_subdicts(values: dict[str, Any]) -> dict[str, Any]:
output = {}
formatter = Formatter()
for key, value in values.items():
@ -410,7 +458,7 @@ class TemplatePartResult:
data[last_key] = value
return output
def get_clean_used_values(self) -> Dict[str, Any]:
def get_clean_used_values(self) -> dict[str, Any]:
new_used_values = {}
for key, value in self.used_values.items():
if isinstance(value, FormatObject):
@ -426,7 +474,8 @@ class TemplatePartResult:
warnings.warn(
"Method 'add_realy_used_value' is deprecated."
" Use 'add_really_used_value' instead.",
DeprecationWarning
DeprecationWarning,
stacklevel=2,
)
self.add_really_used_value(key, value)
@ -479,7 +528,7 @@ class FormattingPart:
self,
field_name: str,
format_spec: str,
conversion: "Union[str, None]",
conversion: Union[str, None],
):
format_spec_v = ""
if format_spec:
@ -546,7 +595,7 @@ class FormattingPart:
return not queue
@staticmethod
def keys_to_template_base(keys: List[str]):
def keys_to_template_base(keys: list[str]):
if not keys:
return None
# Create copy of keys
@ -556,7 +605,7 @@ class FormattingPart:
return f"{template_base}{joined_keys}"
def format(
self, data: Dict[str, Any], result: TemplatePartResult
self, data: dict[str, Any], result: TemplatePartResult
) -> TemplatePartResult:
"""Format the formattings string.
@ -635,6 +684,12 @@ class FormattingPart:
result.add_output(self.template)
return result
if isinstance(value, DefaultKeysDict):
try:
value = value.get_default_value()
except KeyError:
pass
if not self.validate_value_type(value):
result.add_invalid_type(key, value)
result.add_output(self.template)
@ -687,23 +742,25 @@ class OptionalPart:
def __init__(
self,
parts: List["Union[str, OptionalPart, FormattingPart]"]
parts: list[Union[str, OptionalPart, FormattingPart]]
):
self._parts: List["Union[str, OptionalPart, FormattingPart]"] = parts
self._parts: list[Union[str, OptionalPart, FormattingPart]] = parts
@property
def parts(self) -> List["Union[str, OptionalPart, FormattingPart]"]:
def parts(self) -> list[Union[str, OptionalPart, FormattingPart]]:
return self._parts
def __str__(self) -> str:
return "<{}>".format("".join([str(p) for p in self._parts]))
joined_parts = "".join([str(p) for p in self._parts])
return f"<{joined_parts}>"
def __repr__(self) -> str:
return "<Optional:{}>".format("".join([str(p) for p in self._parts]))
joined_parts = "".join([str(p) for p in self._parts])
return f"<Optional:{joined_parts}>"
def format(
self,
data: Dict[str, Any],
data: dict[str, Any],
result: TemplatePartResult,
) -> TemplatePartResult:
new_result = TemplatePartResult(True)

View file

@ -110,6 +110,15 @@ def deprecated(new_destination):
return _decorator(func)
class MissingRGBAChannelsError(ValueError):
"""Raised when we can't find channels to use as RGBA for conversion in
input media.
This may be other channels than solely RGBA, like Z-channel. The error is
raised when no matching 'reviewable' channel was found.
"""
def get_transcode_temp_directory():
"""Creates temporary folder for transcoding.
@ -388,6 +397,10 @@ def get_review_info_by_layer_name(channel_names):
...
]
This tries to find suitable outputs good for review purposes, by
searching for channel names like RGBA, but also XYZ, Z, N, AR, AG, AB
channels.
Args:
channel_names (list[str]): List of channel names.
@ -396,7 +409,6 @@ def get_review_info_by_layer_name(channel_names):
"""
layer_names_order = []
rgba_by_layer_name = collections.defaultdict(dict)
channels_by_layer_name = collections.defaultdict(dict)
for channel_name in channel_names:
@ -405,45 +417,95 @@ def get_review_info_by_layer_name(channel_names):
if "." in channel_name:
layer_name, last_part = channel_name.rsplit(".", 1)
channels_by_layer_name[layer_name][channel_name] = last_part
if last_part.lower() not in {
"r", "red",
"g", "green",
"b", "blue",
"a", "alpha"
# R, G, B, A or X, Y, Z, N, AR, AG, AB, RED, GREEN, BLUE, ALPHA
channel = last_part.upper()
if channel not in {
# Detect RGBA channels
"R", "G", "B", "A",
# Support fully written out rgba channel names
"RED", "GREEN", "BLUE", "ALPHA",
# Allow detecting of x, y and z channels, and normal channels
"X", "Y", "Z", "N",
# red, green and blue alpha/opacity, for colored mattes
"AR", "AG", "AB"
}:
continue
if layer_name not in layer_names_order:
layer_names_order.append(layer_name)
# R, G, B or A
channel = last_part[0].upper()
rgba_by_layer_name[layer_name][channel] = channel_name
channels_by_layer_name[layer_name][channel] = channel_name
# Put empty layer or 'rgba' to the beginning of the list
# - if input has R, G, B, A channels they should be used for review
# NOTE They are iterated in reversed order because they're inserted to
# the beginning of 'layer_names_order' -> last added will be first.
for name in reversed(["", "rgba"]):
if name in layer_names_order:
layer_names_order.remove(name)
layer_names_order.insert(0, name)
def _sort(_layer_name: str) -> int:
# Prioritize "" layer name
# Prioritize layers with RGB channels
if _layer_name == "rgba":
return 0
if _layer_name == "":
return 1
channels = channels_by_layer_name[_layer_name]
if all(channel in channels for channel in "RGB"):
return 2
return 10
layer_names_order.sort(key=_sort)
output = []
for layer_name in layer_names_order:
rgba_layer_info = rgba_by_layer_name[layer_name]
red = rgba_layer_info.get("R")
green = rgba_layer_info.get("G")
blue = rgba_layer_info.get("B")
if not red or not green or not blue:
channel_info = channels_by_layer_name[layer_name]
alpha = channel_info.get("A")
# RGB channels
if all(channel in channel_info for channel in "RGB"):
rgb = "R", "G", "B"
# RGB channels using fully written out channel names
elif all(
channel in channel_info
for channel in ("RED", "GREEN", "BLUE")
):
rgb = "RED", "GREEN", "BLUE"
alpha = channel_info.get("ALPHA")
# XYZ channels (position pass)
elif all(channel in channel_info for channel in "XYZ"):
rgb = "X", "Y", "Z"
# Colored mattes (as defined in OpenEXR Channel Name standards)
elif all(channel in channel_info for channel in ("AR", "AG", "AB")):
rgb = "AR", "AG", "AB"
# Luminance channel (as defined in OpenEXR Channel Name standards)
elif "Y" in channel_info:
rgb = "Y", "Y", "Y"
# Has only Z channel (Z-depth layer)
elif "Z" in channel_info:
rgb = "Z", "Z", "Z"
# Has only A channel (Alpha layer)
elif "A" in channel_info:
rgb = "A", "A", "A"
alpha = None
else:
# No reviewable channels found
continue
red = channel_info[rgb[0]]
green = channel_info[rgb[1]]
blue = channel_info[rgb[2]]
output.append({
"name": layer_name,
"review_channels": {
"R": red,
"G": green,
"B": blue,
"A": rgba_layer_info.get("A"),
"A": alpha,
}
})
return output
@ -1467,8 +1529,9 @@ def get_oiio_input_and_channel_args(oiio_input_info, alpha_default=None):
review_channels = get_convert_rgb_channels(channel_names)
if review_channels is None:
raise ValueError(
"Couldn't find channels that can be used for conversion."
raise MissingRGBAChannelsError(
"Couldn't find channels that can be used for conversion "
f"among channels: {channel_names}."
)
red, green, blue, alpha = review_channels

View file

@ -137,6 +137,7 @@ class AttributeValues:
if value is None:
continue
converted_value = attr_def.convert_value(value)
# QUESTION Could we just use converted value all the time?
if converted_value == value:
self._data[attr_def.key] = value
@ -245,11 +246,11 @@ class AttributeValues:
def _update(self, value):
changes = {}
for key, value in dict(value).items():
if key in self._data and self._data.get(key) == value:
for key, key_value in dict(value).items():
if key in self._data and self._data.get(key) == key_value:
continue
self._data[key] = value
changes[key] = value
self._data[key] = key_value
changes[key] = key_value
return changes
def _pop(self, key, default):

View file

@ -202,7 +202,8 @@ def is_clip_from_media_sequence(otio_clip):
def remap_range_on_file_sequence(otio_clip, otio_range):
"""
""" Remap the provided range on a file sequence clip.
Args:
otio_clip (otio.schema.Clip): The OTIO clip to check.
otio_range (otio.schema.TimeRange): The trim range to apply.
@ -249,7 +250,11 @@ def remap_range_on_file_sequence(otio_clip, otio_range):
if (
is_clip_from_media_sequence(otio_clip)
and available_range_start_frame == media_ref.start_frame
and conformed_src_in.to_frames() < media_ref.start_frame
# source range should be included in available range from media
# using round instead of conformed_src_in.to_frames() to avoid
# any precision issue with frame rate.
and round(conformed_src_in.value) < media_ref.start_frame
):
media_in = otio.opentime.RationalTime(
0, rate=available_range_rate

View file

@ -983,7 +983,26 @@ def get_instance_expected_output_path(
"version": version
})
path_template_obj = anatomy.get_template_item("publish", "default")["path"]
# Get instance publish template name
task_name = task_type = None
task_entity = instance.data.get("taskEntity")
if task_entity:
task_name = task_entity["name"]
task_type = task_entity["taskType"]
template_name = get_publish_template_name(
project_name=instance.context.data["projectName"],
host_name=instance.context.data["hostName"],
product_type=instance.data["productType"],
task_name=task_name,
task_type=task_type,
project_settings=instance.context.data["project_settings"],
)
path_template_obj = anatomy.get_template_item(
"publish",
template_name
)["path"]
template_filled = path_template_obj.format_strict(template_data)
return os.path.normpath(template_filled)

View file

@ -1,27 +1,50 @@
from __future__ import annotations
from typing import Optional, Any
import ayon_api
from ayon_core.settings import get_studio_settings
from ayon_core.lib.local_settings import get_ayon_username
from ayon_core.lib import DefaultKeysDict
from ayon_core.lib.local_settings import get_ayon_user_entity
def get_general_template_data(settings=None, username=None):
def get_general_template_data(
settings: Optional[dict[str, Any]] = None,
username: Optional[str] = None,
user_entity: Optional[dict[str, Any]] = None,
):
"""General template data based on system settings or machine.
Output contains formatting keys:
- 'studio[name]' - Studio name filled from system settings
- 'studio[code]' - Studio code filled from system settings
- 'user' - User's name using 'get_ayon_username'
- 'studio[name]' - Studio name filled from system settings
- 'studio[code]' - Studio code filled from system settings
- 'user[name]' - User's name
- 'user[attrib][...]' - User's attributes
- 'user[data][...]' - User's data
Args:
settings (Dict[str, Any]): Studio or project settings.
username (Optional[str]): AYON Username.
"""
user_entity (Optional[dict[str, Any]]): User entity.
"""
if not settings:
settings = get_studio_settings()
if username is None:
username = get_ayon_username()
if user_entity is None:
user_entity = get_ayon_user_entity(username)
# Use dictionary with default value for backwards compatibility
# - we did support '{user}' now it should be '{user[name]}'
user_data = DefaultKeysDict(
"name",
{
"name": user_entity["name"],
"attrib": user_entity["attrib"],
"data": user_entity["data"],
}
)
core_settings = settings["core"]
return {
@ -29,7 +52,7 @@ def get_general_template_data(settings=None, username=None):
"name": core_settings["studio_name"],
"code": core_settings["studio_code"]
},
"user": username
"user": user_data,
}
@ -150,7 +173,8 @@ def get_template_data(
task_entity=None,
host_name=None,
settings=None,
username=None
username=None,
user_entity=None,
):
"""Prepare data for templates filling from entered documents and info.
@ -173,13 +197,18 @@ def get_template_data(
host_name (Optional[str]): Used to fill '{app}' key.
settings (Union[Dict, None]): Prepared studio or project settings.
They're queried if not passed (may be slower).
username (Optional[str]): AYON Username.
username (Optional[str]): DEPRECATED AYON Username.
user_entity (Optional[dict[str, Any]): AYON user entity.
Returns:
Dict[str, Any]: Data prepared for filling workdir template.
"""
template_data = get_general_template_data(settings, username=username)
template_data = get_general_template_data(
settings,
username=username,
user_entity=user_entity,
)
template_data.update(get_project_template_data(project_entity))
if folder_entity:
template_data.update(get_folder_template_data(

View file

@ -16,6 +16,7 @@ Provides:
import json
import pyblish.api
from ayon_core.lib import get_ayon_user_entity
from ayon_core.pipeline.template_data import get_template_data
@ -55,17 +56,18 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin):
if folder_entity:
task_entity = context.data["taskEntity"]
username = context.data["user"]
user_entity = get_ayon_user_entity(username)
anatomy_data = get_template_data(
project_entity,
folder_entity,
task_entity,
host_name,
project_settings
host_name=host_name,
settings=project_settings,
user_entity=user_entity,
)
anatomy_data.update(context.data.get("datetimeData") or {})
username = context.data["user"]
anatomy_data["user"] = username
# Backwards compatibility for 'username' key
anatomy_data["username"] = username

View file

@ -71,6 +71,12 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
import opentimelineio as otio
otio_clip = instance.data["otioClip"]
if isinstance(
otio_clip.media_reference,
otio.schema.MissingReference
):
self.log.info("Clip has no media reference")
return
# Collect timeline ranges if workfile start frame is available
if "workfileFrameStart" in instance.data:

View file

@ -60,6 +60,13 @@ class CollectOtioSubsetResources(
# get basic variables
otio_clip = instance.data["otioClip"]
if isinstance(
otio_clip.media_reference,
otio.schema.MissingReference
):
self.log.info("Clip has no media reference")
return
otio_available_range = otio_clip.available_range()
media_fps = otio_available_range.start_time.rate
available_duration = otio_available_range.duration.value

View file

@ -11,6 +11,7 @@ from ayon_core.lib import (
is_oiio_supported,
)
from ayon_core.lib.transcoding import (
MissingRGBAChannelsError,
oiio_color_convert,
)
@ -111,7 +112,17 @@ class ExtractOIIOTranscode(publish.Extractor):
self.log.warning("Config file doesn't exist, skipping")
continue
# Get representation files to convert
if isinstance(repre["files"], list):
repre_files_to_convert = copy.deepcopy(repre["files"])
else:
repre_files_to_convert = [repre["files"]]
# Process each output definition
for output_def in profile_output_defs:
# Local copy to avoid accidental mutable changes
files_to_convert = list(repre_files_to_convert)
output_name = output_def["name"]
new_repre = copy.deepcopy(repre)
@ -122,11 +133,6 @@ class ExtractOIIOTranscode(publish.Extractor):
)
new_repre["stagingDir"] = new_staging_dir
if isinstance(new_repre["files"], list):
files_to_convert = copy.deepcopy(new_repre["files"])
else:
files_to_convert = [new_repre["files"]]
output_extension = output_def["extension"]
output_extension = output_extension.replace('.', '')
self._rename_in_representation(new_repre,
@ -168,30 +174,49 @@ class ExtractOIIOTranscode(publish.Extractor):
additional_command_args = (output_def["oiiotool_args"]
["additional_command_args"])
files_to_convert = self._translate_to_sequence(
files_to_convert)
self.log.debug("Files to convert: {}".format(files_to_convert))
for file_name in files_to_convert:
sequence_files = self._translate_to_sequence(files_to_convert)
self.log.debug("Files to convert: {}".format(sequence_files))
missing_rgba_review_channels = False
for file_name in sequence_files:
if isinstance(file_name, clique.Collection):
# Convert to filepath that can be directly converted
# by oiio like `frame.1001-1025%04d.exr`
file_name: str = file_name.format(
"{head}{range}{padding}{tail}"
)
self.log.debug("Transcoding file: `{}`".format(file_name))
input_path = os.path.join(original_staging_dir,
file_name)
output_path = self._get_output_file_path(input_path,
new_staging_dir,
output_extension)
try:
oiio_color_convert(
input_path=input_path,
output_path=output_path,
config_path=config_path,
source_colorspace=source_colorspace,
target_colorspace=target_colorspace,
target_display=target_display,
target_view=target_view,
source_display=source_display,
source_view=source_view,
additional_command_args=additional_command_args,
logger=self.log
)
except MissingRGBAChannelsError as exc:
missing_rgba_review_channels = True
self.log.error(exc)
self.log.error(
"Skipping OIIO Transcode. Unknown RGBA channels"
f" for colorspace conversion in file: {input_path}"
)
break
oiio_color_convert(
input_path=input_path,
output_path=output_path,
config_path=config_path,
source_colorspace=source_colorspace,
target_colorspace=target_colorspace,
target_display=target_display,
target_view=target_view,
source_display=source_display,
source_view=source_view,
additional_command_args=additional_command_args,
logger=self.log
)
if missing_rgba_review_channels:
# Stop processing this representation
break
# cleanup temporary transcoded files
for file_name in new_repre["files"]:
@ -217,11 +242,11 @@ class ExtractOIIOTranscode(publish.Extractor):
added_review = True
# If there is only 1 file outputted then convert list to
# string, cause that'll indicate that its not a sequence.
# string, because that'll indicate that it is not a sequence.
if len(new_repre["files"]) == 1:
new_repre["files"] = new_repre["files"][0]
# If the source representation has "review" tag, but its not
# If the source representation has "review" tag, but it's not
# part of the output definition tags, then both the
# representations will be transcoded in ExtractReview and
# their outputs will clash in integration.
@ -271,42 +296,34 @@ class ExtractOIIOTranscode(publish.Extractor):
new_repre["files"] = renamed_files
def _translate_to_sequence(self, files_to_convert):
"""Returns original list or list with filename formatted in single
sequence format.
"""Returns original list or a clique.Collection of a sequence.
Uses clique to find frame sequence, in this case it merges all frames
into sequence format (FRAMESTART-FRAMEEND#) and returns it.
If sequence not found, it returns original list
Uses clique to find frame sequence Collection.
If sequence not found, it returns original list.
Args:
files_to_convert (list): list of file names
Returns:
(list) of [file.1001-1010#.exr] or [fileA.exr, fileB.exr]
list[str | clique.Collection]: List of filepaths or a list
of Collections (usually one, unless there are holes)
"""
pattern = [clique.PATTERNS["frames"]]
collections, _ = clique.assemble(
files_to_convert, patterns=pattern,
assume_padded_when_ambiguous=True)
if collections:
if len(collections) > 1:
raise ValueError(
"Too many collections {}".format(collections))
collection = collections[0]
frames = list(collection.indexes)
if collection.holes().indexes:
return files_to_convert
# Get the padding from the collection
# This is the number of digits used in the frame numbers
padding = collection.padding
frame_str = "{}-{}%0{}d".format(frames[0], frames[-1], padding)
file_name = "{}{}{}".format(collection.head, frame_str,
collection.tail)
files_to_convert = [file_name]
# TODO: Technically oiiotool supports holes in the sequence as well
# using the dedicated --frames argument to specify the frames.
# We may want to use that too so conversions of sequences with
# holes will perform faster as well.
# Separate the collection so that we have no holes/gaps per
# collection.
return collection.separate()
return files_to_convert

View file

@ -130,7 +130,7 @@ class ExtractOTIOReview(
# NOTE it looks like it is set only in hiero integration
res_data = {"width": self.to_width, "height": self.to_height}
for key in res_data:
for meta_prefix in ("ayon.source.", "openpype.source."):
for meta_prefix in ("ayon.source", "openpype.source"):
meta_key = f"{meta_prefix}.{key}"
value = media_metadata.get(meta_key)
if value is not None:

View file

@ -361,14 +361,14 @@ class ExtractReview(pyblish.api.InstancePlugin):
if not filtered_output_defs:
self.log.debug((
"Repre: {} - All output definitions were filtered"
" out by single frame filter. Skipping"
" out by single frame filter. Skipped."
).format(repre["name"]))
continue
# Skip if file is not set
if first_input_path is None:
self.log.warning((
"Representation \"{}\" have empty files. Skipped."
"Representation \"{}\" has empty files. Skipped."
).format(repre["name"]))
continue

View file

@ -17,6 +17,7 @@ from ayon_core.lib import (
run_subprocess,
)
from ayon_core.lib.transcoding import (
MissingRGBAChannelsError,
oiio_color_convert,
get_oiio_input_and_channel_args,
get_oiio_info_for_input,
@ -477,7 +478,16 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
return False
input_info = get_oiio_info_for_input(src_path, logger=self.log)
input_arg, channels_arg = get_oiio_input_and_channel_args(input_info)
try:
input_arg, channels_arg = get_oiio_input_and_channel_args(
input_info
)
except MissingRGBAChannelsError:
self.log.debug(
"Unable to find relevant reviewable channel for thumbnail "
"creation"
)
return False
oiio_cmd = get_oiio_tool_args(
"oiiotool",
input_arg, src_path,

View file

@ -121,7 +121,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"version",
"representation",
"username",
"user",
"output",
# OpenPype keys - should be removed
"asset", # folder[name]
@ -796,6 +795,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
if value is not None:
repre_context[key] = value
# Keep only username
# NOTE This is to avoid storing all user attributes and data
# to representation
if "user" not in repre_context:
repre_context["user"] = {
"name": template_data["user"]["name"]
}
# Use previous representation's id if there is a name match
existing = existing_repres_by_name.get(repre["name"].lower())
repre_id = None

View file

@ -89,7 +89,6 @@ class IntegrateHeroVersion(
"family",
"representation",
"username",
"user",
"output"
]
# QUESTION/TODO this process should happen on server if crashed due to
@ -364,6 +363,14 @@ class IntegrateHeroVersion(
if value is not None:
repre_context[key] = value
# Keep only username
# NOTE This is to avoid storing all user attributes and data
# to representation
if "user" not in repre_context:
repre_context["user"] = {
"name": anatomy_data["user"]["name"]
}
# Prepare new repre
repre_entity = copy.deepcopy(repre_info["representation"])
repre_entity.pop("id", None)

View file

@ -1,5 +1,6 @@
import logging
import re
import copy
from typing import (
Union,
List,
@ -1098,7 +1099,7 @@ class CreateModel:
creator_attributes[key] = attr_def.default
elif attr_def.is_value_valid(value):
creator_attributes[key] = value
creator_attributes[key] = copy.deepcopy(value)
def _set_instances_publish_attr_values(
self, instance_ids, plugin_name, key, value

View file

@ -41,6 +41,7 @@ class PushToContextController:
self._process_item_id = None
self._use_original_name = False
self._version_up = False
self.set_source(project_name, version_ids)
@ -212,7 +213,7 @@ class PushToContextController:
self._user_values.variant,
comment=self._user_values.comment,
new_folder_name=self._user_values.new_folder_name,
dst_version=1,
version_up=self._version_up,
use_original_name=self._use_original_name,
)
item_ids.append(item_id)
@ -229,6 +230,9 @@ class PushToContextController:
thread.start()
return item_ids
def set_version_up(self, state):
self._version_up = state
def wait_for_process_thread(self):
if self._process_thread is None:
return

View file

@ -3,9 +3,10 @@ import re
import copy
import itertools
import sys
import tempfile
import traceback
import uuid
from typing import Optional, Dict
from typing import Optional, Any
import ayon_api
from ayon_api.utils import create_entity_id
@ -88,7 +89,7 @@ class ProjectPushItem:
variant,
comment,
new_folder_name,
dst_version,
version_up,
item_id=None,
use_original_name=False
):
@ -99,7 +100,7 @@ class ProjectPushItem:
self.dst_project_name = dst_project_name
self.dst_folder_id = dst_folder_id
self.dst_task_name = dst_task_name
self.dst_version = dst_version
self.version_up = version_up
self.variant = variant
self.new_folder_name = new_folder_name
self.comment = comment or ""
@ -117,7 +118,7 @@ class ProjectPushItem:
str(self.dst_folder_id),
str(self.new_folder_name),
str(self.dst_task_name),
str(self.dst_version),
str(self.version_up),
self.use_original_name
])
return self._repr_value
@ -132,7 +133,7 @@ class ProjectPushItem:
"dst_project_name": self.dst_project_name,
"dst_folder_id": self.dst_folder_id,
"dst_task_name": self.dst_task_name,
"dst_version": self.dst_version,
"version_up": self.version_up,
"variant": self.variant,
"comment": self.comment,
"new_folder_name": self.new_folder_name,
@ -225,8 +226,8 @@ class ProjectPushRepreItem:
but filenames are not template based.
Args:
repre_entity (Dict[str, Ant]): Representation entity.
roots (Dict[str, str]): Project roots (based on project anatomy).
repre_entity (dict[str, Ant]): Representation entity.
roots (dict[str, str]): Project roots (based on project anatomy).
"""
def __init__(self, repre_entity, roots):
@ -482,6 +483,8 @@ class ProjectPushItemProcess:
self._log_info("Destination project was found")
self._fill_or_create_destination_folder()
self._log_info("Destination folder was determined")
self._fill_or_create_destination_task()
self._log_info("Destination task was determined")
self._determine_product_type()
self._determine_publish_template_name()
self._determine_product_name()
@ -650,10 +653,10 @@ class ProjectPushItemProcess:
def _create_folder(
self,
src_folder_entity,
project_entity,
parent_folder_entity,
folder_name
src_folder_entity: dict[str, Any],
project_entity: dict[str, Any],
parent_folder_entity: dict[str, Any],
folder_name: str
):
parent_id = None
if parent_folder_entity:
@ -702,12 +705,19 @@ class ProjectPushItemProcess:
if new_folder_name != folder_name:
folder_label = folder_name
# TODO find out how to define folder type
src_folder_type = src_folder_entity["folderType"]
dst_folder_type = self._get_dst_folder_type(
project_entity,
src_folder_type
)
new_thumbnail_id = self._create_new_folder_thumbnail(
project_entity, src_folder_entity)
folder_entity = new_folder_entity(
folder_name,
"Folder",
dst_folder_type,
parent_id=parent_id,
attribs=new_folder_attrib
attribs=new_folder_attrib,
thumbnail_id=new_thumbnail_id
)
if folder_label:
folder_entity["label"] = folder_label
@ -727,10 +737,59 @@ class ProjectPushItemProcess:
folder_entity["path"] = "/".join([parent_path, folder_name])
return folder_entity
def _create_new_folder_thumbnail(
self,
project_entity: dict[str, Any],
src_folder_entity: dict[str, Any]
) -> Optional[str]:
"""Copy thumbnail possibly set on folder.
Could be different from representation thumbnails, and it is only shown
when folder is selected.
"""
if not src_folder_entity["thumbnailId"]:
return None
thumbnail = ayon_api.get_folder_thumbnail(
self._item.src_project_name,
src_folder_entity["id"],
src_folder_entity["thumbnailId"]
)
if not thumbnail.id:
return None
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
tmp_file.write(thumbnail.content)
temp_file_path = tmp_file.name
new_thumbnail_id = None
try:
new_thumbnail_id = ayon_api.create_thumbnail(
project_entity["name"], temp_file_path)
finally:
if os.path.exists(temp_file_path):
os.remove(temp_file_path)
return new_thumbnail_id
def _get_dst_folder_type(
self,
project_entity: dict[str, Any],
src_folder_type: str
) -> str:
"""Get new folder type."""
for folder_type in project_entity["folderTypes"]:
if folder_type["name"].lower() == src_folder_type.lower():
return folder_type["name"]
self._status.set_failed(
f"'{src_folder_type}' folder type is not configured in "
f"project Anatomy."
)
raise PushToProjectError(self._status.fail_reason)
def _fill_or_create_destination_folder(self):
dst_project_name = self._item.dst_project_name
dst_folder_id = self._item.dst_folder_id
dst_task_name = self._item.dst_task_name
new_folder_name = self._item.new_folder_name
if not dst_folder_id and not new_folder_name:
self._status.set_failed(
@ -761,9 +820,11 @@ class ProjectPushItemProcess:
new_folder_name
)
self._folder_entity = folder_entity
if not dst_task_name:
self._task_info = {}
return
def _fill_or_create_destination_task(self):
folder_entity = self._folder_entity
dst_task_name = self._item.dst_task_name
dst_project_name = self._item.dst_project_name
folder_path = folder_entity["path"]
folder_tasks = {
@ -772,6 +833,20 @@ class ProjectPushItemProcess:
dst_project_name, folder_ids=[folder_entity["id"]]
)
}
if not dst_task_name:
src_task_info = self._get_src_task_info()
if not src_task_info: # really no task selected nor on source
self._task_info = {}
return
dst_task_name = src_task_info["name"]
if dst_task_name.lower() not in folder_tasks:
task_info = self._make_sure_task_exists(
folder_entity, src_task_info
)
folder_tasks[dst_task_name.lower()] = task_info
task_info = folder_tasks.get(dst_task_name.lower())
if not task_info:
self._status.set_failed(
@ -790,7 +865,10 @@ class ProjectPushItemProcess:
task_type["name"]: task_type
for task_type in self._project_entity["taskTypes"]
}
task_type_info = task_types_by_name.get(task_type_name, {})
task_type_info = copy.deepcopy(
task_types_by_name.get(task_type_name, {})
)
task_type_info.pop("name") # do not overwrite real task name
task_info.update(task_type_info)
self._task_info = task_info
@ -870,10 +948,22 @@ class ProjectPushItemProcess:
self._product_entity = product_entity
return product_entity
src_attrib = self._src_product_entity["attrib"]
dst_attrib = {}
for key in {
"description",
"productGroup",
}:
value = src_attrib.get(key)
if value:
dst_attrib[key] = value
product_entity = new_product_entity(
product_name,
product_type,
folder_id,
attribs=dst_attrib
)
self._operations.create_entity(
project_name, "product", product_entity
@ -884,7 +974,7 @@ class ProjectPushItemProcess:
"""Make sure version document exits in database."""
project_name = self._item.dst_project_name
version = self._item.dst_version
version_up = self._item.version_up
src_version_entity = self._src_version_entity
product_entity = self._product_entity
product_id = product_entity["id"]
@ -912,27 +1002,29 @@ class ProjectPushItemProcess:
"description",
"intent",
}:
if key in src_attrib:
dst_attrib[key] = src_attrib[key]
value = src_attrib.get(key)
if value:
dst_attrib[key] = value
if version is None:
last_version_entity = ayon_api.get_last_version_by_product_id(
project_name, product_id
last_version_entity = ayon_api.get_last_version_by_product_id(
project_name, product_id
)
if last_version_entity is None:
dst_version = get_versioning_start(
project_name,
self.host_name,
task_name=self._task_info.get("name"),
task_type=self._task_info.get("taskType"),
product_type=product_type,
product_name=product_entity["name"],
)
if last_version_entity:
version = int(last_version_entity["version"]) + 1
else:
version = get_versioning_start(
project_name,
self.host_name,
task_name=self._task_info["name"],
task_type=self._task_info["taskType"],
product_type=product_type,
product_name=product_entity["name"],
)
else:
dst_version = int(last_version_entity["version"])
if version_up:
dst_version += 1
existing_version_entity = ayon_api.get_version_by_name(
project_name, version, product_id
project_name, dst_version, product_id
)
thumbnail_id = self._copy_version_thumbnail()
@ -950,10 +1042,16 @@ class ProjectPushItemProcess:
existing_version_entity["attrib"].update(dst_attrib)
self._version_entity = existing_version_entity
return
copied_tags = self._get_transferable_tags(src_version_entity)
copied_status = self._get_transferable_status(src_version_entity)
version_entity = new_version_entity(
version,
dst_version,
product_id,
author=src_version_entity["author"],
status=copied_status,
tags=copied_tags,
task_id=self._task_info.get("id"),
attribs=dst_attrib,
thumbnail_id=thumbnail_id,
)
@ -962,6 +1060,47 @@ class ProjectPushItemProcess:
)
self._version_entity = version_entity
def _make_sure_task_exists(
self,
folder_entity: dict[str, Any],
task_info: dict[str, Any],
) -> dict[str, Any]:
"""Creates destination task from source task information"""
project_name = self._item.dst_project_name
found_task_type = False
src_task_type = task_info["taskType"]
for task_type in self._project_entity["taskTypes"]:
if task_type["name"].lower() == src_task_type.lower():
found_task_type = True
break
if not found_task_type:
self._status.set_failed(
f"'{src_task_type}' task type is not configured in "
"project Anatomy."
)
raise PushToProjectError(self._status.fail_reason)
task_info = self._operations.create_task(
project_name,
task_info["name"],
folder_id=folder_entity["id"],
task_type=src_task_type,
attrib=task_info["attrib"],
)
self._task_info = task_info.data
return self._task_info
def _get_src_task_info(self):
src_version_entity = self._src_version_entity
if not src_version_entity["taskId"]:
return None
src_task = ayon_api.get_task_by_id(
self._item.src_project_name, src_version_entity["taskId"]
)
return src_task
def _integrate_representations(self):
try:
self._real_integrate_representations()
@ -1197,18 +1336,42 @@ class ProjectPushItemProcess:
if context_value and isinstance(context_value, dict):
for context_sub_key in context_value.keys():
value_to_update = formatting_data.get(context_key, {}).get(
context_sub_key)
context_sub_key
)
if value_to_update:
repre_context[context_key][
context_sub_key] = value_to_update
repre_context[context_key][context_sub_key] = (
value_to_update
)
else:
value_to_update = formatting_data.get(context_key)
if value_to_update:
repre_context[context_key] = value_to_update
if "task" not in formatting_data:
repre_context.pop("task")
repre_context.pop("task", None)
return repre_context
def _get_transferable_tags(self, src_version_entity):
"""Copy over only tags present in destination project"""
dst_project_tags = [
tag["name"] for tag in self._project_entity["tags"]
]
copied_tags = []
for src_tag in src_version_entity["tags"]:
if src_tag in dst_project_tags:
copied_tags.append(src_tag)
return copied_tags
def _get_transferable_status(self, src_version_entity):
"""Copy over status, first status if not matching found"""
dst_project_statuses = {
status["name"]: status
for status in self._project_entity["statuses"]
}
copied_status = dst_project_statuses.get(src_version_entity["status"])
if copied_status:
return copied_status["name"]
return None
class IntegrateModel:
def __init__(self, controller):
@ -1231,7 +1394,7 @@ class IntegrateModel:
variant,
comment,
new_folder_name,
dst_version,
version_up,
use_original_name
):
"""Create new item for integration.
@ -1245,7 +1408,7 @@ class IntegrateModel:
variant (str): Variant name.
comment (Union[str, None]): Comment.
new_folder_name (Union[str, None]): New folder name.
dst_version (int): Destination version number.
version_up (bool): Should destination product be versioned up
use_original_name (bool): If original product names should be used
Returns:
@ -1262,7 +1425,7 @@ class IntegrateModel:
variant,
comment=comment,
new_folder_name=new_folder_name,
dst_version=dst_version,
version_up=version_up,
use_original_name=use_original_name
)
process_item = ProjectPushItemProcess(self, item)
@ -1281,6 +1444,6 @@ class IntegrateModel:
return
item.integrate()
def get_items(self) -> Dict[str, ProjectPushItemProcess]:
def get_items(self) -> dict[str, ProjectPushItemProcess]:
"""Returns dict of all ProjectPushItemProcess items """
return self._process_items

View file

@ -144,6 +144,8 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
variant_input.setPlaceholderText("< Variant >")
variant_input.setObjectName("ValidatedLineEdit")
version_up_checkbox = NiceCheckbox(True, parent=inputs_widget)
comment_input = PlaceholderLineEdit(inputs_widget)
comment_input.setPlaceholderText("< Publish comment >")
@ -153,7 +155,11 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
inputs_layout.addRow("New folder name", folder_name_input)
inputs_layout.addRow("Variant", variant_input)
inputs_layout.addRow(
"Use original product names", original_names_checkbox)
"Use original product names", original_names_checkbox
)
inputs_layout.addRow(
"Version up existing Product", version_up_checkbox
)
inputs_layout.addRow("Comment", comment_input)
main_splitter.addWidget(context_widget)
@ -209,8 +215,11 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
"Show error detail dialog to copy full error."
)
original_names_checkbox.setToolTip(
"Required for multi copy, doesn't allow changes "
"variant values."
"Required for multi copy, doesn't allow changes variant values."
)
version_up_checkbox.setToolTip(
"Version up existing product. If not selected version will be "
"updated."
)
overlay_close_btn = QtWidgets.QPushButton(
@ -259,6 +268,8 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
library_only_checkbox.stateChanged.connect(self._on_library_only_change)
original_names_checkbox.stateChanged.connect(
self._on_original_names_change)
version_up_checkbox.stateChanged.connect(
self._on_version_up_checkbox_change)
publish_btn.clicked.connect(self._on_select_click)
cancel_btn.clicked.connect(self._on_close_click)
@ -308,6 +319,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
self._folder_name_input = folder_name_input
self._comment_input = comment_input
self._use_original_names_checkbox = original_names_checkbox
self._library_only_checkbox = library_only_checkbox
self._publish_btn = publish_btn
@ -328,6 +340,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
self._new_folder_name_input_text = None
self._variant_input_text = None
self._comment_input_text = None
self._version_up_checkbox = version_up_checkbox
self._first_show = True
self._show_timer = show_timer
@ -344,6 +357,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
show_detail_btn.setVisible(False)
overlay_close_btn.setVisible(False)
overlay_try_btn.setVisible(False)
version_up_checkbox.setChecked(False)
# Support of public api function of controller
def set_source(self, project_name, version_ids):
@ -376,7 +390,6 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
self._invalidate_new_folder_name(
new_folder_name, user_values["is_new_folder_name_valid"]
)
self._controller._invalidate()
self._projects_combobox.refresh()
def _on_first_show(self):
@ -415,14 +428,18 @@ class PushToContextSelectWindow(QtWidgets.QWidget):
self._comment_input_text = text
self._user_input_changed_timer.start()
def _on_library_only_change(self, state: int) -> None:
def _on_library_only_change(self) -> None:
"""Change toggle state, reset filter, recalculate dropdown"""
state = bool(state)
self._projects_combobox.set_standard_filter_enabled(state)
is_checked = self._library_only_checkbox.isChecked()
self._projects_combobox.set_standard_filter_enabled(is_checked)
def _on_original_names_change(self, state: int) -> None:
use_original_name = bool(state)
self._invalidate_use_original_names(use_original_name)
def _on_original_names_change(self) -> None:
is_checked = self._use_original_names_checkbox.isChecked()
self._invalidate_use_original_names(is_checked)
def _on_version_up_checkbox_change(self) -> None:
is_checked = self._version_up_checkbox.isChecked()
self._controller.set_version_up(is_checked)
def _on_user_input_timer(self):
folder_name_enabled = self._new_folder_name_enabled

View file

@ -358,9 +358,8 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
if not self._host_is_valid:
return
self._folders_widget.set_project_name(
self._controller.get_current_project_name()
)
self._project_name = self._controller.get_current_project_name()
self._folders_widget.set_project_name(self._project_name)
def _on_save_as_finished(self, event):
if event["failed"]:

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON addon 'core' version."""
__version__ = "1.6.5+dev"
__version__ = "1.6.7+dev"

View file

@ -19,3 +19,6 @@ OpenTimelineIO = "0.16.0"
opencolorio = "^2.3.2,<2.4.0"
Pillow = "9.5.0"
websocket-client = ">=0.40.0,<2"
[ayon.runtimeDependencies.darwin]
pyobjc-core = "^11.1"