Merge branch 'develop' into enhancement/multithreaded-oiiotools

This commit is contained in:
Jakub Ježek 2025-06-05 16:59:30 +02:00 committed by GitHub
commit e3e81cbb62
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 868 additions and 83 deletions

View file

@ -35,6 +35,8 @@ body:
label: Version
description: What version are you running? Look to AYON Tray
options:
- 1.3.1
- 1.3.0
- 1.2.0
- 1.1.9
- 1.1.8

View file

@ -1,15 +1,13 @@
import concurrent.futures
import os
import logging
import sys
import errno
from concurrent.futures import ThreadPoolExecutor, Future
from typing import List, Optional
from ayon_core.lib import create_hard_link
# this is needed until speedcopy for linux is fixed
if sys.platform == "win32":
from speedcopy import copyfile
else:
from shutil import copyfile
from speedcopy import copyfile
class DuplicateDestinationError(ValueError):
@ -109,41 +107,52 @@ class FileTransaction:
self._transfers[dst] = (src, opts)
def process(self):
# Backup any existing files
for dst, (src, _) in self._transfers.items():
self.log.debug("Checking file ... {} -> {}".format(src, dst))
path_same = self._same_paths(src, dst)
if path_same or not os.path.exists(dst):
continue
with ThreadPoolExecutor(max_workers=8) as executor:
# Submit backup tasks
backup_futures = [
executor.submit(self._backup_file, dst, src)
for dst, (src, _) in self._transfers.items()
]
wait_for_future_errors(
executor, backup_futures, logger=self.log)
# Backup original file
# todo: add timestamp or uuid to ensure unique
backup = dst + ".bak"
self._backup_to_original[backup] = dst
# Submit transfer tasks
transfer_futures = [
executor.submit(self._transfer_file, dst, src, opts)
for dst, (src, opts) in self._transfers.items()
]
wait_for_future_errors(
executor, transfer_futures, logger=self.log)
def _backup_file(self, dst, src):
self.log.debug(f"Checking file ... {src} -> {dst}")
path_same = self._same_paths(src, dst)
if path_same or not os.path.exists(dst):
return
# Backup original file
backup = dst + ".bak"
self._backup_to_original[backup] = dst
self.log.debug(f"Backup existing file: {dst} -> {backup}")
os.rename(dst, backup)
def _transfer_file(self, dst, src, opts):
path_same = self._same_paths(src, dst)
if path_same:
self.log.debug(
"Backup existing file: {} -> {}".format(dst, backup))
os.rename(dst, backup)
f"Source and destination are same files {src} -> {dst}")
return
# Copy the files to transfer
for dst, (src, opts) in self._transfers.items():
path_same = self._same_paths(src, dst)
if path_same:
self.log.debug(
"Source and destination are same files {} -> {}".format(
src, dst))
continue
self._create_folder_for_file(dst)
self._create_folder_for_file(dst)
if opts["mode"] == self.MODE_COPY:
self.log.debug(f"Copying file ... {src} -> {dst}")
copyfile(src, dst)
elif opts["mode"] == self.MODE_HARDLINK:
self.log.debug(f"Hardlinking file ... {src} -> {dst}")
create_hard_link(src, dst)
if opts["mode"] == self.MODE_COPY:
self.log.debug("Copying file ... {} -> {}".format(src, dst))
copyfile(src, dst)
elif opts["mode"] == self.MODE_HARDLINK:
self.log.debug("Hardlinking file ... {} -> {}".format(
src, dst))
create_hard_link(src, dst)
self._transferred.append(dst)
self._transferred.append(dst)
def finalize(self):
# Delete any backed up files
@ -212,3 +221,46 @@ class FileTransaction:
return os.stat(src) == os.stat(dst)
return src == dst
def wait_for_future_errors(
executor: ThreadPoolExecutor,
futures: List[Future],
logger: Optional[logging.Logger] = None):
"""For the ThreadPoolExecutor shutdown and cancel futures as soon one of
the workers raises an error as they complete.
The ThreadPoolExecutor only cancels pending futures on exception but will
still complete those that are running - each which also themselves could
fail. We log all exceptions but re-raise the last exception only.
"""
if logger is None:
logger = logging.getLogger(__name__)
for future in concurrent.futures.as_completed(futures):
exception = future.exception()
if exception:
# As soon as an error occurs, stop executing more futures.
# Running workers, however, will still be complete, so we also want
# to log those errors if any occurred on them.
executor.shutdown(wait=True, cancel_futures=True)
break
else:
# Futures are completed, no exceptions occurred
return
# An exception occurred in at least one future. Get exceptions from
# all futures that are done and ended up failing until that point.
exceptions = []
for future in futures:
if not future.cancelled() and future.done():
exception = future.exception()
if exception:
exceptions.append(exception)
# Log any exceptions that occurred in all workers
for exception in exceptions:
logger.error("Error occurred in worker", exc_info=exception)
# Raise the last exception
raise exceptions[-1]

View file

@ -462,8 +462,8 @@ class Anatomy(BaseAnatomy):
Union[Dict[str, str], None]): Local root overrides.
"""
if not project_name:
return
return ayon_api.get_project_roots_for_site(
return None
return ayon_api.get_project_root_overrides_by_site_id(
project_name, get_local_site_id()
)

View file

@ -1,11 +1,14 @@
# -*- coding: utf-8 -*-
"""Cleanup leftover files from publish."""
import os
import shutil
import pyblish.api
import re
import shutil
import tempfile
import pyblish.api
from ayon_core.lib import is_in_tests
from ayon_core.pipeline import PublishError
class CleanUp(pyblish.api.InstancePlugin):
@ -48,17 +51,15 @@ class CleanUp(pyblish.api.InstancePlugin):
if is_in_tests():
# let automatic test process clean up temporary data
return
# Get the errored instances
failed = []
# If instance has errors, do not clean up
for result in instance.context.data["results"]:
if (result["error"] is not None and result["instance"] is not None
and result["instance"] not in failed):
failed.append(result["instance"])
assert instance not in failed, (
"Result of '{}' instance were not success".format(
instance.data["name"]
)
)
if result["error"] is not None and result["instance"] is instance:
raise PublishError(
"Result of '{}' instance were not success".format(
instance.data["name"]
)
)
_skip_cleanup_filepaths = instance.context.data.get(
"skipCleanupFilepaths"
@ -71,10 +72,17 @@ class CleanUp(pyblish.api.InstancePlugin):
self.log.debug("Cleaning renders new...")
self.clean_renders(instance, skip_cleanup_filepaths)
if [ef for ef in self.exclude_families
if instance.data["productType"] in ef]:
# TODO: Figure out whether this could be refactored to just a
# product_type in self.exclude_families check.
product_type = instance.data["productType"]
if any(
product_type in exclude_family
for exclude_family in self.exclude_families
):
self.log.debug(
"Skipping cleanup for instance because product "
f"type is excluded from cleanup: {product_type}")
return
import tempfile
temp_root = tempfile.gettempdir()
staging_dir = instance.data.get("stagingDir", None)

View file

@ -32,16 +32,16 @@ class CollectManagedStagingDir(pyblish.api.InstancePlugin):
label = "Collect Managed Staging Directory"
order = pyblish.api.CollectorOrder + 0.4990
def process(self, instance):
def process(self, instance: pyblish.api.Instance):
""" Collect the staging data and stores it to the instance.
Args:
instance (object): The instance to inspect.
"""
staging_dir_path = get_instance_staging_dir(instance)
persistance = instance.data.get("stagingDir_persistent", False)
persistence: bool = instance.data.get("stagingDir_persistent", False)
self.log.info((
self.log.debug(
f"Instance staging dir was set to `{staging_dir_path}` "
f"and persistence is set to `{persistance}`"
))
f"and persistence is set to `{persistence}`"
)

View file

@ -283,7 +283,11 @@ class ExtractOIIOTranscode(publish.Extractor):
if collection.holes().indexes:
return files_to_convert
frame_str = "{}-{}#".format(frames[0], frames[-1])
# Get the padding from the collection
# This is the number of digits used in the frame numbers
padding = collection.padding
frame_str = "{}-{}%0{}d".format(frames[0], frames[-1], padding)
file_name = "{}{}{}".format(collection.head, frame_str,
collection.tail)

View file

@ -1,7 +1,11 @@
import os
import copy
import errno
import itertools
import shutil
from concurrent.futures import ThreadPoolExecutor
from speedcopy import copyfile
import clique
import pyblish.api
@ -13,6 +17,7 @@ from ayon_api.operations import (
from ayon_api.utils import create_entity_id
from ayon_core.lib import create_hard_link, source_hash
from ayon_core.lib.file_transaction import wait_for_future_errors
from ayon_core.pipeline.publish import (
get_publish_template_name,
OptionalPyblishPluginMixin,
@ -415,11 +420,14 @@ class IntegrateHeroVersion(
# Copy(hardlink) paths of source and destination files
# TODO should we *only* create hardlinks?
# TODO should we keep files for deletion until this is successful?
for src_path, dst_path in src_to_dst_file_paths:
self.copy_file(src_path, dst_path)
for src_path, dst_path in other_file_paths_mapping:
self.copy_file(src_path, dst_path)
with ThreadPoolExecutor(max_workers=8) as executor:
futures = [
executor.submit(self.copy_file, src_path, dst_path)
for src_path, dst_path in itertools.chain(
src_to_dst_file_paths, other_file_paths_mapping
)
]
wait_for_future_errors(executor, futures)
# Update prepared representation etity data with files
# and integrate it to server.
@ -648,7 +656,7 @@ class IntegrateHeroVersion(
src_path, dst_path
))
shutil.copy(src_path, dst_path)
copyfile(src_path, dst_path)
def version_from_representations(self, project_name, repres):
for repre in repres:

View file

@ -1,4 +1,5 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass, asdict
from typing import (
Optional,
Dict,
@ -28,6 +29,19 @@ if TYPE_CHECKING:
from .models import CreatorItem, PublishErrorInfo, InstanceItem
@dataclass
class CommentDef:
"""Comment attribute definition."""
minimum_chars_required: int
def to_data(self):
return asdict(self)
@classmethod
def from_data(cls, data):
return cls(**data)
class CardMessageTypes:
standard = None
info = "info"
@ -135,6 +149,17 @@ class AbstractPublisherCommon(ABC):
pass
@abstractmethod
def get_comment_def(self) -> CommentDef:
"""Get comment attribute definition.
This can define how the Comment field should behave, like having
a minimum amount of required characters before being allowed to
publish.
"""
pass
class AbstractPublisherBackend(AbstractPublisherCommon):
@abstractmethod

View file

@ -20,7 +20,8 @@ from .models import (
from .abstract import (
AbstractPublisherBackend,
AbstractPublisherFrontend,
CardMessageTypes
CardMessageTypes,
CommentDef,
)
@ -601,3 +602,17 @@ class PublisherController(
def _start_publish(self, up_validation):
self._publish_model.set_publish_up_validation(up_validation)
self._publish_model.start_publish(wait=True)
def get_comment_def(self) -> CommentDef:
# Take the cached settings from the Create Context
settings = self.get_create_context().get_current_project_settings()
comment_minimum_required_chars: int = (
settings
.get("core", {})
.get("tools", {})
.get("publish", {})
.get("comment_minimum_required_chars", 0)
)
return CommentDef(
minimum_chars_required=comment_minimum_required_chars
)

View file

@ -245,6 +245,13 @@ class PublisherWindow(QtWidgets.QDialog):
show_timer.setInterval(1)
show_timer.timeout.connect(self._on_show_timer)
comment_invalid_timer = QtCore.QTimer()
comment_invalid_timer.setSingleShot(True)
comment_invalid_timer.setInterval(2500)
comment_invalid_timer.timeout.connect(
self._on_comment_invalid_timeout
)
errors_dialog_message_timer = QtCore.QTimer()
errors_dialog_message_timer.setInterval(100)
errors_dialog_message_timer.timeout.connect(
@ -395,6 +402,7 @@ class PublisherWindow(QtWidgets.QDialog):
self._app_event_listener_installed = False
self._show_timer = show_timer
self._comment_invalid_timer = comment_invalid_timer
self._show_counter = 0
self._window_is_visible = False
@ -823,15 +831,45 @@ class PublisherWindow(QtWidgets.QDialog):
self._controller.set_comment(self._comment_input.text())
def _on_validate_clicked(self):
if self._save_changes(False):
if self._validate_comment() and self._save_changes(False):
self._set_publish_comment()
self._controller.validate()
def _on_publish_clicked(self):
if self._save_changes(False):
if self._validate_comment() and self._save_changes(False):
self._set_publish_comment()
self._controller.publish()
def _validate_comment(self) -> bool:
# Validate comment length
comment_def = self._controller.get_comment_def()
char_count = len(self._comment_input.text().strip())
if (
comment_def.minimum_chars_required
and char_count < comment_def.minimum_chars_required
):
self._overlay_object.add_message(
"Please enter a comment of at least "
f"{comment_def.minimum_chars_required} characters",
message_type="error"
)
self._invalidate_comment_field()
return False
return True
def _invalidate_comment_field(self):
self._comment_invalid_timer.start()
self._comment_input.setStyleSheet("border-color: #DD2020")
# Set focus so user can start typing and is pointed towards the field
self._comment_input.setFocus()
self._comment_input.setCursorPosition(
len(self._comment_input.text())
)
def _on_comment_invalid_timeout(self):
# Reset style
self._comment_input.setStyleSheet("")
def _set_footer_enabled(self, enabled):
self._save_btn.setEnabled(True)
self._reset_btn.setEnabled(True)

View file

@ -5,12 +5,32 @@ CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
def get_font_filepath(
font_name: Optional[str] = "MaterialSymbolsOutlined"
font_name: Optional[str] = "MaterialSymbolsOutlined-Regular"
) -> str:
return os.path.join(CURRENT_DIR, f"{font_name}.ttf")
def get_mapping_filepath(
font_name: Optional[str] = "MaterialSymbolsOutlined"
font_name: Optional[str] = "MaterialSymbolsOutlined-Regular"
) -> str:
return os.path.join(CURRENT_DIR, f"{font_name}.json")
def regenerate_mapping():
"""Regenerate the MaterialSymbolsOutlined.json file, assuming
MaterialSymbolsOutlined.codepoints and the TrueType font file have been
updated to support the new symbols.
"""
import json
jfile = get_mapping_filepath()
cpfile = jfile.replace(".json", ".codepoints")
with open(cpfile, "r") as cpf:
codepoints = cpf.read()
mapping = {}
for cp in codepoints.splitlines():
name, code = cp.split()
mapping[name] = int(f"0x{code}", 16)
with open(jfile, "w") as jf:
json.dump(mapping, jf, indent=4)

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON addon 'core' version."""
__version__ = "1.2.0+dev"
__version__ = "1.3.1+dev"

View file

@ -1,6 +1,6 @@
name = "core"
title = "Core"
version = "1.2.0+dev"
version = "1.3.1+dev"
client_dir = "ayon_core"

View file

@ -5,7 +5,7 @@
[tool.poetry]
name = "ayon-core"
version = "1.2.0+dev"
version = "1.3.1+dev"
description = ""
authors = ["Ynput Team <team@ynput.io>"]
readme = "README.md"

View file

@ -358,6 +358,14 @@ class PublishToolModel(BaseSettingsModel):
title="Custom Staging Dir Profiles"
)
)
comment_minimum_required_chars: int = SettingsField(
0,
title="Publish comment minimum required characters",
description=(
"Minimum number of characters required in the comment field "
"before the publisher UI is allowed to continue publishing"
)
)
class GlobalToolsModel(BaseSettingsModel):
@ -671,6 +679,7 @@ DEFAULT_TOOLS_VALUES = {
"task_names": [],
"template_name": "simpleUnrealTextureHero"
}
]
],
"comment_minimum_required_chars": 0,
}
}