Merge branch 'develop' into enhancement/improved_OIIO_subimages_handling

This commit is contained in:
Jakub Ježek 2024-10-03 12:16:05 +02:00 committed by GitHub
commit 473c4c19d9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
21 changed files with 714 additions and 330 deletions

View file

@ -1,6 +1,6 @@
name: Bug Report
description: File a bug report
title: 'Your issue title here'
title: Your issue title here
labels:
- 'type: bug'
body:
@ -36,6 +36,16 @@ body:
description: What version are you running? Look to AYON Tray
options:
- 1.0.0
- 0.4.4
- 0.4.3
- 0.4.2
- 0.4.1
- 0.4.0
- 0.3.2
- 0.3.1
- 0.3.0
- 0.2.1
- 0.2.0
validations:
required: true
- type: dropdown

12
.github/workflows/release_trigger.yml vendored Normal file
View file

@ -0,0 +1,12 @@
name: 🚀 Release Trigger
on:
workflow_dispatch:
jobs:
call-release-trigger:
uses: ynput/ops-repo-automation/.github/workflows/release_trigger.yml@main
secrets:
token: ${{ secrets.YNPUT_BOT_TOKEN }}
email: ${{ secrets.CI_EMAIL }}
user: ${{ secrets.CI_USER }}

View file

@ -19,7 +19,8 @@ class OCIOEnvHook(PreLaunchHook):
"nuke",
"hiero",
"resolve",
"openrv"
"openrv",
"cinema4d"
}
launch_types = set()

View file

@ -81,7 +81,10 @@ def collect_frames(files):
dict: {'/folder/product_v001.0001.png': '0001', ....}
"""
patterns = [clique.PATTERNS["frames"]]
# clique.PATTERNS["frames"] supports only `.1001.exr` not `_1001.exr` so
# we use a customized pattern.
pattern = "[_.](?P<index>(?P<padding>0*)\\d+)\\.\\D+\\d?$"
patterns = [pattern]
collections, remainder = clique.assemble(
files, minimum_items=1, patterns=patterns)

View file

@ -3,7 +3,6 @@ from .constants import (
AVALON_INSTANCE_ID,
AYON_CONTAINER_ID,
AYON_INSTANCE_ID,
HOST_WORKFILE_EXTENSIONS,
)
from .anatomy import Anatomy
@ -114,7 +113,6 @@ __all__ = (
"AVALON_INSTANCE_ID",
"AYON_CONTAINER_ID",
"AYON_INSTANCE_ID",
"HOST_WORKFILE_EXTENSIONS",
# --- Anatomy ---
"Anatomy",

View file

@ -699,6 +699,34 @@ def get_ocio_config_views(config_path):
)
def _get_config_path_from_profile_data(
profile, profile_type, template_data
):
"""Get config path from profile data.
Args:
profile (dict[str, Any]): Profile data.
profile_type (str): Profile type.
template_data (dict[str, Any]): Template data.
Returns:
dict[str, str]: Config data with path and template.
"""
template = profile[profile_type]
result = StringTemplate.format_strict_template(
template, template_data
)
normalized_path = str(result.normalized())
if not os.path.exists(normalized_path):
log.warning(f"Path was not found '{normalized_path}'.")
return None
return {
"path": normalized_path,
"template": template
}
def _get_global_config_data(
project_name,
host_name,
@ -717,7 +745,7 @@ def _get_global_config_data(
2. Custom path to ocio config.
3. Path to 'ocioconfig' representation on product. Name of product can be
defined in settings. Product name can be regex but exact match is
always preferred.
always preferred. Fallback can be defined in case no product is found.
None is returned when no profile is found, when path
@ -755,30 +783,36 @@ def _get_global_config_data(
profile_type = profile["type"]
if profile_type in ("builtin_path", "custom_path"):
template = profile[profile_type]
result = StringTemplate.format_strict_template(
template, template_data
)
normalized_path = str(result.normalized())
if not os.path.exists(normalized_path):
log.warning(f"Path was not found '{normalized_path}'.")
return None
return {
"path": normalized_path,
"template": template
}
return _get_config_path_from_profile_data(
profile, profile_type, template_data)
# TODO decide if this is the right name for representation
repre_name = "ocioconfig"
published_product_data = profile["published_product"]
product_name = published_product_data["product_name"]
fallback_data = published_product_data["fallback"]
if product_name == "":
log.error(
"Colorspace OCIO config path cannot be set. "
"Profile is set to published product but `Product name` is empty."
)
return None
folder_info = template_data.get("folder")
if not folder_info:
log.warning("Folder info is missing.")
return None
log.info("Using fallback data for ocio config path.")
# in case no product was found we need to use fallback
fallback_type = fallback_data["fallback_type"]
return _get_config_path_from_profile_data(
fallback_data, fallback_type, template_data
)
folder_path = folder_info["path"]
product_name = profile["product_name"]
if folder_id is None:
folder_entity = ayon_api.get_folder_by_path(
project_name, folder_path, fields={"id"}
@ -797,12 +831,13 @@ def _get_global_config_data(
fields={"id", "name"}
)
}
if not product_entities_by_name:
log.debug(
f"No product entities were found for folder '{folder_path}' with"
f" product name filter '{product_name}'."
# in case no product was found we need to use fallback
fallback_type = fallback_data["type"]
return _get_config_path_from_profile_data(
fallback_data, fallback_type, template_data
)
return None
# Try to use exact match first, otherwise use first available product
product_entity = product_entities_by_name.get(product_name)
@ -837,6 +872,7 @@ def _get_global_config_data(
path = get_representation_path_with_anatomy(repre_entity, anatomy)
template = repre_entity["attrib"]["template"]
return {
"path": path,
"template": template,

View file

@ -4,20 +4,3 @@ AYON_INSTANCE_ID = "ayon.create.instance"
# Backwards compatibility
AVALON_CONTAINER_ID = "pyblish.avalon.container"
AVALON_INSTANCE_ID = "pyblish.avalon.instance"
# TODO get extensions from host implementations
HOST_WORKFILE_EXTENSIONS = {
"blender": [".blend"],
"celaction": [".scn"],
"tvpaint": [".tvpp"],
"fusion": [".comp"],
"harmony": [".zip"],
"houdini": [".hip", ".hiplc", ".hipnc"],
"maya": [".ma", ".mb"],
"nuke": [".nk"],
"hiero": [".hrox"],
"photoshop": [".psd", ".psb"],
"premiere": [".prproj"],
"resolve": [".drp"],
"aftereffects": [".aep"]
}

View file

@ -506,55 +506,61 @@ class AbstractTemplateBuilder(ABC):
keep_placeholders (bool): Add flag to placeholder data for
hosts to decide if they want to remove
placeholder after it is used.
create_first_version (bool): create first version of a workfile
workfile_creation_enabled (bool): If True, it might create
first version but ignore
process if version is created
create_first_version (bool): Create first version of a workfile.
When set to True, this option initiates the saving of the
workfile for an initial version. It will skip saving if
a version already exists.
workfile_creation_enabled (bool): Whether the call is part of
creating a new workfile.
When True, we only build if the current file is not
an existing saved workfile but a "new" file. Basically when
enabled we assume the user tries to load it only into a
"New File" (unsaved empty workfile).
When False, the default value, we assume we explicitly want to
build the template in our current scene regardless of current
scene state.
"""
if any(
value is None
for value in [
template_path,
keep_placeholders,
create_first_version,
]
):
template_preset = self.get_template_preset()
if template_path is None:
template_path = template_preset["path"]
if keep_placeholders is None:
keep_placeholders = template_preset["keep_placeholder"]
if create_first_version is None:
create_first_version = template_preset["create_first_version"]
# More accurate variable name
# - logic related to workfile creation should be moved out in future
explicit_build_requested = not workfile_creation_enabled
# check if first version is created
created_version_workfile = False
if create_first_version:
created_version_workfile = self.create_first_workfile_version()
# if first version is created, import template
# and populate placeholders
# Get default values if not provided
if (
create_first_version
and workfile_creation_enabled
and created_version_workfile
template_path is None
or keep_placeholders is None
or create_first_version is None
):
preset = self.get_template_preset()
template_path: str = template_path or preset["path"]
if keep_placeholders is None:
keep_placeholders: bool = preset["keep_placeholder"]
if create_first_version is None:
create_first_version: bool = preset["create_first_version"]
# Build the template if we are explicitly requesting it or if it's
# an unsaved "new file".
is_new_file = not self.host.get_current_workfile()
if is_new_file or explicit_build_requested:
self.log.info(f"Building the workfile template: {template_path}")
self.import_template(template_path)
self.populate_scene_placeholders(
level_limit, keep_placeholders)
# save workfile after template is populated
self.save_workfile(created_version_workfile)
# ignore process if first workfile is enabled
# but a version is already created
if workfile_creation_enabled:
# Do not consider saving a first workfile version, if this is not set
# to be a "workfile creation" or `create_first_version` is disabled.
if explicit_build_requested or not create_first_version:
return
self.import_template(template_path)
self.populate_scene_placeholders(
level_limit, keep_placeholders)
# If there is no existing workfile, save the first version
workfile_path = self.get_workfile_path()
if not os.path.exists(workfile_path):
self.log.info("Saving first workfile: %s", workfile_path)
self.save_workfile(workfile_path)
else:
self.log.info(
"A workfile already exists. Skipping save of workfile as "
"initial version.")
def rebuild_template(self):
"""Go through existing placeholders in scene and update them.
@ -608,29 +614,16 @@ class AbstractTemplateBuilder(ABC):
pass
def create_first_workfile_version(self):
"""
Create first version of workfile.
def get_workfile_path(self):
"""Return last known workfile path or the first workfile path create.
Should load the content of template into scene so
'populate_scene_placeholders' can be started.
Args:
template_path (str): Fullpath for current task and
host's template file.
Return:
str: Last workfile path, or first version to create if none exist.
"""
# AYON_LAST_WORKFILE will be set to the last existing workfile OR
# if none exist it will be set to the first version.
last_workfile_path = os.environ.get("AYON_LAST_WORKFILE")
self.log.info("__ last_workfile_path: {}".format(last_workfile_path))
if os.path.exists(last_workfile_path):
# ignore in case workfile existence
self.log.info("Workfile already exists, skipping creation.")
return False
# Create first version
self.log.info("Creating first version of workfile.")
self.save_workfile(last_workfile_path)
# Confirm creation of first version
return last_workfile_path
def save_workfile(self, workfile_path):

View file

@ -230,6 +230,11 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
self.log
]
# TODO: This will currently incorrectly detect 'resources'
# that are published along with the publish, because those should
# not adhere to the template directly but are ingested in a
# customized way. For example, maya look textures or any publish
# that directly adds files into `instance.data["transfers"]`
src_paths = []
for repre_file in repre["files"]:
src_path = self.anatomy.fill_root(repre_file["path"])
@ -261,7 +266,18 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
frame = dst_frame
if frame is not None:
anatomy_data["frame"] = frame
if repre["context"].get("frame"):
anatomy_data["frame"] = frame
elif repre["context"].get("udim"):
anatomy_data["udim"] = frame
else:
# Fallback
self.log.warning(
"Representation context has no frame or udim"
" data. Supplying sequence frame to '{frame}'"
" formatting data."
)
anatomy_data["frame"] = frame
new_report_items, uploaded = deliver_single_file(*args)
report_items.update(new_report_items)
self._update_progress(uploaded)

View file

@ -122,13 +122,22 @@ class ExtractOIIOTranscode(publish.Extractor):
transcoding_type = output_def["transcoding_type"]
target_colorspace = view = display = None
# NOTE: we use colorspace_data as the fallback values for
# the target colorspace.
if transcoding_type == "colorspace":
# TODO: Should we fallback to the colorspace
# (which used as source above) ?
# or should we compute the target colorspace from
# current view and display ?
target_colorspace = (output_def["colorspace"] or
colorspace_data.get("colorspace"))
else:
view = output_def["view"] or colorspace_data.get("view")
display = (output_def["display"] or
colorspace_data.get("display"))
elif transcoding_type == "display_view":
display_view = output_def["display_view"]
view = display_view["view"] or colorspace_data.get("view")
display = (
display_view["display"]
or colorspace_data.get("display")
)
# both could be already collected by DCC,
# but could be overwritten when transcoding
@ -192,7 +201,7 @@ class ExtractOIIOTranscode(publish.Extractor):
new_repre["files"] = new_repre["files"][0]
# If the source representation has "review" tag, but its not
# part of the output defintion tags, then both the
# part of the output definition tags, then both the
# representations will be transcoded in ExtractReview and
# their outputs will clash in integration.
if "review" in repre.get("tags", []):

View file

@ -509,8 +509,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
if not is_sequence_representation:
files = [files]
if any(os.path.isabs(fname) for fname in files):
raise KnownPublishError("Given file names contain full paths")
for fname in files:
if os.path.isabs(fname):
raise KnownPublishError(
f"Representation file names contains full paths: {fname}"
)
if not is_sequence_representation:
return

View file

@ -36,7 +36,8 @@ class ValidateCurrentSaveFile(pyblish.api.ContextPlugin):
label = "Validate File Saved"
order = pyblish.api.ValidatorOrder - 0.1
hosts = ["fusion", "houdini", "max", "maya", "nuke", "substancepainter"]
hosts = ["fusion", "houdini", "max", "maya", "nuke", "substancepainter",
"cinema4d"]
actions = [SaveByVersionUpAction, ShowWorkfilesAction]
def process(self, context):

View file

@ -439,10 +439,13 @@ class PublisherWindow(QtWidgets.QDialog):
def make_sure_is_visible(self):
if self._window_is_visible:
self.setWindowState(QtCore.Qt.WindowActive)
else:
self.show()
self.raise_()
self.activateWindow()
self.showNormal()
def showEvent(self, event):
self._window_is_visible = True
super().showEvent(event)

View file

@ -777,7 +777,7 @@ class ProjectPushItemProcess:
task_info = copy.deepcopy(task_info)
task_info["name"] = dst_task_name
# Fill rest of task information based on task type
task_type_name = task_info["type"]
task_type_name = task_info["taskType"]
task_types_by_name = {
task_type["name"]: task_type
for task_type in self._project_entity["taskTypes"]
@ -821,7 +821,7 @@ class ProjectPushItemProcess:
task_name = task_type = None
if task_info:
task_name = task_info["name"]
task_type = task_info["type"]
task_type = task_info["taskType"]
product_name = get_product_name(
self._item.dst_project_name,
@ -905,7 +905,7 @@ class ProjectPushItemProcess:
project_name,
self.host_name,
task_name=self._task_info["name"],
task_type=self._task_info["type"],
task_type=self._task_info["taskType"],
product_type=product_type,
product_name=product_entity["name"]
)
@ -959,7 +959,7 @@ class ProjectPushItemProcess:
formatting_data = get_template_data(
self._project_entity,
self._folder_entity,
self._task_info.get("name"),
self._task_info,
self.host_name
)
formatting_data.update({

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON core addon version."""
__version__ = "0.4.5-dev.1"
"""Package declaring AYON addon 'core' version."""
__version__ = "1.0.0+dev"

View file

@ -1,3 +1,5 @@
#!/usr/bin/env python
"""Prepares server package from addon repo to upload to server.
Requires Python 3.9. (Or at least 3.8+).
@ -22,32 +24,39 @@ client side code zipped in `private` subfolder.
import os
import sys
import re
import io
import shutil
import argparse
import platform
import argparse
import logging
import collections
import zipfile
import hashlib
import subprocess
from typing import Optional, Iterable, Pattern, Union, List, Tuple
from typing import Optional
import package
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PACKAGE_PATH = os.path.join(CURRENT_DIR, "package.py")
package_content = {}
with open(PACKAGE_PATH, "r") as stream:
exec(stream.read(), package_content)
FileMapping = Tuple[Union[str, io.BytesIO], str]
ADDON_NAME: str = package.name
ADDON_VERSION: str = package.version
ADDON_CLIENT_DIR: Union[str, None] = getattr(package, "client_dir", None)
ADDON_VERSION = package_content["version"]
ADDON_NAME = package_content["name"]
ADDON_CLIENT_DIR = package_content["client_dir"]
CLIENT_VERSION_CONTENT = '''# -*- coding: utf-8 -*-
"""Package declaring AYON core addon version."""
__version__ = "{}"
CURRENT_ROOT: str = os.path.dirname(os.path.abspath(__file__))
SERVER_ROOT: str = os.path.join(CURRENT_ROOT, "server")
FRONTEND_ROOT: str = os.path.join(CURRENT_ROOT, "frontend")
FRONTEND_DIST_ROOT: str = os.path.join(FRONTEND_ROOT, "dist")
DST_DIST_DIR: str = os.path.join("frontend", "dist")
PRIVATE_ROOT: str = os.path.join(CURRENT_ROOT, "private")
PUBLIC_ROOT: str = os.path.join(CURRENT_ROOT, "public")
CLIENT_ROOT: str = os.path.join(CURRENT_ROOT, "client")
VERSION_PY_CONTENT = f'''# -*- coding: utf-8 -*-
"""Package declaring AYON addon '{ADDON_NAME}' version."""
__version__ = "{ADDON_VERSION}"
'''
# Patterns of directories to be skipped for server part of addon
IGNORE_DIR_PATTERNS = [
IGNORE_DIR_PATTERNS: List[Pattern] = [
re.compile(pattern)
for pattern in {
# Skip directories starting with '.'
@ -58,7 +67,7 @@ IGNORE_DIR_PATTERNS = [
]
# Patterns of files to be skipped for server part of addon
IGNORE_FILE_PATTERNS = [
IGNORE_FILE_PATTERNS: List[Pattern] = [
re.compile(pattern)
for pattern in {
# Skip files starting with '.'
@ -70,15 +79,6 @@ IGNORE_FILE_PATTERNS = [
]
def calculate_file_checksum(filepath, hash_algorithm, chunk_size=10000):
func = getattr(hashlib, hash_algorithm)
hash_obj = func()
with open(filepath, "rb") as f:
for chunk in iter(lambda: f.read(chunk_size), b""):
hash_obj.update(chunk)
return hash_obj.hexdigest()
class ZipFileLongPaths(zipfile.ZipFile):
"""Allows longer paths in zip files.
@ -97,12 +97,28 @@ class ZipFileLongPaths(zipfile.ZipFile):
else:
tpath = "\\\\?\\" + tpath
return super(ZipFileLongPaths, self)._extract_member(
member, tpath, pwd
)
return super()._extract_member(member, tpath, pwd)
def safe_copy_file(src_path, dst_path):
def _get_yarn_executable() -> Union[str, None]:
cmd = "which"
if platform.system().lower() == "windows":
cmd = "where"
for line in subprocess.check_output(
[cmd, "yarn"], encoding="utf-8"
).splitlines():
if not line or not os.path.exists(line):
continue
try:
subprocess.call([line, "--version"])
return line
except OSError:
continue
return None
def safe_copy_file(src_path: str, dst_path: str):
"""Copy file and make sure destination directory exists.
Ignore if destination already contains directories from source.
@ -115,210 +131,335 @@ def safe_copy_file(src_path, dst_path):
if src_path == dst_path:
return
dst_dir = os.path.dirname(dst_path)
try:
os.makedirs(dst_dir)
except Exception:
pass
dst_dir: str = os.path.dirname(dst_path)
os.makedirs(dst_dir, exist_ok=True)
shutil.copy2(src_path, dst_path)
def _value_match_regexes(value, regexes):
for regex in regexes:
if regex.search(value):
return True
return False
def _value_match_regexes(value: str, regexes: Iterable[Pattern]) -> bool:
return any(
regex.search(value)
for regex in regexes
)
def find_files_in_subdir(
src_path,
ignore_file_patterns=None,
ignore_dir_patterns=None
):
src_path: str,
ignore_file_patterns: Optional[List[Pattern]] = None,
ignore_dir_patterns: Optional[List[Pattern]] = None
) -> List[Tuple[str, str]]:
"""Find all files to copy in subdirectories of given path.
All files that match any of the patterns in 'ignore_file_patterns' will
be skipped and any directories that match any of the patterns in
'ignore_dir_patterns' will be skipped with all subfiles.
Args:
src_path (str): Path to directory to search in.
ignore_file_patterns (Optional[list[Pattern]]): List of regexes
to match files to ignore.
ignore_dir_patterns (Optional[list[Pattern]]): List of regexes
to match directories to ignore.
Returns:
list[tuple[str, str]]: List of tuples with path to file and parent
directories relative to 'src_path'.
"""
if ignore_file_patterns is None:
ignore_file_patterns = IGNORE_FILE_PATTERNS
if ignore_dir_patterns is None:
ignore_dir_patterns = IGNORE_DIR_PATTERNS
output = []
output: List[Tuple[str, str]] = []
if not os.path.exists(src_path):
return output
hierarchy_queue = collections.deque()
hierarchy_queue: collections.deque = collections.deque()
hierarchy_queue.append((src_path, []))
while hierarchy_queue:
item = hierarchy_queue.popleft()
item: Tuple[str, str] = hierarchy_queue.popleft()
dirpath, parents = item
for name in os.listdir(dirpath):
path = os.path.join(dirpath, name)
path: str = os.path.join(dirpath, name)
if os.path.isfile(path):
if not _value_match_regexes(name, ignore_file_patterns):
items = list(parents)
items: List[str] = list(parents)
items.append(name)
output.append((path, os.path.sep.join(items)))
continue
if not _value_match_regexes(name, ignore_dir_patterns):
items = list(parents)
items: List[str] = list(parents)
items.append(name)
hierarchy_queue.append((path, items))
return output
def copy_server_content(addon_output_dir, current_dir, log):
def update_client_version(logger):
"""Update version in client code if version.py is present."""
if not ADDON_CLIENT_DIR:
return
version_path: str = os.path.join(
CLIENT_ROOT, ADDON_CLIENT_DIR, "version.py"
)
if not os.path.exists(version_path):
logger.debug("Did not find version.py in client directory")
return
logger.info("Updating client version")
with open(version_path, "w") as stream:
stream.write(VERSION_PY_CONTENT)
def update_pyproject_toml(logger):
filepath = os.path.join(CURRENT_ROOT, "pyproject.toml")
new_lines = []
with open(filepath, "r") as stream:
version_found = False
for line in stream.readlines():
if not version_found and line.startswith("version ="):
line = f'version = "{ADDON_VERSION}"\n'
version_found = True
new_lines.append(line)
with open(filepath, "w") as stream:
stream.write("".join(new_lines))
def build_frontend():
yarn_executable = _get_yarn_executable()
if yarn_executable is None:
raise RuntimeError("Yarn executable was not found.")
subprocess.run([yarn_executable, "install"], cwd=FRONTEND_ROOT)
subprocess.run([yarn_executable, "build"], cwd=FRONTEND_ROOT)
if not os.path.exists(FRONTEND_DIST_ROOT):
raise RuntimeError(
"Frontend build failed. Did not find 'dist' folder."
)
def get_client_files_mapping() -> List[Tuple[str, str]]:
"""Mapping of source client code files to destination paths.
Example output:
[
(
"C:/addons/MyAddon/version.py",
"my_addon/version.py"
),
(
"C:/addons/MyAddon/client/my_addon/__init__.py",
"my_addon/__init__.py"
)
]
Returns:
list[tuple[str, str]]: List of path mappings to copy. The destination
path is relative to expected output directory.
"""
# Add client code content to zip
client_code_dir: str = os.path.join(CLIENT_ROOT, ADDON_CLIENT_DIR)
mapping = [
(path, os.path.join(ADDON_CLIENT_DIR, sub_path))
for path, sub_path in find_files_in_subdir(client_code_dir)
]
license_path = os.path.join(CURRENT_ROOT, "LICENSE")
if os.path.exists(license_path):
mapping.append((license_path, f"{ADDON_CLIENT_DIR}/LICENSE"))
return mapping
def get_client_zip_content(log) -> io.BytesIO:
log.info("Preparing client code zip")
files_mapping: List[Tuple[str, str]] = get_client_files_mapping()
stream = io.BytesIO()
with ZipFileLongPaths(stream, "w", zipfile.ZIP_DEFLATED) as zipf:
for src_path, subpath in files_mapping:
zipf.write(src_path, subpath)
stream.seek(0)
return stream
def get_base_files_mapping() -> List[FileMapping]:
filepaths_to_copy: List[FileMapping] = [
(
os.path.join(CURRENT_ROOT, "package.py"),
"package.py"
)
]
# Add license file to package if exists
license_path = os.path.join(CURRENT_ROOT, "LICENSE")
if os.path.exists(license_path):
filepaths_to_copy.append((license_path, "LICENSE"))
# Go through server, private and public directories and find all files
for dirpath in (SERVER_ROOT, PRIVATE_ROOT, PUBLIC_ROOT):
if not os.path.exists(dirpath):
continue
dirname = os.path.basename(dirpath)
for src_file, subpath in find_files_in_subdir(dirpath):
dst_subpath = os.path.join(dirname, subpath)
filepaths_to_copy.append((src_file, dst_subpath))
if os.path.exists(FRONTEND_DIST_ROOT):
for src_file, subpath in find_files_in_subdir(FRONTEND_DIST_ROOT):
dst_subpath = os.path.join(DST_DIST_DIR, subpath)
filepaths_to_copy.append((src_file, dst_subpath))
pyproject_toml = os.path.join(CLIENT_ROOT, "pyproject.toml")
if os.path.exists(pyproject_toml):
filepaths_to_copy.append(
(pyproject_toml, "private/pyproject.toml")
)
return filepaths_to_copy
def copy_client_code(output_dir: str, log: logging.Logger):
"""Copies server side folders to 'addon_package_dir'
Args:
addon_output_dir (str): package dir in addon repo dir
current_dir (str): addon repo dir
output_dir (str): Output directory path.
log (logging.Logger)
"""
log.info(f"Copying client for {ADDON_NAME}-{ADDON_VERSION}")
log.info("Copying server content")
full_output_path = os.path.join(
output_dir, f"{ADDON_NAME}_{ADDON_VERSION}"
)
if os.path.exists(full_output_path):
shutil.rmtree(full_output_path)
os.makedirs(full_output_path, exist_ok=True)
filepaths_to_copy = []
server_dirpath = os.path.join(current_dir, "server")
for item in find_files_in_subdir(server_dirpath):
src_path, dst_subpath = item
dst_path = os.path.join(addon_output_dir, "server", dst_subpath)
filepaths_to_copy.append((src_path, dst_path))
# Copy files
for src_path, dst_path in filepaths_to_copy:
for src_path, dst_subpath in get_client_files_mapping():
dst_path = os.path.join(full_output_path, dst_subpath)
safe_copy_file(src_path, dst_path)
def _update_client_version(client_addon_dir):
"""Write version.py file to 'client' directory.
Make sure the version in client dir is the same as in package.py.
Args:
client_addon_dir (str): Directory path of client addon.
"""
dst_version_path = os.path.join(client_addon_dir, "version.py")
with open(dst_version_path, "w") as stream:
stream.write(CLIENT_VERSION_CONTENT.format(ADDON_VERSION))
log.info("Client copy finished")
def zip_client_side(addon_package_dir, current_dir, log):
"""Copy and zip `client` content into 'addon_package_dir'.
Args:
addon_package_dir (str): Output package directory path.
current_dir (str): Directory path of addon source.
log (logging.Logger): Logger object.
"""
client_dir = os.path.join(current_dir, "client")
client_addon_dir = os.path.join(client_dir, ADDON_CLIENT_DIR)
if not os.path.isdir(client_addon_dir):
raise ValueError(
f"Failed to find client directory '{client_addon_dir}'"
)
log.info("Preparing client code zip")
private_dir = os.path.join(addon_package_dir, "private")
if not os.path.exists(private_dir):
os.makedirs(private_dir)
_update_client_version(client_addon_dir)
zip_filepath = os.path.join(os.path.join(private_dir, "client.zip"))
with ZipFileLongPaths(zip_filepath, "w", zipfile.ZIP_DEFLATED) as zipf:
# Add client code content to zip
for path, sub_path in find_files_in_subdir(client_addon_dir):
sub_path = os.path.join(ADDON_CLIENT_DIR, sub_path)
zipf.write(path, sub_path)
shutil.copy(os.path.join(client_dir, "pyproject.toml"), private_dir)
def create_server_package(
def copy_addon_package(
output_dir: str,
addon_output_dir: str,
files_mapping: List[FileMapping],
log: logging.Logger
):
"""Create server package zip file.
The zip file can be installed to a server using UI or rest api endpoints.
"""Copy client code to output directory.
Args:
output_dir (str): Directory path to output zip file.
addon_output_dir (str): Directory path to addon output directory.
output_dir (str): Directory path to output client code.
files_mapping (List[FileMapping]): List of tuples with source file
and destination subpath.
log (logging.Logger): Logger object.
"""
log.info("Creating server package")
"""
log.info(f"Copying package for {ADDON_NAME}-{ADDON_VERSION}")
# Add addon name and version to output directory
addon_output_dir: str = os.path.join(
output_dir, ADDON_NAME, ADDON_VERSION
)
if os.path.isdir(addon_output_dir):
log.info(f"Purging {addon_output_dir}")
shutil.rmtree(addon_output_dir)
os.makedirs(addon_output_dir, exist_ok=True)
# Copy server content
for src_file, dst_subpath in files_mapping:
dst_path: str = os.path.join(addon_output_dir, dst_subpath)
dst_dir: str = os.path.dirname(dst_path)
os.makedirs(dst_dir, exist_ok=True)
if isinstance(src_file, io.BytesIO):
with open(dst_path, "wb") as stream:
stream.write(src_file.getvalue())
else:
safe_copy_file(src_file, dst_path)
log.info("Package copy finished")
def create_addon_package(
output_dir: str,
files_mapping: List[FileMapping],
log: logging.Logger
):
log.info(f"Creating package for {ADDON_NAME}-{ADDON_VERSION}")
os.makedirs(output_dir, exist_ok=True)
output_path = os.path.join(
output_dir, f"{ADDON_NAME}-{ADDON_VERSION}.zip"
)
with ZipFileLongPaths(output_path, "w", zipfile.ZIP_DEFLATED) as zipf:
# Move addon content to zip into 'addon' directory
addon_output_dir_offset = len(addon_output_dir) + 1
for root, _, filenames in os.walk(addon_output_dir):
if not filenames:
continue
# Copy server content
for src_file, dst_subpath in files_mapping:
if isinstance(src_file, io.BytesIO):
zipf.writestr(dst_subpath, src_file.getvalue())
else:
zipf.write(src_file, dst_subpath)
dst_root = None
if root != addon_output_dir:
dst_root = root[addon_output_dir_offset:]
for filename in filenames:
src_path = os.path.join(root, filename)
dst_path = filename
if dst_root:
dst_path = os.path.join(dst_root, dst_path)
zipf.write(src_path, dst_path)
log.info(f"Output package can be found: {output_path}")
log.info("Package created")
def main(
output_dir: Optional[str]=None,
skip_zip: bool=False,
keep_sources: bool=False,
clear_output_dir: bool=False
output_dir: Optional[str] = None,
skip_zip: Optional[bool] = False,
only_client: Optional[bool] = False
):
log = logging.getLogger("create_package")
log.info("Start creating package")
log: logging.Logger = logging.getLogger("create_package")
log.info("Package creation started")
current_dir = os.path.dirname(os.path.abspath(__file__))
if not output_dir:
output_dir = os.path.join(current_dir, "package")
output_dir = os.path.join(CURRENT_ROOT, "package")
has_client_code = bool(ADDON_CLIENT_DIR)
if has_client_code:
client_dir: str = os.path.join(CLIENT_ROOT, ADDON_CLIENT_DIR)
if not os.path.exists(client_dir):
raise RuntimeError(
f"Client directory was not found '{client_dir}'."
" Please check 'client_dir' in 'package.py'."
)
update_client_version(log)
new_created_version_dir = os.path.join(
output_dir, ADDON_NAME, ADDON_VERSION
)
update_pyproject_toml(log)
if os.path.isdir(new_created_version_dir) and clear_output_dir:
log.info(f"Purging {new_created_version_dir}")
shutil.rmtree(output_dir)
if only_client:
if not has_client_code:
raise RuntimeError("Client code is not available. Skipping")
copy_client_code(output_dir, log)
return
log.info(f"Preparing package for {ADDON_NAME}-{ADDON_VERSION}")
addon_output_root = os.path.join(output_dir, ADDON_NAME)
addon_output_dir = os.path.join(addon_output_root, ADDON_VERSION)
if not os.path.exists(addon_output_dir):
os.makedirs(addon_output_dir)
if os.path.exists(FRONTEND_ROOT):
build_frontend()
copy_server_content(addon_output_dir, current_dir, log)
safe_copy_file(
PACKAGE_PATH,
os.path.join(addon_output_dir, os.path.basename(PACKAGE_PATH))
)
zip_client_side(addon_output_dir, current_dir, log)
files_mapping: List[FileMapping] = []
files_mapping.extend(get_base_files_mapping())
if has_client_code:
files_mapping.append(
(get_client_zip_content(log), "private/client.zip")
)
# Skip server zipping
if not skip_zip:
create_server_package(output_dir, addon_output_dir, log)
# Remove sources only if zip file is created
if not keep_sources:
log.info("Removing source files for server package")
shutil.rmtree(addon_output_root)
if skip_zip:
copy_addon_package(output_dir, files_mapping, log)
else:
create_addon_package(output_dir, files_mapping, log)
log.info("Package creation finished")
@ -333,23 +474,6 @@ if __name__ == "__main__":
" server folder structure."
)
)
parser.add_argument(
"--keep-sources",
dest="keep_sources",
action="store_true",
help=(
"Keep folder structure when server package is created."
)
)
parser.add_argument(
"-c", "--clear-output-dir",
dest="clear_output_dir",
action="store_true",
help=(
"Clear output directory before package creation."
)
)
parser.add_argument(
"-o", "--output",
dest="output_dir",
@ -359,11 +483,25 @@ if __name__ == "__main__":
" (Will be purged if already exists!)"
)
)
parser.add_argument(
"--only-client",
dest="only_client",
action="store_true",
help=(
"Extract only client code. This is useful for development."
" Requires '-o', '--output' argument to be filled."
)
)
parser.add_argument(
"--debug",
dest="debug",
action="store_true",
help="Debug log messages."
)
args = parser.parse_args(sys.argv[1:])
main(
args.output_dir,
args.skip_zip,
args.keep_sources,
args.clear_output_dir
)
level = logging.INFO
if args.debug:
level = logging.DEBUG
logging.basicConfig(level=level)
main(args.output_dir, args.skip_zip, args.only_client)

View file

@ -1,6 +1,6 @@
name = "core"
title = "Core"
version = "0.4.5-dev.1"
version = "1.0.0+dev"
client_dir = "ayon_core"

View file

@ -5,7 +5,7 @@
[tool.poetry]
name = "ayon-core"
version = "0.4.3-dev.1"
version = "1.0.0+dev"
description = ""
authors = ["Ynput Team <team@ynput.io>"]
readme = "README.md"

View file

@ -4,6 +4,29 @@ from typing import Any
from .publish_plugins import DEFAULT_PUBLISH_VALUES
def _convert_imageio_configs_0_4_5(overrides):
"""Imageio config settings did change to profiles since 0.4.5."""
imageio_overrides = overrides.get("imageio") or {}
# make sure settings are already converted to profiles
ocio_config_profiles = imageio_overrides.get("ocio_config_profiles")
if not ocio_config_profiles:
return
for profile in ocio_config_profiles:
if profile.get("type") != "product_name":
continue
profile["type"] = "published_product"
profile["published_product"] = {
"product_name": profile.pop("product_name"),
"fallback": {
"type": "builtin_path",
"builtin_path": "{BUILTIN_OCIO_ROOT}/aces_1.2/config.ocio",
},
}
def _convert_imageio_configs_0_3_1(overrides):
"""Imageio config settings did change to profiles since 0.3.1. ."""
imageio_overrides = overrides.get("imageio") or {}
@ -71,10 +94,43 @@ def _convert_validate_version_0_3_3(publish_overrides):
validate_version["plugin_state_profiles"] = [profile]
def _conver_publish_plugins(overrides):
def _convert_oiio_transcode_0_4_5(publish_overrides):
"""ExtractOIIOTranscode plugin changed in 0.4.5."""
if "ExtractOIIOTranscode" not in publish_overrides:
return
transcode_profiles = publish_overrides["ExtractOIIOTranscode"].get(
"profiles")
if not transcode_profiles:
return
for profile in transcode_profiles:
outputs = profile.get("outputs")
if outputs is None:
return
for output in outputs:
# Already new settings
if "display_view" in output:
break
# Fix 'display' -> 'display_view' in 'transcoding_type'
transcode_type = output.get("transcoding_type")
if transcode_type == "display":
output["transcoding_type"] = "display_view"
# Convert 'display' and 'view' to new values
output["display_view"] = {
"display": output.pop("display", ""),
"view": output.pop("view", ""),
}
def _convert_publish_plugins(overrides):
if "publish" not in overrides:
return
_convert_validate_version_0_3_3(overrides["publish"])
_convert_oiio_transcode_0_4_5(overrides["publish"])
def convert_settings_overrides(
@ -82,5 +138,6 @@ def convert_settings_overrides(
overrides: dict[str, Any],
) -> dict[str, Any]:
_convert_imageio_configs_0_3_1(overrides)
_conver_publish_plugins(overrides)
_convert_imageio_configs_0_4_5(overrides)
_convert_publish_plugins(overrides)
return overrides

View file

@ -58,7 +58,14 @@ def _ocio_config_profile_types():
return [
{"value": "builtin_path", "label": "AYON built-in OCIO config"},
{"value": "custom_path", "label": "Path to OCIO config"},
{"value": "product_name", "label": "Published product"},
{"value": "published_product", "label": "Published product"},
]
def _fallback_ocio_config_profile_types():
return [
{"value": "builtin_path", "label": "AYON built-in OCIO config"},
{"value": "custom_path", "label": "Path to OCIO config"},
]
@ -76,6 +83,49 @@ def _ocio_built_in_paths():
]
class FallbackProductModel(BaseSettingsModel):
_layout = "expanded"
fallback_type: str = SettingsField(
title="Fallback config type",
enum_resolver=_fallback_ocio_config_profile_types,
conditionalEnum=True,
default="builtin_path",
description=(
"Type of config which needs to be used in case published "
"product is not found."
),
)
builtin_path: str = SettingsField(
"ACES 1.2",
title="Built-in OCIO config",
enum_resolver=_ocio_built_in_paths,
description=(
"AYON ocio addon distributed OCIO config. "
"Activated addon in bundle is required: 'ayon_ocio' >= 1.1.1"
),
)
custom_path: str = SettingsField(
"",
title="OCIO config path",
description="Path to OCIO config. Anatomy formatting is supported.",
)
class PublishedProductModel(BaseSettingsModel):
_layout = "expanded"
product_name: str = SettingsField(
"",
title="Product name",
description=(
"Context related published product name to get OCIO config from. "
"Partial match is supported via use of regex expression."
),
)
fallback: FallbackProductModel = SettingsField(
default_factory=FallbackProductModel,
)
class CoreImageIOConfigProfilesModel(BaseSettingsModel):
_layout = "expanded"
host_names: list[str] = SettingsField(
@ -102,19 +152,19 @@ class CoreImageIOConfigProfilesModel(BaseSettingsModel):
"ACES 1.2",
title="Built-in OCIO config",
enum_resolver=_ocio_built_in_paths,
description=(
"AYON ocio addon distributed OCIO config. "
"Activated addon in bundle is required: 'ayon_ocio' >= 1.1.1"
),
)
custom_path: str = SettingsField(
"",
title="OCIO config path",
description="Path to OCIO config. Anatomy formatting is supported.",
)
product_name: str = SettingsField(
"",
title="Product name",
description=(
"Published product name to get OCIO config from. "
"Partial match is supported."
),
published_product: PublishedProductModel = SettingsField(
default_factory=PublishedProductModel,
title="Published product",
)
@ -294,7 +344,14 @@ DEFAULT_VALUES = {
"type": "builtin_path",
"builtin_path": "{BUILTIN_OCIO_ROOT}/aces_1.2/config.ocio",
"custom_path": "",
"product_name": "",
"published_product": {
"product_name": "",
"fallback": {
"fallback_type": "builtin_path",
"builtin_path": "ACES 1.2",
"custom_path": ""
}
}
}
],
"file_rules": {

View file

@ -268,13 +268,36 @@ class ExtractThumbnailModel(BaseSettingsModel):
def _extract_oiio_transcoding_type():
return [
{"value": "colorspace", "label": "Use Colorspace"},
{"value": "display", "label": "Use Display&View"}
{"value": "display_view", "label": "Use Display&View"}
]
class OIIOToolArgumentsModel(BaseSettingsModel):
additional_command_args: list[str] = SettingsField(
default_factory=list, title="Arguments")
default_factory=list,
title="Arguments",
description="Additional command line arguments for *oiiotool*."
)
class UseDisplayViewModel(BaseSettingsModel):
_layout = "expanded"
display: str = SettingsField(
"",
title="Target Display",
description=(
"Display of the target transform. If left empty, the"
" source Display value will be used."
)
)
view: str = SettingsField(
"",
title="Target View",
description=(
"View of the target transform. If left empty, the"
" source View value will be used."
)
)
class ExtractOIIOTranscodeOutputModel(BaseSettingsModel):
@ -285,22 +308,57 @@ class ExtractOIIOTranscodeOutputModel(BaseSettingsModel):
description="Output name (no space)",
regex=r"[a-zA-Z0-9_]([a-zA-Z0-9_\.\-]*[a-zA-Z0-9_])?$",
)
extension: str = SettingsField("", title="Extension")
extension: str = SettingsField(
"",
title="Extension",
description=(
"Target extension. If left empty, original"
" extension is used."
),
)
transcoding_type: str = SettingsField(
"colorspace",
title="Transcoding type",
enum_resolver=_extract_oiio_transcoding_type
enum_resolver=_extract_oiio_transcoding_type,
conditionalEnum=True,
description=(
"Select the transcoding type for your output, choosing either "
"*Colorspace* or *Display&View* transform."
" Only one option can be applied per output definition."
),
)
colorspace: str = SettingsField("", title="Colorspace")
display: str = SettingsField("", title="Display")
view: str = SettingsField("", title="View")
colorspace: str = SettingsField(
"",
title="Target Colorspace",
description=(
"Choose the desired target colorspace, confirming its availability"
" in the active OCIO config. If left empty, the"
" source colorspace value will be used, resulting in no"
" colorspace conversion."
)
)
display_view: UseDisplayViewModel = SettingsField(
title="Use Display&View",
default_factory=UseDisplayViewModel
)
oiiotool_args: OIIOToolArgumentsModel = SettingsField(
default_factory=OIIOToolArgumentsModel,
title="OIIOtool arguments")
tags: list[str] = SettingsField(default_factory=list, title="Tags")
tags: list[str] = SettingsField(
default_factory=list,
title="Tags",
description=(
"Additional tags that will be added to the created representation."
"\nAdd *review* tag to create review from the transcoded"
" representation instead of the original."
)
)
custom_tags: list[str] = SettingsField(
default_factory=list, title="Custom Tags"
default_factory=list,
title="Custom Tags",
description="Additional custom tags that will be added to the created representation."
)
@ -328,7 +386,13 @@ class ExtractOIIOTranscodeProfileModel(BaseSettingsModel):
)
delete_original: bool = SettingsField(
True,
title="Delete Original Representation"
title="Delete Original Representation",
description=(
"Choose to preserve or remove the original representation.\n"
"Keep in mind that if the transcoded representation includes"
" a `review` tag, it will take precedence over"
" the original for creating reviews."
),
)
outputs: list[ExtractOIIOTranscodeOutputModel] = SettingsField(
default_factory=list,
@ -371,7 +435,7 @@ class ExtractReviewFFmpegModel(BaseSettingsModel):
def extract_review_filter_enum():
return [
{
"value": "everytime",
"value": "everytime", # codespell:ignore everytime
"label": "Always"
},
{
@ -393,7 +457,7 @@ class ExtractReviewFilterModel(BaseSettingsModel):
default_factory=list, title="Custom Tags"
)
single_frame_filter: str = SettingsField(
"everytime",
"everytime", # codespell:ignore everytime
description=(
"Use output <b>always</b> / only if input <b>is 1 frame</b>"
" image / only if has <b>2+ frames</b> or <b>is video</b>"
@ -791,7 +855,7 @@ class IntegrateHeroVersionModel(BaseSettingsModel):
class CleanUpModel(BaseSettingsModel):
_isGroup = True
paterns: list[str] = SettingsField(
paterns: list[str] = SettingsField( # codespell:ignore paterns
default_factory=list,
title="Patterns (regex)"
)
@ -1225,7 +1289,7 @@ DEFAULT_PUBLISH_VALUES = {
"use_hardlinks": False
},
"CleanUp": {
"paterns": [],
"paterns": [], # codespell:ignore paterns
"remove_temp_renders": False
},
"CleanUpFarm": {