Merge branch 'develop' into enhancement/OP-8208_Use-folderPath-during-publishing

# Conflicts:
#	client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py
This commit is contained in:
Jakub Trllo 2024-02-20 11:33:15 +01:00
commit fc10b8d52a
33 changed files with 213 additions and 513 deletions

View file

@ -181,6 +181,10 @@ class HostDirmap(object):
exclude_locals=False,
cached=False)
# TODO implement
# Dirmap is dependent on 'get_site_local_overrides' which
# is not implemented in AYON. The mapping should be received
# from sitesync addon.
active_overrides = get_site_local_overrides(
project_name, active_site)
remote_overrides = get_site_local_overrides(

View file

@ -194,13 +194,13 @@ class RenderCreator(Creator):
name into created subset name.
Position of composition name could be set in
`project_settings/global/tools/creator/subset_name_profiles` with some
form of '{composition}' placeholder.
`project_settings/global/tools/creator/product_name_profiles` with
some form of '{composition}' placeholder.
Composition name will be used implicitly if multiple composition should
be handled at same time.
If {composition} placeholder is not us 'subset_name_profiles'
If {composition} placeholder is not us 'product_name_profiles'
composition name will be capitalized and set at the end of subset name
if necessary.

View file

@ -493,7 +493,7 @@ def get_colorspace_from_node(node):
def get_review_presets_config():
settings = get_current_project_settings()
review_profiles = (
settings["global"]
settings["core"]
["publish"]
["ExtractReview"]
["profiles"]
@ -1348,7 +1348,9 @@ def _remove_old_knobs(node):
def exposed_write_knobs(settings, plugin_name, instance_node):
exposed_knobs = settings["nuke"]["create"][plugin_name]["exposed_knobs"]
exposed_knobs = settings["nuke"]["create"][plugin_name].get(
"exposed_knobs", []
)
if exposed_knobs:
instance_node.addKnob(nuke.Text_Knob('', 'Write Knobs'))
write_node = nuke.allNodes(group=instance_node, filter="Write")[0]

View file

@ -65,7 +65,7 @@ class ValidateExposedKnobs(
group_node = instance.data["transientData"]["node"]
nuke_settings = instance.context.data["project_settings"]["nuke"]
create_settings = nuke_settings["create"][plugin]
exposed_knobs = create_settings["exposed_knobs"]
exposed_knobs = create_settings.get("exposed_knobs", [])
unexposed_knobs = []
for knob in exposed_knobs:
if knob not in group_node.knobs():

View file

@ -209,8 +209,8 @@ class ImageCreator(Creator):
'Use layer name in subset' will explicitly add layer name into subset
name. Position of this name is configurable in
`project_settings/global/tools/creator/subset_name_profiles`.
If layer placeholder ({layer}) is not used in `subset_name_profiles`
`project_settings/global/tools/creator/product_name_profiles`.
If layer placeholder ({layer}) is not used in `product_name_profiles`
but layer name should be used (set explicitly in UI or implicitly if
multiple images should be created), it is added in capitalized form
as a suffix to subset name.

View file

@ -16,7 +16,6 @@ from ayon_core.client import get_asset_name_identifier
from ayon_core.settings import (
get_system_settings,
get_project_settings,
get_local_settings
)
from ayon_core.settings.constants import (
METADATA_KEYS,
@ -1528,16 +1527,17 @@ def prepare_app_environments(
# Use environments from local settings
filtered_local_envs = {}
system_settings = data["system_settings"]
whitelist_envs = system_settings["general"].get("local_env_white_list")
if whitelist_envs:
local_settings = get_local_settings()
local_envs = local_settings.get("environments") or {}
filtered_local_envs = {
key: value
for key, value in local_envs.items()
if key in whitelist_envs
}
# NOTE Overrides for environment variables are not implemented in AYON.
# system_settings = data["system_settings"]
# whitelist_envs = system_settings["general"].get("local_env_white_list")
# if whitelist_envs:
# local_settings = get_local_settings()
# local_envs = local_settings.get("environments") or {}
# filtered_local_envs = {
# key: value
# for key, value in local_envs.items()
# if key in whitelist_envs
# }
# Apply local environment variables for already existing values
for key, value in filtered_local_envs.items():
@ -1656,8 +1656,9 @@ def apply_project_environments_value(
if project_settings is None:
project_settings = get_project_settings(project_name)
env_value = project_settings["global"]["project_environments"]
env_value = project_settings["core"]["project_environments"]
if env_value:
env_value = json.loads(env_value)
parsed_value = parse_environments(env_value, env_group)
env.update(acre.compute(
_merge_env(parsed_value, env),
@ -1916,7 +1917,7 @@ def should_start_last_workfile(
project_settings = get_project_settings(project_name)
profiles = (
project_settings
["global"]
["core"]
["tools"]
["Workfiles"]
["last_workfile_on_startup"]
@ -1966,7 +1967,7 @@ def should_workfile_tool_start(
project_settings = get_project_settings(project_name)
profiles = (
project_settings
["global"]
["core"]
["tools"]
["Workfiles"]
["open_workfile_tool_on_startup"]

View file

@ -5,7 +5,6 @@ import platform
import getpass
import socket
from ayon_core.settings.lib import get_local_settings
from .execute import get_ayon_launcher_args
from .local_settings import get_local_site_id
@ -96,7 +95,6 @@ def get_all_current_info():
return {
"workstation": get_workstation_info(),
"env": os.environ.copy(),
"local_settings": get_local_settings(),
"ayon": get_ayon_info(),
}

View file

@ -1385,23 +1385,26 @@ def _get_image_dimensions(application, input_path, log):
def convert_color_values(application, color_value):
"""Get color mapping for ffmpeg and oiiotool.
Args:
application (str): Application for which command should be created.
color_value (list[int]): List of 8bit int values for RGBA.
color_value (tuple[int, int, int, float]): List of 8bit int values
for RGBA.
Returns:
str: ffmpeg returns hex string, oiiotool is string with floats.
"""
red, green, blue, alpha = color_value
if application == "ffmpeg":
return "{0:0>2X}{1:0>2X}{2:0>2X}@{3}".format(
red, green, blue, (alpha / 255.0)
red, green, blue, alpha
)
elif application == "oiiotool":
red = float(red / 255)
green = float(green / 255)
blue = float(blue / 255)
alpha = float(alpha / 255)
return "{0:.3f},{1:.3f},{2:.3f},{3:.3f}".format(
red, green, blue, alpha)

View file

@ -330,151 +330,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
self.log.debug("Skipping local instance.")
return
data = instance.data.copy()
context = instance.context
self.context = context
self.anatomy = instance.context.data["anatomy"]
folder_path = data.get("folderPath") or context.data["folderPath"]
subset = data.get("subset")
start = instance.data.get("frameStart")
if start is None:
start = context.data["frameStart"]
end = instance.data.get("frameEnd")
if end is None:
end = context.data["frameEnd"]
handle_start = instance.data.get("handleStart")
if handle_start is None:
handle_start = context.data["handleStart"]
handle_end = instance.data.get("handleEnd")
if handle_end is None:
handle_end = context.data["handleEnd"]
fps = instance.data.get("fps")
if fps is None:
fps = context.data["fps"]
if data.get("extendFrames", False):
start, end = self._extend_frames(
folder_path,
subset,
start,
end,
data["overrideExistingFrame"])
try:
source = data["source"]
except KeyError:
source = context.data["currentFile"]
success, rootless_path = (
self.anatomy.find_root_template_from_path(source)
)
if success:
source = rootless_path
else:
# `rootless_path` is not set to `source` if none of roots match
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues."
).format(source))
family = "render"
if ("prerender" in instance.data["families"] or
"prerender.farm" in instance.data["families"]):
family = "prerender"
families = [family]
# pass review to families if marked as review
do_not_add_review = False
if data.get("review"):
families.append("review")
elif data.get("review") is False:
self.log.debug("Instance has review explicitly disabled.")
do_not_add_review = True
instance_skeleton_data = {
"family": family,
"subset": subset,
"families": families,
"folderPath": folder_path,
"frameStart": start,
"frameEnd": end,
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStartHandle": start - handle_start,
"frameEndHandle": end + handle_end,
"comment": instance.data["comment"],
"fps": fps,
"source": source,
"extendFrames": data.get("extendFrames"),
"overrideExistingFrame": data.get("overrideExistingFrame"),
"pixelAspect": data.get("pixelAspect", 1),
"resolutionWidth": data.get("resolutionWidth", 1920),
"resolutionHeight": data.get("resolutionHeight", 1080),
"multipartExr": data.get("multipartExr", False),
"jobBatchName": data.get("jobBatchName", ""),
"useSequenceForReview": data.get("useSequenceForReview", True),
# map inputVersions `ObjectId` -> `str` so json supports it
"inputVersions": list(map(str, data.get("inputVersions", []))),
"colorspace": instance.data.get("colorspace"),
"stagingDir_persistent": instance.data.get(
"stagingDir_persistent", False
)
}
# skip locking version if we are creating v01
instance_version = instance.data.get("version") # take this if exists
if instance_version != 1:
instance_skeleton_data["version"] = instance_version
# transfer specific families from original instance to new render
for item in self.families_transfer:
if item in instance.data.get("families", []):
instance_skeleton_data["families"] += [item]
# transfer specific properties from original instance based on
# mapping dictionary `instance_transfer`
for key, values in self.instance_transfer.items():
if key in instance.data.get("families", []):
for v in values:
instance_skeleton_data[v] = instance.data.get(v)
# look into instance data if representations are not having any
# which are having tag `publish_on_farm` and include them
for repre in instance.data.get("representations", []):
staging_dir = repre.get("stagingDir")
if staging_dir:
success, rootless_staging_dir = (
self.anatomy.find_root_template_from_path(
staging_dir
)
)
if success:
repre["stagingDir"] = rootless_staging_dir
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(staging_dir))
repre["stagingDir"] = staging_dir
if "publish_on_farm" in repre.get("tags"):
# create representations attribute of not there
if "representations" not in instance_skeleton_data.keys():
instance_skeleton_data["representations"] = []
instance_skeleton_data["representations"].append(repre)
instances = None
assert data.get("expectedFiles"), ("Submission from old Pype version"
" - missing expectedFiles")
anatomy = instance.context.data["anatomy"]
instance_skeleton_data = create_skeleton_instance(

View file

@ -8,9 +8,6 @@ import numbers
import six
import time
from ayon_core.settings.lib import (
get_local_settings,
)
from ayon_core.client import get_project, get_ayon_server_api_connection
from ayon_core.lib import Logger, get_local_site_id
from ayon_core.lib.path_templates import (
@ -453,7 +450,7 @@ class Anatomy(BaseAnatomy):
return cls._sync_server_addon_cache.data
@classmethod
def _get_studio_roots_overrides(cls, project_name, local_settings=None):
def _get_studio_roots_overrides(cls, project_name):
"""This would return 'studio' site override by local settings.
Notes:
@ -465,7 +462,6 @@ class Anatomy(BaseAnatomy):
Args:
project_name (str): Name of project.
local_settings (Optional[dict[str, Any]]): Prepared local settings.
Returns:
Union[Dict[str, str], None]): Local root overrides.
@ -488,11 +484,6 @@ class Anatomy(BaseAnatomy):
should be returned.
"""
# Local settings may be used more than once or may not be used at all
# - to avoid slowdowns 'get_local_settings' is not called until it's
# really needed
local_settings = None
# First check if sync server is available and enabled
sync_server = cls.get_sync_server_addon()
if sync_server is None or not sync_server.enabled:
@ -503,11 +494,8 @@ class Anatomy(BaseAnatomy):
# Use sync server to receive active site name
project_cache = cls._default_site_id_cache[project_name]
if project_cache.is_outdated:
local_settings = get_local_settings()
project_cache.update_data(
sync_server.get_active_site_type(
project_name, local_settings
)
sync_server.get_active_site_type(project_name)
)
site_name = project_cache.data
@ -517,12 +505,12 @@ class Anatomy(BaseAnatomy):
# Handle studio root overrides without sync server
# - studio root overrides can be done even without sync server
roots_overrides = cls._get_studio_roots_overrides(
project_name, local_settings
project_name
)
else:
# Ask sync server to get roots overrides
roots_overrides = sync_server.get_site_root_overrides(
project_name, site_name, local_settings
project_name, site_name
)
site_cache.update_data(roots_overrides)
return site_cache.data

View file

@ -1018,7 +1018,7 @@ def _get_imageio_settings(project_settings, host_name):
tuple[dict, dict]: image io settings for global and host
"""
# get image io from global and host_name
imageio_global = project_settings["global"]["imageio"]
imageio_global = project_settings["core"]["imageio"]
# host is optional, some might not have any settings
imageio_host = project_settings.get(host_name, {}).get("imageio", {})

View file

@ -208,8 +208,8 @@ def install_ayon_plugins(project_name=None, host_name=None):
platform_name = platform.system().lower()
project_plugins = (
project_settings
.get("global", {})
.get("project_plugins", {})
["core"]
["project_plugins"]
.get(platform_name)
) or []
for path in project_plugins:

View file

@ -54,7 +54,7 @@ class LegacyCreator(object):
)
global_type_settings = (
project_settings
.get("global", {})
.get("core", {})
.get(plugin_type, {})
)
if not global_type_settings and not plugin_type_settings:

View file

@ -47,10 +47,10 @@ def get_subset_name_template(
if project_settings is None:
project_settings = get_project_settings(project_name)
tools_settings = project_settings["global"]["tools"]
profiles = tools_settings["creator"]["subset_name_profiles"]
tools_settings = project_settings["core"]["tools"]
profiles = tools_settings["creator"]["product_name_profiles"]
filtering_criteria = {
"families": family,
"product_types": family,
"hosts": host_name,
"tasks": task_name,
"task_types": task_type
@ -59,7 +59,19 @@ def get_subset_name_template(
matching_profile = filter_profiles(profiles, filtering_criteria)
template = None
if matching_profile:
template = matching_profile["template"]
# TODO remove formatting keys replacement
template = (
matching_profile["template"]
.replace("{task[name]}", "{task}")
.replace("{Task[name]}", "{Task}")
.replace("{TASK[NAME]}", "{TASK}")
.replace("{product[type]}", "{family}")
.replace("{Product[type]}", "{Family}")
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
.replace("{folder[name]}", "{asset}")
.replace("{Folder[name]}", "{Asset}")
.replace("{FOLDER[NAME]}", "{ASSET}")
)
# Make sure template is set (matching may have empty string)
if not template:
@ -82,9 +94,9 @@ def get_subset_name(
"""Calculate subset name based on passed context and OpenPype settings.
Subst name templates are defined in `project_settings/global/tools/creator
/subset_name_profiles` where are profiles with host name, family, task name
and task type filters. If context does not match any profile then
`DEFAULT_SUBSET_TEMPLATE` is used as default template.
/product_name_profiles` where are profiles with host name, family,
task name and task type filters. If context does not match any profile
then `DEFAULT_SUBSET_TEMPLATE` is used as default template.
That's main reason why so many arguments are required to calculate subset
name.

View file

@ -47,7 +47,7 @@ class LoaderPlugin(list):
)
global_type_settings = (
project_settings
.get("global", {})
.get("core", {})
.get(plugin_type, {})
)
if not global_type_settings and not plugin_type_settings:

View file

@ -104,7 +104,7 @@ def _list_path_items(folder_structure):
def get_project_basic_paths(project_name):
project_settings = get_project_settings(project_name)
folder_structure = (
project_settings["global"]["project_folder_structure"]
project_settings["core"]["project_folder_structure"]
)
if not folder_structure:
return []

View file

@ -60,7 +60,7 @@ def get_template_name_profiles(
return copy.deepcopy(
project_settings
["global"]
["core"]
["tools"]
["publish"]
["template_name_profiles"]
@ -95,7 +95,7 @@ def get_hero_template_name_profiles(
return copy.deepcopy(
project_settings
["global"]
["core"]
["tools"]
["publish"]
["hero_template_name_profiles"]
@ -138,7 +138,7 @@ def get_publish_template_name(
template = None
filter_criteria = {
"hosts": host_name,
"families": family,
"product_types": family,
"task_names": task_name,
"task_types": task_type,
}
@ -383,7 +383,7 @@ def get_plugin_settings(plugin, project_settings, log, category=None):
# TODO: change after all plugins are moved one level up
if category_from_file in ("ayon_core", "openpype"):
category_from_file = "global"
category_from_file = "core"
try:
return (
@ -744,7 +744,7 @@ def get_custom_staging_dir_info(project_name, host_name, family, task_name,
ValueError - if misconfigured template should be used
"""
settings = project_settings or get_project_settings(project_name)
custom_staging_dir_profiles = (settings["global"]
custom_staging_dir_profiles = (settings["core"]
["tools"]
["publish"]
["custom_staging_dir_profiles"])

View file

@ -16,7 +16,7 @@ def get_versioning_start(
project_settings = get_project_settings(project_name)
version_start = 1
settings = project_settings["global"]
settings = project_settings["core"]
profiles = settings.get("version_start_category", {}).get("profiles", [])
if not profiles:

View file

@ -64,7 +64,7 @@ def is_workfile_lock_enabled(host_name, project_name, project_setting=None):
project_setting = get_project_settings(project_name)
workfile_lock_profiles = (
project_setting
["global"]
["core"]
["tools"]
["Workfiles"]
["workfile_lock_profiles"])

View file

@ -72,7 +72,7 @@ def get_workfile_template_key(
try:
profiles = (
project_settings
["global"]
["core"]
["tools"]
["Workfiles"]
["workfile_template_profiles"]
@ -507,7 +507,7 @@ def create_workdir_extra_folders(
# Load extra folders profiles
extra_folders_profiles = (
project_settings["global"]["tools"]["Workfiles"]["extra_folders"]
project_settings["core"]["tools"]["Workfiles"]["extra_folders"]
)
# Skip if are empty
if not extra_folders_profiles:

View file

@ -40,7 +40,7 @@ class CleanUp(pyblish.api.InstancePlugin):
active = True
# Presets
paterns = None # list of regex paterns
patterns = None # list of regex patterns
remove_temp_renders = True
def process(self, instance):
@ -115,10 +115,10 @@ class CleanUp(pyblish.api.InstancePlugin):
src = os.path.normpath(src)
dest = os.path.normpath(dest)
# add src dir into clearing dir paths (regex paterns)
# add src dir into clearing dir paths (regex patterns)
transfers_dirs.append(os.path.dirname(src))
# add dest dir into clearing dir paths (regex paterns)
# add dest dir into clearing dir paths (regex patterns)
transfers_dirs.append(os.path.dirname(dest))
if src in skip_cleanup_filepaths:
@ -141,13 +141,13 @@ class CleanUp(pyblish.api.InstancePlugin):
# add dir for cleanup
dirnames.append(os.path.dirname(src))
# clean by regex paterns
# clean by regex patterns
# make unique set
transfers_dirs = set(transfers_dirs)
self.log.debug("__ transfers_dirs: `{}`".format(transfers_dirs))
self.log.debug("__ self.paterns: `{}`".format(self.paterns))
if self.paterns:
self.log.debug("__ self.patterns: `{}`".format(self.patterns))
if self.patterns:
files = list()
# get list of all available content of dirs
for _dir in transfers_dirs:
@ -159,14 +159,14 @@ class CleanUp(pyblish.api.InstancePlugin):
self.log.debug("__ files: `{}`".format(files))
# remove all files which match regex patern
# remove all files which match regex pattern
for f in files:
if os.path.normpath(f) in skip_cleanup_filepaths:
continue
for p in self.paterns:
patern = re.compile(p)
if not patern.findall(f):
for p in self.patterns:
pattern = re.compile(p)
if not pattern.findall(f):
continue
if not os.path.exists(f):
continue

View file

@ -43,7 +43,7 @@ class CollectInstanceCommentDef(
@classmethod
def apply_settings(cls, project_setting, _):
plugin_settings = project_setting["global"]["publish"].get(
plugin_settings = project_setting["core"]["publish"].get(
"collect_comment_per_instance"
)
if not plugin_settings:

View file

@ -65,8 +65,8 @@ class ExtractBurnin(publish.Extractor):
# Default options for burnins for cases that are not set in presets.
default_options = {
"font_size": 42,
"font_color": [255, 255, 255, 255],
"bg_color": [0, 0, 0, 127],
"font_color": [255, 255, 255, 1.0],
"bg_color": [0, 0, 0, 0.5],
"bg_padding": 5,
"x_offset": 5,
"y_offset": 5
@ -96,7 +96,20 @@ class ExtractBurnin(publish.Extractor):
instance.data["representations"].remove(repre)
def _get_burnins_per_representations(self, instance, src_burnin_defs):
self.log.debug("Filtering of representations and their burnins starts")
"""
Args:
instance (pyblish.api.Instance): Pyblish instance.
src_burnin_defs (list): Burnin definitions.
Returns:
list[tuple[dict, list]]: List of tuples containing representation
and its burnin definitions.
"""
self.log.debug(
"Filtering of representations and their burnins starts"
)
filtered_repres = []
repres = instance.data.get("representations") or []
@ -111,16 +124,13 @@ class ExtractBurnin(publish.Extractor):
)
burnin_defs = copy.deepcopy(src_burnin_defs)
self.log.debug(
"burnin_defs.keys(): {}".format(burnin_defs.keys())
)
# Filter output definition by `burnin` represetation key
repre_linked_burnins = {
name: output
for name, output in burnin_defs.items()
if name in repre_burnin_links
}
repre_linked_burnins = [
burnin_def
for burnin_def in burnin_defs
if burnin_def["name"] in repre_burnin_links
]
self.log.debug(
"repre_linked_burnins: {}".format(repre_linked_burnins)
)
@ -154,19 +164,21 @@ class ExtractBurnin(publish.Extractor):
filtering_criteria = {
"hosts": host_name,
"families": family,
"product_types": family,
"product_names": subset,
"task_names": task_name,
"task_types": task_type,
"subset": subset
}
profile = filter_profiles(self.profiles, filtering_criteria,
logger=self.log)
profile = filter_profiles(
self.profiles,
filtering_criteria,
logger=self.log
)
if not profile:
self.log.debug((
"Skipped instance. None of profiles in presets are for"
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
" | Task type \"{}\" | Subset \"{}\" "
" Host: \"{}\" | Product type: \"{}\" | Task name \"{}\""
" | Task type \"{}\" | Product name \"{}\" "
).format(host_name, family, task_name, task_type, subset))
return
@ -175,7 +187,7 @@ class ExtractBurnin(publish.Extractor):
if not burnin_defs:
self.log.debug((
"Skipped instance. Burnin definitions are not set for profile"
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
" Host: \"{}\" | Product type: \"{}\" | Task name \"{}\""
" | Profile \"{}\""
).format(host_name, family, task_name, profile))
return
@ -275,7 +287,8 @@ class ExtractBurnin(publish.Extractor):
# it in review?
# burnin_data["fps"] = fps
for filename_suffix, burnin_def in repre_burnin_defs.items():
for burnin_def in repre_burnin_defs:
filename_suffix = burnin_def["name"]
new_repre = copy.deepcopy(repre)
new_repre["stagingDir"] = src_repre_staging_dir
@ -288,16 +301,28 @@ class ExtractBurnin(publish.Extractor):
burnin_values = {}
for key in self.positions:
value = burnin_def.get(key)
if value:
burnin_values[key] = value.replace(
"{task}", "{task[name]}"
)
if not value:
continue
# TODO remove replacements
burnin_values[key] = (
value
.replace("{task}", "{task[name]}")
.replace("{product[name]}", "{subset}")
.replace("{Product[name]}", "{Subset}")
.replace("{PRODUCT[NAME]}", "{SUBSET}")
.replace("{product[type]}", "{family}")
.replace("{Product[type]}", "{Family}")
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
.replace("{folder[name]}", "{asset}")
.replace("{Folder[name]}", "{Asset}")
.replace("{FOLDER[NAME]}", "{ASSET}")
)
# Remove "delete" tag from new representation
if "delete" in new_repre["tags"]:
new_repre["tags"].remove("delete")
if len(repre_burnin_defs.keys()) > 1:
if len(repre_burnin_defs) > 1:
# Update name and outputName to be
# able have multiple outputs in case of more burnin presets
# Join previous "outputName" with filename suffix
@ -401,8 +426,7 @@ class ExtractBurnin(publish.Extractor):
bg_color_hex = "#{0:0>2X}{1:0>2X}{2:0>2X}".format(
bg_red, bg_green, bg_blue
)
bg_color_alpha = float(bg_alpha) / 255
burnin_options["bg_opacity"] = bg_color_alpha
burnin_options["bg_opacity"] = bg_alpha
burnin_options["bg_color"] = bg_color_hex
# FG Color
@ -412,8 +436,7 @@ class ExtractBurnin(publish.Extractor):
fg_color_hex = "#{0:0>2X}{1:0>2X}{2:0>2X}".format(
fg_red, fg_green, fg_blue
)
fg_color_alpha = float(fg_alpha) / 255
burnin_options["opacity"] = fg_color_alpha
burnin_options["opacity"] = fg_alpha
burnin_options["font_color"] = fg_color_hex
# Define font filepath
@ -543,15 +566,16 @@ class ExtractBurnin(publish.Extractor):
Burnin definitions without tags filter are marked as valid.
Args:
outputs (list): Contain list of burnin definitions from presets.
burnin_defs (list): Burnin definitions.
tags (list): Tags of processed representation.
Returns:
list: Containg all burnin definitions matching entered tags.
"""
filtered_burnins = {}
filtered_burnins = []
repre_tags_low = set(tag.lower() for tag in tags)
for filename_suffix, burnin_def in burnin_defs.items():
for burnin_def in burnin_defs:
valid = True
tag_filters = burnin_def["filter"]["tags"]
if tag_filters:
@ -561,8 +585,7 @@ class ExtractBurnin(publish.Extractor):
valid = bool(repre_tags_low & tag_filters_low)
if valid:
filtered_burnins[filename_suffix] = burnin_def
filtered_burnins.append(burnin_def)
return filtered_burnins
def input_output_paths(
@ -724,7 +747,7 @@ class ExtractBurnin(publish.Extractor):
Returns:
list: Containg all valid output definitions.
"""
filtered_burnin_defs = {}
filtered_burnin_defs = []
burnin_defs = profile.get("burnins")
if not burnin_defs:
@ -732,13 +755,11 @@ class ExtractBurnin(publish.Extractor):
families = self.families_from_instance(instance)
for filename_suffix, orig_burnin_def in burnin_defs.items():
for orig_burnin_def in burnin_defs:
burnin_def = copy.deepcopy(orig_burnin_def)
def_filter = burnin_def.get("filter", None) or {}
for key in ("families", "tags"):
if key not in def_filter:
def_filter[key] = []
filename_suffix = burnin_def["name"]
def_filter = burnin_def["filter"]
families_filters = def_filter["families"]
if not self.families_filter_validation(
families, families_filters
@ -752,10 +773,13 @@ class ExtractBurnin(publish.Extractor):
continue
# Burnin values
new_burnin_def = {}
burnin_values = {}
for key, value in tuple(burnin_def.items()):
key_low = key.lower()
if key_low in self.positions and value:
if key_low not in self.positions:
new_burnin_def[key] = value
elif value:
burnin_values[key_low] = value
# Skip processing if burnin values are not set
@ -767,9 +791,9 @@ class ExtractBurnin(publish.Extractor):
).format(filename_suffix, str(orig_burnin_def)))
continue
burnin_values["filter"] = def_filter
new_burnin_def.update(burnin_values)
filtered_burnin_defs[filename_suffix] = burnin_values
filtered_burnin_defs.append(new_burnin_def)
self.log.debug((
"Burnin definition \"{}\" passed first filtering."

View file

@ -81,6 +81,7 @@ class ExtractOIIOTranscode(publish.Extractor):
if not profile:
return
profile_output_defs = profile["outputs"]
new_representations = []
repres = instance.data["representations"]
for idx, repre in enumerate(list(repres)):
@ -98,7 +99,8 @@ class ExtractOIIOTranscode(publish.Extractor):
self.log.warning("Config file doesn't exist, skipping")
continue
for output_name, output_def in profile.get("outputs", {}).items():
for output_def in profile_output_defs:
output_name = output_def["name"]
new_repre = copy.deepcopy(repre)
original_staging_dir = new_repre["stagingDir"]
@ -318,10 +320,10 @@ class ExtractOIIOTranscode(publish.Extractor):
subset = instance.data["subset"]
filtering_criteria = {
"hosts": host_name,
"families": family,
"product_types": family,
"product_names": subset,
"task_names": task_name,
"task_types": task_type,
"subsets": subset
}
profile = filter_profiles(self.profiles, filtering_criteria,
logger=self.log)
@ -329,8 +331,8 @@ class ExtractOIIOTranscode(publish.Extractor):
if not profile:
self.log.debug((
"Skipped instance. None of profiles in presets are for"
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
" | Task type \"{}\" | Subset \"{}\" "
" Host: \"{}\" | Product types: \"{}\" | Task \"{}\""
" | Task type \"{}\" | Product names: \"{}\" "
).format(host_name, family, task_name, task_type, subset))
return profile

View file

@ -1280,14 +1280,11 @@ class ExtractReview(pyblish.api.InstancePlugin):
"FFprobe couldn't read resolution from input file: \"{}\""
).format(full_input_path_single_file))
# NOTE Setting only one of `width` or `heigth` is not allowed
# NOTE Setting only one of `width` or `height` is not allowed
# - settings value can't have None but has value of 0
output_width = (
output_def.get("output_width") or output_width or None
)
output_height = (
output_def.get("output_height") or output_height or None
)
output_width = output_def["width"] or output_width or None
output_height = output_def["height"] or output_height or None
# Force to use input resolution if output resolution was not defined
# in settings. Resolution from instance is not used when
# 'use_input_res' is set to 'True'.

View file

@ -42,15 +42,27 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
integrate_thumbnail = False
target_size = {
"type": "resize",
"width": 1920,
"height": 1080
"type": "source",
"resize": {
"width": 1920,
"height": 1080
}
}
background_color = None
background_color = (0, 0, 0, 0.0)
duration_split = 0.5
# attribute presets from settings
oiiotool_defaults = None
ffmpeg_args = None
oiiotool_defaults = {
"type": "colorspace",
"colorspace": "color_picking",
"display_and_view": {
"display": "default",
"view": "sRGB"
}
}
ffmpeg_args = {
"input": [],
"output": []
}
product_names = []
def process(self, instance):
@ -369,7 +381,6 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
repre_display = colorspace_data.get("display")
repre_view = colorspace_data.get("view")
oiio_default_type = None
oiio_default_display = None
oiio_default_view = None
oiio_default_colorspace = None
@ -387,11 +398,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
# oiiotool_defaults
elif self.oiiotool_defaults:
oiio_default_type = self.oiiotool_defaults["type"]
if "colorspace" in oiio_default_type:
if "colorspace" == oiio_default_type:
oiio_default_colorspace = self.oiiotool_defaults["colorspace"]
else:
oiio_default_display = self.oiiotool_defaults["display"]
oiio_default_view = self.oiiotool_defaults["view"]
display_and_view = self.oiiotool_defaults["display_and_view"]
oiio_default_display = display_and_view["display"]
oiio_default_view = display_and_view["view"]
try:
convert_colorspace(
@ -507,11 +519,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
input_path,
):
# get settings
if self.target_size.get("type") == "source":
if self.target_size["type"] == "source":
return []
target_width = self.target_size["width"]
target_height = self.target_size["height"]
resize = self.target_size["resize"]
target_width = resize["width"]
target_height = resize["height"]
# form arg string per application
return get_rescaled_command_arguments(

View file

@ -17,24 +17,24 @@ from ayon_core.lib import (
)
class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
class IntegrateProductGroup(pyblish.api.InstancePlugin):
"""Integrate Subset Group for publish."""
# Run after CollectAnatomyInstanceData
order = pyblish.api.IntegratorOrder - 0.1
label = "Subset Group"
label = "Product Group"
# Attributes set by settings
subset_grouping_profiles = None
product_grouping_profiles = None
def process(self, instance):
"""Look into subset group profiles set by settings.
Attribute 'subset_grouping_profiles' is defined by settings.
Attribute 'product_grouping_profiles' is defined by settings.
"""
# Skip if 'subset_grouping_profiles' is empty
if not self.subset_grouping_profiles:
# Skip if 'product_grouping_profiles' is empty
if not self.product_grouping_profiles:
return
if instance.data.get("subsetGroup"):
@ -47,7 +47,7 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
# Skip if there is no matching profile
filter_criteria = self.get_profile_filter_criteria(instance)
profile = filter_profiles(
self.subset_grouping_profiles,
self.product_grouping_profiles,
filter_criteria,
logger=self.log
)
@ -58,7 +58,7 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
template = profile["template"]
fill_pairs = prepare_template_data({
"family": filter_criteria["families"],
"family": filter_criteria["product_types"],
"task": filter_criteria["tasks"],
"host": filter_criteria["hosts"],
"subset": instance.data["subset"],
@ -91,7 +91,7 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
# Return filter criteria
return {
"families": anatomy_data["family"],
"product_types": anatomy_data["family"],
"tasks": task.get("name"),
"hosts": instance.context.data["hostName"],
"task_types": task.get("type")

View file

@ -7,7 +7,6 @@ from .lib import (
get_system_settings,
get_project_settings,
get_current_project_settings,
get_local_settings,
)
from .ayon_settings import get_ayon_settings
@ -20,7 +19,6 @@ __all__ = (
"get_system_settings",
"get_project_settings",
"get_current_project_settings",
"get_local_settings",
"get_ayon_settings",
)

View file

@ -381,178 +381,6 @@ def _convert_royalrender_project_settings(ayon_settings, output):
}
def _convert_global_project_settings(ayon_settings, output, default_settings):
if "core" not in ayon_settings:
return
ayon_core = ayon_settings["core"]
# Publish conversion
ayon_publish = ayon_core["publish"]
# ExtractThumbnail plugin
ayon_extract_thumbnail = ayon_publish["ExtractThumbnail"]
# fix display and view at oiio defaults
ayon_default_oiio = copy.deepcopy(
ayon_extract_thumbnail["oiiotool_defaults"])
display_and_view = ayon_default_oiio.pop("display_and_view")
ayon_default_oiio["display"] = display_and_view["display"]
ayon_default_oiio["view"] = display_and_view["view"]
ayon_extract_thumbnail["oiiotool_defaults"] = ayon_default_oiio
# fix target size
ayon_default_resize = copy.deepcopy(ayon_extract_thumbnail["target_size"])
resize = ayon_default_resize.pop("resize")
ayon_default_resize["width"] = resize["width"]
ayon_default_resize["height"] = resize["height"]
ayon_extract_thumbnail["target_size"] = ayon_default_resize
# fix background color
ayon_extract_thumbnail["background_color"] = _convert_color(
ayon_extract_thumbnail["background_color"]
)
# ExtractOIIOTranscode plugin
extract_oiio_transcode = ayon_publish["ExtractOIIOTranscode"]
extract_oiio_transcode_profiles = extract_oiio_transcode["profiles"]
for profile in extract_oiio_transcode_profiles:
new_outputs = {}
name_counter = {}
if "product_names" in profile:
profile["subsets"] = profile.pop("product_names")
for profile_output in profile["outputs"]:
if "name" in profile_output:
name = profile_output.pop("name")
else:
# Backwards compatibility for setting without 'name' in model
name = profile_output["extension"]
if name in new_outputs:
name_counter[name] += 1
name = "{}_{}".format(name, name_counter[name])
else:
name_counter[name] = 0
new_outputs[name] = profile_output
profile["outputs"] = new_outputs
# Extract Burnin plugin
extract_burnin = ayon_publish["ExtractBurnin"]
extract_burnin_options = extract_burnin["options"]
for color_key in ("font_color", "bg_color"):
extract_burnin_options[color_key] = _convert_color(
extract_burnin_options[color_key]
)
for profile in extract_burnin["profiles"]:
extract_burnin_defs = profile["burnins"]
if "product_names" in profile:
profile["subsets"] = profile.pop("product_names")
profile["families"] = profile.pop("product_types")
for burnin_def in extract_burnin_defs:
for key in (
"TOP_LEFT",
"TOP_CENTERED",
"TOP_RIGHT",
"BOTTOM_LEFT",
"BOTTOM_CENTERED",
"BOTTOM_RIGHT",
):
burnin_def[key] = (
burnin_def[key]
.replace("{product[name]}", "{subset}")
.replace("{Product[name]}", "{Subset}")
.replace("{PRODUCT[NAME]}", "{SUBSET}")
.replace("{product[type]}", "{family}")
.replace("{Product[type]}", "{Family}")
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
.replace("{folder[name]}", "{asset}")
.replace("{Folder[name]}", "{Asset}")
.replace("{FOLDER[NAME]}", "{ASSET}")
)
profile["burnins"] = {
extract_burnin_def.pop("name"): extract_burnin_def
for extract_burnin_def in extract_burnin_defs
}
if "IntegrateProductGroup" in ayon_publish:
subset_group = ayon_publish.pop("IntegrateProductGroup")
subset_group_profiles = subset_group.pop("product_grouping_profiles")
for profile in subset_group_profiles:
profile["families"] = profile.pop("product_types")
subset_group["subset_grouping_profiles"] = subset_group_profiles
ayon_publish["IntegrateSubsetGroup"] = subset_group
# Cleanup plugin
ayon_cleanup = ayon_publish["CleanUp"]
if "patterns" in ayon_cleanup:
ayon_cleanup["paterns"] = ayon_cleanup.pop("patterns")
# Project root settings - json string to dict
ayon_core["project_environments"] = json.loads(
ayon_core["project_environments"]
)
ayon_core["project_folder_structure"] = json.dumps(json.loads(
ayon_core["project_folder_structure"]
))
# Tools settings
ayon_tools = ayon_core["tools"]
ayon_create_tool = ayon_tools["creator"]
if "product_name_profiles" in ayon_create_tool:
product_name_profiles = ayon_create_tool.pop("product_name_profiles")
for profile in product_name_profiles:
profile["families"] = profile.pop("product_types")
ayon_create_tool["subset_name_profiles"] = product_name_profiles
for profile in ayon_create_tool["subset_name_profiles"]:
template = profile["template"]
profile["template"] = (
template
.replace("{task[name]}", "{task}")
.replace("{Task[name]}", "{Task}")
.replace("{TASK[NAME]}", "{TASK}")
.replace("{product[type]}", "{family}")
.replace("{Product[type]}", "{Family}")
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
.replace("{folder[name]}", "{asset}")
.replace("{Folder[name]}", "{Asset}")
.replace("{FOLDER[NAME]}", "{ASSET}")
)
product_smart_select_key = "families_smart_select"
if "product_types_smart_select" in ayon_create_tool:
product_smart_select_key = "product_types_smart_select"
new_smart_select_families = {
item["name"]: item["task_names"]
for item in ayon_create_tool.pop(product_smart_select_key)
}
ayon_create_tool["families_smart_select"] = new_smart_select_families
ayon_loader_tool = ayon_tools["loader"]
if "product_type_filter_profiles" in ayon_loader_tool:
product_type_filter_profiles = (
ayon_loader_tool.pop("product_type_filter_profiles"))
for profile in product_type_filter_profiles:
profile["filter_families"] = profile.pop("filter_product_types")
ayon_loader_tool["family_filter_profiles"] = (
product_type_filter_profiles)
ayon_publish_tool = ayon_tools["publish"]
for profile in ayon_publish_tool["hero_template_name_profiles"]:
if "product_types" in profile:
profile["families"] = profile.pop("product_types")
for profile in ayon_publish_tool["template_name_profiles"]:
if "product_types" in profile:
profile["families"] = profile.pop("product_types")
ayon_core["sync_server"] = (
default_settings["global"]["sync_server"]
)
output["global"] = ayon_core
def convert_project_settings(ayon_settings, default_settings):
default_settings = copy.deepcopy(default_settings)
output = {}
@ -562,8 +390,6 @@ def convert_project_settings(ayon_settings, default_settings):
_convert_royalrender_project_settings(ayon_settings, output)
_convert_global_project_settings(ayon_settings, output, default_settings)
for key, value in ayon_settings.items():
if key not in output:
output[key] = value

View file

@ -48,11 +48,6 @@ def clear_metadata_from_settings(values):
clear_metadata_from_settings(item)
def get_local_settings():
# TODO implement ayon implementation
return {}
def load_openpype_default_settings():
"""Load openpype default settings."""
return load_jsons_from_dir(DEFAULTS_DIR)
@ -203,39 +198,17 @@ def merge_overrides(source_dict, override_dict):
def get_site_local_overrides(project_name, site_name, local_settings=None):
"""Site overrides from local settings for passet project and site name.
Deprecated:
This function is not implemented for AYON and will be removed.
Args:
project_name (str): For which project are overrides.
site_name (str): For which site are overrides needed.
local_settings (dict): Preloaded local settings. They are loaded
automatically if not passed.
"""
# Check if local settings were passed
if local_settings is None:
local_settings = get_local_settings()
output = {}
# Skip if local settings are empty
if not local_settings:
return output
local_project_settings = local_settings.get("projects") or {}
# Prepare overrides for entered project and for default project
project_locals = None
if project_name:
project_locals = local_project_settings.get(project_name)
default_project_locals = local_project_settings.get(DEFAULT_PROJECT_KEY)
# First load and use local settings from default project
if default_project_locals and site_name in default_project_locals:
output.update(default_project_locals[site_name])
# Apply project specific local settings if there are any
if project_locals and site_name in project_locals:
output.update(project_locals[site_name])
return output
return {}
def get_current_project_settings():

View file

@ -377,23 +377,25 @@ class CreatorWindow(QtWidgets.QDialog):
self._creators_model.reset()
pype_project_setting = (
product_types_smart_select = (
get_current_project_settings()
["global"]
["tools"]
["creator"]
["families_smart_select"]
["product_types_smart_select"]
)
current_index = None
family = None
task_name = get_current_task_name() or None
lowered_task_name = task_name.lower()
if task_name:
for _family, _task_names in pype_project_setting.items():
_low_task_names = {name.lower() for name in _task_names}
for smart_item in product_types_smart_select:
_low_task_names = {
name.lower() for name in smart_item["task_names"]
}
for _task_name in _low_task_names:
if _task_name in lowered_task_name:
family = _family
family = smart_item["name"]
break
if family:
break

View file

@ -1,5 +1,4 @@
import os
from ayon_core.settings import get_local_settings
# Constant key under which local settings are stored
LOCAL_EXPERIMENTAL_KEY = "experimental_tools"
@ -155,7 +154,10 @@ class ExperimentalTools:
def refresh_availability(self):
"""Reload local settings and check if any tool changed ability."""
local_settings = get_local_settings()
# NOTE AYON does not have implemented settings for experimental
# tools.
local_settings = {}
experimental_settings = (
local_settings.get(LOCAL_EXPERIMENTAL_KEY)
) or {}

View file

@ -208,7 +208,7 @@ class Controller(QtCore.QObject):
if not presets:
return {}
result = presets.get("global", {}).get("filters", {})
result = presets.get("core", {}).get("filters", {})
hosts = pyblish.api.registered_hosts()
for host in hosts:
host_presets = presets.get(host, {}).get("filters")