mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge pull request #74 from ynput/enhancement/OP-8206_Core-use-AYON-settings
Core: Use AYON settings
This commit is contained in:
commit
8f8358e1f7
25 changed files with 182 additions and 302 deletions
|
|
@ -194,13 +194,13 @@ class RenderCreator(Creator):
|
|||
name into created subset name.
|
||||
|
||||
Position of composition name could be set in
|
||||
`project_settings/global/tools/creator/subset_name_profiles` with some
|
||||
form of '{composition}' placeholder.
|
||||
`project_settings/global/tools/creator/product_name_profiles` with
|
||||
some form of '{composition}' placeholder.
|
||||
|
||||
Composition name will be used implicitly if multiple composition should
|
||||
be handled at same time.
|
||||
|
||||
If {composition} placeholder is not us 'subset_name_profiles'
|
||||
If {composition} placeholder is not us 'product_name_profiles'
|
||||
composition name will be capitalized and set at the end of subset name
|
||||
if necessary.
|
||||
|
||||
|
|
|
|||
|
|
@ -493,7 +493,7 @@ def get_colorspace_from_node(node):
|
|||
def get_review_presets_config():
|
||||
settings = get_current_project_settings()
|
||||
review_profiles = (
|
||||
settings["global"]
|
||||
settings["core"]
|
||||
["publish"]
|
||||
["ExtractReview"]
|
||||
["profiles"]
|
||||
|
|
|
|||
|
|
@ -209,8 +209,8 @@ class ImageCreator(Creator):
|
|||
|
||||
'Use layer name in subset' will explicitly add layer name into subset
|
||||
name. Position of this name is configurable in
|
||||
`project_settings/global/tools/creator/subset_name_profiles`.
|
||||
If layer placeholder ({layer}) is not used in `subset_name_profiles`
|
||||
`project_settings/global/tools/creator/product_name_profiles`.
|
||||
If layer placeholder ({layer}) is not used in `product_name_profiles`
|
||||
but layer name should be used (set explicitly in UI or implicitly if
|
||||
multiple images should be created), it is added in capitalized form
|
||||
as a suffix to subset name.
|
||||
|
|
|
|||
|
|
@ -1656,8 +1656,9 @@ def apply_project_environments_value(
|
|||
if project_settings is None:
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
env_value = project_settings["global"]["project_environments"]
|
||||
env_value = project_settings["core"]["project_environments"]
|
||||
if env_value:
|
||||
env_value = json.loads(env_value)
|
||||
parsed_value = parse_environments(env_value, env_group)
|
||||
env.update(acre.compute(
|
||||
_merge_env(parsed_value, env),
|
||||
|
|
@ -1916,7 +1917,7 @@ def should_start_last_workfile(
|
|||
project_settings = get_project_settings(project_name)
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["last_workfile_on_startup"]
|
||||
|
|
@ -1966,7 +1967,7 @@ def should_workfile_tool_start(
|
|||
project_settings = get_project_settings(project_name)
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["open_workfile_tool_on_startup"]
|
||||
|
|
|
|||
|
|
@ -1385,23 +1385,26 @@ def _get_image_dimensions(application, input_path, log):
|
|||
|
||||
def convert_color_values(application, color_value):
|
||||
"""Get color mapping for ffmpeg and oiiotool.
|
||||
|
||||
Args:
|
||||
application (str): Application for which command should be created.
|
||||
color_value (list[int]): List of 8bit int values for RGBA.
|
||||
color_value (tuple[int, int, int, float]): List of 8bit int values
|
||||
for RGBA.
|
||||
|
||||
Returns:
|
||||
str: ffmpeg returns hex string, oiiotool is string with floats.
|
||||
|
||||
"""
|
||||
red, green, blue, alpha = color_value
|
||||
|
||||
if application == "ffmpeg":
|
||||
return "{0:0>2X}{1:0>2X}{2:0>2X}@{3}".format(
|
||||
red, green, blue, (alpha / 255.0)
|
||||
red, green, blue, alpha
|
||||
)
|
||||
elif application == "oiiotool":
|
||||
red = float(red / 255)
|
||||
green = float(green / 255)
|
||||
blue = float(blue / 255)
|
||||
alpha = float(alpha / 255)
|
||||
|
||||
return "{0:.3f},{1:.3f},{2:.3f},{3:.3f}".format(
|
||||
red, green, blue, alpha)
|
||||
|
|
|
|||
|
|
@ -1018,7 +1018,7 @@ def _get_imageio_settings(project_settings, host_name):
|
|||
tuple[dict, dict]: image io settings for global and host
|
||||
"""
|
||||
# get image io from global and host_name
|
||||
imageio_global = project_settings["global"]["imageio"]
|
||||
imageio_global = project_settings["core"]["imageio"]
|
||||
# host is optional, some might not have any settings
|
||||
imageio_host = project_settings.get(host_name, {}).get("imageio", {})
|
||||
|
||||
|
|
|
|||
|
|
@ -208,8 +208,8 @@ def install_ayon_plugins(project_name=None, host_name=None):
|
|||
platform_name = platform.system().lower()
|
||||
project_plugins = (
|
||||
project_settings
|
||||
.get("global", {})
|
||||
.get("project_plugins", {})
|
||||
["core"]
|
||||
["project_plugins"]
|
||||
.get(platform_name)
|
||||
) or []
|
||||
for path in project_plugins:
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ class LegacyCreator(object):
|
|||
)
|
||||
global_type_settings = (
|
||||
project_settings
|
||||
.get("global", {})
|
||||
.get("core", {})
|
||||
.get(plugin_type, {})
|
||||
)
|
||||
if not global_type_settings and not plugin_type_settings:
|
||||
|
|
|
|||
|
|
@ -47,10 +47,10 @@ def get_subset_name_template(
|
|||
|
||||
if project_settings is None:
|
||||
project_settings = get_project_settings(project_name)
|
||||
tools_settings = project_settings["global"]["tools"]
|
||||
profiles = tools_settings["creator"]["subset_name_profiles"]
|
||||
tools_settings = project_settings["core"]["tools"]
|
||||
profiles = tools_settings["creator"]["product_name_profiles"]
|
||||
filtering_criteria = {
|
||||
"families": family,
|
||||
"product_types": family,
|
||||
"hosts": host_name,
|
||||
"tasks": task_name,
|
||||
"task_types": task_type
|
||||
|
|
@ -59,7 +59,19 @@ def get_subset_name_template(
|
|||
matching_profile = filter_profiles(profiles, filtering_criteria)
|
||||
template = None
|
||||
if matching_profile:
|
||||
template = matching_profile["template"]
|
||||
# TODO remove formatting keys replacement
|
||||
template = (
|
||||
matching_profile["template"]
|
||||
.replace("{task[name]}", "{task}")
|
||||
.replace("{Task[name]}", "{Task}")
|
||||
.replace("{TASK[NAME]}", "{TASK}")
|
||||
.replace("{product[type]}", "{family}")
|
||||
.replace("{Product[type]}", "{Family}")
|
||||
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{Folder[name]}", "{Asset}")
|
||||
.replace("{FOLDER[NAME]}", "{ASSET}")
|
||||
)
|
||||
|
||||
# Make sure template is set (matching may have empty string)
|
||||
if not template:
|
||||
|
|
@ -82,9 +94,9 @@ def get_subset_name(
|
|||
"""Calculate subset name based on passed context and OpenPype settings.
|
||||
|
||||
Subst name templates are defined in `project_settings/global/tools/creator
|
||||
/subset_name_profiles` where are profiles with host name, family, task name
|
||||
and task type filters. If context does not match any profile then
|
||||
`DEFAULT_SUBSET_TEMPLATE` is used as default template.
|
||||
/product_name_profiles` where are profiles with host name, family,
|
||||
task name and task type filters. If context does not match any profile
|
||||
then `DEFAULT_SUBSET_TEMPLATE` is used as default template.
|
||||
|
||||
That's main reason why so many arguments are required to calculate subset
|
||||
name.
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ class LoaderPlugin(list):
|
|||
)
|
||||
global_type_settings = (
|
||||
project_settings
|
||||
.get("global", {})
|
||||
.get("core", {})
|
||||
.get(plugin_type, {})
|
||||
)
|
||||
if not global_type_settings and not plugin_type_settings:
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ def _list_path_items(folder_structure):
|
|||
def get_project_basic_paths(project_name):
|
||||
project_settings = get_project_settings(project_name)
|
||||
folder_structure = (
|
||||
project_settings["global"]["project_folder_structure"]
|
||||
project_settings["core"]["project_folder_structure"]
|
||||
)
|
||||
if not folder_structure:
|
||||
return []
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ def get_template_name_profiles(
|
|||
|
||||
return copy.deepcopy(
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["publish"]
|
||||
["template_name_profiles"]
|
||||
|
|
@ -95,7 +95,7 @@ def get_hero_template_name_profiles(
|
|||
|
||||
return copy.deepcopy(
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["publish"]
|
||||
["hero_template_name_profiles"]
|
||||
|
|
@ -138,7 +138,7 @@ def get_publish_template_name(
|
|||
template = None
|
||||
filter_criteria = {
|
||||
"hosts": host_name,
|
||||
"families": family,
|
||||
"product_types": family,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
}
|
||||
|
|
@ -383,7 +383,7 @@ def get_plugin_settings(plugin, project_settings, log, category=None):
|
|||
|
||||
# TODO: change after all plugins are moved one level up
|
||||
if category_from_file in ("ayon_core", "openpype"):
|
||||
category_from_file = "global"
|
||||
category_from_file = "core"
|
||||
|
||||
try:
|
||||
return (
|
||||
|
|
@ -744,7 +744,7 @@ def get_custom_staging_dir_info(project_name, host_name, family, task_name,
|
|||
ValueError - if misconfigured template should be used
|
||||
"""
|
||||
settings = project_settings or get_project_settings(project_name)
|
||||
custom_staging_dir_profiles = (settings["global"]
|
||||
custom_staging_dir_profiles = (settings["core"]
|
||||
["tools"]
|
||||
["publish"]
|
||||
["custom_staging_dir_profiles"])
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def get_versioning_start(
|
|||
project_settings = get_project_settings(project_name)
|
||||
|
||||
version_start = 1
|
||||
settings = project_settings["global"]
|
||||
settings = project_settings["core"]
|
||||
profiles = settings.get("version_start_category", {}).get("profiles", [])
|
||||
|
||||
if not profiles:
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ def is_workfile_lock_enabled(host_name, project_name, project_setting=None):
|
|||
project_setting = get_project_settings(project_name)
|
||||
workfile_lock_profiles = (
|
||||
project_setting
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["workfile_lock_profiles"])
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ def get_workfile_template_key(
|
|||
try:
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["workfile_template_profiles"]
|
||||
|
|
@ -507,7 +507,7 @@ def create_workdir_extra_folders(
|
|||
|
||||
# Load extra folders profiles
|
||||
extra_folders_profiles = (
|
||||
project_settings["global"]["tools"]["Workfiles"]["extra_folders"]
|
||||
project_settings["core"]["tools"]["Workfiles"]["extra_folders"]
|
||||
)
|
||||
# Skip if are empty
|
||||
if not extra_folders_profiles:
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
active = True
|
||||
|
||||
# Presets
|
||||
paterns = None # list of regex paterns
|
||||
patterns = None # list of regex patterns
|
||||
remove_temp_renders = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -115,10 +115,10 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
src = os.path.normpath(src)
|
||||
dest = os.path.normpath(dest)
|
||||
|
||||
# add src dir into clearing dir paths (regex paterns)
|
||||
# add src dir into clearing dir paths (regex patterns)
|
||||
transfers_dirs.append(os.path.dirname(src))
|
||||
|
||||
# add dest dir into clearing dir paths (regex paterns)
|
||||
# add dest dir into clearing dir paths (regex patterns)
|
||||
transfers_dirs.append(os.path.dirname(dest))
|
||||
|
||||
if src in skip_cleanup_filepaths:
|
||||
|
|
@ -141,13 +141,13 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
# add dir for cleanup
|
||||
dirnames.append(os.path.dirname(src))
|
||||
|
||||
# clean by regex paterns
|
||||
# clean by regex patterns
|
||||
# make unique set
|
||||
transfers_dirs = set(transfers_dirs)
|
||||
|
||||
self.log.debug("__ transfers_dirs: `{}`".format(transfers_dirs))
|
||||
self.log.debug("__ self.paterns: `{}`".format(self.paterns))
|
||||
if self.paterns:
|
||||
self.log.debug("__ self.patterns: `{}`".format(self.patterns))
|
||||
if self.patterns:
|
||||
files = list()
|
||||
# get list of all available content of dirs
|
||||
for _dir in transfers_dirs:
|
||||
|
|
@ -159,14 +159,14 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("__ files: `{}`".format(files))
|
||||
|
||||
# remove all files which match regex patern
|
||||
# remove all files which match regex pattern
|
||||
for f in files:
|
||||
if os.path.normpath(f) in skip_cleanup_filepaths:
|
||||
continue
|
||||
|
||||
for p in self.paterns:
|
||||
patern = re.compile(p)
|
||||
if not patern.findall(f):
|
||||
for p in self.patterns:
|
||||
pattern = re.compile(p)
|
||||
if not pattern.findall(f):
|
||||
continue
|
||||
if not os.path.exists(f):
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class CollectInstanceCommentDef(
|
|||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_setting, _):
|
||||
plugin_settings = project_setting["global"]["publish"].get(
|
||||
plugin_settings = project_setting["core"]["publish"].get(
|
||||
"collect_comment_per_instance"
|
||||
)
|
||||
if not plugin_settings:
|
||||
|
|
|
|||
|
|
@ -65,8 +65,8 @@ class ExtractBurnin(publish.Extractor):
|
|||
# Default options for burnins for cases that are not set in presets.
|
||||
default_options = {
|
||||
"font_size": 42,
|
||||
"font_color": [255, 255, 255, 255],
|
||||
"bg_color": [0, 0, 0, 127],
|
||||
"font_color": [255, 255, 255, 1.0],
|
||||
"bg_color": [0, 0, 0, 0.5],
|
||||
"bg_padding": 5,
|
||||
"x_offset": 5,
|
||||
"y_offset": 5
|
||||
|
|
@ -96,7 +96,20 @@ class ExtractBurnin(publish.Extractor):
|
|||
instance.data["representations"].remove(repre)
|
||||
|
||||
def _get_burnins_per_representations(self, instance, src_burnin_defs):
|
||||
self.log.debug("Filtering of representations and their burnins starts")
|
||||
"""
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Pyblish instance.
|
||||
src_burnin_defs (list): Burnin definitions.
|
||||
|
||||
Returns:
|
||||
list[tuple[dict, list]]: List of tuples containing representation
|
||||
and its burnin definitions.
|
||||
|
||||
"""
|
||||
self.log.debug(
|
||||
"Filtering of representations and their burnins starts"
|
||||
)
|
||||
|
||||
filtered_repres = []
|
||||
repres = instance.data.get("representations") or []
|
||||
|
|
@ -111,16 +124,13 @@ class ExtractBurnin(publish.Extractor):
|
|||
)
|
||||
|
||||
burnin_defs = copy.deepcopy(src_burnin_defs)
|
||||
self.log.debug(
|
||||
"burnin_defs.keys(): {}".format(burnin_defs.keys())
|
||||
)
|
||||
|
||||
# Filter output definition by `burnin` represetation key
|
||||
repre_linked_burnins = {
|
||||
name: output
|
||||
for name, output in burnin_defs.items()
|
||||
if name in repre_burnin_links
|
||||
}
|
||||
repre_linked_burnins = [
|
||||
burnin_def
|
||||
for burnin_def in burnin_defs
|
||||
if burnin_def["name"] in repre_burnin_links
|
||||
]
|
||||
self.log.debug(
|
||||
"repre_linked_burnins: {}".format(repre_linked_burnins)
|
||||
)
|
||||
|
|
@ -154,19 +164,21 @@ class ExtractBurnin(publish.Extractor):
|
|||
|
||||
filtering_criteria = {
|
||||
"hosts": host_name,
|
||||
"families": family,
|
||||
"product_types": family,
|
||||
"product_names": subset,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
"subset": subset
|
||||
}
|
||||
profile = filter_profiles(self.profiles, filtering_criteria,
|
||||
logger=self.log)
|
||||
|
||||
profile = filter_profiles(
|
||||
self.profiles,
|
||||
filtering_criteria,
|
||||
logger=self.log
|
||||
)
|
||||
if not profile:
|
||||
self.log.debug((
|
||||
"Skipped instance. None of profiles in presets are for"
|
||||
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
|
||||
" | Task type \"{}\" | Subset \"{}\" "
|
||||
" Host: \"{}\" | Product type: \"{}\" | Task name \"{}\""
|
||||
" | Task type \"{}\" | Product name \"{}\" "
|
||||
).format(host_name, family, task_name, task_type, subset))
|
||||
return
|
||||
|
||||
|
|
@ -175,7 +187,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
if not burnin_defs:
|
||||
self.log.debug((
|
||||
"Skipped instance. Burnin definitions are not set for profile"
|
||||
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
|
||||
" Host: \"{}\" | Product type: \"{}\" | Task name \"{}\""
|
||||
" | Profile \"{}\""
|
||||
).format(host_name, family, task_name, profile))
|
||||
return
|
||||
|
|
@ -275,7 +287,8 @@ class ExtractBurnin(publish.Extractor):
|
|||
# it in review?
|
||||
# burnin_data["fps"] = fps
|
||||
|
||||
for filename_suffix, burnin_def in repre_burnin_defs.items():
|
||||
for burnin_def in repre_burnin_defs:
|
||||
filename_suffix = burnin_def["name"]
|
||||
new_repre = copy.deepcopy(repre)
|
||||
new_repre["stagingDir"] = src_repre_staging_dir
|
||||
|
||||
|
|
@ -288,16 +301,28 @@ class ExtractBurnin(publish.Extractor):
|
|||
burnin_values = {}
|
||||
for key in self.positions:
|
||||
value = burnin_def.get(key)
|
||||
if value:
|
||||
burnin_values[key] = value.replace(
|
||||
"{task}", "{task[name]}"
|
||||
)
|
||||
if not value:
|
||||
continue
|
||||
# TODO remove replacements
|
||||
burnin_values[key] = (
|
||||
value
|
||||
.replace("{task}", "{task[name]}")
|
||||
.replace("{product[name]}", "{subset}")
|
||||
.replace("{Product[name]}", "{Subset}")
|
||||
.replace("{PRODUCT[NAME]}", "{SUBSET}")
|
||||
.replace("{product[type]}", "{family}")
|
||||
.replace("{Product[type]}", "{Family}")
|
||||
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{Folder[name]}", "{Asset}")
|
||||
.replace("{FOLDER[NAME]}", "{ASSET}")
|
||||
)
|
||||
|
||||
# Remove "delete" tag from new representation
|
||||
if "delete" in new_repre["tags"]:
|
||||
new_repre["tags"].remove("delete")
|
||||
|
||||
if len(repre_burnin_defs.keys()) > 1:
|
||||
if len(repre_burnin_defs) > 1:
|
||||
# Update name and outputName to be
|
||||
# able have multiple outputs in case of more burnin presets
|
||||
# Join previous "outputName" with filename suffix
|
||||
|
|
@ -401,8 +426,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
bg_color_hex = "#{0:0>2X}{1:0>2X}{2:0>2X}".format(
|
||||
bg_red, bg_green, bg_blue
|
||||
)
|
||||
bg_color_alpha = float(bg_alpha) / 255
|
||||
burnin_options["bg_opacity"] = bg_color_alpha
|
||||
burnin_options["bg_opacity"] = bg_alpha
|
||||
burnin_options["bg_color"] = bg_color_hex
|
||||
|
||||
# FG Color
|
||||
|
|
@ -412,8 +436,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
fg_color_hex = "#{0:0>2X}{1:0>2X}{2:0>2X}".format(
|
||||
fg_red, fg_green, fg_blue
|
||||
)
|
||||
fg_color_alpha = float(fg_alpha) / 255
|
||||
burnin_options["opacity"] = fg_color_alpha
|
||||
burnin_options["opacity"] = fg_alpha
|
||||
burnin_options["font_color"] = fg_color_hex
|
||||
|
||||
# Define font filepath
|
||||
|
|
@ -543,15 +566,16 @@ class ExtractBurnin(publish.Extractor):
|
|||
Burnin definitions without tags filter are marked as valid.
|
||||
|
||||
Args:
|
||||
outputs (list): Contain list of burnin definitions from presets.
|
||||
burnin_defs (list): Burnin definitions.
|
||||
tags (list): Tags of processed representation.
|
||||
|
||||
Returns:
|
||||
list: Containg all burnin definitions matching entered tags.
|
||||
|
||||
"""
|
||||
filtered_burnins = {}
|
||||
filtered_burnins = []
|
||||
repre_tags_low = set(tag.lower() for tag in tags)
|
||||
for filename_suffix, burnin_def in burnin_defs.items():
|
||||
for burnin_def in burnin_defs:
|
||||
valid = True
|
||||
tag_filters = burnin_def["filter"]["tags"]
|
||||
if tag_filters:
|
||||
|
|
@ -561,8 +585,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
valid = bool(repre_tags_low & tag_filters_low)
|
||||
|
||||
if valid:
|
||||
filtered_burnins[filename_suffix] = burnin_def
|
||||
|
||||
filtered_burnins.append(burnin_def)
|
||||
return filtered_burnins
|
||||
|
||||
def input_output_paths(
|
||||
|
|
@ -724,7 +747,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
Returns:
|
||||
list: Containg all valid output definitions.
|
||||
"""
|
||||
filtered_burnin_defs = {}
|
||||
filtered_burnin_defs = []
|
||||
|
||||
burnin_defs = profile.get("burnins")
|
||||
if not burnin_defs:
|
||||
|
|
@ -732,13 +755,11 @@ class ExtractBurnin(publish.Extractor):
|
|||
|
||||
families = self.families_from_instance(instance)
|
||||
|
||||
for filename_suffix, orig_burnin_def in burnin_defs.items():
|
||||
for orig_burnin_def in burnin_defs:
|
||||
burnin_def = copy.deepcopy(orig_burnin_def)
|
||||
def_filter = burnin_def.get("filter", None) or {}
|
||||
for key in ("families", "tags"):
|
||||
if key not in def_filter:
|
||||
def_filter[key] = []
|
||||
filename_suffix = burnin_def["name"]
|
||||
|
||||
def_filter = burnin_def["filter"]
|
||||
families_filters = def_filter["families"]
|
||||
if not self.families_filter_validation(
|
||||
families, families_filters
|
||||
|
|
@ -752,10 +773,13 @@ class ExtractBurnin(publish.Extractor):
|
|||
continue
|
||||
|
||||
# Burnin values
|
||||
new_burnin_def = {}
|
||||
burnin_values = {}
|
||||
for key, value in tuple(burnin_def.items()):
|
||||
key_low = key.lower()
|
||||
if key_low in self.positions and value:
|
||||
if key_low not in self.positions:
|
||||
new_burnin_def[key] = value
|
||||
elif value:
|
||||
burnin_values[key_low] = value
|
||||
|
||||
# Skip processing if burnin values are not set
|
||||
|
|
@ -767,9 +791,9 @@ class ExtractBurnin(publish.Extractor):
|
|||
).format(filename_suffix, str(orig_burnin_def)))
|
||||
continue
|
||||
|
||||
burnin_values["filter"] = def_filter
|
||||
new_burnin_def.update(burnin_values)
|
||||
|
||||
filtered_burnin_defs[filename_suffix] = burnin_values
|
||||
filtered_burnin_defs.append(new_burnin_def)
|
||||
|
||||
self.log.debug((
|
||||
"Burnin definition \"{}\" passed first filtering."
|
||||
|
|
|
|||
|
|
@ -81,6 +81,7 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
if not profile:
|
||||
return
|
||||
|
||||
profile_output_defs = profile["outputs"]
|
||||
new_representations = []
|
||||
repres = instance.data["representations"]
|
||||
for idx, repre in enumerate(list(repres)):
|
||||
|
|
@ -98,7 +99,8 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
self.log.warning("Config file doesn't exist, skipping")
|
||||
continue
|
||||
|
||||
for output_name, output_def in profile.get("outputs", {}).items():
|
||||
for output_def in profile_output_defs:
|
||||
output_name = output_def["name"]
|
||||
new_repre = copy.deepcopy(repre)
|
||||
|
||||
original_staging_dir = new_repre["stagingDir"]
|
||||
|
|
@ -318,10 +320,10 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
subset = instance.data["subset"]
|
||||
filtering_criteria = {
|
||||
"hosts": host_name,
|
||||
"families": family,
|
||||
"product_types": family,
|
||||
"product_names": subset,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
"subsets": subset
|
||||
}
|
||||
profile = filter_profiles(self.profiles, filtering_criteria,
|
||||
logger=self.log)
|
||||
|
|
@ -329,8 +331,8 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
if not profile:
|
||||
self.log.debug((
|
||||
"Skipped instance. None of profiles in presets are for"
|
||||
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
|
||||
" | Task type \"{}\" | Subset \"{}\" "
|
||||
" Host: \"{}\" | Product types: \"{}\" | Task \"{}\""
|
||||
" | Task type \"{}\" | Product names: \"{}\" "
|
||||
).format(host_name, family, task_name, task_type, subset))
|
||||
|
||||
return profile
|
||||
|
|
|
|||
|
|
@ -1280,14 +1280,11 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
"FFprobe couldn't read resolution from input file: \"{}\""
|
||||
).format(full_input_path_single_file))
|
||||
|
||||
# NOTE Setting only one of `width` or `heigth` is not allowed
|
||||
# NOTE Setting only one of `width` or `height` is not allowed
|
||||
# - settings value can't have None but has value of 0
|
||||
output_width = (
|
||||
output_def.get("output_width") or output_width or None
|
||||
)
|
||||
output_height = (
|
||||
output_def.get("output_height") or output_height or None
|
||||
)
|
||||
output_width = output_def["width"] or output_width or None
|
||||
output_height = output_def["height"] or output_height or None
|
||||
|
||||
# Force to use input resolution if output resolution was not defined
|
||||
# in settings. Resolution from instance is not used when
|
||||
# 'use_input_res' is set to 'True'.
|
||||
|
|
|
|||
|
|
@ -42,15 +42,27 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
|
||||
integrate_thumbnail = False
|
||||
target_size = {
|
||||
"type": "resize",
|
||||
"width": 1920,
|
||||
"height": 1080
|
||||
"type": "source",
|
||||
"resize": {
|
||||
"width": 1920,
|
||||
"height": 1080
|
||||
}
|
||||
}
|
||||
background_color = None
|
||||
background_color = (0, 0, 0, 0.0)
|
||||
duration_split = 0.5
|
||||
# attribute presets from settings
|
||||
oiiotool_defaults = None
|
||||
ffmpeg_args = None
|
||||
oiiotool_defaults = {
|
||||
"type": "colorspace",
|
||||
"colorspace": "color_picking",
|
||||
"display_and_view": {
|
||||
"display": "default",
|
||||
"view": "sRGB"
|
||||
}
|
||||
}
|
||||
ffmpeg_args = {
|
||||
"input": [],
|
||||
"output": []
|
||||
}
|
||||
product_names = []
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -369,7 +381,6 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
|
||||
repre_display = colorspace_data.get("display")
|
||||
repre_view = colorspace_data.get("view")
|
||||
oiio_default_type = None
|
||||
oiio_default_display = None
|
||||
oiio_default_view = None
|
||||
oiio_default_colorspace = None
|
||||
|
|
@ -387,11 +398,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
# oiiotool_defaults
|
||||
elif self.oiiotool_defaults:
|
||||
oiio_default_type = self.oiiotool_defaults["type"]
|
||||
if "colorspace" in oiio_default_type:
|
||||
if "colorspace" == oiio_default_type:
|
||||
oiio_default_colorspace = self.oiiotool_defaults["colorspace"]
|
||||
else:
|
||||
oiio_default_display = self.oiiotool_defaults["display"]
|
||||
oiio_default_view = self.oiiotool_defaults["view"]
|
||||
display_and_view = self.oiiotool_defaults["display_and_view"]
|
||||
oiio_default_display = display_and_view["display"]
|
||||
oiio_default_view = display_and_view["view"]
|
||||
|
||||
try:
|
||||
convert_colorspace(
|
||||
|
|
@ -507,11 +519,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
input_path,
|
||||
):
|
||||
# get settings
|
||||
if self.target_size.get("type") == "source":
|
||||
if self.target_size["type"] == "source":
|
||||
return []
|
||||
|
||||
target_width = self.target_size["width"]
|
||||
target_height = self.target_size["height"]
|
||||
resize = self.target_size["resize"]
|
||||
target_width = resize["width"]
|
||||
target_height = resize["height"]
|
||||
|
||||
# form arg string per application
|
||||
return get_rescaled_command_arguments(
|
||||
|
|
|
|||
|
|
@ -17,24 +17,24 @@ from ayon_core.lib import (
|
|||
)
|
||||
|
||||
|
||||
class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
|
||||
class IntegrateProductGroup(pyblish.api.InstancePlugin):
|
||||
"""Integrate Subset Group for publish."""
|
||||
|
||||
# Run after CollectAnatomyInstanceData
|
||||
order = pyblish.api.IntegratorOrder - 0.1
|
||||
label = "Subset Group"
|
||||
label = "Product Group"
|
||||
|
||||
# Attributes set by settings
|
||||
subset_grouping_profiles = None
|
||||
product_grouping_profiles = None
|
||||
|
||||
def process(self, instance):
|
||||
"""Look into subset group profiles set by settings.
|
||||
|
||||
Attribute 'subset_grouping_profiles' is defined by settings.
|
||||
Attribute 'product_grouping_profiles' is defined by settings.
|
||||
"""
|
||||
|
||||
# Skip if 'subset_grouping_profiles' is empty
|
||||
if not self.subset_grouping_profiles:
|
||||
# Skip if 'product_grouping_profiles' is empty
|
||||
if not self.product_grouping_profiles:
|
||||
return
|
||||
|
||||
if instance.data.get("subsetGroup"):
|
||||
|
|
@ -47,7 +47,7 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
|
|||
# Skip if there is no matching profile
|
||||
filter_criteria = self.get_profile_filter_criteria(instance)
|
||||
profile = filter_profiles(
|
||||
self.subset_grouping_profiles,
|
||||
self.product_grouping_profiles,
|
||||
filter_criteria,
|
||||
logger=self.log
|
||||
)
|
||||
|
|
@ -58,7 +58,7 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
|
|||
template = profile["template"]
|
||||
|
||||
fill_pairs = prepare_template_data({
|
||||
"family": filter_criteria["families"],
|
||||
"family": filter_criteria["product_types"],
|
||||
"task": filter_criteria["tasks"],
|
||||
"host": filter_criteria["hosts"],
|
||||
"subset": instance.data["subset"],
|
||||
|
|
@ -91,7 +91,7 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
|
|||
|
||||
# Return filter criteria
|
||||
return {
|
||||
"families": anatomy_data["family"],
|
||||
"product_types": anatomy_data["family"],
|
||||
"tasks": task.get("name"),
|
||||
"hosts": instance.context.data["hostName"],
|
||||
"task_types": task.get("type")
|
||||
|
|
@ -381,178 +381,6 @@ def _convert_royalrender_project_settings(ayon_settings, output):
|
|||
}
|
||||
|
||||
|
||||
def _convert_global_project_settings(ayon_settings, output, default_settings):
|
||||
if "core" not in ayon_settings:
|
||||
return
|
||||
|
||||
ayon_core = ayon_settings["core"]
|
||||
|
||||
# Publish conversion
|
||||
ayon_publish = ayon_core["publish"]
|
||||
|
||||
# ExtractThumbnail plugin
|
||||
ayon_extract_thumbnail = ayon_publish["ExtractThumbnail"]
|
||||
# fix display and view at oiio defaults
|
||||
ayon_default_oiio = copy.deepcopy(
|
||||
ayon_extract_thumbnail["oiiotool_defaults"])
|
||||
display_and_view = ayon_default_oiio.pop("display_and_view")
|
||||
ayon_default_oiio["display"] = display_and_view["display"]
|
||||
ayon_default_oiio["view"] = display_and_view["view"]
|
||||
ayon_extract_thumbnail["oiiotool_defaults"] = ayon_default_oiio
|
||||
# fix target size
|
||||
ayon_default_resize = copy.deepcopy(ayon_extract_thumbnail["target_size"])
|
||||
resize = ayon_default_resize.pop("resize")
|
||||
ayon_default_resize["width"] = resize["width"]
|
||||
ayon_default_resize["height"] = resize["height"]
|
||||
ayon_extract_thumbnail["target_size"] = ayon_default_resize
|
||||
# fix background color
|
||||
ayon_extract_thumbnail["background_color"] = _convert_color(
|
||||
ayon_extract_thumbnail["background_color"]
|
||||
)
|
||||
|
||||
# ExtractOIIOTranscode plugin
|
||||
extract_oiio_transcode = ayon_publish["ExtractOIIOTranscode"]
|
||||
extract_oiio_transcode_profiles = extract_oiio_transcode["profiles"]
|
||||
for profile in extract_oiio_transcode_profiles:
|
||||
new_outputs = {}
|
||||
name_counter = {}
|
||||
if "product_names" in profile:
|
||||
profile["subsets"] = profile.pop("product_names")
|
||||
for profile_output in profile["outputs"]:
|
||||
if "name" in profile_output:
|
||||
name = profile_output.pop("name")
|
||||
else:
|
||||
# Backwards compatibility for setting without 'name' in model
|
||||
name = profile_output["extension"]
|
||||
if name in new_outputs:
|
||||
name_counter[name] += 1
|
||||
name = "{}_{}".format(name, name_counter[name])
|
||||
else:
|
||||
name_counter[name] = 0
|
||||
|
||||
new_outputs[name] = profile_output
|
||||
profile["outputs"] = new_outputs
|
||||
|
||||
# Extract Burnin plugin
|
||||
extract_burnin = ayon_publish["ExtractBurnin"]
|
||||
extract_burnin_options = extract_burnin["options"]
|
||||
for color_key in ("font_color", "bg_color"):
|
||||
extract_burnin_options[color_key] = _convert_color(
|
||||
extract_burnin_options[color_key]
|
||||
)
|
||||
|
||||
for profile in extract_burnin["profiles"]:
|
||||
extract_burnin_defs = profile["burnins"]
|
||||
if "product_names" in profile:
|
||||
profile["subsets"] = profile.pop("product_names")
|
||||
profile["families"] = profile.pop("product_types")
|
||||
|
||||
for burnin_def in extract_burnin_defs:
|
||||
for key in (
|
||||
"TOP_LEFT",
|
||||
"TOP_CENTERED",
|
||||
"TOP_RIGHT",
|
||||
"BOTTOM_LEFT",
|
||||
"BOTTOM_CENTERED",
|
||||
"BOTTOM_RIGHT",
|
||||
):
|
||||
burnin_def[key] = (
|
||||
burnin_def[key]
|
||||
.replace("{product[name]}", "{subset}")
|
||||
.replace("{Product[name]}", "{Subset}")
|
||||
.replace("{PRODUCT[NAME]}", "{SUBSET}")
|
||||
.replace("{product[type]}", "{family}")
|
||||
.replace("{Product[type]}", "{Family}")
|
||||
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{Folder[name]}", "{Asset}")
|
||||
.replace("{FOLDER[NAME]}", "{ASSET}")
|
||||
)
|
||||
profile["burnins"] = {
|
||||
extract_burnin_def.pop("name"): extract_burnin_def
|
||||
for extract_burnin_def in extract_burnin_defs
|
||||
}
|
||||
|
||||
if "IntegrateProductGroup" in ayon_publish:
|
||||
subset_group = ayon_publish.pop("IntegrateProductGroup")
|
||||
subset_group_profiles = subset_group.pop("product_grouping_profiles")
|
||||
for profile in subset_group_profiles:
|
||||
profile["families"] = profile.pop("product_types")
|
||||
subset_group["subset_grouping_profiles"] = subset_group_profiles
|
||||
ayon_publish["IntegrateSubsetGroup"] = subset_group
|
||||
|
||||
# Cleanup plugin
|
||||
ayon_cleanup = ayon_publish["CleanUp"]
|
||||
if "patterns" in ayon_cleanup:
|
||||
ayon_cleanup["paterns"] = ayon_cleanup.pop("patterns")
|
||||
|
||||
# Project root settings - json string to dict
|
||||
ayon_core["project_environments"] = json.loads(
|
||||
ayon_core["project_environments"]
|
||||
)
|
||||
ayon_core["project_folder_structure"] = json.dumps(json.loads(
|
||||
ayon_core["project_folder_structure"]
|
||||
))
|
||||
|
||||
# Tools settings
|
||||
ayon_tools = ayon_core["tools"]
|
||||
ayon_create_tool = ayon_tools["creator"]
|
||||
if "product_name_profiles" in ayon_create_tool:
|
||||
product_name_profiles = ayon_create_tool.pop("product_name_profiles")
|
||||
for profile in product_name_profiles:
|
||||
profile["families"] = profile.pop("product_types")
|
||||
ayon_create_tool["subset_name_profiles"] = product_name_profiles
|
||||
|
||||
for profile in ayon_create_tool["subset_name_profiles"]:
|
||||
template = profile["template"]
|
||||
profile["template"] = (
|
||||
template
|
||||
.replace("{task[name]}", "{task}")
|
||||
.replace("{Task[name]}", "{Task}")
|
||||
.replace("{TASK[NAME]}", "{TASK}")
|
||||
.replace("{product[type]}", "{family}")
|
||||
.replace("{Product[type]}", "{Family}")
|
||||
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{Folder[name]}", "{Asset}")
|
||||
.replace("{FOLDER[NAME]}", "{ASSET}")
|
||||
)
|
||||
|
||||
product_smart_select_key = "families_smart_select"
|
||||
if "product_types_smart_select" in ayon_create_tool:
|
||||
product_smart_select_key = "product_types_smart_select"
|
||||
|
||||
new_smart_select_families = {
|
||||
item["name"]: item["task_names"]
|
||||
for item in ayon_create_tool.pop(product_smart_select_key)
|
||||
}
|
||||
ayon_create_tool["families_smart_select"] = new_smart_select_families
|
||||
|
||||
ayon_loader_tool = ayon_tools["loader"]
|
||||
if "product_type_filter_profiles" in ayon_loader_tool:
|
||||
product_type_filter_profiles = (
|
||||
ayon_loader_tool.pop("product_type_filter_profiles"))
|
||||
for profile in product_type_filter_profiles:
|
||||
profile["filter_families"] = profile.pop("filter_product_types")
|
||||
|
||||
ayon_loader_tool["family_filter_profiles"] = (
|
||||
product_type_filter_profiles)
|
||||
|
||||
ayon_publish_tool = ayon_tools["publish"]
|
||||
for profile in ayon_publish_tool["hero_template_name_profiles"]:
|
||||
if "product_types" in profile:
|
||||
profile["families"] = profile.pop("product_types")
|
||||
|
||||
for profile in ayon_publish_tool["template_name_profiles"]:
|
||||
if "product_types" in profile:
|
||||
profile["families"] = profile.pop("product_types")
|
||||
|
||||
ayon_core["sync_server"] = (
|
||||
default_settings["global"]["sync_server"]
|
||||
)
|
||||
output["global"] = ayon_core
|
||||
|
||||
|
||||
def convert_project_settings(ayon_settings, default_settings):
|
||||
default_settings = copy.deepcopy(default_settings)
|
||||
output = {}
|
||||
|
|
@ -562,8 +390,6 @@ def convert_project_settings(ayon_settings, default_settings):
|
|||
|
||||
_convert_royalrender_project_settings(ayon_settings, output)
|
||||
|
||||
_convert_global_project_settings(ayon_settings, output, default_settings)
|
||||
|
||||
for key, value in ayon_settings.items():
|
||||
if key not in output:
|
||||
output[key] = value
|
||||
|
|
|
|||
|
|
@ -377,23 +377,25 @@ class CreatorWindow(QtWidgets.QDialog):
|
|||
|
||||
self._creators_model.reset()
|
||||
|
||||
pype_project_setting = (
|
||||
product_types_smart_select = (
|
||||
get_current_project_settings()
|
||||
["global"]
|
||||
["tools"]
|
||||
["creator"]
|
||||
["families_smart_select"]
|
||||
["product_types_smart_select"]
|
||||
)
|
||||
current_index = None
|
||||
family = None
|
||||
task_name = get_current_task_name() or None
|
||||
lowered_task_name = task_name.lower()
|
||||
if task_name:
|
||||
for _family, _task_names in pype_project_setting.items():
|
||||
_low_task_names = {name.lower() for name in _task_names}
|
||||
for smart_item in product_types_smart_select:
|
||||
_low_task_names = {
|
||||
name.lower() for name in smart_item["task_names"]
|
||||
}
|
||||
for _task_name in _low_task_names:
|
||||
if _task_name in lowered_task_name:
|
||||
family = _family
|
||||
family = smart_item["name"]
|
||||
break
|
||||
if family:
|
||||
break
|
||||
|
|
|
|||
|
|
@ -208,7 +208,7 @@ class Controller(QtCore.QObject):
|
|||
if not presets:
|
||||
return {}
|
||||
|
||||
result = presets.get("global", {}).get("filters", {})
|
||||
result = presets.get("core", {}).get("filters", {})
|
||||
hosts = pyblish.api.registered_hosts()
|
||||
for host in hosts:
|
||||
host_presets = presets.get(host, {}).get("filters")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue