mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/OP-8198_RoyalRender-use-AYON-settings
This commit is contained in:
commit
4d6a5d8bba
42 changed files with 220 additions and 1081 deletions
|
|
@ -181,6 +181,10 @@ class HostDirmap(object):
|
|||
exclude_locals=False,
|
||||
cached=False)
|
||||
|
||||
# TODO implement
|
||||
# Dirmap is dependent on 'get_site_local_overrides' which
|
||||
# is not implemented in AYON. The mapping should be received
|
||||
# from sitesync addon.
|
||||
active_overrides = get_site_local_overrides(
|
||||
project_name, active_site)
|
||||
remote_overrides = get_site_local_overrides(
|
||||
|
|
|
|||
|
|
@ -194,13 +194,13 @@ class RenderCreator(Creator):
|
|||
name into created subset name.
|
||||
|
||||
Position of composition name could be set in
|
||||
`project_settings/global/tools/creator/subset_name_profiles` with some
|
||||
form of '{composition}' placeholder.
|
||||
`project_settings/global/tools/creator/product_name_profiles` with
|
||||
some form of '{composition}' placeholder.
|
||||
|
||||
Composition name will be used implicitly if multiple composition should
|
||||
be handled at same time.
|
||||
|
||||
If {composition} placeholder is not us 'subset_name_profiles'
|
||||
If {composition} placeholder is not us 'product_name_profiles'
|
||||
composition name will be capitalized and set at the end of subset name
|
||||
if necessary.
|
||||
|
||||
|
|
|
|||
|
|
@ -38,25 +38,6 @@ class ToolWindows:
|
|||
cls._windows[tool] = window
|
||||
|
||||
|
||||
def edit_shader_definitions():
|
||||
from qtpy import QtWidgets
|
||||
from ayon_core.hosts.maya.api.shader_definition_editor import (
|
||||
ShaderDefinitionsEditor
|
||||
)
|
||||
from ayon_core.tools.utils import qt_app_context
|
||||
|
||||
top_level_widgets = QtWidgets.QApplication.topLevelWidgets()
|
||||
main_window = next(widget for widget in top_level_widgets
|
||||
if widget.objectName() == "MayaWindow")
|
||||
|
||||
with qt_app_context():
|
||||
window = ToolWindows.get_window("shader_definition_editor")
|
||||
if not window:
|
||||
window = ShaderDefinitionsEditor(parent=main_window)
|
||||
ToolWindows.set_window("shader_definition_editor", window)
|
||||
window.show()
|
||||
|
||||
|
||||
def _resolution_from_document(doc):
|
||||
if not doc or "data" not in doc:
|
||||
print("Entered document is not valid. \"{}\"".format(str(doc)))
|
||||
|
|
|
|||
|
|
@ -1,176 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Editor for shader definitions.
|
||||
|
||||
Shader names are stored as simple text file over GridFS in mongodb.
|
||||
|
||||
"""
|
||||
import os
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
from ayon_core.client.mongo import OpenPypeMongoConnection
|
||||
from ayon_core import resources
|
||||
import gridfs
|
||||
|
||||
|
||||
DEFINITION_FILENAME = "{}/maya/shader_definition.txt".format(
|
||||
os.getenv("AYON_PROJECT_NAME"))
|
||||
|
||||
|
||||
class ShaderDefinitionsEditor(QtWidgets.QWidget):
|
||||
"""Widget serving as simple editor for shader name definitions."""
|
||||
|
||||
# name of the file used to store definitions
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(ShaderDefinitionsEditor, self).__init__(parent)
|
||||
self._mongo = OpenPypeMongoConnection.get_mongo_client()
|
||||
self._gridfs = gridfs.GridFS(
|
||||
self._mongo[os.getenv("OPENPYPE_DATABASE_NAME")])
|
||||
self._editor = None
|
||||
|
||||
self._original_content = self._read_definition_file()
|
||||
|
||||
self.setObjectName("shaderDefinitionEditor")
|
||||
self.setWindowTitle("OpenPype shader name definition editor")
|
||||
icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
self.setWindowFlags(QtCore.Qt.Window)
|
||||
self.setParent(parent)
|
||||
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
|
||||
self.resize(750, 500)
|
||||
|
||||
self._setup_ui()
|
||||
self._reload()
|
||||
|
||||
def _setup_ui(self):
|
||||
"""Setup UI of Widget."""
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
label = QtWidgets.QLabel()
|
||||
label.setText("Put shader names here - one name per line:")
|
||||
layout.addWidget(label)
|
||||
self._editor = QtWidgets.QPlainTextEdit()
|
||||
self._editor.setStyleSheet("border: none;")
|
||||
layout.addWidget(self._editor)
|
||||
|
||||
btn_layout = QtWidgets.QHBoxLayout()
|
||||
save_btn = QtWidgets.QPushButton("Save")
|
||||
save_btn.clicked.connect(self._save)
|
||||
|
||||
reload_btn = QtWidgets.QPushButton("Reload")
|
||||
reload_btn.clicked.connect(self._reload)
|
||||
|
||||
exit_btn = QtWidgets.QPushButton("Exit")
|
||||
exit_btn.clicked.connect(self._close)
|
||||
|
||||
btn_layout.addWidget(reload_btn)
|
||||
btn_layout.addWidget(save_btn)
|
||||
btn_layout.addWidget(exit_btn)
|
||||
|
||||
layout.addLayout(btn_layout)
|
||||
|
||||
def _read_definition_file(self, file=None):
|
||||
"""Read definition file from database.
|
||||
|
||||
Args:
|
||||
file (gridfs.grid_file.GridOut, Optional): File to read. If not
|
||||
set, new query will be issued to find it.
|
||||
|
||||
Returns:
|
||||
str: Content of the file or empty string if file doesn't exist.
|
||||
|
||||
"""
|
||||
content = ""
|
||||
if not file:
|
||||
file = self._gridfs.find_one(
|
||||
{"filename": DEFINITION_FILENAME})
|
||||
if not file:
|
||||
print(">>> [SNDE]: nothing in database yet")
|
||||
return content
|
||||
content = file.read()
|
||||
file.close()
|
||||
return content
|
||||
|
||||
def _write_definition_file(self, content, force=False):
|
||||
"""Write content as definition to file in database.
|
||||
|
||||
Before file is written, check is made if its content has not
|
||||
changed. If is changed, warning is issued to user if he wants
|
||||
it to overwrite. Note: GridFs doesn't allow changing file content.
|
||||
You need to delete existing file and create new one.
|
||||
|
||||
Args:
|
||||
content (str): Content to write.
|
||||
|
||||
Raises:
|
||||
ContentException: If file is changed in database while
|
||||
editor is running.
|
||||
"""
|
||||
file = self._gridfs.find_one(
|
||||
{"filename": DEFINITION_FILENAME})
|
||||
if file:
|
||||
content_check = self._read_definition_file(file)
|
||||
if content == content_check:
|
||||
print(">>> [SNDE]: content not changed")
|
||||
return
|
||||
if self._original_content != content_check:
|
||||
if not force:
|
||||
raise ContentException("Content changed")
|
||||
print(">>> [SNDE]: overwriting data")
|
||||
file.close()
|
||||
self._gridfs.delete(file._id)
|
||||
|
||||
file = self._gridfs.new_file(
|
||||
filename=DEFINITION_FILENAME,
|
||||
content_type='text/plain',
|
||||
encoding='utf-8')
|
||||
file.write(content)
|
||||
file.close()
|
||||
QtCore.QTimer.singleShot(200, self._reset_style)
|
||||
self._editor.setStyleSheet("border: 1px solid #33AF65;")
|
||||
self._original_content = content
|
||||
|
||||
def _reset_style(self):
|
||||
"""Reset editor style back.
|
||||
|
||||
Used to visually indicate save.
|
||||
|
||||
"""
|
||||
self._editor.setStyleSheet("border: none;")
|
||||
|
||||
def _close(self):
|
||||
self.hide()
|
||||
|
||||
def closeEvent(self, event):
|
||||
event.ignore()
|
||||
self.hide()
|
||||
|
||||
def _reload(self):
|
||||
print(">>> [SNDE]: reloading")
|
||||
self._set_content(self._read_definition_file())
|
||||
|
||||
def _save(self):
|
||||
try:
|
||||
self._write_definition_file(content=self._editor.toPlainText())
|
||||
except ContentException:
|
||||
# content has changed meanwhile
|
||||
print(">>> [SNDE]: content has changed")
|
||||
self._show_overwrite_warning()
|
||||
|
||||
def _set_content(self, content):
|
||||
self._editor.setPlainText(content)
|
||||
|
||||
def _show_overwrite_warning(self):
|
||||
reply = QtWidgets.QMessageBox.question(
|
||||
self,
|
||||
"Warning",
|
||||
("Content you are editing was changed meanwhile in database.\n"
|
||||
"Please, reload and solve the conflict."),
|
||||
QtWidgets.QMessageBox.OK)
|
||||
|
||||
if reply == QtWidgets.QMessageBox.OK:
|
||||
# do nothing
|
||||
pass
|
||||
|
||||
|
||||
class ContentException(Exception):
|
||||
"""This is risen during save if file is changed in database."""
|
||||
pass
|
||||
|
|
@ -1,161 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate model nodes names."""
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
|
||||
import gridfs
|
||||
import pyblish.api
|
||||
from maya import cmds
|
||||
|
||||
import ayon_core.hosts.maya.api.action
|
||||
from ayon_core.client.mongo import OpenPypeMongoConnection
|
||||
from ayon_core.hosts.maya.api.shader_definition_editor import (
|
||||
DEFINITION_FILENAME)
|
||||
from ayon_core.pipeline.publish import (
|
||||
OptionalPyblishPluginMixin, PublishValidationError, ValidateContentsOrder)
|
||||
|
||||
|
||||
class ValidateModelName(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate name of model
|
||||
|
||||
starts with (somename)_###_(materialID)_GEO
|
||||
materialID must be present in list
|
||||
padding number doesn't have limit
|
||||
|
||||
"""
|
||||
optional = True
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["model"]
|
||||
label = "Model Name"
|
||||
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction]
|
||||
material_file = None
|
||||
database_file = DEFINITION_FILENAME
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
"""Get invalid nodes."""
|
||||
use_db = cls.database
|
||||
|
||||
def is_group(group_name):
|
||||
"""Find out if supplied transform is group or not."""
|
||||
try:
|
||||
children = cmds.listRelatives(group_name, children=True)
|
||||
for child in children:
|
||||
if not cmds.ls(child, transforms=True):
|
||||
return False
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
invalid = []
|
||||
content_instance = instance.data.get("setMembers", None)
|
||||
if not content_instance:
|
||||
cls.log.error("Instance has no nodes!")
|
||||
return True
|
||||
pass
|
||||
|
||||
# validate top level group name
|
||||
assemblies = cmds.ls(content_instance, assemblies=True, long=True)
|
||||
if len(assemblies) != 1:
|
||||
cls.log.error("Must have exactly one top group")
|
||||
return assemblies or True
|
||||
top_group = assemblies[0]
|
||||
regex = cls.top_level_regex
|
||||
r = re.compile(regex)
|
||||
m = r.match(top_group)
|
||||
project_name = instance.context.data["projectName"]
|
||||
current_asset_name = instance.context.data["asset"]
|
||||
if m is None:
|
||||
cls.log.error("invalid name on: {}".format(top_group))
|
||||
cls.log.error("name doesn't match regex {}".format(regex))
|
||||
invalid.append(top_group)
|
||||
else:
|
||||
if "asset" in r.groupindex:
|
||||
if m.group("asset") != current_asset_name:
|
||||
cls.log.error("Invalid asset name in top level group.")
|
||||
return top_group
|
||||
if "subset" in r.groupindex:
|
||||
if m.group("subset") != instance.data.get("subset"):
|
||||
cls.log.error("Invalid subset name in top level group.")
|
||||
return top_group
|
||||
if "project" in r.groupindex:
|
||||
if m.group("project") != project_name:
|
||||
cls.log.error("Invalid project name in top level group.")
|
||||
return top_group
|
||||
|
||||
descendants = cmds.listRelatives(content_instance,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
|
||||
trns = cmds.ls(descendants, long=False, type='transform')
|
||||
|
||||
# filter out groups
|
||||
filtered = [node for node in trns if not is_group(node)]
|
||||
|
||||
# load shader list file as utf-8
|
||||
shaders = []
|
||||
if not use_db:
|
||||
material_file = cls.material_file[platform.system().lower()]
|
||||
if material_file:
|
||||
if os.path.isfile(material_file):
|
||||
shader_file = open(material_file, "r")
|
||||
shaders = shader_file.readlines()
|
||||
shader_file.close()
|
||||
else:
|
||||
cls.log.error("Missing shader name definition file.")
|
||||
return True
|
||||
else:
|
||||
client = OpenPypeMongoConnection.get_mongo_client()
|
||||
fs = gridfs.GridFS(client[os.getenv("OPENPYPE_DATABASE_NAME")])
|
||||
shader_file = fs.find_one({"filename": cls.database_file})
|
||||
if not shader_file:
|
||||
cls.log.error("Missing shader name definition in database.")
|
||||
return True
|
||||
shaders = shader_file.read().splitlines()
|
||||
shader_file.close()
|
||||
|
||||
# strip line endings from list
|
||||
shaders = [s.rstrip() for s in shaders if s.rstrip()]
|
||||
|
||||
# compile regex for testing names
|
||||
regex = cls.regex
|
||||
r = re.compile(regex)
|
||||
|
||||
for obj in filtered:
|
||||
cls.log.debug("testing: {}".format(obj))
|
||||
m = r.match(obj)
|
||||
if m is None:
|
||||
cls.log.error("invalid name on: {}".format(obj))
|
||||
invalid.append(obj)
|
||||
else:
|
||||
# if we have shader files and shader named group is in
|
||||
# regex, test this group against names in shader file
|
||||
if "shader" in r.groupindex and shaders:
|
||||
try:
|
||||
if not m.group('shader') in shaders:
|
||||
cls.log.error(
|
||||
"invalid materialID on: {0} ({1})".format(
|
||||
obj, m.group('shader')))
|
||||
invalid.append(obj)
|
||||
except IndexError:
|
||||
# shader named group doesn't match
|
||||
cls.log.error(
|
||||
"shader group doesn't match: {}".format(obj))
|
||||
invalid.append(obj)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Model naming is invalid. See the log.")
|
||||
|
|
@ -493,7 +493,7 @@ def get_colorspace_from_node(node):
|
|||
def get_review_presets_config():
|
||||
settings = get_current_project_settings()
|
||||
review_profiles = (
|
||||
settings["global"]
|
||||
settings["core"]
|
||||
["publish"]
|
||||
["ExtractReview"]
|
||||
["profiles"]
|
||||
|
|
@ -1348,7 +1348,9 @@ def _remove_old_knobs(node):
|
|||
|
||||
|
||||
def exposed_write_knobs(settings, plugin_name, instance_node):
|
||||
exposed_knobs = settings["nuke"]["create"][plugin_name]["exposed_knobs"]
|
||||
exposed_knobs = settings["nuke"]["create"][plugin_name].get(
|
||||
"exposed_knobs", []
|
||||
)
|
||||
if exposed_knobs:
|
||||
instance_node.addKnob(nuke.Text_Knob('', 'Write Knobs'))
|
||||
write_node = nuke.allNodes(group=instance_node, filter="Write")[0]
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class ValidateExposedKnobs(
|
|||
group_node = instance.data["transientData"]["node"]
|
||||
nuke_settings = instance.context.data["project_settings"]["nuke"]
|
||||
create_settings = nuke_settings["create"][plugin]
|
||||
exposed_knobs = create_settings["exposed_knobs"]
|
||||
exposed_knobs = create_settings.get("exposed_knobs", [])
|
||||
unexposed_knobs = []
|
||||
for knob in exposed_knobs:
|
||||
if knob not in group_node.knobs():
|
||||
|
|
|
|||
|
|
@ -209,8 +209,8 @@ class ImageCreator(Creator):
|
|||
|
||||
'Use layer name in subset' will explicitly add layer name into subset
|
||||
name. Position of this name is configurable in
|
||||
`project_settings/global/tools/creator/subset_name_profiles`.
|
||||
If layer placeholder ({layer}) is not used in `subset_name_profiles`
|
||||
`project_settings/global/tools/creator/product_name_profiles`.
|
||||
If layer placeholder ({layer}) is not used in `product_name_profiles`
|
||||
but layer name should be used (set explicitly in UI or implicitly if
|
||||
multiple images should be created), it is added in capitalized form
|
||||
as a suffix to subset name.
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ from ayon_core.client import get_asset_name_identifier
|
|||
from ayon_core.settings import (
|
||||
get_system_settings,
|
||||
get_project_settings,
|
||||
get_local_settings
|
||||
)
|
||||
from ayon_core.settings.constants import (
|
||||
METADATA_KEYS,
|
||||
|
|
@ -1528,16 +1527,17 @@ def prepare_app_environments(
|
|||
|
||||
# Use environments from local settings
|
||||
filtered_local_envs = {}
|
||||
system_settings = data["system_settings"]
|
||||
whitelist_envs = system_settings["general"].get("local_env_white_list")
|
||||
if whitelist_envs:
|
||||
local_settings = get_local_settings()
|
||||
local_envs = local_settings.get("environments") or {}
|
||||
filtered_local_envs = {
|
||||
key: value
|
||||
for key, value in local_envs.items()
|
||||
if key in whitelist_envs
|
||||
}
|
||||
# NOTE Overrides for environment variables are not implemented in AYON.
|
||||
# system_settings = data["system_settings"]
|
||||
# whitelist_envs = system_settings["general"].get("local_env_white_list")
|
||||
# if whitelist_envs:
|
||||
# local_settings = get_local_settings()
|
||||
# local_envs = local_settings.get("environments") or {}
|
||||
# filtered_local_envs = {
|
||||
# key: value
|
||||
# for key, value in local_envs.items()
|
||||
# if key in whitelist_envs
|
||||
# }
|
||||
|
||||
# Apply local environment variables for already existing values
|
||||
for key, value in filtered_local_envs.items():
|
||||
|
|
@ -1656,8 +1656,9 @@ def apply_project_environments_value(
|
|||
if project_settings is None:
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
env_value = project_settings["global"]["project_environments"]
|
||||
env_value = project_settings["core"]["project_environments"]
|
||||
if env_value:
|
||||
env_value = json.loads(env_value)
|
||||
parsed_value = parse_environments(env_value, env_group)
|
||||
env.update(acre.compute(
|
||||
_merge_env(parsed_value, env),
|
||||
|
|
@ -1916,7 +1917,7 @@ def should_start_last_workfile(
|
|||
project_settings = get_project_settings(project_name)
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["last_workfile_on_startup"]
|
||||
|
|
@ -1966,7 +1967,7 @@ def should_workfile_tool_start(
|
|||
project_settings = get_project_settings(project_name)
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["open_workfile_tool_on_startup"]
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ import platform
|
|||
import getpass
|
||||
import socket
|
||||
|
||||
from ayon_core.settings.lib import get_local_settings
|
||||
from .execute import get_ayon_launcher_args
|
||||
from .local_settings import get_local_site_id
|
||||
|
||||
|
|
@ -96,7 +95,6 @@ def get_all_current_info():
|
|||
return {
|
||||
"workstation": get_workstation_info(),
|
||||
"env": os.environ.copy(),
|
||||
"local_settings": get_local_settings(),
|
||||
"ayon": get_ayon_info(),
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1385,23 +1385,26 @@ def _get_image_dimensions(application, input_path, log):
|
|||
|
||||
def convert_color_values(application, color_value):
|
||||
"""Get color mapping for ffmpeg and oiiotool.
|
||||
|
||||
Args:
|
||||
application (str): Application for which command should be created.
|
||||
color_value (list[int]): List of 8bit int values for RGBA.
|
||||
color_value (tuple[int, int, int, float]): List of 8bit int values
|
||||
for RGBA.
|
||||
|
||||
Returns:
|
||||
str: ffmpeg returns hex string, oiiotool is string with floats.
|
||||
|
||||
"""
|
||||
red, green, blue, alpha = color_value
|
||||
|
||||
if application == "ffmpeg":
|
||||
return "{0:0>2X}{1:0>2X}{2:0>2X}@{3}".format(
|
||||
red, green, blue, (alpha / 255.0)
|
||||
red, green, blue, alpha
|
||||
)
|
||||
elif application == "oiiotool":
|
||||
red = float(red / 255)
|
||||
green = float(green / 255)
|
||||
blue = float(blue / 255)
|
||||
alpha = float(alpha / 255)
|
||||
|
||||
return "{0:.3f},{1:.3f},{2:.3f},{3:.3f}".format(
|
||||
red, green, blue, alpha)
|
||||
|
|
|
|||
|
|
@ -1,39 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect instances that should be processed and published on DL.
|
||||
|
||||
"""
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class CollectDeadlinePublishableInstances(pyblish.api.InstancePlugin):
|
||||
"""Collect instances that should be processed and published on DL.
|
||||
|
||||
Some long running publishes (not just renders) could be offloaded to DL,
|
||||
this plugin compares theirs name against env variable, marks only
|
||||
publishable by farm.
|
||||
|
||||
Triggered only when running only in headless mode, eg on a farm.
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Collect Deadline Publishable Instance"
|
||||
targets = ["remote"]
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug("CollectDeadlinePublishableInstances")
|
||||
publish_inst = os.environ.get("OPENPYPE_PUBLISH_SUBSET", '')
|
||||
if not publish_inst:
|
||||
raise PublishValidationError("OPENPYPE_PUBLISH_SUBSET env var "
|
||||
"required for remote publishing")
|
||||
|
||||
subset_name = instance.data["subset"]
|
||||
if subset_name == publish_inst:
|
||||
self.log.debug("Publish {}".format(subset_name))
|
||||
instance.data["publish"] = True
|
||||
instance.data["farm"] = False
|
||||
else:
|
||||
self.log.debug("Skipping {}".format(subset_name))
|
||||
instance.data["publish"] = False
|
||||
|
|
@ -1,131 +0,0 @@
|
|||
import os
|
||||
import attr
|
||||
from datetime import datetime
|
||||
|
||||
from ayon_core.pipeline import PublishXmlValidationError
|
||||
from ayon_core.lib import is_in_tests
|
||||
from openpype_modules.deadline import abstract_submit_deadline
|
||||
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
@attr.s
|
||||
class MayaPluginInfo(object):
|
||||
Build = attr.ib(default=None) # Don't force build
|
||||
StrictErrorChecking = attr.ib(default=True)
|
||||
|
||||
SceneFile = attr.ib(default=None) # Input scene
|
||||
Version = attr.ib(default=None) # Mandatory for Deadline
|
||||
ProjectPath = attr.ib(default=None)
|
||||
|
||||
ScriptJob = attr.ib(default=True)
|
||||
ScriptFilename = attr.ib(default=None)
|
||||
|
||||
|
||||
class MayaSubmitRemotePublishDeadline(
|
||||
abstract_submit_deadline.AbstractSubmitDeadline):
|
||||
"""Submit Maya scene to perform a local publish in Deadline.
|
||||
|
||||
Publishing in Deadline can be helpful for scenes that publish very slow.
|
||||
This way it can process in the background on another machine without the
|
||||
Artist having to wait for the publish to finish on their local machine.
|
||||
|
||||
Submission is done through the Deadline Web Service. DL then triggers
|
||||
`openpype/scripts/remote_publish.py`.
|
||||
|
||||
Each publishable instance creates its own full publish job.
|
||||
|
||||
Different from `ProcessSubmittedJobOnFarm` which creates publish job
|
||||
depending on metadata json containing context and instance data of
|
||||
rendered files.
|
||||
"""
|
||||
|
||||
label = "Submit Scene to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["maya"]
|
||||
families = ["publish.farm"]
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Ensure no errors so far
|
||||
if not (all(result["success"]
|
||||
for result in instance.context.data["results"])):
|
||||
raise PublishXmlValidationError("Publish process has errors")
|
||||
|
||||
if not instance.data["publish"]:
|
||||
self.log.warning("No active instances found. "
|
||||
"Skipping submission..")
|
||||
return
|
||||
|
||||
super(MayaSubmitRemotePublishDeadline, self).process(instance)
|
||||
|
||||
def get_job_info(self):
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
project_name = instance.context.data["projectName"]
|
||||
scene = instance.context.data["currentFile"]
|
||||
scenename = os.path.basename(scene)
|
||||
|
||||
job_name = "{scene} [PUBLISH]".format(scene=scenename)
|
||||
batch_name = "{code} - {scene}".format(code=project_name,
|
||||
scene=scenename)
|
||||
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
job_info = DeadlineJobInfo(Plugin="MayaBatch")
|
||||
job_info.BatchName = batch_name
|
||||
job_info.Name = job_name
|
||||
job_info.UserName = context.data.get("user")
|
||||
job_info.Comment = context.data.get("comment", "")
|
||||
|
||||
# use setting for publish job on farm, no reason to have it separately
|
||||
project_settings = context.data["project_settings"]
|
||||
deadline_publish_job_sett = project_settings["deadline"]["publish"]["ProcessSubmittedJobOnFarm"] # noqa
|
||||
job_info.Department = deadline_publish_job_sett["deadline_department"]
|
||||
job_info.ChunkSize = deadline_publish_job_sett["deadline_chunk_size"]
|
||||
job_info.Priority = deadline_publish_job_sett["deadline_priority"]
|
||||
job_info.Group = deadline_publish_job_sett["deadline_group"]
|
||||
job_info.Pool = deadline_publish_job_sett["deadline_pool"]
|
||||
|
||||
# Include critical environment variables with submission + Session
|
||||
keys = [
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER"
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
environment["AYON_PROJECT_NAME"] = project_name
|
||||
environment["AYON_FOLDER_PATH"] = instance.context.data["asset"]
|
||||
environment["AYON_TASK_NAME"] = instance.context.data["task"]
|
||||
environment["AYON_APP_NAME"] = os.environ.get("AYON_APP_NAME")
|
||||
environment["OPENPYPE_PUBLISH_SUBSET"] = instance.data["subset"]
|
||||
environment["AYON_LOG_NO_COLORS"] = "1"
|
||||
environment["AYON_USERNAME"] = instance.context.data["user"]
|
||||
environment["AYON_REMOTE_PUBLISH"] = "1"
|
||||
|
||||
for key, value in environment.items():
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
def get_plugin_info(self):
|
||||
# Not all hosts can import this module.
|
||||
from maya import cmds
|
||||
scene = self._instance.context.data["currentFile"]
|
||||
|
||||
plugin_info = MayaPluginInfo()
|
||||
plugin_info.SceneFile = scene
|
||||
plugin_info.ScriptFilename = "{OPENPYPE_REPOS_ROOT}/openpype/scripts/remote_publish.py" # noqa
|
||||
plugin_info.Version = cmds.about(version=True)
|
||||
plugin_info.ProjectPath = cmds.workspace(query=True,
|
||||
rootDirectory=True)
|
||||
|
||||
return attr.asdict(plugin_info)
|
||||
|
|
@ -330,151 +330,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
data = instance.data.copy()
|
||||
context = instance.context
|
||||
self.context = context
|
||||
self.anatomy = instance.context.data["anatomy"]
|
||||
|
||||
asset = data.get("asset") or context.data["asset"]
|
||||
subset = data.get("subset")
|
||||
|
||||
start = instance.data.get("frameStart")
|
||||
if start is None:
|
||||
start = context.data["frameStart"]
|
||||
|
||||
end = instance.data.get("frameEnd")
|
||||
if end is None:
|
||||
end = context.data["frameEnd"]
|
||||
|
||||
handle_start = instance.data.get("handleStart")
|
||||
if handle_start is None:
|
||||
handle_start = context.data["handleStart"]
|
||||
|
||||
handle_end = instance.data.get("handleEnd")
|
||||
if handle_end is None:
|
||||
handle_end = context.data["handleEnd"]
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if fps is None:
|
||||
fps = context.data["fps"]
|
||||
|
||||
if data.get("extendFrames", False):
|
||||
start, end = self._extend_frames(
|
||||
asset,
|
||||
subset,
|
||||
start,
|
||||
end,
|
||||
data["overrideExistingFrame"])
|
||||
|
||||
try:
|
||||
source = data["source"]
|
||||
except KeyError:
|
||||
source = context.data["currentFile"]
|
||||
|
||||
success, rootless_path = (
|
||||
self.anatomy.find_root_template_from_path(source)
|
||||
)
|
||||
if success:
|
||||
source = rootless_path
|
||||
|
||||
else:
|
||||
# `rootless_path` is not set to `source` if none of roots match
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues."
|
||||
).format(source))
|
||||
|
||||
family = "render"
|
||||
if ("prerender" in instance.data["families"] or
|
||||
"prerender.farm" in instance.data["families"]):
|
||||
family = "prerender"
|
||||
families = [family]
|
||||
|
||||
# pass review to families if marked as review
|
||||
do_not_add_review = False
|
||||
if data.get("review"):
|
||||
families.append("review")
|
||||
elif data.get("review") is False:
|
||||
self.log.debug("Instance has review explicitly disabled.")
|
||||
do_not_add_review = True
|
||||
|
||||
instance_skeleton_data = {
|
||||
"family": family,
|
||||
"subset": subset,
|
||||
"families": families,
|
||||
"asset": asset,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStartHandle": start - handle_start,
|
||||
"frameEndHandle": end + handle_end,
|
||||
"comment": instance.data["comment"],
|
||||
"fps": fps,
|
||||
"source": source,
|
||||
"extendFrames": data.get("extendFrames"),
|
||||
"overrideExistingFrame": data.get("overrideExistingFrame"),
|
||||
"pixelAspect": data.get("pixelAspect", 1),
|
||||
"resolutionWidth": data.get("resolutionWidth", 1920),
|
||||
"resolutionHeight": data.get("resolutionHeight", 1080),
|
||||
"multipartExr": data.get("multipartExr", False),
|
||||
"jobBatchName": data.get("jobBatchName", ""),
|
||||
"useSequenceForReview": data.get("useSequenceForReview", True),
|
||||
# map inputVersions `ObjectId` -> `str` so json supports it
|
||||
"inputVersions": list(map(str, data.get("inputVersions", []))),
|
||||
"colorspace": instance.data.get("colorspace"),
|
||||
"stagingDir_persistent": instance.data.get(
|
||||
"stagingDir_persistent", False
|
||||
)
|
||||
}
|
||||
|
||||
# skip locking version if we are creating v01
|
||||
instance_version = instance.data.get("version") # take this if exists
|
||||
if instance_version != 1:
|
||||
instance_skeleton_data["version"] = instance_version
|
||||
|
||||
# transfer specific families from original instance to new render
|
||||
for item in self.families_transfer:
|
||||
if item in instance.data.get("families", []):
|
||||
instance_skeleton_data["families"] += [item]
|
||||
|
||||
# transfer specific properties from original instance based on
|
||||
# mapping dictionary `instance_transfer`
|
||||
for key, values in self.instance_transfer.items():
|
||||
if key in instance.data.get("families", []):
|
||||
for v in values:
|
||||
instance_skeleton_data[v] = instance.data.get(v)
|
||||
|
||||
# look into instance data if representations are not having any
|
||||
# which are having tag `publish_on_farm` and include them
|
||||
for repre in instance.data.get("representations", []):
|
||||
staging_dir = repre.get("stagingDir")
|
||||
if staging_dir:
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.find_root_template_from_path(
|
||||
staging_dir
|
||||
)
|
||||
)
|
||||
if success:
|
||||
repre["stagingDir"] = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging_dir))
|
||||
repre["stagingDir"] = staging_dir
|
||||
|
||||
if "publish_on_farm" in repre.get("tags"):
|
||||
# create representations attribute of not there
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
instance_skeleton_data["representations"].append(repre)
|
||||
|
||||
instances = None
|
||||
assert data.get("expectedFiles"), ("Submission from old Pype version"
|
||||
" - missing expectedFiles")
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
instance_skeleton_data = create_skeleton_instance(
|
||||
|
|
|
|||
|
|
@ -8,9 +8,6 @@ import numbers
|
|||
import six
|
||||
import time
|
||||
|
||||
from ayon_core.settings.lib import (
|
||||
get_local_settings,
|
||||
)
|
||||
from ayon_core.client import get_project, get_ayon_server_api_connection
|
||||
from ayon_core.lib import Logger, get_local_site_id
|
||||
from ayon_core.lib.path_templates import (
|
||||
|
|
@ -453,7 +450,7 @@ class Anatomy(BaseAnatomy):
|
|||
return cls._sync_server_addon_cache.data
|
||||
|
||||
@classmethod
|
||||
def _get_studio_roots_overrides(cls, project_name, local_settings=None):
|
||||
def _get_studio_roots_overrides(cls, project_name):
|
||||
"""This would return 'studio' site override by local settings.
|
||||
|
||||
Notes:
|
||||
|
|
@ -465,7 +462,6 @@ class Anatomy(BaseAnatomy):
|
|||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
local_settings (Optional[dict[str, Any]]): Prepared local settings.
|
||||
|
||||
Returns:
|
||||
Union[Dict[str, str], None]): Local root overrides.
|
||||
|
|
@ -488,11 +484,6 @@ class Anatomy(BaseAnatomy):
|
|||
should be returned.
|
||||
"""
|
||||
|
||||
# Local settings may be used more than once or may not be used at all
|
||||
# - to avoid slowdowns 'get_local_settings' is not called until it's
|
||||
# really needed
|
||||
local_settings = None
|
||||
|
||||
# First check if sync server is available and enabled
|
||||
sync_server = cls.get_sync_server_addon()
|
||||
if sync_server is None or not sync_server.enabled:
|
||||
|
|
@ -503,11 +494,8 @@ class Anatomy(BaseAnatomy):
|
|||
# Use sync server to receive active site name
|
||||
project_cache = cls._default_site_id_cache[project_name]
|
||||
if project_cache.is_outdated:
|
||||
local_settings = get_local_settings()
|
||||
project_cache.update_data(
|
||||
sync_server.get_active_site_type(
|
||||
project_name, local_settings
|
||||
)
|
||||
sync_server.get_active_site_type(project_name)
|
||||
)
|
||||
site_name = project_cache.data
|
||||
|
||||
|
|
@ -517,12 +505,12 @@ class Anatomy(BaseAnatomy):
|
|||
# Handle studio root overrides without sync server
|
||||
# - studio root overrides can be done even without sync server
|
||||
roots_overrides = cls._get_studio_roots_overrides(
|
||||
project_name, local_settings
|
||||
project_name
|
||||
)
|
||||
else:
|
||||
# Ask sync server to get roots overrides
|
||||
roots_overrides = sync_server.get_site_root_overrides(
|
||||
project_name, site_name, local_settings
|
||||
project_name, site_name
|
||||
)
|
||||
site_cache.update_data(roots_overrides)
|
||||
return site_cache.data
|
||||
|
|
|
|||
|
|
@ -1018,7 +1018,7 @@ def _get_imageio_settings(project_settings, host_name):
|
|||
tuple[dict, dict]: image io settings for global and host
|
||||
"""
|
||||
# get image io from global and host_name
|
||||
imageio_global = project_settings["global"]["imageio"]
|
||||
imageio_global = project_settings["core"]["imageio"]
|
||||
# host is optional, some might not have any settings
|
||||
imageio_host = project_settings.get(host_name, {}).get("imageio", {})
|
||||
|
||||
|
|
|
|||
|
|
@ -208,8 +208,8 @@ def install_ayon_plugins(project_name=None, host_name=None):
|
|||
platform_name = platform.system().lower()
|
||||
project_plugins = (
|
||||
project_settings
|
||||
.get("global", {})
|
||||
.get("project_plugins", {})
|
||||
["core"]
|
||||
["project_plugins"]
|
||||
.get(platform_name)
|
||||
) or []
|
||||
for path in project_plugins:
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ class LegacyCreator(object):
|
|||
)
|
||||
global_type_settings = (
|
||||
project_settings
|
||||
.get("global", {})
|
||||
.get("core", {})
|
||||
.get(plugin_type, {})
|
||||
)
|
||||
if not global_type_settings and not plugin_type_settings:
|
||||
|
|
|
|||
|
|
@ -47,10 +47,10 @@ def get_subset_name_template(
|
|||
|
||||
if project_settings is None:
|
||||
project_settings = get_project_settings(project_name)
|
||||
tools_settings = project_settings["global"]["tools"]
|
||||
profiles = tools_settings["creator"]["subset_name_profiles"]
|
||||
tools_settings = project_settings["core"]["tools"]
|
||||
profiles = tools_settings["creator"]["product_name_profiles"]
|
||||
filtering_criteria = {
|
||||
"families": family,
|
||||
"product_types": family,
|
||||
"hosts": host_name,
|
||||
"tasks": task_name,
|
||||
"task_types": task_type
|
||||
|
|
@ -59,7 +59,19 @@ def get_subset_name_template(
|
|||
matching_profile = filter_profiles(profiles, filtering_criteria)
|
||||
template = None
|
||||
if matching_profile:
|
||||
template = matching_profile["template"]
|
||||
# TODO remove formatting keys replacement
|
||||
template = (
|
||||
matching_profile["template"]
|
||||
.replace("{task[name]}", "{task}")
|
||||
.replace("{Task[name]}", "{Task}")
|
||||
.replace("{TASK[NAME]}", "{TASK}")
|
||||
.replace("{product[type]}", "{family}")
|
||||
.replace("{Product[type]}", "{Family}")
|
||||
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{Folder[name]}", "{Asset}")
|
||||
.replace("{FOLDER[NAME]}", "{ASSET}")
|
||||
)
|
||||
|
||||
# Make sure template is set (matching may have empty string)
|
||||
if not template:
|
||||
|
|
@ -82,9 +94,9 @@ def get_subset_name(
|
|||
"""Calculate subset name based on passed context and OpenPype settings.
|
||||
|
||||
Subst name templates are defined in `project_settings/global/tools/creator
|
||||
/subset_name_profiles` where are profiles with host name, family, task name
|
||||
and task type filters. If context does not match any profile then
|
||||
`DEFAULT_SUBSET_TEMPLATE` is used as default template.
|
||||
/product_name_profiles` where are profiles with host name, family,
|
||||
task name and task type filters. If context does not match any profile
|
||||
then `DEFAULT_SUBSET_TEMPLATE` is used as default template.
|
||||
|
||||
That's main reason why so many arguments are required to calculate subset
|
||||
name.
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ class LoaderPlugin(list):
|
|||
)
|
||||
global_type_settings = (
|
||||
project_settings
|
||||
.get("global", {})
|
||||
.get("core", {})
|
||||
.get(plugin_type, {})
|
||||
)
|
||||
if not global_type_settings and not plugin_type_settings:
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ def _list_path_items(folder_structure):
|
|||
def get_project_basic_paths(project_name):
|
||||
project_settings = get_project_settings(project_name)
|
||||
folder_structure = (
|
||||
project_settings["global"]["project_folder_structure"]
|
||||
project_settings["core"]["project_folder_structure"]
|
||||
)
|
||||
if not folder_structure:
|
||||
return []
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ def get_template_name_profiles(
|
|||
|
||||
return copy.deepcopy(
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["publish"]
|
||||
["template_name_profiles"]
|
||||
|
|
@ -95,7 +95,7 @@ def get_hero_template_name_profiles(
|
|||
|
||||
return copy.deepcopy(
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["publish"]
|
||||
["hero_template_name_profiles"]
|
||||
|
|
@ -138,7 +138,7 @@ def get_publish_template_name(
|
|||
template = None
|
||||
filter_criteria = {
|
||||
"hosts": host_name,
|
||||
"families": family,
|
||||
"product_types": family,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
}
|
||||
|
|
@ -383,7 +383,7 @@ def get_plugin_settings(plugin, project_settings, log, category=None):
|
|||
|
||||
# TODO: change after all plugins are moved one level up
|
||||
if category_from_file in ("ayon_core", "openpype"):
|
||||
category_from_file = "global"
|
||||
category_from_file = "core"
|
||||
|
||||
try:
|
||||
return (
|
||||
|
|
@ -485,26 +485,6 @@ def filter_pyblish_plugins(plugins):
|
|||
plugins.remove(plugin)
|
||||
|
||||
|
||||
def remote_publish(log):
|
||||
"""Loops through all plugins, logs to console. Used for tests.
|
||||
|
||||
Args:
|
||||
log (Logger)
|
||||
"""
|
||||
|
||||
# Error exit as soon as any error occurs.
|
||||
error_format = "Failed {plugin.__name__}: {error}\n{error.traceback}"
|
||||
|
||||
for result in pyblish.util.publish_iter():
|
||||
if not result["error"]:
|
||||
continue
|
||||
|
||||
error_message = error_format.format(**result)
|
||||
log.error(error_message)
|
||||
# 'Fatal Error: ' is because of Deadline
|
||||
raise RuntimeError("Fatal Error: {}".format(error_message))
|
||||
|
||||
|
||||
def get_errored_instances_from_context(context, plugin=None):
|
||||
"""Collect failed instances from pyblish context.
|
||||
|
||||
|
|
@ -744,7 +724,7 @@ def get_custom_staging_dir_info(project_name, host_name, family, task_name,
|
|||
ValueError - if misconfigured template should be used
|
||||
"""
|
||||
settings = project_settings or get_project_settings(project_name)
|
||||
custom_staging_dir_profiles = (settings["global"]
|
||||
custom_staging_dir_profiles = (settings["core"]
|
||||
["tools"]
|
||||
["publish"]
|
||||
["custom_staging_dir_profiles"])
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def get_versioning_start(
|
|||
project_settings = get_project_settings(project_name)
|
||||
|
||||
version_start = 1
|
||||
settings = project_settings["global"]
|
||||
settings = project_settings["core"]
|
||||
profiles = settings.get("version_start_category", {}).get("profiles", [])
|
||||
|
||||
if not profiles:
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ def is_workfile_lock_enabled(host_name, project_name, project_setting=None):
|
|||
project_setting = get_project_settings(project_name)
|
||||
workfile_lock_profiles = (
|
||||
project_setting
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["workfile_lock_profiles"])
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ def get_workfile_template_key(
|
|||
try:
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["workfile_template_profiles"]
|
||||
|
|
@ -507,7 +507,7 @@ def create_workdir_extra_folders(
|
|||
|
||||
# Load extra folders profiles
|
||||
extra_folders_profiles = (
|
||||
project_settings["global"]["tools"]["Workfiles"]["extra_folders"]
|
||||
project_settings["core"]["tools"]["Workfiles"]["extra_folders"]
|
||||
)
|
||||
# Skip if are empty
|
||||
if not extra_folders_profiles:
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
active = True
|
||||
|
||||
# Presets
|
||||
paterns = None # list of regex paterns
|
||||
patterns = None # list of regex patterns
|
||||
remove_temp_renders = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -115,10 +115,10 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
src = os.path.normpath(src)
|
||||
dest = os.path.normpath(dest)
|
||||
|
||||
# add src dir into clearing dir paths (regex paterns)
|
||||
# add src dir into clearing dir paths (regex patterns)
|
||||
transfers_dirs.append(os.path.dirname(src))
|
||||
|
||||
# add dest dir into clearing dir paths (regex paterns)
|
||||
# add dest dir into clearing dir paths (regex patterns)
|
||||
transfers_dirs.append(os.path.dirname(dest))
|
||||
|
||||
if src in skip_cleanup_filepaths:
|
||||
|
|
@ -141,13 +141,13 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
# add dir for cleanup
|
||||
dirnames.append(os.path.dirname(src))
|
||||
|
||||
# clean by regex paterns
|
||||
# clean by regex patterns
|
||||
# make unique set
|
||||
transfers_dirs = set(transfers_dirs)
|
||||
|
||||
self.log.debug("__ transfers_dirs: `{}`".format(transfers_dirs))
|
||||
self.log.debug("__ self.paterns: `{}`".format(self.paterns))
|
||||
if self.paterns:
|
||||
self.log.debug("__ self.patterns: `{}`".format(self.patterns))
|
||||
if self.patterns:
|
||||
files = list()
|
||||
# get list of all available content of dirs
|
||||
for _dir in transfers_dirs:
|
||||
|
|
@ -159,14 +159,14 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("__ files: `{}`".format(files))
|
||||
|
||||
# remove all files which match regex patern
|
||||
# remove all files which match regex pattern
|
||||
for f in files:
|
||||
if os.path.normpath(f) in skip_cleanup_filepaths:
|
||||
continue
|
||||
|
||||
for p in self.paterns:
|
||||
patern = re.compile(p)
|
||||
if not patern.findall(f):
|
||||
for p in self.patterns:
|
||||
pattern = re.compile(p)
|
||||
if not pattern.findall(f):
|
||||
continue
|
||||
if not os.path.exists(f):
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class CollectInstanceCommentDef(
|
|||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_setting, _):
|
||||
plugin_settings = project_setting["global"]["publish"].get(
|
||||
plugin_settings = project_setting["core"]["publish"].get(
|
||||
"collect_comment_per_instance"
|
||||
)
|
||||
if not plugin_settings:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
import pyblish.api
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from ayon_core.client import get_representations
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -65,8 +65,8 @@ class ExtractBurnin(publish.Extractor):
|
|||
# Default options for burnins for cases that are not set in presets.
|
||||
default_options = {
|
||||
"font_size": 42,
|
||||
"font_color": [255, 255, 255, 255],
|
||||
"bg_color": [0, 0, 0, 127],
|
||||
"font_color": [255, 255, 255, 1.0],
|
||||
"bg_color": [0, 0, 0, 0.5],
|
||||
"bg_padding": 5,
|
||||
"x_offset": 5,
|
||||
"y_offset": 5
|
||||
|
|
@ -96,7 +96,20 @@ class ExtractBurnin(publish.Extractor):
|
|||
instance.data["representations"].remove(repre)
|
||||
|
||||
def _get_burnins_per_representations(self, instance, src_burnin_defs):
|
||||
self.log.debug("Filtering of representations and their burnins starts")
|
||||
"""
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Pyblish instance.
|
||||
src_burnin_defs (list): Burnin definitions.
|
||||
|
||||
Returns:
|
||||
list[tuple[dict, list]]: List of tuples containing representation
|
||||
and its burnin definitions.
|
||||
|
||||
"""
|
||||
self.log.debug(
|
||||
"Filtering of representations and their burnins starts"
|
||||
)
|
||||
|
||||
filtered_repres = []
|
||||
repres = instance.data.get("representations") or []
|
||||
|
|
@ -111,16 +124,13 @@ class ExtractBurnin(publish.Extractor):
|
|||
)
|
||||
|
||||
burnin_defs = copy.deepcopy(src_burnin_defs)
|
||||
self.log.debug(
|
||||
"burnin_defs.keys(): {}".format(burnin_defs.keys())
|
||||
)
|
||||
|
||||
# Filter output definition by `burnin` represetation key
|
||||
repre_linked_burnins = {
|
||||
name: output
|
||||
for name, output in burnin_defs.items()
|
||||
if name in repre_burnin_links
|
||||
}
|
||||
repre_linked_burnins = [
|
||||
burnin_def
|
||||
for burnin_def in burnin_defs
|
||||
if burnin_def["name"] in repre_burnin_links
|
||||
]
|
||||
self.log.debug(
|
||||
"repre_linked_burnins: {}".format(repre_linked_burnins)
|
||||
)
|
||||
|
|
@ -154,19 +164,21 @@ class ExtractBurnin(publish.Extractor):
|
|||
|
||||
filtering_criteria = {
|
||||
"hosts": host_name,
|
||||
"families": family,
|
||||
"product_types": family,
|
||||
"product_names": subset,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
"subset": subset
|
||||
}
|
||||
profile = filter_profiles(self.profiles, filtering_criteria,
|
||||
logger=self.log)
|
||||
|
||||
profile = filter_profiles(
|
||||
self.profiles,
|
||||
filtering_criteria,
|
||||
logger=self.log
|
||||
)
|
||||
if not profile:
|
||||
self.log.debug((
|
||||
"Skipped instance. None of profiles in presets are for"
|
||||
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
|
||||
" | Task type \"{}\" | Subset \"{}\" "
|
||||
" Host: \"{}\" | Product type: \"{}\" | Task name \"{}\""
|
||||
" | Task type \"{}\" | Product name \"{}\" "
|
||||
).format(host_name, family, task_name, task_type, subset))
|
||||
return
|
||||
|
||||
|
|
@ -175,7 +187,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
if not burnin_defs:
|
||||
self.log.debug((
|
||||
"Skipped instance. Burnin definitions are not set for profile"
|
||||
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
|
||||
" Host: \"{}\" | Product type: \"{}\" | Task name \"{}\""
|
||||
" | Profile \"{}\""
|
||||
).format(host_name, family, task_name, profile))
|
||||
return
|
||||
|
|
@ -275,7 +287,8 @@ class ExtractBurnin(publish.Extractor):
|
|||
# it in review?
|
||||
# burnin_data["fps"] = fps
|
||||
|
||||
for filename_suffix, burnin_def in repre_burnin_defs.items():
|
||||
for burnin_def in repre_burnin_defs:
|
||||
filename_suffix = burnin_def["name"]
|
||||
new_repre = copy.deepcopy(repre)
|
||||
new_repre["stagingDir"] = src_repre_staging_dir
|
||||
|
||||
|
|
@ -288,16 +301,28 @@ class ExtractBurnin(publish.Extractor):
|
|||
burnin_values = {}
|
||||
for key in self.positions:
|
||||
value = burnin_def.get(key)
|
||||
if value:
|
||||
burnin_values[key] = value.replace(
|
||||
"{task}", "{task[name]}"
|
||||
)
|
||||
if not value:
|
||||
continue
|
||||
# TODO remove replacements
|
||||
burnin_values[key] = (
|
||||
value
|
||||
.replace("{task}", "{task[name]}")
|
||||
.replace("{product[name]}", "{subset}")
|
||||
.replace("{Product[name]}", "{Subset}")
|
||||
.replace("{PRODUCT[NAME]}", "{SUBSET}")
|
||||
.replace("{product[type]}", "{family}")
|
||||
.replace("{Product[type]}", "{Family}")
|
||||
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{Folder[name]}", "{Asset}")
|
||||
.replace("{FOLDER[NAME]}", "{ASSET}")
|
||||
)
|
||||
|
||||
# Remove "delete" tag from new representation
|
||||
if "delete" in new_repre["tags"]:
|
||||
new_repre["tags"].remove("delete")
|
||||
|
||||
if len(repre_burnin_defs.keys()) > 1:
|
||||
if len(repre_burnin_defs) > 1:
|
||||
# Update name and outputName to be
|
||||
# able have multiple outputs in case of more burnin presets
|
||||
# Join previous "outputName" with filename suffix
|
||||
|
|
@ -401,8 +426,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
bg_color_hex = "#{0:0>2X}{1:0>2X}{2:0>2X}".format(
|
||||
bg_red, bg_green, bg_blue
|
||||
)
|
||||
bg_color_alpha = float(bg_alpha) / 255
|
||||
burnin_options["bg_opacity"] = bg_color_alpha
|
||||
burnin_options["bg_opacity"] = bg_alpha
|
||||
burnin_options["bg_color"] = bg_color_hex
|
||||
|
||||
# FG Color
|
||||
|
|
@ -412,8 +436,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
fg_color_hex = "#{0:0>2X}{1:0>2X}{2:0>2X}".format(
|
||||
fg_red, fg_green, fg_blue
|
||||
)
|
||||
fg_color_alpha = float(fg_alpha) / 255
|
||||
burnin_options["opacity"] = fg_color_alpha
|
||||
burnin_options["opacity"] = fg_alpha
|
||||
burnin_options["font_color"] = fg_color_hex
|
||||
|
||||
# Define font filepath
|
||||
|
|
@ -543,15 +566,16 @@ class ExtractBurnin(publish.Extractor):
|
|||
Burnin definitions without tags filter are marked as valid.
|
||||
|
||||
Args:
|
||||
outputs (list): Contain list of burnin definitions from presets.
|
||||
burnin_defs (list): Burnin definitions.
|
||||
tags (list): Tags of processed representation.
|
||||
|
||||
Returns:
|
||||
list: Containg all burnin definitions matching entered tags.
|
||||
|
||||
"""
|
||||
filtered_burnins = {}
|
||||
filtered_burnins = []
|
||||
repre_tags_low = set(tag.lower() for tag in tags)
|
||||
for filename_suffix, burnin_def in burnin_defs.items():
|
||||
for burnin_def in burnin_defs:
|
||||
valid = True
|
||||
tag_filters = burnin_def["filter"]["tags"]
|
||||
if tag_filters:
|
||||
|
|
@ -561,8 +585,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
valid = bool(repre_tags_low & tag_filters_low)
|
||||
|
||||
if valid:
|
||||
filtered_burnins[filename_suffix] = burnin_def
|
||||
|
||||
filtered_burnins.append(burnin_def)
|
||||
return filtered_burnins
|
||||
|
||||
def input_output_paths(
|
||||
|
|
@ -724,7 +747,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
Returns:
|
||||
list: Containg all valid output definitions.
|
||||
"""
|
||||
filtered_burnin_defs = {}
|
||||
filtered_burnin_defs = []
|
||||
|
||||
burnin_defs = profile.get("burnins")
|
||||
if not burnin_defs:
|
||||
|
|
@ -732,13 +755,11 @@ class ExtractBurnin(publish.Extractor):
|
|||
|
||||
families = self.families_from_instance(instance)
|
||||
|
||||
for filename_suffix, orig_burnin_def in burnin_defs.items():
|
||||
for orig_burnin_def in burnin_defs:
|
||||
burnin_def = copy.deepcopy(orig_burnin_def)
|
||||
def_filter = burnin_def.get("filter", None) or {}
|
||||
for key in ("families", "tags"):
|
||||
if key not in def_filter:
|
||||
def_filter[key] = []
|
||||
filename_suffix = burnin_def["name"]
|
||||
|
||||
def_filter = burnin_def["filter"]
|
||||
families_filters = def_filter["families"]
|
||||
if not self.families_filter_validation(
|
||||
families, families_filters
|
||||
|
|
@ -752,10 +773,13 @@ class ExtractBurnin(publish.Extractor):
|
|||
continue
|
||||
|
||||
# Burnin values
|
||||
new_burnin_def = {}
|
||||
burnin_values = {}
|
||||
for key, value in tuple(burnin_def.items()):
|
||||
key_low = key.lower()
|
||||
if key_low in self.positions and value:
|
||||
if key_low not in self.positions:
|
||||
new_burnin_def[key] = value
|
||||
elif value:
|
||||
burnin_values[key_low] = value
|
||||
|
||||
# Skip processing if burnin values are not set
|
||||
|
|
@ -767,9 +791,9 @@ class ExtractBurnin(publish.Extractor):
|
|||
).format(filename_suffix, str(orig_burnin_def)))
|
||||
continue
|
||||
|
||||
burnin_values["filter"] = def_filter
|
||||
new_burnin_def.update(burnin_values)
|
||||
|
||||
filtered_burnin_defs[filename_suffix] = burnin_values
|
||||
filtered_burnin_defs.append(new_burnin_def)
|
||||
|
||||
self.log.debug((
|
||||
"Burnin definition \"{}\" passed first filtering."
|
||||
|
|
|
|||
|
|
@ -81,6 +81,7 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
if not profile:
|
||||
return
|
||||
|
||||
profile_output_defs = profile["outputs"]
|
||||
new_representations = []
|
||||
repres = instance.data["representations"]
|
||||
for idx, repre in enumerate(list(repres)):
|
||||
|
|
@ -98,7 +99,8 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
self.log.warning("Config file doesn't exist, skipping")
|
||||
continue
|
||||
|
||||
for output_name, output_def in profile.get("outputs", {}).items():
|
||||
for output_def in profile_output_defs:
|
||||
output_name = output_def["name"]
|
||||
new_repre = copy.deepcopy(repre)
|
||||
|
||||
original_staging_dir = new_repre["stagingDir"]
|
||||
|
|
@ -318,10 +320,10 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
subset = instance.data["subset"]
|
||||
filtering_criteria = {
|
||||
"hosts": host_name,
|
||||
"families": family,
|
||||
"product_types": family,
|
||||
"product_names": subset,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
"subsets": subset
|
||||
}
|
||||
profile = filter_profiles(self.profiles, filtering_criteria,
|
||||
logger=self.log)
|
||||
|
|
@ -329,8 +331,8 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
if not profile:
|
||||
self.log.debug((
|
||||
"Skipped instance. None of profiles in presets are for"
|
||||
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
|
||||
" | Task type \"{}\" | Subset \"{}\" "
|
||||
" Host: \"{}\" | Product types: \"{}\" | Task \"{}\""
|
||||
" | Task type \"{}\" | Product names: \"{}\" "
|
||||
).format(host_name, family, task_name, task_type, subset))
|
||||
|
||||
return profile
|
||||
|
|
|
|||
|
|
@ -1280,14 +1280,11 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
"FFprobe couldn't read resolution from input file: \"{}\""
|
||||
).format(full_input_path_single_file))
|
||||
|
||||
# NOTE Setting only one of `width` or `heigth` is not allowed
|
||||
# NOTE Setting only one of `width` or `height` is not allowed
|
||||
# - settings value can't have None but has value of 0
|
||||
output_width = (
|
||||
output_def.get("output_width") or output_width or None
|
||||
)
|
||||
output_height = (
|
||||
output_def.get("output_height") or output_height or None
|
||||
)
|
||||
output_width = output_def["width"] or output_width or None
|
||||
output_height = output_def["height"] or output_height or None
|
||||
|
||||
# Force to use input resolution if output resolution was not defined
|
||||
# in settings. Resolution from instance is not used when
|
||||
# 'use_input_res' is set to 'True'.
|
||||
|
|
|
|||
|
|
@ -42,15 +42,27 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
|
||||
integrate_thumbnail = False
|
||||
target_size = {
|
||||
"type": "resize",
|
||||
"width": 1920,
|
||||
"height": 1080
|
||||
"type": "source",
|
||||
"resize": {
|
||||
"width": 1920,
|
||||
"height": 1080
|
||||
}
|
||||
}
|
||||
background_color = None
|
||||
background_color = (0, 0, 0, 0.0)
|
||||
duration_split = 0.5
|
||||
# attribute presets from settings
|
||||
oiiotool_defaults = None
|
||||
ffmpeg_args = None
|
||||
oiiotool_defaults = {
|
||||
"type": "colorspace",
|
||||
"colorspace": "color_picking",
|
||||
"display_and_view": {
|
||||
"display": "default",
|
||||
"view": "sRGB"
|
||||
}
|
||||
}
|
||||
ffmpeg_args = {
|
||||
"input": [],
|
||||
"output": []
|
||||
}
|
||||
product_names = []
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -369,7 +381,6 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
|
||||
repre_display = colorspace_data.get("display")
|
||||
repre_view = colorspace_data.get("view")
|
||||
oiio_default_type = None
|
||||
oiio_default_display = None
|
||||
oiio_default_view = None
|
||||
oiio_default_colorspace = None
|
||||
|
|
@ -387,11 +398,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
# oiiotool_defaults
|
||||
elif self.oiiotool_defaults:
|
||||
oiio_default_type = self.oiiotool_defaults["type"]
|
||||
if "colorspace" in oiio_default_type:
|
||||
if "colorspace" == oiio_default_type:
|
||||
oiio_default_colorspace = self.oiiotool_defaults["colorspace"]
|
||||
else:
|
||||
oiio_default_display = self.oiiotool_defaults["display"]
|
||||
oiio_default_view = self.oiiotool_defaults["view"]
|
||||
display_and_view = self.oiiotool_defaults["display_and_view"]
|
||||
oiio_default_display = display_and_view["display"]
|
||||
oiio_default_view = display_and_view["view"]
|
||||
|
||||
try:
|
||||
convert_colorspace(
|
||||
|
|
@ -507,11 +519,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
input_path,
|
||||
):
|
||||
# get settings
|
||||
if self.target_size.get("type") == "source":
|
||||
if self.target_size["type"] == "source":
|
||||
return []
|
||||
|
||||
target_width = self.target_size["width"]
|
||||
target_height = self.target_size["height"]
|
||||
resize = self.target_size["resize"]
|
||||
target_width = resize["width"]
|
||||
target_height = resize["height"]
|
||||
|
||||
# form arg string per application
|
||||
return get_rescaled_command_arguments(
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import datetime
|
|||
|
||||
import clique
|
||||
import six
|
||||
from bson.objectid import ObjectId
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.client.operations import (
|
||||
|
|
@ -988,7 +987,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
return {
|
||||
"_id": ObjectId(),
|
||||
"path": self.get_rootless_path(anatomy, path),
|
||||
"size": os.path.getsize(path),
|
||||
"hash": source_hash(path),
|
||||
|
|
|
|||
|
|
@ -17,24 +17,24 @@ from ayon_core.lib import (
|
|||
)
|
||||
|
||||
|
||||
class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
|
||||
class IntegrateProductGroup(pyblish.api.InstancePlugin):
|
||||
"""Integrate Subset Group for publish."""
|
||||
|
||||
# Run after CollectAnatomyInstanceData
|
||||
order = pyblish.api.IntegratorOrder - 0.1
|
||||
label = "Subset Group"
|
||||
label = "Product Group"
|
||||
|
||||
# Attributes set by settings
|
||||
subset_grouping_profiles = None
|
||||
product_grouping_profiles = None
|
||||
|
||||
def process(self, instance):
|
||||
"""Look into subset group profiles set by settings.
|
||||
|
||||
Attribute 'subset_grouping_profiles' is defined by settings.
|
||||
Attribute 'product_grouping_profiles' is defined by settings.
|
||||
"""
|
||||
|
||||
# Skip if 'subset_grouping_profiles' is empty
|
||||
if not self.subset_grouping_profiles:
|
||||
# Skip if 'product_grouping_profiles' is empty
|
||||
if not self.product_grouping_profiles:
|
||||
return
|
||||
|
||||
if instance.data.get("subsetGroup"):
|
||||
|
|
@ -47,7 +47,7 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
|
|||
# Skip if there is no matching profile
|
||||
filter_criteria = self.get_profile_filter_criteria(instance)
|
||||
profile = filter_profiles(
|
||||
self.subset_grouping_profiles,
|
||||
self.product_grouping_profiles,
|
||||
filter_criteria,
|
||||
logger=self.log
|
||||
)
|
||||
|
|
@ -58,7 +58,7 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
|
|||
template = profile["template"]
|
||||
|
||||
fill_pairs = prepare_template_data({
|
||||
"family": filter_criteria["families"],
|
||||
"family": filter_criteria["product_types"],
|
||||
"task": filter_criteria["tasks"],
|
||||
"host": filter_criteria["hosts"],
|
||||
"subset": instance.data["subset"],
|
||||
|
|
@ -91,7 +91,7 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
|
|||
|
||||
# Return filter criteria
|
||||
return {
|
||||
"families": anatomy_data["family"],
|
||||
"product_types": anatomy_data["family"],
|
||||
"tasks": task.get("name"),
|
||||
"hosts": instance.context.data["hostName"],
|
||||
"task_types": task.get("type")
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
try:
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.pipeline.publish.lib import remote_publish
|
||||
except ImportError as exc:
|
||||
# Ensure Deadline fails by output an error that contains "Fatal Error:"
|
||||
raise ImportError("Fatal Error: %s" % exc)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Perform remote publish with thorough error checking
|
||||
log = Logger.get_logger(__name__)
|
||||
remote_publish(log)
|
||||
|
|
@ -7,7 +7,6 @@ from .lib import (
|
|||
get_system_settings,
|
||||
get_project_settings,
|
||||
get_current_project_settings,
|
||||
get_local_settings,
|
||||
)
|
||||
from .ayon_settings import get_ayon_settings
|
||||
|
||||
|
|
@ -20,7 +19,6 @@ __all__ = (
|
|||
"get_system_settings",
|
||||
"get_project_settings",
|
||||
"get_current_project_settings",
|
||||
"get_local_settings",
|
||||
|
||||
"get_ayon_settings",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -360,178 +360,6 @@ def _convert_royalrender_project_settings(ayon_settings, output):
|
|||
}
|
||||
|
||||
|
||||
def _convert_global_project_settings(ayon_settings, output, default_settings):
|
||||
if "core" not in ayon_settings:
|
||||
return
|
||||
|
||||
ayon_core = ayon_settings["core"]
|
||||
|
||||
# Publish conversion
|
||||
ayon_publish = ayon_core["publish"]
|
||||
|
||||
# ExtractThumbnail plugin
|
||||
ayon_extract_thumbnail = ayon_publish["ExtractThumbnail"]
|
||||
# fix display and view at oiio defaults
|
||||
ayon_default_oiio = copy.deepcopy(
|
||||
ayon_extract_thumbnail["oiiotool_defaults"])
|
||||
display_and_view = ayon_default_oiio.pop("display_and_view")
|
||||
ayon_default_oiio["display"] = display_and_view["display"]
|
||||
ayon_default_oiio["view"] = display_and_view["view"]
|
||||
ayon_extract_thumbnail["oiiotool_defaults"] = ayon_default_oiio
|
||||
# fix target size
|
||||
ayon_default_resize = copy.deepcopy(ayon_extract_thumbnail["target_size"])
|
||||
resize = ayon_default_resize.pop("resize")
|
||||
ayon_default_resize["width"] = resize["width"]
|
||||
ayon_default_resize["height"] = resize["height"]
|
||||
ayon_extract_thumbnail["target_size"] = ayon_default_resize
|
||||
# fix background color
|
||||
ayon_extract_thumbnail["background_color"] = _convert_color(
|
||||
ayon_extract_thumbnail["background_color"]
|
||||
)
|
||||
|
||||
# ExtractOIIOTranscode plugin
|
||||
extract_oiio_transcode = ayon_publish["ExtractOIIOTranscode"]
|
||||
extract_oiio_transcode_profiles = extract_oiio_transcode["profiles"]
|
||||
for profile in extract_oiio_transcode_profiles:
|
||||
new_outputs = {}
|
||||
name_counter = {}
|
||||
if "product_names" in profile:
|
||||
profile["subsets"] = profile.pop("product_names")
|
||||
for profile_output in profile["outputs"]:
|
||||
if "name" in profile_output:
|
||||
name = profile_output.pop("name")
|
||||
else:
|
||||
# Backwards compatibility for setting without 'name' in model
|
||||
name = profile_output["extension"]
|
||||
if name in new_outputs:
|
||||
name_counter[name] += 1
|
||||
name = "{}_{}".format(name, name_counter[name])
|
||||
else:
|
||||
name_counter[name] = 0
|
||||
|
||||
new_outputs[name] = profile_output
|
||||
profile["outputs"] = new_outputs
|
||||
|
||||
# Extract Burnin plugin
|
||||
extract_burnin = ayon_publish["ExtractBurnin"]
|
||||
extract_burnin_options = extract_burnin["options"]
|
||||
for color_key in ("font_color", "bg_color"):
|
||||
extract_burnin_options[color_key] = _convert_color(
|
||||
extract_burnin_options[color_key]
|
||||
)
|
||||
|
||||
for profile in extract_burnin["profiles"]:
|
||||
extract_burnin_defs = profile["burnins"]
|
||||
if "product_names" in profile:
|
||||
profile["subsets"] = profile.pop("product_names")
|
||||
profile["families"] = profile.pop("product_types")
|
||||
|
||||
for burnin_def in extract_burnin_defs:
|
||||
for key in (
|
||||
"TOP_LEFT",
|
||||
"TOP_CENTERED",
|
||||
"TOP_RIGHT",
|
||||
"BOTTOM_LEFT",
|
||||
"BOTTOM_CENTERED",
|
||||
"BOTTOM_RIGHT",
|
||||
):
|
||||
burnin_def[key] = (
|
||||
burnin_def[key]
|
||||
.replace("{product[name]}", "{subset}")
|
||||
.replace("{Product[name]}", "{Subset}")
|
||||
.replace("{PRODUCT[NAME]}", "{SUBSET}")
|
||||
.replace("{product[type]}", "{family}")
|
||||
.replace("{Product[type]}", "{Family}")
|
||||
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{Folder[name]}", "{Asset}")
|
||||
.replace("{FOLDER[NAME]}", "{ASSET}")
|
||||
)
|
||||
profile["burnins"] = {
|
||||
extract_burnin_def.pop("name"): extract_burnin_def
|
||||
for extract_burnin_def in extract_burnin_defs
|
||||
}
|
||||
|
||||
if "IntegrateProductGroup" in ayon_publish:
|
||||
subset_group = ayon_publish.pop("IntegrateProductGroup")
|
||||
subset_group_profiles = subset_group.pop("product_grouping_profiles")
|
||||
for profile in subset_group_profiles:
|
||||
profile["families"] = profile.pop("product_types")
|
||||
subset_group["subset_grouping_profiles"] = subset_group_profiles
|
||||
ayon_publish["IntegrateSubsetGroup"] = subset_group
|
||||
|
||||
# Cleanup plugin
|
||||
ayon_cleanup = ayon_publish["CleanUp"]
|
||||
if "patterns" in ayon_cleanup:
|
||||
ayon_cleanup["paterns"] = ayon_cleanup.pop("patterns")
|
||||
|
||||
# Project root settings - json string to dict
|
||||
ayon_core["project_environments"] = json.loads(
|
||||
ayon_core["project_environments"]
|
||||
)
|
||||
ayon_core["project_folder_structure"] = json.dumps(json.loads(
|
||||
ayon_core["project_folder_structure"]
|
||||
))
|
||||
|
||||
# Tools settings
|
||||
ayon_tools = ayon_core["tools"]
|
||||
ayon_create_tool = ayon_tools["creator"]
|
||||
if "product_name_profiles" in ayon_create_tool:
|
||||
product_name_profiles = ayon_create_tool.pop("product_name_profiles")
|
||||
for profile in product_name_profiles:
|
||||
profile["families"] = profile.pop("product_types")
|
||||
ayon_create_tool["subset_name_profiles"] = product_name_profiles
|
||||
|
||||
for profile in ayon_create_tool["subset_name_profiles"]:
|
||||
template = profile["template"]
|
||||
profile["template"] = (
|
||||
template
|
||||
.replace("{task[name]}", "{task}")
|
||||
.replace("{Task[name]}", "{Task}")
|
||||
.replace("{TASK[NAME]}", "{TASK}")
|
||||
.replace("{product[type]}", "{family}")
|
||||
.replace("{Product[type]}", "{Family}")
|
||||
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{Folder[name]}", "{Asset}")
|
||||
.replace("{FOLDER[NAME]}", "{ASSET}")
|
||||
)
|
||||
|
||||
product_smart_select_key = "families_smart_select"
|
||||
if "product_types_smart_select" in ayon_create_tool:
|
||||
product_smart_select_key = "product_types_smart_select"
|
||||
|
||||
new_smart_select_families = {
|
||||
item["name"]: item["task_names"]
|
||||
for item in ayon_create_tool.pop(product_smart_select_key)
|
||||
}
|
||||
ayon_create_tool["families_smart_select"] = new_smart_select_families
|
||||
|
||||
ayon_loader_tool = ayon_tools["loader"]
|
||||
if "product_type_filter_profiles" in ayon_loader_tool:
|
||||
product_type_filter_profiles = (
|
||||
ayon_loader_tool.pop("product_type_filter_profiles"))
|
||||
for profile in product_type_filter_profiles:
|
||||
profile["filter_families"] = profile.pop("filter_product_types")
|
||||
|
||||
ayon_loader_tool["family_filter_profiles"] = (
|
||||
product_type_filter_profiles)
|
||||
|
||||
ayon_publish_tool = ayon_tools["publish"]
|
||||
for profile in ayon_publish_tool["hero_template_name_profiles"]:
|
||||
if "product_types" in profile:
|
||||
profile["families"] = profile.pop("product_types")
|
||||
|
||||
for profile in ayon_publish_tool["template_name_profiles"]:
|
||||
if "product_types" in profile:
|
||||
profile["families"] = profile.pop("product_types")
|
||||
|
||||
ayon_core["sync_server"] = (
|
||||
default_settings["global"]["sync_server"]
|
||||
)
|
||||
output["global"] = ayon_core
|
||||
|
||||
|
||||
def convert_project_settings(ayon_settings, default_settings):
|
||||
default_settings = copy.deepcopy(default_settings)
|
||||
output = {}
|
||||
|
|
@ -541,8 +369,6 @@ def convert_project_settings(ayon_settings, default_settings):
|
|||
|
||||
_convert_royalrender_project_settings(ayon_settings, output)
|
||||
|
||||
_convert_global_project_settings(ayon_settings, output, default_settings)
|
||||
|
||||
for key, value in ayon_settings.items():
|
||||
if key not in output:
|
||||
output[key] = value
|
||||
|
|
|
|||
|
|
@ -48,11 +48,6 @@ def clear_metadata_from_settings(values):
|
|||
clear_metadata_from_settings(item)
|
||||
|
||||
|
||||
def get_local_settings():
|
||||
# TODO implement ayon implementation
|
||||
return {}
|
||||
|
||||
|
||||
def load_openpype_default_settings():
|
||||
"""Load openpype default settings."""
|
||||
return load_jsons_from_dir(DEFAULTS_DIR)
|
||||
|
|
@ -203,39 +198,17 @@ def merge_overrides(source_dict, override_dict):
|
|||
def get_site_local_overrides(project_name, site_name, local_settings=None):
|
||||
"""Site overrides from local settings for passet project and site name.
|
||||
|
||||
Deprecated:
|
||||
This function is not implemented for AYON and will be removed.
|
||||
|
||||
Args:
|
||||
project_name (str): For which project are overrides.
|
||||
site_name (str): For which site are overrides needed.
|
||||
local_settings (dict): Preloaded local settings. They are loaded
|
||||
automatically if not passed.
|
||||
"""
|
||||
# Check if local settings were passed
|
||||
if local_settings is None:
|
||||
local_settings = get_local_settings()
|
||||
|
||||
output = {}
|
||||
|
||||
# Skip if local settings are empty
|
||||
if not local_settings:
|
||||
return output
|
||||
|
||||
local_project_settings = local_settings.get("projects") or {}
|
||||
|
||||
# Prepare overrides for entered project and for default project
|
||||
project_locals = None
|
||||
if project_name:
|
||||
project_locals = local_project_settings.get(project_name)
|
||||
default_project_locals = local_project_settings.get(DEFAULT_PROJECT_KEY)
|
||||
|
||||
# First load and use local settings from default project
|
||||
if default_project_locals and site_name in default_project_locals:
|
||||
output.update(default_project_locals[site_name])
|
||||
|
||||
# Apply project specific local settings if there are any
|
||||
if project_locals and site_name in project_locals:
|
||||
output.update(project_locals[site_name])
|
||||
|
||||
return output
|
||||
return {}
|
||||
|
||||
|
||||
def get_current_project_settings():
|
||||
|
|
|
|||
|
|
@ -377,23 +377,25 @@ class CreatorWindow(QtWidgets.QDialog):
|
|||
|
||||
self._creators_model.reset()
|
||||
|
||||
pype_project_setting = (
|
||||
product_types_smart_select = (
|
||||
get_current_project_settings()
|
||||
["global"]
|
||||
["tools"]
|
||||
["creator"]
|
||||
["families_smart_select"]
|
||||
["product_types_smart_select"]
|
||||
)
|
||||
current_index = None
|
||||
family = None
|
||||
task_name = get_current_task_name() or None
|
||||
lowered_task_name = task_name.lower()
|
||||
if task_name:
|
||||
for _family, _task_names in pype_project_setting.items():
|
||||
_low_task_names = {name.lower() for name in _task_names}
|
||||
for smart_item in product_types_smart_select:
|
||||
_low_task_names = {
|
||||
name.lower() for name in smart_item["task_names"]
|
||||
}
|
||||
for _task_name in _low_task_names:
|
||||
if _task_name in lowered_task_name:
|
||||
family = _family
|
||||
family = smart_item["name"]
|
||||
break
|
||||
if family:
|
||||
break
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
from ayon_core.settings import get_local_settings
|
||||
|
||||
# Constant key under which local settings are stored
|
||||
LOCAL_EXPERIMENTAL_KEY = "experimental_tools"
|
||||
|
|
@ -89,9 +88,13 @@ class ExperimentalTools:
|
|||
"New publisher",
|
||||
"Combined creation and publishing into one tool.",
|
||||
self._show_publisher,
|
||||
hosts_filter=["blender", "maya", "nuke", "celaction", "flame",
|
||||
"fusion", "harmony", "hiero", "resolve",
|
||||
"tvpaint", "unreal"]
|
||||
hosts_filter=[
|
||||
"celaction",
|
||||
"flame",
|
||||
"harmony",
|
||||
"hiero",
|
||||
"resolve",
|
||||
]
|
||||
)
|
||||
]
|
||||
|
||||
|
|
@ -151,7 +154,10 @@ class ExperimentalTools:
|
|||
|
||||
def refresh_availability(self):
|
||||
"""Reload local settings and check if any tool changed ability."""
|
||||
local_settings = get_local_settings()
|
||||
|
||||
# NOTE AYON does not have implemented settings for experimental
|
||||
# tools.
|
||||
local_settings = {}
|
||||
experimental_settings = (
|
||||
local_settings.get(LOCAL_EXPERIMENTAL_KEY)
|
||||
) or {}
|
||||
|
|
|
|||
|
|
@ -8,8 +8,6 @@ import sys
|
|||
import traceback
|
||||
import uuid
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from ayon_core.client import (
|
||||
get_project,
|
||||
get_assets,
|
||||
|
|
@ -1080,7 +1078,6 @@ class ProjectPushItemProcess:
|
|||
new_repre_files = []
|
||||
for (path, rootless_path) in repre_filepaths:
|
||||
new_repre_files.append({
|
||||
"_id": ObjectId(),
|
||||
"path": rootless_path,
|
||||
"size": os.path.getsize(path),
|
||||
"hash": source_hash(path),
|
||||
|
|
|
|||
|
|
@ -208,7 +208,7 @@ class Controller(QtCore.QObject):
|
|||
if not presets:
|
||||
return {}
|
||||
|
||||
result = presets.get("global", {}).get("filters", {})
|
||||
result = presets.get("core", {}).get("filters", {})
|
||||
hosts = pyblish.api.registered_hosts()
|
||||
for host in hosts:
|
||||
host_presets = presets.get(host, {}).get("filters")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue