Merge branch 'develop' into enhancement/OP-8205_Hiero-use-AYON-settings

This commit is contained in:
Jakub Trllo 2024-02-20 11:58:16 +01:00
commit 463b7cf250
16 changed files with 28 additions and 768 deletions

View file

@ -181,6 +181,10 @@ class HostDirmap(object):
exclude_locals=False,
cached=False)
# TODO implement
# Dirmap is dependent on 'get_site_local_overrides' which
# is not implemented in AYON. The mapping should be received
# from sitesync addon.
active_overrides = get_site_local_overrides(
project_name, active_site)
remote_overrides = get_site_local_overrides(

View file

@ -38,25 +38,6 @@ class ToolWindows:
cls._windows[tool] = window
def edit_shader_definitions():
from qtpy import QtWidgets
from ayon_core.hosts.maya.api.shader_definition_editor import (
ShaderDefinitionsEditor
)
from ayon_core.tools.utils import qt_app_context
top_level_widgets = QtWidgets.QApplication.topLevelWidgets()
main_window = next(widget for widget in top_level_widgets
if widget.objectName() == "MayaWindow")
with qt_app_context():
window = ToolWindows.get_window("shader_definition_editor")
if not window:
window = ShaderDefinitionsEditor(parent=main_window)
ToolWindows.set_window("shader_definition_editor", window)
window.show()
def _resolution_from_document(doc):
if not doc or "data" not in doc:
print("Entered document is not valid. \"{}\"".format(str(doc)))

View file

@ -1,176 +0,0 @@
# -*- coding: utf-8 -*-
"""Editor for shader definitions.
Shader names are stored as simple text file over GridFS in mongodb.
"""
import os
from qtpy import QtWidgets, QtCore, QtGui
from ayon_core.client.mongo import OpenPypeMongoConnection
from ayon_core import resources
import gridfs
DEFINITION_FILENAME = "{}/maya/shader_definition.txt".format(
os.getenv("AYON_PROJECT_NAME"))
class ShaderDefinitionsEditor(QtWidgets.QWidget):
"""Widget serving as simple editor for shader name definitions."""
# name of the file used to store definitions
def __init__(self, parent=None):
super(ShaderDefinitionsEditor, self).__init__(parent)
self._mongo = OpenPypeMongoConnection.get_mongo_client()
self._gridfs = gridfs.GridFS(
self._mongo[os.getenv("OPENPYPE_DATABASE_NAME")])
self._editor = None
self._original_content = self._read_definition_file()
self.setObjectName("shaderDefinitionEditor")
self.setWindowTitle("OpenPype shader name definition editor")
icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
self.setWindowIcon(icon)
self.setWindowFlags(QtCore.Qt.Window)
self.setParent(parent)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.resize(750, 500)
self._setup_ui()
self._reload()
def _setup_ui(self):
"""Setup UI of Widget."""
layout = QtWidgets.QVBoxLayout(self)
label = QtWidgets.QLabel()
label.setText("Put shader names here - one name per line:")
layout.addWidget(label)
self._editor = QtWidgets.QPlainTextEdit()
self._editor.setStyleSheet("border: none;")
layout.addWidget(self._editor)
btn_layout = QtWidgets.QHBoxLayout()
save_btn = QtWidgets.QPushButton("Save")
save_btn.clicked.connect(self._save)
reload_btn = QtWidgets.QPushButton("Reload")
reload_btn.clicked.connect(self._reload)
exit_btn = QtWidgets.QPushButton("Exit")
exit_btn.clicked.connect(self._close)
btn_layout.addWidget(reload_btn)
btn_layout.addWidget(save_btn)
btn_layout.addWidget(exit_btn)
layout.addLayout(btn_layout)
def _read_definition_file(self, file=None):
"""Read definition file from database.
Args:
file (gridfs.grid_file.GridOut, Optional): File to read. If not
set, new query will be issued to find it.
Returns:
str: Content of the file or empty string if file doesn't exist.
"""
content = ""
if not file:
file = self._gridfs.find_one(
{"filename": DEFINITION_FILENAME})
if not file:
print(">>> [SNDE]: nothing in database yet")
return content
content = file.read()
file.close()
return content
def _write_definition_file(self, content, force=False):
"""Write content as definition to file in database.
Before file is written, check is made if its content has not
changed. If is changed, warning is issued to user if he wants
it to overwrite. Note: GridFs doesn't allow changing file content.
You need to delete existing file and create new one.
Args:
content (str): Content to write.
Raises:
ContentException: If file is changed in database while
editor is running.
"""
file = self._gridfs.find_one(
{"filename": DEFINITION_FILENAME})
if file:
content_check = self._read_definition_file(file)
if content == content_check:
print(">>> [SNDE]: content not changed")
return
if self._original_content != content_check:
if not force:
raise ContentException("Content changed")
print(">>> [SNDE]: overwriting data")
file.close()
self._gridfs.delete(file._id)
file = self._gridfs.new_file(
filename=DEFINITION_FILENAME,
content_type='text/plain',
encoding='utf-8')
file.write(content)
file.close()
QtCore.QTimer.singleShot(200, self._reset_style)
self._editor.setStyleSheet("border: 1px solid #33AF65;")
self._original_content = content
def _reset_style(self):
"""Reset editor style back.
Used to visually indicate save.
"""
self._editor.setStyleSheet("border: none;")
def _close(self):
self.hide()
def closeEvent(self, event):
event.ignore()
self.hide()
def _reload(self):
print(">>> [SNDE]: reloading")
self._set_content(self._read_definition_file())
def _save(self):
try:
self._write_definition_file(content=self._editor.toPlainText())
except ContentException:
# content has changed meanwhile
print(">>> [SNDE]: content has changed")
self._show_overwrite_warning()
def _set_content(self, content):
self._editor.setPlainText(content)
def _show_overwrite_warning(self):
reply = QtWidgets.QMessageBox.question(
self,
"Warning",
("Content you are editing was changed meanwhile in database.\n"
"Please, reload and solve the conflict."),
QtWidgets.QMessageBox.OK)
if reply == QtWidgets.QMessageBox.OK:
# do nothing
pass
class ContentException(Exception):
"""This is risen during save if file is changed in database."""
pass

View file

@ -1,161 +0,0 @@
# -*- coding: utf-8 -*-
"""Validate model nodes names."""
import os
import platform
import re
import gridfs
import pyblish.api
from maya import cmds
import ayon_core.hosts.maya.api.action
from ayon_core.client.mongo import OpenPypeMongoConnection
from ayon_core.hosts.maya.api.shader_definition_editor import (
DEFINITION_FILENAME)
from ayon_core.pipeline.publish import (
OptionalPyblishPluginMixin, PublishValidationError, ValidateContentsOrder)
class ValidateModelName(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validate name of model
starts with (somename)_###_(materialID)_GEO
materialID must be present in list
padding number doesn't have limit
"""
optional = True
order = ValidateContentsOrder
hosts = ["maya"]
families = ["model"]
label = "Model Name"
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction]
material_file = None
database_file = DEFINITION_FILENAME
@classmethod
def get_invalid(cls, instance):
"""Get invalid nodes."""
use_db = cls.database
def is_group(group_name):
"""Find out if supplied transform is group or not."""
try:
children = cmds.listRelatives(group_name, children=True)
for child in children:
if not cmds.ls(child, transforms=True):
return False
return True
except Exception:
return False
invalid = []
content_instance = instance.data.get("setMembers", None)
if not content_instance:
cls.log.error("Instance has no nodes!")
return True
pass
# validate top level group name
assemblies = cmds.ls(content_instance, assemblies=True, long=True)
if len(assemblies) != 1:
cls.log.error("Must have exactly one top group")
return assemblies or True
top_group = assemblies[0]
regex = cls.top_level_regex
r = re.compile(regex)
m = r.match(top_group)
project_name = instance.context.data["projectName"]
current_asset_name = instance.context.data["asset"]
if m is None:
cls.log.error("invalid name on: {}".format(top_group))
cls.log.error("name doesn't match regex {}".format(regex))
invalid.append(top_group)
else:
if "asset" in r.groupindex:
if m.group("asset") != current_asset_name:
cls.log.error("Invalid asset name in top level group.")
return top_group
if "subset" in r.groupindex:
if m.group("subset") != instance.data.get("subset"):
cls.log.error("Invalid subset name in top level group.")
return top_group
if "project" in r.groupindex:
if m.group("project") != project_name:
cls.log.error("Invalid project name in top level group.")
return top_group
descendants = cmds.listRelatives(content_instance,
allDescendents=True,
fullPath=True) or []
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
trns = cmds.ls(descendants, long=False, type='transform')
# filter out groups
filtered = [node for node in trns if not is_group(node)]
# load shader list file as utf-8
shaders = []
if not use_db:
material_file = cls.material_file[platform.system().lower()]
if material_file:
if os.path.isfile(material_file):
shader_file = open(material_file, "r")
shaders = shader_file.readlines()
shader_file.close()
else:
cls.log.error("Missing shader name definition file.")
return True
else:
client = OpenPypeMongoConnection.get_mongo_client()
fs = gridfs.GridFS(client[os.getenv("OPENPYPE_DATABASE_NAME")])
shader_file = fs.find_one({"filename": cls.database_file})
if not shader_file:
cls.log.error("Missing shader name definition in database.")
return True
shaders = shader_file.read().splitlines()
shader_file.close()
# strip line endings from list
shaders = [s.rstrip() for s in shaders if s.rstrip()]
# compile regex for testing names
regex = cls.regex
r = re.compile(regex)
for obj in filtered:
cls.log.debug("testing: {}".format(obj))
m = r.match(obj)
if m is None:
cls.log.error("invalid name on: {}".format(obj))
invalid.append(obj)
else:
# if we have shader files and shader named group is in
# regex, test this group against names in shader file
if "shader" in r.groupindex and shaders:
try:
if not m.group('shader') in shaders:
cls.log.error(
"invalid materialID on: {0} ({1})".format(
obj, m.group('shader')))
invalid.append(obj)
except IndexError:
# shader named group doesn't match
cls.log.error(
"shader group doesn't match: {}".format(obj))
invalid.append(obj)
return invalid
def process(self, instance):
"""Plugin entry point."""
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
"Model naming is invalid. See the log.")

View file

@ -65,7 +65,7 @@ class ValidateExposedKnobs(
group_node = instance.data["transientData"]["node"]
nuke_settings = instance.context.data["project_settings"]["nuke"]
create_settings = nuke_settings["create"][plugin]
exposed_knobs = create_settings["exposed_knobs"]
exposed_knobs = create_settings.get("exposed_knobs", [])
unexposed_knobs = []
for knob in exposed_knobs:
if knob not in group_node.knobs():

View file

@ -16,7 +16,6 @@ from ayon_core.client import get_asset_name_identifier
from ayon_core.settings import (
get_system_settings,
get_project_settings,
get_local_settings
)
from ayon_core.settings.constants import (
METADATA_KEYS,
@ -1528,16 +1527,17 @@ def prepare_app_environments(
# Use environments from local settings
filtered_local_envs = {}
system_settings = data["system_settings"]
whitelist_envs = system_settings["general"].get("local_env_white_list")
if whitelist_envs:
local_settings = get_local_settings()
local_envs = local_settings.get("environments") or {}
filtered_local_envs = {
key: value
for key, value in local_envs.items()
if key in whitelist_envs
}
# NOTE Overrides for environment variables are not implemented in AYON.
# system_settings = data["system_settings"]
# whitelist_envs = system_settings["general"].get("local_env_white_list")
# if whitelist_envs:
# local_settings = get_local_settings()
# local_envs = local_settings.get("environments") or {}
# filtered_local_envs = {
# key: value
# for key, value in local_envs.items()
# if key in whitelist_envs
# }
# Apply local environment variables for already existing values
for key, value in filtered_local_envs.items():

View file

@ -5,7 +5,6 @@ import platform
import getpass
import socket
from ayon_core.settings.lib import get_local_settings
from .execute import get_ayon_launcher_args
from .local_settings import get_local_site_id
@ -96,7 +95,6 @@ def get_all_current_info():
return {
"workstation": get_workstation_info(),
"env": os.environ.copy(),
"local_settings": get_local_settings(),
"ayon": get_ayon_info(),
}

View file

@ -1,39 +0,0 @@
# -*- coding: utf-8 -*-
"""Collect instances that should be processed and published on DL.
"""
import os
import pyblish.api
from ayon_core.pipeline import PublishValidationError
class CollectDeadlinePublishableInstances(pyblish.api.InstancePlugin):
"""Collect instances that should be processed and published on DL.
Some long running publishes (not just renders) could be offloaded to DL,
this plugin compares theirs name against env variable, marks only
publishable by farm.
Triggered only when running only in headless mode, eg on a farm.
"""
order = pyblish.api.CollectorOrder + 0.499
label = "Collect Deadline Publishable Instance"
targets = ["remote"]
def process(self, instance):
self.log.debug("CollectDeadlinePublishableInstances")
publish_inst = os.environ.get("OPENPYPE_PUBLISH_SUBSET", '')
if not publish_inst:
raise PublishValidationError("OPENPYPE_PUBLISH_SUBSET env var "
"required for remote publishing")
subset_name = instance.data["subset"]
if subset_name == publish_inst:
self.log.debug("Publish {}".format(subset_name))
instance.data["publish"] = True
instance.data["farm"] = False
else:
self.log.debug("Skipping {}".format(subset_name))
instance.data["publish"] = False

View file

@ -1,131 +0,0 @@
import os
import attr
from datetime import datetime
from ayon_core.pipeline import PublishXmlValidationError
from ayon_core.lib import is_in_tests
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
import pyblish.api
@attr.s
class MayaPluginInfo(object):
Build = attr.ib(default=None) # Don't force build
StrictErrorChecking = attr.ib(default=True)
SceneFile = attr.ib(default=None) # Input scene
Version = attr.ib(default=None) # Mandatory for Deadline
ProjectPath = attr.ib(default=None)
ScriptJob = attr.ib(default=True)
ScriptFilename = attr.ib(default=None)
class MayaSubmitRemotePublishDeadline(
abstract_submit_deadline.AbstractSubmitDeadline):
"""Submit Maya scene to perform a local publish in Deadline.
Publishing in Deadline can be helpful for scenes that publish very slow.
This way it can process in the background on another machine without the
Artist having to wait for the publish to finish on their local machine.
Submission is done through the Deadline Web Service. DL then triggers
`openpype/scripts/remote_publish.py`.
Each publishable instance creates its own full publish job.
Different from `ProcessSubmittedJobOnFarm` which creates publish job
depending on metadata json containing context and instance data of
rendered files.
"""
label = "Submit Scene to Deadline"
order = pyblish.api.IntegratorOrder
hosts = ["maya"]
families = ["publish.farm"]
targets = ["local"]
def process(self, instance):
# Ensure no errors so far
if not (all(result["success"]
for result in instance.context.data["results"])):
raise PublishXmlValidationError("Publish process has errors")
if not instance.data["publish"]:
self.log.warning("No active instances found. "
"Skipping submission..")
return
super(MayaSubmitRemotePublishDeadline, self).process(instance)
def get_job_info(self):
instance = self._instance
context = instance.context
project_name = instance.context.data["projectName"]
scene = instance.context.data["currentFile"]
scenename = os.path.basename(scene)
job_name = "{scene} [PUBLISH]".format(scene=scenename)
batch_name = "{code} - {scene}".format(code=project_name,
scene=scenename)
if is_in_tests():
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
job_info = DeadlineJobInfo(Plugin="MayaBatch")
job_info.BatchName = batch_name
job_info.Name = job_name
job_info.UserName = context.data.get("user")
job_info.Comment = context.data.get("comment", "")
# use setting for publish job on farm, no reason to have it separately
project_settings = context.data["project_settings"]
deadline_publish_job_sett = project_settings["deadline"]["publish"]["ProcessSubmittedJobOnFarm"] # noqa
job_info.Department = deadline_publish_job_sett["deadline_department"]
job_info.ChunkSize = deadline_publish_job_sett["deadline_chunk_size"]
job_info.Priority = deadline_publish_job_sett["deadline_priority"]
job_info.Group = deadline_publish_job_sett["deadline_group"]
job_info.Pool = deadline_publish_job_sett["deadline_pool"]
# Include critical environment variables with submission + Session
keys = [
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER"
]
environment = {
key: os.environ[key]
for key in keys
if key in os.environ
}
environment["AYON_PROJECT_NAME"] = project_name
environment["AYON_FOLDER_PATH"] = instance.context.data["asset"]
environment["AYON_TASK_NAME"] = instance.context.data["task"]
environment["AYON_APP_NAME"] = os.environ.get("AYON_APP_NAME")
environment["OPENPYPE_PUBLISH_SUBSET"] = instance.data["subset"]
environment["AYON_LOG_NO_COLORS"] = "1"
environment["AYON_USERNAME"] = instance.context.data["user"]
environment["AYON_REMOTE_PUBLISH"] = "1"
for key, value in environment.items():
job_info.EnvironmentKeyValue[key] = value
def get_plugin_info(self):
# Not all hosts can import this module.
from maya import cmds
scene = self._instance.context.data["currentFile"]
plugin_info = MayaPluginInfo()
plugin_info.SceneFile = scene
plugin_info.ScriptFilename = "{OPENPYPE_REPOS_ROOT}/openpype/scripts/remote_publish.py" # noqa
plugin_info.Version = cmds.about(version=True)
plugin_info.ProjectPath = cmds.workspace(query=True,
rootDirectory=True)
return attr.asdict(plugin_info)

View file

@ -330,151 +330,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
self.log.debug("Skipping local instance.")
return
data = instance.data.copy()
context = instance.context
self.context = context
self.anatomy = instance.context.data["anatomy"]
asset = data.get("asset") or context.data["asset"]
subset = data.get("subset")
start = instance.data.get("frameStart")
if start is None:
start = context.data["frameStart"]
end = instance.data.get("frameEnd")
if end is None:
end = context.data["frameEnd"]
handle_start = instance.data.get("handleStart")
if handle_start is None:
handle_start = context.data["handleStart"]
handle_end = instance.data.get("handleEnd")
if handle_end is None:
handle_end = context.data["handleEnd"]
fps = instance.data.get("fps")
if fps is None:
fps = context.data["fps"]
if data.get("extendFrames", False):
start, end = self._extend_frames(
asset,
subset,
start,
end,
data["overrideExistingFrame"])
try:
source = data["source"]
except KeyError:
source = context.data["currentFile"]
success, rootless_path = (
self.anatomy.find_root_template_from_path(source)
)
if success:
source = rootless_path
else:
# `rootless_path` is not set to `source` if none of roots match
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues."
).format(source))
family = "render"
if ("prerender" in instance.data["families"] or
"prerender.farm" in instance.data["families"]):
family = "prerender"
families = [family]
# pass review to families if marked as review
do_not_add_review = False
if data.get("review"):
families.append("review")
elif data.get("review") is False:
self.log.debug("Instance has review explicitly disabled.")
do_not_add_review = True
instance_skeleton_data = {
"family": family,
"subset": subset,
"families": families,
"asset": asset,
"frameStart": start,
"frameEnd": end,
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStartHandle": start - handle_start,
"frameEndHandle": end + handle_end,
"comment": instance.data["comment"],
"fps": fps,
"source": source,
"extendFrames": data.get("extendFrames"),
"overrideExistingFrame": data.get("overrideExistingFrame"),
"pixelAspect": data.get("pixelAspect", 1),
"resolutionWidth": data.get("resolutionWidth", 1920),
"resolutionHeight": data.get("resolutionHeight", 1080),
"multipartExr": data.get("multipartExr", False),
"jobBatchName": data.get("jobBatchName", ""),
"useSequenceForReview": data.get("useSequenceForReview", True),
# map inputVersions `ObjectId` -> `str` so json supports it
"inputVersions": list(map(str, data.get("inputVersions", []))),
"colorspace": instance.data.get("colorspace"),
"stagingDir_persistent": instance.data.get(
"stagingDir_persistent", False
)
}
# skip locking version if we are creating v01
instance_version = instance.data.get("version") # take this if exists
if instance_version != 1:
instance_skeleton_data["version"] = instance_version
# transfer specific families from original instance to new render
for item in self.families_transfer:
if item in instance.data.get("families", []):
instance_skeleton_data["families"] += [item]
# transfer specific properties from original instance based on
# mapping dictionary `instance_transfer`
for key, values in self.instance_transfer.items():
if key in instance.data.get("families", []):
for v in values:
instance_skeleton_data[v] = instance.data.get(v)
# look into instance data if representations are not having any
# which are having tag `publish_on_farm` and include them
for repre in instance.data.get("representations", []):
staging_dir = repre.get("stagingDir")
if staging_dir:
success, rootless_staging_dir = (
self.anatomy.find_root_template_from_path(
staging_dir
)
)
if success:
repre["stagingDir"] = rootless_staging_dir
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(staging_dir))
repre["stagingDir"] = staging_dir
if "publish_on_farm" in repre.get("tags"):
# create representations attribute of not there
if "representations" not in instance_skeleton_data.keys():
instance_skeleton_data["representations"] = []
instance_skeleton_data["representations"].append(repre)
instances = None
assert data.get("expectedFiles"), ("Submission from old Pype version"
" - missing expectedFiles")
anatomy = instance.context.data["anatomy"]
instance_skeleton_data = create_skeleton_instance(

View file

@ -8,9 +8,6 @@ import numbers
import six
import time
from ayon_core.settings.lib import (
get_local_settings,
)
from ayon_core.client import get_project, get_ayon_server_api_connection
from ayon_core.lib import Logger, get_local_site_id
from ayon_core.lib.path_templates import (
@ -453,7 +450,7 @@ class Anatomy(BaseAnatomy):
return cls._sync_server_addon_cache.data
@classmethod
def _get_studio_roots_overrides(cls, project_name, local_settings=None):
def _get_studio_roots_overrides(cls, project_name):
"""This would return 'studio' site override by local settings.
Notes:
@ -465,7 +462,6 @@ class Anatomy(BaseAnatomy):
Args:
project_name (str): Name of project.
local_settings (Optional[dict[str, Any]]): Prepared local settings.
Returns:
Union[Dict[str, str], None]): Local root overrides.
@ -488,11 +484,6 @@ class Anatomy(BaseAnatomy):
should be returned.
"""
# Local settings may be used more than once or may not be used at all
# - to avoid slowdowns 'get_local_settings' is not called until it's
# really needed
local_settings = None
# First check if sync server is available and enabled
sync_server = cls.get_sync_server_addon()
if sync_server is None or not sync_server.enabled:
@ -503,11 +494,8 @@ class Anatomy(BaseAnatomy):
# Use sync server to receive active site name
project_cache = cls._default_site_id_cache[project_name]
if project_cache.is_outdated:
local_settings = get_local_settings()
project_cache.update_data(
sync_server.get_active_site_type(
project_name, local_settings
)
sync_server.get_active_site_type(project_name)
)
site_name = project_cache.data
@ -517,12 +505,12 @@ class Anatomy(BaseAnatomy):
# Handle studio root overrides without sync server
# - studio root overrides can be done even without sync server
roots_overrides = cls._get_studio_roots_overrides(
project_name, local_settings
project_name
)
else:
# Ask sync server to get roots overrides
roots_overrides = sync_server.get_site_root_overrides(
project_name, site_name, local_settings
project_name, site_name
)
site_cache.update_data(roots_overrides)
return site_cache.data

View file

@ -485,26 +485,6 @@ def filter_pyblish_plugins(plugins):
plugins.remove(plugin)
def remote_publish(log):
"""Loops through all plugins, logs to console. Used for tests.
Args:
log (Logger)
"""
# Error exit as soon as any error occurs.
error_format = "Failed {plugin.__name__}: {error}\n{error.traceback}"
for result in pyblish.util.publish_iter():
if not result["error"]:
continue
error_message = error_format.format(**result)
log.error(error_message)
# 'Fatal Error: ' is because of Deadline
raise RuntimeError("Fatal Error: {}".format(error_message))
def get_errored_instances_from_context(context, plugin=None):
"""Collect failed instances from pyblish context.

View file

@ -1,12 +0,0 @@
try:
from ayon_core.lib import Logger
from ayon_core.pipeline.publish.lib import remote_publish
except ImportError as exc:
# Ensure Deadline fails by output an error that contains "Fatal Error:"
raise ImportError("Fatal Error: %s" % exc)
if __name__ == "__main__":
# Perform remote publish with thorough error checking
log = Logger.get_logger(__name__)
remote_publish(log)

View file

@ -7,7 +7,6 @@ from .lib import (
get_system_settings,
get_project_settings,
get_current_project_settings,
get_local_settings,
)
from .ayon_settings import get_ayon_settings
@ -20,7 +19,6 @@ __all__ = (
"get_system_settings",
"get_project_settings",
"get_current_project_settings",
"get_local_settings",
"get_ayon_settings",
)

View file

@ -48,11 +48,6 @@ def clear_metadata_from_settings(values):
clear_metadata_from_settings(item)
def get_local_settings():
# TODO implement ayon implementation
return {}
def load_openpype_default_settings():
"""Load openpype default settings."""
return load_jsons_from_dir(DEFAULTS_DIR)
@ -203,39 +198,17 @@ def merge_overrides(source_dict, override_dict):
def get_site_local_overrides(project_name, site_name, local_settings=None):
"""Site overrides from local settings for passet project and site name.
Deprecated:
This function is not implemented for AYON and will be removed.
Args:
project_name (str): For which project are overrides.
site_name (str): For which site are overrides needed.
local_settings (dict): Preloaded local settings. They are loaded
automatically if not passed.
"""
# Check if local settings were passed
if local_settings is None:
local_settings = get_local_settings()
output = {}
# Skip if local settings are empty
if not local_settings:
return output
local_project_settings = local_settings.get("projects") or {}
# Prepare overrides for entered project and for default project
project_locals = None
if project_name:
project_locals = local_project_settings.get(project_name)
default_project_locals = local_project_settings.get(DEFAULT_PROJECT_KEY)
# First load and use local settings from default project
if default_project_locals and site_name in default_project_locals:
output.update(default_project_locals[site_name])
# Apply project specific local settings if there are any
if project_locals and site_name in project_locals:
output.update(project_locals[site_name])
return output
return {}
def get_current_project_settings():

View file

@ -1,5 +1,4 @@
import os
from ayon_core.settings import get_local_settings
# Constant key under which local settings are stored
LOCAL_EXPERIMENTAL_KEY = "experimental_tools"
@ -155,7 +154,10 @@ class ExperimentalTools:
def refresh_availability(self):
"""Reload local settings and check if any tool changed ability."""
local_settings = get_local_settings()
# NOTE AYON does not have implemented settings for experimental
# tools.
local_settings = {}
experimental_settings = (
local_settings.get(LOCAL_EXPERIMENTAL_KEY)
) or {}