mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into bugfix/houdini_redshift_fix_default_redshift_version
This commit is contained in:
commit
b3de9a7ffe
130 changed files with 797 additions and 1666 deletions
|
|
@ -73,6 +73,20 @@ class Commands:
|
|||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
# Fix older jobs
|
||||
for src_key, dst_key in (
|
||||
("AVALON_PROJECT", "AYON_PROJECT_NAME"),
|
||||
("AVALON_ASSET", "AYON_FOLDER_PATH"),
|
||||
("AVALON_TASK", "AYON_TASK_NAME"),
|
||||
("AVALON_WORKDIR", "AYON_WORKDIR"),
|
||||
("AVALON_APP_NAME", "AYON_APP_NAME"),
|
||||
("AVALON_APP", "AYON_HOST_NAME"),
|
||||
):
|
||||
if src_key in os.environ and dst_key not in os.environ:
|
||||
os.environ[dst_key] = os.environ[src_key]
|
||||
# Remove old keys, so we're sure they're not used
|
||||
os.environ.pop(src_key, None)
|
||||
|
||||
log = Logger.get_logger("CLI-publish")
|
||||
|
||||
install_ayon_plugins()
|
||||
|
|
@ -87,7 +101,7 @@ class Commands:
|
|||
if not any(paths):
|
||||
raise RuntimeError("No publish paths specified")
|
||||
|
||||
app_full_name = os.getenv("AVALON_APP_NAME")
|
||||
app_full_name = os.getenv("AYON_APP_NAME")
|
||||
if app_full_name:
|
||||
context = get_global_context()
|
||||
env = get_app_environments_for_context(
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class CreateWorkdirExtraFolders(PreLaunchHook):
|
|||
return
|
||||
|
||||
env = self.data.get("env") or {}
|
||||
workdir = env.get("AVALON_WORKDIR")
|
||||
workdir = env.get("AYON_WORKDIR")
|
||||
if not workdir or not os.path.exists(workdir):
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -181,6 +181,10 @@ class HostDirmap(object):
|
|||
exclude_locals=False,
|
||||
cached=False)
|
||||
|
||||
# TODO implement
|
||||
# Dirmap is dependent on 'get_site_local_overrides' which
|
||||
# is not implemented in AYON. The mapping should be received
|
||||
# from sitesync addon.
|
||||
active_overrides = get_site_local_overrides(
|
||||
project_name, active_site)
|
||||
remote_overrides = get_site_local_overrides(
|
||||
|
|
|
|||
|
|
@ -106,7 +106,7 @@ class HostBase(object):
|
|||
Union[str, None]: Current project name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_PROJECT")
|
||||
return os.environ.get("AYON_PROJECT_NAME")
|
||||
|
||||
def get_current_asset_name(self):
|
||||
"""
|
||||
|
|
@ -114,7 +114,7 @@ class HostBase(object):
|
|||
Union[str, None]: Current asset name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_ASSET")
|
||||
return os.environ.get("AYON_FOLDER_PATH")
|
||||
|
||||
def get_current_task_name(self):
|
||||
"""
|
||||
|
|
@ -122,7 +122,7 @@ class HostBase(object):
|
|||
Union[str, None]: Current task name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_TASK")
|
||||
return os.environ.get("AYON_TASK_NAME")
|
||||
|
||||
def get_current_context(self):
|
||||
"""Get current context information.
|
||||
|
|
|
|||
|
|
@ -234,7 +234,7 @@ class IWorkfileHost:
|
|||
str: Path to new workdir.
|
||||
"""
|
||||
|
||||
return session["AVALON_WORKDIR"]
|
||||
return session["AYON_WORKDIR"]
|
||||
|
||||
# --- Deprecated method names ---
|
||||
def file_extensions(self):
|
||||
|
|
|
|||
|
|
@ -297,11 +297,11 @@ class AfterEffectsRoute(WebSocketRoute):
|
|||
log.info("Setting context change")
|
||||
log.info("project {} asset {} ".format(project, asset))
|
||||
if project:
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
os.environ["AYON_PROJECT_NAME"] = project
|
||||
if asset:
|
||||
os.environ["AVALON_ASSET"] = asset
|
||||
os.environ["AYON_FOLDER_PATH"] = asset
|
||||
if task:
|
||||
os.environ["AVALON_TASK"] = task
|
||||
os.environ["AYON_TASK_NAME"] = task
|
||||
|
||||
async def read(self):
|
||||
log.debug("aftereffects.read client calls server server calls "
|
||||
|
|
|
|||
|
|
@ -194,13 +194,13 @@ class RenderCreator(Creator):
|
|||
name into created subset name.
|
||||
|
||||
Position of composition name could be set in
|
||||
`project_settings/global/tools/creator/subset_name_profiles` with some
|
||||
form of '{composition}' placeholder.
|
||||
`project_settings/global/tools/creator/product_name_profiles` with
|
||||
some form of '{composition}' placeholder.
|
||||
|
||||
Composition name will be used implicitly if multiple composition should
|
||||
be handled at same time.
|
||||
|
||||
If {composition} placeholder is not us 'subset_name_profiles'
|
||||
If {composition} placeholder is not us 'product_name_profiles'
|
||||
composition name will be capitalized and set at the end of subset name
|
||||
if necessary.
|
||||
|
||||
|
|
|
|||
|
|
@ -272,7 +272,7 @@ def set_resolution(data):
|
|||
|
||||
|
||||
def on_new():
|
||||
project = os.environ.get("AVALON_PROJECT")
|
||||
project = os.environ.get("AYON_PROJECT_NAME")
|
||||
settings = get_project_settings(project).get("blender")
|
||||
|
||||
set_resolution_startup = settings.get("set_resolution_startup")
|
||||
|
|
@ -293,7 +293,7 @@ def on_new():
|
|||
|
||||
|
||||
def on_open():
|
||||
project = os.environ.get("AVALON_PROJECT")
|
||||
project = os.environ.get("AYON_PROJECT_NAME")
|
||||
settings = get_project_settings(project).get("blender")
|
||||
|
||||
set_resolution_startup = settings.get("set_resolution_startup")
|
||||
|
|
@ -379,7 +379,7 @@ def _on_task_changed():
|
|||
# `directory` attribute, so it opens in that directory (does it?).
|
||||
# https://docs.blender.org/api/blender2.8/bpy.types.Operator.html#calling-a-file-selector
|
||||
# https://docs.blender.org/api/blender2.8/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add
|
||||
workdir = os.getenv("AVALON_WORKDIR")
|
||||
workdir = os.getenv("AYON_WORKDIR")
|
||||
log.debug("New working directory: %s", workdir)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ def file_extensions() -> List[str]:
|
|||
def work_root(session: dict) -> str:
|
||||
"""Return the default root to browse for work files."""
|
||||
|
||||
work_dir = session["AVALON_WORKDIR"]
|
||||
work_dir = session["AYON_WORKDIR"]
|
||||
scene_dir = session.get("AVALON_SCENEDIR")
|
||||
if scene_dir:
|
||||
return str(Path(work_dir, scene_dir))
|
||||
|
|
|
|||
|
|
@ -34,4 +34,4 @@ def current_file():
|
|||
|
||||
|
||||
def work_root(session):
|
||||
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
|
||||
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ class LoadClip(opfapi.ClipLoader):
|
|||
self.log.info("Loading with colorspace: `{}`".format(colorspace))
|
||||
|
||||
# create workfile path
|
||||
workfile_dir = os.environ["AVALON_WORKDIR"]
|
||||
workfile_dir = os.environ["AYON_WORKDIR"]
|
||||
openclip_dir = os.path.join(
|
||||
workfile_dir, clip_name
|
||||
)
|
||||
|
|
|
|||
|
|
@ -80,7 +80,7 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
self.log.info("Loading with colorspace: `{}`".format(colorspace))
|
||||
|
||||
# create workfile path
|
||||
workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"]
|
||||
workfile_dir = options.get("workdir") or os.environ["AYON_WORKDIR"]
|
||||
openclip_dir = os.path.join(
|
||||
workfile_dir, clip_name
|
||||
)
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ def get_fusion_version(app_name):
|
|||
The function is triggered by the prelaunch hooks to get the fusion version.
|
||||
|
||||
`app_name` is obtained by prelaunch hooks from the
|
||||
`launch_context.env.get("AVALON_APP_NAME")`.
|
||||
`launch_context.env.get("AYON_APP_NAME")`.
|
||||
|
||||
To get a correct Fusion version, a version number should be present
|
||||
in the `applications/fusion/variants` key
|
||||
|
|
|
|||
|
|
@ -135,7 +135,7 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
return current_filepath
|
||||
|
||||
def work_root(self, session):
|
||||
work_dir = session["AVALON_WORKDIR"]
|
||||
work_dir = session["AYON_WORKDIR"]
|
||||
scene_dir = session.get("AVALON_SCENEDIR")
|
||||
if scene_dir:
|
||||
return os.path.join(work_dir, scene_dir)
|
||||
|
|
|
|||
|
|
@ -135,7 +135,7 @@ class GenericCreateSaver(Creator):
|
|||
ext = data["creator_attributes"]["image_format"]
|
||||
|
||||
# Subset change detected
|
||||
workdir = os.path.normpath(os.getenv("AVALON_WORKDIR"))
|
||||
workdir = os.path.normpath(os.getenv("AYON_WORKDIR"))
|
||||
formatting_data.update({
|
||||
"workdir": workdir,
|
||||
"frame": "0" * frame_padding,
|
||||
|
|
|
|||
|
|
@ -131,7 +131,7 @@ class FusionCopyPrefsPrelaunch(PreLaunchHook):
|
|||
) = self.get_copy_fusion_prefs_settings()
|
||||
|
||||
# Get launched application context and return correct app version
|
||||
app_name = self.launch_context.env.get("AVALON_APP_NAME")
|
||||
app_name = self.launch_context.env.get("AYON_APP_NAME")
|
||||
app_version = get_fusion_version(app_name)
|
||||
if app_version is None:
|
||||
version_names = ", ".join(str(x) for x in FUSION_VERSIONS_DICT)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class FusionPrelaunch(PreLaunchHook):
|
|||
def execute(self):
|
||||
# making sure python 3 is installed at provided path
|
||||
# Py 3.3-3.10 for Fusion 18+ or Py 3.6 for Fu 16-17
|
||||
app_data = self.launch_context.env.get("AVALON_APP_NAME")
|
||||
app_data = self.launch_context.env.get("AYON_APP_NAME")
|
||||
app_version = get_fusion_version(app_data)
|
||||
if not app_version:
|
||||
raise ApplicationLaunchFailed(
|
||||
|
|
|
|||
|
|
@ -74,4 +74,4 @@ def current_file():
|
|||
|
||||
|
||||
def work_root(session):
|
||||
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
|
||||
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
expected_settings.pop("resolutionWidth")
|
||||
expected_settings.pop("resolutionHeight")
|
||||
|
||||
if (any(re.search(pattern, os.getenv('AVALON_TASK'))
|
||||
if (any(re.search(pattern, os.getenv('AYON_TASK_NAME'))
|
||||
for pattern in self.skip_timelines_check)):
|
||||
self.log.info("Skipping frames check because of "
|
||||
"task name and pattern {}".format(
|
||||
|
|
|
|||
|
|
@ -70,4 +70,4 @@ def current_file():
|
|||
|
||||
|
||||
def work_root(session):
|
||||
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
|
||||
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class SetPath(PreLaunchHook):
|
|||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")
|
||||
workdir = self.launch_context.env.get("AYON_WORKDIR", "")
|
||||
if not workdir:
|
||||
self.log.warning("BUG: Workdir is not filled.")
|
||||
return
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import hou
|
|||
import pyblish.api
|
||||
from ayon_core.hosts.houdini.api import lib
|
||||
import ayon_core.hosts.houdini.api.usd as hou_usdlib
|
||||
import ayon_core.lib.usdlib as usdlib
|
||||
from ayon_core.pipeline import usdlib
|
||||
|
||||
|
||||
class CollectInstancesUsdLayered(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from ayon_core.client import (
|
|||
get_asset_by_name,
|
||||
get_asset_name_identifier,
|
||||
)
|
||||
import ayon_core.lib.usdlib as usdlib
|
||||
from ayon_core.pipeline import usdlib
|
||||
|
||||
|
||||
class CollectUsdBootstrap(pyblish.api.InstancePlugin):
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class SetPath(PreLaunchHook):
|
|||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")
|
||||
workdir = self.launch_context.env.get("AYON_WORKDIR", "")
|
||||
if not workdir:
|
||||
self.log.warning("BUG: Workdir is not filled.")
|
||||
return
|
||||
|
|
|
|||
|
|
@ -38,25 +38,6 @@ class ToolWindows:
|
|||
cls._windows[tool] = window
|
||||
|
||||
|
||||
def edit_shader_definitions():
|
||||
from qtpy import QtWidgets
|
||||
from ayon_core.hosts.maya.api.shader_definition_editor import (
|
||||
ShaderDefinitionsEditor
|
||||
)
|
||||
from ayon_core.tools.utils import qt_app_context
|
||||
|
||||
top_level_widgets = QtWidgets.QApplication.topLevelWidgets()
|
||||
main_window = next(widget for widget in top_level_widgets
|
||||
if widget.objectName() == "MayaWindow")
|
||||
|
||||
with qt_app_context():
|
||||
window = ToolWindows.get_window("shader_definition_editor")
|
||||
if not window:
|
||||
window = ShaderDefinitionsEditor(parent=main_window)
|
||||
ToolWindows.set_window("shader_definition_editor", window)
|
||||
window.show()
|
||||
|
||||
|
||||
def _resolution_from_document(doc):
|
||||
if not doc or "data" not in doc:
|
||||
print("Entered document is not valid. \"{}\"".format(str(doc)))
|
||||
|
|
|
|||
|
|
@ -246,7 +246,7 @@ def _set_project():
|
|||
None
|
||||
|
||||
"""
|
||||
workdir = os.getenv("AVALON_WORKDIR")
|
||||
workdir = os.getenv("AYON_WORKDIR")
|
||||
|
||||
try:
|
||||
os.makedirs(workdir)
|
||||
|
|
@ -628,7 +628,7 @@ def on_task_changed():
|
|||
# Run
|
||||
menu.update_menu_task_label()
|
||||
|
||||
workdir = os.getenv("AVALON_WORKDIR")
|
||||
workdir = os.getenv("AYON_WORKDIR")
|
||||
if os.path.exists(workdir):
|
||||
log.info("Updating Maya workspace for task change to %s", workdir)
|
||||
_set_project()
|
||||
|
|
@ -677,7 +677,7 @@ def workfile_save_before_xgen(event):
|
|||
|
||||
import xgenm
|
||||
|
||||
current_work_dir = os.getenv("AVALON_WORKDIR").replace("\\", "/")
|
||||
current_work_dir = os.getenv("AYON_WORKDIR").replace("\\", "/")
|
||||
expected_work_dir = event.data["workdir_path"].replace("\\", "/")
|
||||
if current_work_dir == expected_work_dir:
|
||||
return
|
||||
|
|
|
|||
|
|
@ -612,7 +612,7 @@ def get_load_color_for_family(family, settings=None):
|
|||
else:
|
||||
raise ValueError("Invalid color definition {}".format(str(color)))
|
||||
|
||||
if type(red, int):
|
||||
if isinstance(red, int):
|
||||
red = red / 255.0
|
||||
green = green / 255.0
|
||||
blue = blue / 255.0
|
||||
|
|
|
|||
|
|
@ -1,176 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Editor for shader definitions.
|
||||
|
||||
Shader names are stored as simple text file over GridFS in mongodb.
|
||||
|
||||
"""
|
||||
import os
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
from ayon_core.client.mongo import OpenPypeMongoConnection
|
||||
from ayon_core import resources
|
||||
import gridfs
|
||||
|
||||
|
||||
DEFINITION_FILENAME = "{}/maya/shader_definition.txt".format(
|
||||
os.getenv("AVALON_PROJECT"))
|
||||
|
||||
|
||||
class ShaderDefinitionsEditor(QtWidgets.QWidget):
|
||||
"""Widget serving as simple editor for shader name definitions."""
|
||||
|
||||
# name of the file used to store definitions
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(ShaderDefinitionsEditor, self).__init__(parent)
|
||||
self._mongo = OpenPypeMongoConnection.get_mongo_client()
|
||||
self._gridfs = gridfs.GridFS(
|
||||
self._mongo[os.getenv("OPENPYPE_DATABASE_NAME")])
|
||||
self._editor = None
|
||||
|
||||
self._original_content = self._read_definition_file()
|
||||
|
||||
self.setObjectName("shaderDefinitionEditor")
|
||||
self.setWindowTitle("OpenPype shader name definition editor")
|
||||
icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
self.setWindowFlags(QtCore.Qt.Window)
|
||||
self.setParent(parent)
|
||||
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
|
||||
self.resize(750, 500)
|
||||
|
||||
self._setup_ui()
|
||||
self._reload()
|
||||
|
||||
def _setup_ui(self):
|
||||
"""Setup UI of Widget."""
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
label = QtWidgets.QLabel()
|
||||
label.setText("Put shader names here - one name per line:")
|
||||
layout.addWidget(label)
|
||||
self._editor = QtWidgets.QPlainTextEdit()
|
||||
self._editor.setStyleSheet("border: none;")
|
||||
layout.addWidget(self._editor)
|
||||
|
||||
btn_layout = QtWidgets.QHBoxLayout()
|
||||
save_btn = QtWidgets.QPushButton("Save")
|
||||
save_btn.clicked.connect(self._save)
|
||||
|
||||
reload_btn = QtWidgets.QPushButton("Reload")
|
||||
reload_btn.clicked.connect(self._reload)
|
||||
|
||||
exit_btn = QtWidgets.QPushButton("Exit")
|
||||
exit_btn.clicked.connect(self._close)
|
||||
|
||||
btn_layout.addWidget(reload_btn)
|
||||
btn_layout.addWidget(save_btn)
|
||||
btn_layout.addWidget(exit_btn)
|
||||
|
||||
layout.addLayout(btn_layout)
|
||||
|
||||
def _read_definition_file(self, file=None):
|
||||
"""Read definition file from database.
|
||||
|
||||
Args:
|
||||
file (gridfs.grid_file.GridOut, Optional): File to read. If not
|
||||
set, new query will be issued to find it.
|
||||
|
||||
Returns:
|
||||
str: Content of the file or empty string if file doesn't exist.
|
||||
|
||||
"""
|
||||
content = ""
|
||||
if not file:
|
||||
file = self._gridfs.find_one(
|
||||
{"filename": DEFINITION_FILENAME})
|
||||
if not file:
|
||||
print(">>> [SNDE]: nothing in database yet")
|
||||
return content
|
||||
content = file.read()
|
||||
file.close()
|
||||
return content
|
||||
|
||||
def _write_definition_file(self, content, force=False):
|
||||
"""Write content as definition to file in database.
|
||||
|
||||
Before file is written, check is made if its content has not
|
||||
changed. If is changed, warning is issued to user if he wants
|
||||
it to overwrite. Note: GridFs doesn't allow changing file content.
|
||||
You need to delete existing file and create new one.
|
||||
|
||||
Args:
|
||||
content (str): Content to write.
|
||||
|
||||
Raises:
|
||||
ContentException: If file is changed in database while
|
||||
editor is running.
|
||||
"""
|
||||
file = self._gridfs.find_one(
|
||||
{"filename": DEFINITION_FILENAME})
|
||||
if file:
|
||||
content_check = self._read_definition_file(file)
|
||||
if content == content_check:
|
||||
print(">>> [SNDE]: content not changed")
|
||||
return
|
||||
if self._original_content != content_check:
|
||||
if not force:
|
||||
raise ContentException("Content changed")
|
||||
print(">>> [SNDE]: overwriting data")
|
||||
file.close()
|
||||
self._gridfs.delete(file._id)
|
||||
|
||||
file = self._gridfs.new_file(
|
||||
filename=DEFINITION_FILENAME,
|
||||
content_type='text/plain',
|
||||
encoding='utf-8')
|
||||
file.write(content)
|
||||
file.close()
|
||||
QtCore.QTimer.singleShot(200, self._reset_style)
|
||||
self._editor.setStyleSheet("border: 1px solid #33AF65;")
|
||||
self._original_content = content
|
||||
|
||||
def _reset_style(self):
|
||||
"""Reset editor style back.
|
||||
|
||||
Used to visually indicate save.
|
||||
|
||||
"""
|
||||
self._editor.setStyleSheet("border: none;")
|
||||
|
||||
def _close(self):
|
||||
self.hide()
|
||||
|
||||
def closeEvent(self, event):
|
||||
event.ignore()
|
||||
self.hide()
|
||||
|
||||
def _reload(self):
|
||||
print(">>> [SNDE]: reloading")
|
||||
self._set_content(self._read_definition_file())
|
||||
|
||||
def _save(self):
|
||||
try:
|
||||
self._write_definition_file(content=self._editor.toPlainText())
|
||||
except ContentException:
|
||||
# content has changed meanwhile
|
||||
print(">>> [SNDE]: content has changed")
|
||||
self._show_overwrite_warning()
|
||||
|
||||
def _set_content(self, content):
|
||||
self._editor.setPlainText(content)
|
||||
|
||||
def _show_overwrite_warning(self):
|
||||
reply = QtWidgets.QMessageBox.question(
|
||||
self,
|
||||
"Warning",
|
||||
("Content you are editing was changed meanwhile in database.\n"
|
||||
"Please, reload and solve the conflict."),
|
||||
QtWidgets.QMessageBox.OK)
|
||||
|
||||
if reply == QtWidgets.QMessageBox.OK:
|
||||
# do nothing
|
||||
pass
|
||||
|
||||
|
||||
class ContentException(Exception):
|
||||
"""This is risen during save if file is changed in database."""
|
||||
pass
|
||||
|
|
@ -35,7 +35,7 @@ def current_file():
|
|||
|
||||
|
||||
def work_root(session):
|
||||
work_dir = session["AVALON_WORKDIR"]
|
||||
work_dir = session["AYON_WORKDIR"]
|
||||
scene_dir = None
|
||||
|
||||
# Query scene file rule from workspace.mel if it exists in WORKDIR
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class PreCopyMel(PreLaunchHook):
|
|||
|
||||
def execute(self):
|
||||
project_doc = self.data["project_doc"]
|
||||
workdir = self.launch_context.env.get("AVALON_WORKDIR")
|
||||
workdir = self.launch_context.env.get("AYON_WORKDIR")
|
||||
if not workdir:
|
||||
self.log.warning("BUG: Workdir is not filled.")
|
||||
return
|
||||
|
|
|
|||
|
|
@ -8,13 +8,12 @@ publishing on farm.
|
|||
Requires:
|
||||
instance -> families
|
||||
instance -> setMembers
|
||||
instance -> asset
|
||||
|
||||
context -> currentFile
|
||||
context -> workspaceDir
|
||||
context -> user
|
||||
|
||||
session -> AVALON_ASSET
|
||||
|
||||
Optional:
|
||||
|
||||
Provides:
|
||||
|
|
|
|||
|
|
@ -1,161 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate model nodes names."""
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
|
||||
import gridfs
|
||||
import pyblish.api
|
||||
from maya import cmds
|
||||
|
||||
import ayon_core.hosts.maya.api.action
|
||||
from ayon_core.client.mongo import OpenPypeMongoConnection
|
||||
from ayon_core.hosts.maya.api.shader_definition_editor import (
|
||||
DEFINITION_FILENAME)
|
||||
from ayon_core.pipeline.publish import (
|
||||
OptionalPyblishPluginMixin, PublishValidationError, ValidateContentsOrder)
|
||||
|
||||
|
||||
class ValidateModelName(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate name of model
|
||||
|
||||
starts with (somename)_###_(materialID)_GEO
|
||||
materialID must be present in list
|
||||
padding number doesn't have limit
|
||||
|
||||
"""
|
||||
optional = True
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["model"]
|
||||
label = "Model Name"
|
||||
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction]
|
||||
material_file = None
|
||||
database_file = DEFINITION_FILENAME
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
"""Get invalid nodes."""
|
||||
use_db = cls.database
|
||||
|
||||
def is_group(group_name):
|
||||
"""Find out if supplied transform is group or not."""
|
||||
try:
|
||||
children = cmds.listRelatives(group_name, children=True)
|
||||
for child in children:
|
||||
if not cmds.ls(child, transforms=True):
|
||||
return False
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
invalid = []
|
||||
content_instance = instance.data.get("setMembers", None)
|
||||
if not content_instance:
|
||||
cls.log.error("Instance has no nodes!")
|
||||
return True
|
||||
pass
|
||||
|
||||
# validate top level group name
|
||||
assemblies = cmds.ls(content_instance, assemblies=True, long=True)
|
||||
if len(assemblies) != 1:
|
||||
cls.log.error("Must have exactly one top group")
|
||||
return assemblies or True
|
||||
top_group = assemblies[0]
|
||||
regex = cls.top_level_regex
|
||||
r = re.compile(regex)
|
||||
m = r.match(top_group)
|
||||
project_name = instance.context.data["projectName"]
|
||||
current_asset_name = instance.context.data["asset"]
|
||||
if m is None:
|
||||
cls.log.error("invalid name on: {}".format(top_group))
|
||||
cls.log.error("name doesn't match regex {}".format(regex))
|
||||
invalid.append(top_group)
|
||||
else:
|
||||
if "asset" in r.groupindex:
|
||||
if m.group("asset") != current_asset_name:
|
||||
cls.log.error("Invalid asset name in top level group.")
|
||||
return top_group
|
||||
if "subset" in r.groupindex:
|
||||
if m.group("subset") != instance.data.get("subset"):
|
||||
cls.log.error("Invalid subset name in top level group.")
|
||||
return top_group
|
||||
if "project" in r.groupindex:
|
||||
if m.group("project") != project_name:
|
||||
cls.log.error("Invalid project name in top level group.")
|
||||
return top_group
|
||||
|
||||
descendants = cmds.listRelatives(content_instance,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
|
||||
trns = cmds.ls(descendants, long=False, type='transform')
|
||||
|
||||
# filter out groups
|
||||
filtered = [node for node in trns if not is_group(node)]
|
||||
|
||||
# load shader list file as utf-8
|
||||
shaders = []
|
||||
if not use_db:
|
||||
material_file = cls.material_file[platform.system().lower()]
|
||||
if material_file:
|
||||
if os.path.isfile(material_file):
|
||||
shader_file = open(material_file, "r")
|
||||
shaders = shader_file.readlines()
|
||||
shader_file.close()
|
||||
else:
|
||||
cls.log.error("Missing shader name definition file.")
|
||||
return True
|
||||
else:
|
||||
client = OpenPypeMongoConnection.get_mongo_client()
|
||||
fs = gridfs.GridFS(client[os.getenv("OPENPYPE_DATABASE_NAME")])
|
||||
shader_file = fs.find_one({"filename": cls.database_file})
|
||||
if not shader_file:
|
||||
cls.log.error("Missing shader name definition in database.")
|
||||
return True
|
||||
shaders = shader_file.read().splitlines()
|
||||
shader_file.close()
|
||||
|
||||
# strip line endings from list
|
||||
shaders = [s.rstrip() for s in shaders if s.rstrip()]
|
||||
|
||||
# compile regex for testing names
|
||||
regex = cls.regex
|
||||
r = re.compile(regex)
|
||||
|
||||
for obj in filtered:
|
||||
cls.log.debug("testing: {}".format(obj))
|
||||
m = r.match(obj)
|
||||
if m is None:
|
||||
cls.log.error("invalid name on: {}".format(obj))
|
||||
invalid.append(obj)
|
||||
else:
|
||||
# if we have shader files and shader named group is in
|
||||
# regex, test this group against names in shader file
|
||||
if "shader" in r.groupindex and shaders:
|
||||
try:
|
||||
if not m.group('shader') in shaders:
|
||||
cls.log.error(
|
||||
"invalid materialID on: {0} ({1})".format(
|
||||
obj, m.group('shader')))
|
||||
invalid.append(obj)
|
||||
except IndexError:
|
||||
# shader named group doesn't match
|
||||
cls.log.error(
|
||||
"shader group doesn't match: {}".format(obj))
|
||||
invalid.append(obj)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Model naming is invalid. See the log.")
|
||||
|
|
@ -38,7 +38,7 @@ if explicit_plugins_loading["enabled"]:
|
|||
key = "AYON_OPEN_WORKFILE_POST_INITIALIZATION"
|
||||
if bool(int(os.environ.get(key, "0"))):
|
||||
def _log_and_open():
|
||||
path = os.environ["AVALON_LAST_WORKFILE"]
|
||||
path = os.environ["AYON_LAST_WORKFILE"]
|
||||
print("Opening \"{}\"".format(path))
|
||||
cmds.file(path, open=True, force=True)
|
||||
cmds.evalDeferred(
|
||||
|
|
|
|||
|
|
@ -120,7 +120,7 @@ def deprecated(new_destination):
|
|||
class Context:
|
||||
main_window = None
|
||||
context_action_item = None
|
||||
project_name = os.getenv("AVALON_PROJECT")
|
||||
project_name = os.getenv("AYON_PROJECT_NAME")
|
||||
# Workfile related code
|
||||
workfiles_launched = False
|
||||
workfiles_tool_timer = None
|
||||
|
|
@ -2605,7 +2605,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
def set_favorites(self):
|
||||
from .utils import set_context_favorites
|
||||
|
||||
work_dir = os.getenv("AVALON_WORKDIR")
|
||||
work_dir = os.getenv("AYON_WORKDIR")
|
||||
asset = get_current_asset_name()
|
||||
favorite_items = OrderedDict()
|
||||
|
||||
|
|
@ -2953,7 +2953,7 @@ def process_workfile_builder():
|
|||
create_fv_on = workfile_builder.get("create_first_version") or None
|
||||
builder_on = workfile_builder.get("builder_on_start") or None
|
||||
|
||||
last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE")
|
||||
last_workfile_path = os.environ.get("AYON_LAST_WORKFILE")
|
||||
|
||||
# generate first version in file not existing and feature is enabled
|
||||
if create_fv_on and not os.path.exists(last_workfile_path):
|
||||
|
|
@ -3203,7 +3203,7 @@ class DirmapCache:
|
|||
@classmethod
|
||||
def project_name(cls):
|
||||
if cls._project_name is None:
|
||||
cls._project_name = os.getenv("AVALON_PROJECT")
|
||||
cls._project_name = os.getenv("AYON_PROJECT_NAME")
|
||||
return cls._project_name
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -493,7 +493,7 @@ def get_colorspace_from_node(node):
|
|||
def get_review_presets_config():
|
||||
settings = get_current_project_settings()
|
||||
review_profiles = (
|
||||
settings["global"]
|
||||
settings["core"]
|
||||
["publish"]
|
||||
["ExtractReview"]
|
||||
["profiles"]
|
||||
|
|
@ -1348,7 +1348,9 @@ def _remove_old_knobs(node):
|
|||
|
||||
|
||||
def exposed_write_knobs(settings, plugin_name, instance_node):
|
||||
exposed_knobs = settings["nuke"]["create"][plugin_name]["exposed_knobs"]
|
||||
exposed_knobs = settings["nuke"]["create"][plugin_name].get(
|
||||
"exposed_knobs", []
|
||||
)
|
||||
if exposed_knobs:
|
||||
instance_node.addKnob(nuke.Text_Knob('', 'Write Knobs'))
|
||||
write_node = nuke.allNodes(group=instance_node, filter="Write")[0]
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ def current_file():
|
|||
|
||||
def work_root(session):
|
||||
|
||||
work_dir = session["AVALON_WORKDIR"]
|
||||
work_dir = session["AYON_WORKDIR"]
|
||||
scene_dir = session.get("AVALON_SCENEDIR")
|
||||
if scene_dir:
|
||||
path = os.path.join(work_dir, scene_dir)
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class ValidateExposedKnobs(
|
|||
group_node = instance.data["transientData"]["node"]
|
||||
nuke_settings = instance.context.data["project_settings"]["nuke"]
|
||||
create_settings = nuke_settings["create"][plugin]
|
||||
exposed_knobs = create_settings["exposed_knobs"]
|
||||
exposed_knobs = create_settings.get("exposed_knobs", [])
|
||||
unexposed_knobs = []
|
||||
for knob in exposed_knobs:
|
||||
if knob not in group_node.knobs():
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ class WriteNodeKnobSettingPanel(nukescripts.PythonPanel):
|
|||
for write_node in write_selected_nodes:
|
||||
# data for mapping the path
|
||||
data = {
|
||||
"work": os.getenv("AVALON_WORKDIR"),
|
||||
"work": os.getenv("AYON_WORKDIR"),
|
||||
"subset": write_node["name"].value(),
|
||||
"frame": "#" * frame_padding,
|
||||
"ext": ext
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ class PhotoshopHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
return None
|
||||
|
||||
def work_root(self, session):
|
||||
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
|
||||
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")
|
||||
|
||||
def open_workfile(self, filepath):
|
||||
lib.stub().open(filepath)
|
||||
|
|
|
|||
|
|
@ -209,8 +209,8 @@ class ImageCreator(Creator):
|
|||
|
||||
'Use layer name in subset' will explicitly add layer name into subset
|
||||
name. Position of this name is configurable in
|
||||
`project_settings/global/tools/creator/subset_name_profiles`.
|
||||
If layer placeholder ({layer}) is not used in `subset_name_profiles`
|
||||
`project_settings/global/tools/creator/product_name_profiles`.
|
||||
If layer placeholder ({layer}) is not used in `product_name_profiles`
|
||||
but layer name should be used (set explicitly in UI or implicitly if
|
||||
multiple images should be created), it is added in capitalized form
|
||||
as a suffix to subset name.
|
||||
|
|
|
|||
|
|
@ -52,10 +52,10 @@ class CollectBatchData(pyblish.api.ContextPlugin):
|
|||
assert os.path.exists(batch_dir), \
|
||||
"Folder {} doesn't exist".format(batch_dir)
|
||||
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
project_name = os.environ.get("AYON_PROJECT_NAME")
|
||||
if project_name is None:
|
||||
raise AssertionError(
|
||||
"Environment `AVALON_PROJECT` was not found."
|
||||
"Environment `AYON_PROJECT_NAME` was not found."
|
||||
"Could not set project `root` which may cause issues."
|
||||
)
|
||||
|
||||
|
|
@ -68,8 +68,8 @@ class CollectBatchData(pyblish.api.ContextPlugin):
|
|||
batch_data["context"]
|
||||
)
|
||||
|
||||
os.environ["AVALON_ASSET"] = asset_name
|
||||
os.environ["AVALON_TASK"] = task_name
|
||||
os.environ["AYON_FOLDER_PATH"] = asset_name
|
||||
os.environ["AYON_TASK_NAME"] = task_name
|
||||
|
||||
context.data["asset"] = asset_name
|
||||
context.data["task"] = task_name
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ def open_file(filepath):
|
|||
def current_file():
|
||||
pm = get_project_manager()
|
||||
file_ext = file_extensions()[0]
|
||||
workdir_path = os.getenv("AVALON_WORKDIR")
|
||||
workdir_path = os.getenv("AYON_WORKDIR")
|
||||
project = pm.GetCurrentProject()
|
||||
project_name = project.GetName()
|
||||
file_name = project_name + file_ext
|
||||
|
|
@ -93,4 +93,4 @@ def current_file():
|
|||
|
||||
|
||||
def work_root(session):
|
||||
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
|
||||
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class TrayPublisherHost(HostBase, IPublishHost):
|
|||
name = "traypublisher"
|
||||
|
||||
def install(self):
|
||||
os.environ["AVALON_APP"] = self.name
|
||||
os.environ["AYON_HOST_NAME"] = self.name
|
||||
|
||||
pyblish.api.register_host("traypublisher")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
|
|
@ -40,7 +40,7 @@ class TrayPublisherHost(HostBase, IPublishHost):
|
|||
def set_project_name(self, project_name):
|
||||
# TODO Deregister project specific plugins and register new project
|
||||
# plugins
|
||||
os.environ["AVALON_PROJECT"] = project_name
|
||||
os.environ["AYON_PROJECT_NAME"] = project_name
|
||||
HostContext.set_project_name(project_name)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ log = Logger.get_logger(__name__)
|
|||
def initialize():
|
||||
from ayon_core.hosts.traypublisher.api.plugin import SettingsCreator
|
||||
|
||||
project_name = os.environ["AVALON_PROJECT"]
|
||||
project_name = os.environ["AYON_PROJECT_NAME"]
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
simple_creators = project_settings["traypublisher"]["simple_creators"]
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
log.info("AYON - Installing TVPaint integration")
|
||||
|
||||
# Create workdir folder if does not exist yet
|
||||
workdir = os.getenv("AVALON_WORKDIR")
|
||||
workdir = os.getenv("AYON_WORKDIR")
|
||||
if not os.path.exists(workdir):
|
||||
os.makedirs(workdir)
|
||||
|
||||
|
|
@ -155,7 +155,7 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
return execute_george(george_script)
|
||||
|
||||
def work_root(self, session):
|
||||
return session["AVALON_WORKDIR"]
|
||||
return session["AYON_WORKDIR"]
|
||||
|
||||
def get_current_workfile(self):
|
||||
return execute_george("tv_GetProjectName")
|
||||
|
|
@ -174,7 +174,7 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
# Setup project settings if its the template that's launched.
|
||||
# TODO also check for template creation when it's possible to define
|
||||
# templates
|
||||
last_workfile = os.environ.get("AVALON_LAST_WORKFILE")
|
||||
last_workfile = os.environ.get("AYON_LAST_WORKFILE")
|
||||
if not last_workfile or os.path.exists(last_workfile):
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -85,8 +85,8 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
|
|||
if workfile_context:
|
||||
# Change current context with context from workfile
|
||||
key_map = (
|
||||
("AVALON_ASSET", "asset_name"),
|
||||
("AVALON_TASK", "task_name")
|
||||
("AYON_FOLDER_PATH", "asset_name"),
|
||||
("AYON_TASK_NAME", "task_name")
|
||||
)
|
||||
for env_key, key in key_map:
|
||||
os.environ[env_key] = workfile_context[key]
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
families = ["review", "render"]
|
||||
|
||||
# Modifiable with settings
|
||||
review_bg = [255, 255, 255, 255]
|
||||
review_bg = [255, 255, 255, 1.0]
|
||||
|
||||
def process(self, instance):
|
||||
self.log.info(
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class ValidateWorkfileProjectName(pyblish.api.ContextPlugin):
|
|||
"""Validate project name stored in workfile metadata.
|
||||
|
||||
It is not possible to publish from different project than is set in
|
||||
environment variable "AVALON_PROJECT".
|
||||
environment variable "AYON_PROJECT_NAME".
|
||||
"""
|
||||
|
||||
label = "Validate Workfile Project Name"
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ def start_rendering():
|
|||
inst_data.append(data)
|
||||
|
||||
try:
|
||||
project = os.environ.get("AVALON_PROJECT")
|
||||
project = os.environ.get("AYON_PROJECT_NAME")
|
||||
anatomy = Anatomy(project)
|
||||
root = anatomy.roots['renders']
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -146,7 +146,7 @@ class UnrealPrelaunchHook(PreLaunchHook):
|
|||
|
||||
def execute(self):
|
||||
"""Hook entry method."""
|
||||
workdir = self.launch_context.env["AVALON_WORKDIR"]
|
||||
workdir = self.launch_context.env["AYON_WORKDIR"]
|
||||
executable = str(self.launch_context.executable)
|
||||
engine_version = self.app_name.split("/")[-1].replace("-", ".")
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ from ayon_core.client import get_asset_name_identifier
|
|||
from ayon_core.settings import (
|
||||
get_system_settings,
|
||||
get_project_settings,
|
||||
get_local_settings
|
||||
)
|
||||
from ayon_core.settings.constants import (
|
||||
METADATA_KEYS,
|
||||
|
|
@ -1528,16 +1527,17 @@ def prepare_app_environments(
|
|||
|
||||
# Use environments from local settings
|
||||
filtered_local_envs = {}
|
||||
system_settings = data["system_settings"]
|
||||
whitelist_envs = system_settings["general"].get("local_env_white_list")
|
||||
if whitelist_envs:
|
||||
local_settings = get_local_settings()
|
||||
local_envs = local_settings.get("environments") or {}
|
||||
filtered_local_envs = {
|
||||
key: value
|
||||
for key, value in local_envs.items()
|
||||
if key in whitelist_envs
|
||||
}
|
||||
# NOTE Overrides for environment variables are not implemented in AYON.
|
||||
# system_settings = data["system_settings"]
|
||||
# whitelist_envs = system_settings["general"].get("local_env_white_list")
|
||||
# if whitelist_envs:
|
||||
# local_settings = get_local_settings()
|
||||
# local_envs = local_settings.get("environments") or {}
|
||||
# filtered_local_envs = {
|
||||
# key: value
|
||||
# for key, value in local_envs.items()
|
||||
# if key in whitelist_envs
|
||||
# }
|
||||
|
||||
# Apply local environment variables for already existing values
|
||||
for key, value in filtered_local_envs.items():
|
||||
|
|
@ -1656,8 +1656,9 @@ def apply_project_environments_value(
|
|||
if project_settings is None:
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
env_value = project_settings["global"]["project_environments"]
|
||||
env_value = project_settings["core"]["project_environments"]
|
||||
if env_value:
|
||||
env_value = json.loads(env_value)
|
||||
parsed_value = parse_environments(env_value, env_group)
|
||||
env.update(acre.compute(
|
||||
_merge_env(parsed_value, env),
|
||||
|
|
@ -1698,15 +1699,15 @@ def prepare_context_environments(data, env_group=None, addons_manager=None):
|
|||
|
||||
app = data["app"]
|
||||
context_env = {
|
||||
"AVALON_PROJECT": project_doc["name"],
|
||||
"AVALON_APP_NAME": app.full_name
|
||||
"AYON_PROJECT_NAME": project_doc["name"],
|
||||
"AYON_APP_NAME": app.full_name
|
||||
}
|
||||
if asset_doc:
|
||||
asset_name = get_asset_name_identifier(asset_doc)
|
||||
context_env["AVALON_ASSET"] = asset_name
|
||||
context_env["AYON_FOLDER_PATH"] = asset_name
|
||||
|
||||
if task_name:
|
||||
context_env["AVALON_TASK"] = task_name
|
||||
context_env["AYON_TASK_NAME"] = task_name
|
||||
|
||||
log.debug(
|
||||
"Context environments set:\n{}".format(
|
||||
|
|
@ -1724,7 +1725,7 @@ def prepare_context_environments(data, env_group=None, addons_manager=None):
|
|||
if not app.is_host:
|
||||
return
|
||||
|
||||
data["env"]["AVALON_APP"] = app.host_name
|
||||
data["env"]["AYON_HOST_NAME"] = app.host_name
|
||||
|
||||
if not asset_doc or not task_name:
|
||||
# QUESTION replace with log.info and skip workfile discovery?
|
||||
|
|
@ -1770,7 +1771,7 @@ def prepare_context_environments(data, env_group=None, addons_manager=None):
|
|||
"Couldn't create workdir because: {}".format(str(exc))
|
||||
)
|
||||
|
||||
data["env"]["AVALON_WORKDIR"] = workdir
|
||||
data["env"]["AYON_WORKDIR"] = workdir
|
||||
|
||||
_prepare_last_workfile(data, workdir, addons_manager)
|
||||
|
||||
|
|
@ -1887,7 +1888,7 @@ def _prepare_last_workfile(data, workdir, addons_manager):
|
|||
"Setting last workfile path: {}".format(last_workfile_path)
|
||||
)
|
||||
|
||||
data["env"]["AVALON_LAST_WORKFILE"] = last_workfile_path
|
||||
data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path
|
||||
data["last_workfile_path"] = last_workfile_path
|
||||
|
||||
|
||||
|
|
@ -1916,7 +1917,7 @@ def should_start_last_workfile(
|
|||
project_settings = get_project_settings(project_name)
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["last_workfile_on_startup"]
|
||||
|
|
@ -1966,7 +1967,7 @@ def should_workfile_tool_start(
|
|||
project_settings = get_project_settings(project_name)
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["open_workfile_tool_on_startup"]
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ import platform
|
|||
import getpass
|
||||
import socket
|
||||
|
||||
from ayon_core.settings.lib import get_local_settings
|
||||
from .execute import get_ayon_launcher_args
|
||||
from .local_settings import get_local_site_id
|
||||
|
||||
|
|
@ -96,7 +95,6 @@ def get_all_current_info():
|
|||
return {
|
||||
"workstation": get_workstation_info(),
|
||||
"env": os.environ.copy(),
|
||||
"local_settings": get_local_settings(),
|
||||
"ayon": get_ayon_info(),
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -257,7 +257,7 @@ class Logger:
|
|||
return cls._process_name
|
||||
|
||||
# Get process name
|
||||
process_name = os.environ.get("AVALON_APP_NAME")
|
||||
process_name = os.environ.get("AYON_APP_NAME")
|
||||
if not process_name:
|
||||
try:
|
||||
import psutil
|
||||
|
|
|
|||
|
|
@ -1385,23 +1385,26 @@ def _get_image_dimensions(application, input_path, log):
|
|||
|
||||
def convert_color_values(application, color_value):
|
||||
"""Get color mapping for ffmpeg and oiiotool.
|
||||
|
||||
Args:
|
||||
application (str): Application for which command should be created.
|
||||
color_value (list[int]): List of 8bit int values for RGBA.
|
||||
color_value (tuple[int, int, int, float]): List of 8bit int values
|
||||
for RGBA.
|
||||
|
||||
Returns:
|
||||
str: ffmpeg returns hex string, oiiotool is string with floats.
|
||||
|
||||
"""
|
||||
red, green, blue, alpha = color_value
|
||||
|
||||
if application == "ffmpeg":
|
||||
return "{0:0>2X}{1:0>2X}{2:0>2X}@{3}".format(
|
||||
red, green, blue, (alpha / 255.0)
|
||||
red, green, blue, alpha
|
||||
)
|
||||
elif application == "oiiotool":
|
||||
red = float(red / 255)
|
||||
green = float(green / 255)
|
||||
blue = float(blue / 255)
|
||||
alpha = float(alpha / 255)
|
||||
|
||||
return "{0:.3f},{1:.3f},{2:.3f},{3:.3f}".format(
|
||||
red, green, blue, alpha)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class ClockifyStart(LauncherAction):
|
|||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
if "AVALON_TASK" in session:
|
||||
if "AYON_TASK_NAME" in session:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
|
@ -20,9 +20,9 @@ class ClockifyStart(LauncherAction):
|
|||
self.clockify_api.set_api()
|
||||
user_id = self.clockify_api.user_id
|
||||
workspace_id = self.clockify_api.workspace_id
|
||||
project_name = session["AVALON_PROJECT"]
|
||||
asset_name = session["AVALON_ASSET"]
|
||||
task_name = session["AVALON_TASK"]
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
asset_name = session["AYON_FOLDER_PATH"]
|
||||
task_name = session["AYON_TASK_NAME"]
|
||||
description = asset_name
|
||||
|
||||
# fetch asset docs
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class ClockifySync(LauncherAction):
|
|||
raise ClockifyPermissionsCheckFailed(
|
||||
"Current CLockify user is missing permissions for this action!"
|
||||
)
|
||||
project_name = session.get("AVALON_PROJECT") or ""
|
||||
project_name = session.get("AYON_PROJECT_NAME") or ""
|
||||
|
||||
projects_to_sync = []
|
||||
if project_name.strip():
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import six
|
|||
import sys
|
||||
|
||||
from ayon_core.lib import requests_get, Logger
|
||||
from ayon_core.modules import OpenPypeModule, IPluginPaths
|
||||
from ayon_core.modules import AYONAddon, IPluginPaths
|
||||
|
||||
|
||||
class DeadlineWebserviceError(Exception):
|
||||
|
|
@ -13,28 +13,28 @@ class DeadlineWebserviceError(Exception):
|
|||
"""
|
||||
|
||||
|
||||
class DeadlineModule(OpenPypeModule, IPluginPaths):
|
||||
class DeadlineModule(AYONAddon, IPluginPaths):
|
||||
name = "deadline"
|
||||
|
||||
def __init__(self, manager, settings):
|
||||
self.deadline_urls = {}
|
||||
super(DeadlineModule, self).__init__(manager, settings)
|
||||
|
||||
def initialize(self, modules_settings):
|
||||
def initialize(self, studio_settings):
|
||||
# This module is always enabled
|
||||
deadline_settings = modules_settings[self.name]
|
||||
self.enabled = deadline_settings["enabled"]
|
||||
deadline_url = deadline_settings.get("DEADLINE_REST_URL")
|
||||
if deadline_url:
|
||||
self.deadline_urls = {"default": deadline_url}
|
||||
else:
|
||||
self.deadline_urls = deadline_settings.get("deadline_urls") # noqa: E501
|
||||
deadline_urls = {}
|
||||
enabled = self.name in studio_settings
|
||||
if enabled:
|
||||
deadline_settings = studio_settings[self.name]
|
||||
deadline_urls = {
|
||||
url_item["name"]: url_item["value"]
|
||||
for url_item in deadline_settings["deadline_urls"]
|
||||
}
|
||||
|
||||
if not self.deadline_urls:
|
||||
self.enabled = False
|
||||
self.log.warning(("default Deadline Webservice URL "
|
||||
"not specified. Disabling module."))
|
||||
return
|
||||
if enabled and not deadline_urls:
|
||||
enabled = False
|
||||
self.log.warning((
|
||||
"Deadline Webservice URLs are not specified. Disabling addon."
|
||||
))
|
||||
|
||||
self.enabled = enabled
|
||||
self.deadline_urls = deadline_urls
|
||||
|
||||
def get_plugin_paths(self):
|
||||
"""Deadline plugin paths."""
|
||||
|
|
|
|||
|
|
@ -47,11 +47,11 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
deadline_settings = (
|
||||
render_instance.context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["deadline"]
|
||||
)
|
||||
|
||||
default_server = render_instance.context.data["defaultDeadline"]
|
||||
# QUESTION How and where is this is set? Should be removed?
|
||||
instance_server = render_instance.data.get("deadlineServers")
|
||||
if not instance_server:
|
||||
self.log.debug("Using default server.")
|
||||
|
|
@ -64,7 +64,10 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
asString=True
|
||||
)
|
||||
|
||||
default_servers = deadline_settings["deadline_urls"]
|
||||
default_servers = {
|
||||
url_item["name"]: url_item["value"]
|
||||
for url_item in deadline_settings["deadline_urls"]
|
||||
}
|
||||
project_servers = (
|
||||
render_instance.context.data
|
||||
["project_settings"]
|
||||
|
|
|
|||
|
|
@ -1,39 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect instances that should be processed and published on DL.
|
||||
|
||||
"""
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class CollectDeadlinePublishableInstances(pyblish.api.InstancePlugin):
|
||||
"""Collect instances that should be processed and published on DL.
|
||||
|
||||
Some long running publishes (not just renders) could be offloaded to DL,
|
||||
this plugin compares theirs name against env variable, marks only
|
||||
publishable by farm.
|
||||
|
||||
Triggered only when running only in headless mode, eg on a farm.
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Collect Deadline Publishable Instance"
|
||||
targets = ["remote"]
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug("CollectDeadlinePublishableInstances")
|
||||
publish_inst = os.environ.get("OPENPYPE_PUBLISH_SUBSET", '')
|
||||
if not publish_inst:
|
||||
raise PublishValidationError("OPENPYPE_PUBLISH_SUBSET env var "
|
||||
"required for remote publishing")
|
||||
|
||||
subset_name = instance.data["subset"]
|
||||
if subset_name == publish_inst:
|
||||
self.log.debug("Publish {}".format(subset_name))
|
||||
instance.data["publish"] = True
|
||||
instance.data["farm"] = False
|
||||
else:
|
||||
self.log.debug("Skipping {}".format(subset_name))
|
||||
instance.data["publish"] = False
|
||||
|
|
@ -80,11 +80,11 @@ class AfterEffectsSubmitDeadline(
|
|||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"AVALON_WORKDIR",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
"IS_TEST"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -102,11 +102,11 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"AVALON_WORKDIR",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"IS_TEST"
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -220,11 +220,11 @@ class FusionSubmitDeadline(
|
|||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"AVALON_WORKDIR",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
"IS_TEST",
|
||||
"AYON_BUNDLE_NAME",
|
||||
|
|
|
|||
|
|
@ -273,11 +273,11 @@ class HarmonySubmitDeadline(
|
|||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"AVALON_WORKDIR",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS"
|
||||
"IS_TEST"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -98,11 +98,11 @@ class HoudiniCacheSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"AVALON_WORKDIR",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -207,11 +207,11 @@ class HoudiniSubmitDeadline(
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"AVALON_WORKDIR",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -106,11 +106,11 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"AVALON_WORKDIR",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"IS_TEST"
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -207,11 +207,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"AVALON_WORKDIR",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"IS_TEST"
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,131 +0,0 @@
|
|||
import os
|
||||
import attr
|
||||
from datetime import datetime
|
||||
|
||||
from ayon_core.pipeline import PublishXmlValidationError
|
||||
from ayon_core.lib import is_in_tests
|
||||
from openpype_modules.deadline import abstract_submit_deadline
|
||||
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
@attr.s
|
||||
class MayaPluginInfo(object):
|
||||
Build = attr.ib(default=None) # Don't force build
|
||||
StrictErrorChecking = attr.ib(default=True)
|
||||
|
||||
SceneFile = attr.ib(default=None) # Input scene
|
||||
Version = attr.ib(default=None) # Mandatory for Deadline
|
||||
ProjectPath = attr.ib(default=None)
|
||||
|
||||
ScriptJob = attr.ib(default=True)
|
||||
ScriptFilename = attr.ib(default=None)
|
||||
|
||||
|
||||
class MayaSubmitRemotePublishDeadline(
|
||||
abstract_submit_deadline.AbstractSubmitDeadline):
|
||||
"""Submit Maya scene to perform a local publish in Deadline.
|
||||
|
||||
Publishing in Deadline can be helpful for scenes that publish very slow.
|
||||
This way it can process in the background on another machine without the
|
||||
Artist having to wait for the publish to finish on their local machine.
|
||||
|
||||
Submission is done through the Deadline Web Service. DL then triggers
|
||||
`openpype/scripts/remote_publish.py`.
|
||||
|
||||
Each publishable instance creates its own full publish job.
|
||||
|
||||
Different from `ProcessSubmittedJobOnFarm` which creates publish job
|
||||
depending on metadata json containing context and instance data of
|
||||
rendered files.
|
||||
"""
|
||||
|
||||
label = "Submit Scene to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["maya"]
|
||||
families = ["publish.farm"]
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Ensure no errors so far
|
||||
if not (all(result["success"]
|
||||
for result in instance.context.data["results"])):
|
||||
raise PublishXmlValidationError("Publish process has errors")
|
||||
|
||||
if not instance.data["publish"]:
|
||||
self.log.warning("No active instances found. "
|
||||
"Skipping submission..")
|
||||
return
|
||||
|
||||
super(MayaSubmitRemotePublishDeadline, self).process(instance)
|
||||
|
||||
def get_job_info(self):
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
project_name = instance.context.data["projectName"]
|
||||
scene = instance.context.data["currentFile"]
|
||||
scenename = os.path.basename(scene)
|
||||
|
||||
job_name = "{scene} [PUBLISH]".format(scene=scenename)
|
||||
batch_name = "{code} - {scene}".format(code=project_name,
|
||||
scene=scenename)
|
||||
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
job_info = DeadlineJobInfo(Plugin="MayaBatch")
|
||||
job_info.BatchName = batch_name
|
||||
job_info.Name = job_name
|
||||
job_info.UserName = context.data.get("user")
|
||||
job_info.Comment = context.data.get("comment", "")
|
||||
|
||||
# use setting for publish job on farm, no reason to have it separately
|
||||
project_settings = context.data["project_settings"]
|
||||
deadline_publish_job_sett = project_settings["deadline"]["publish"]["ProcessSubmittedJobOnFarm"] # noqa
|
||||
job_info.Department = deadline_publish_job_sett["deadline_department"]
|
||||
job_info.ChunkSize = deadline_publish_job_sett["deadline_chunk_size"]
|
||||
job_info.Priority = deadline_publish_job_sett["deadline_priority"]
|
||||
job_info.Group = deadline_publish_job_sett["deadline_group"]
|
||||
job_info.Pool = deadline_publish_job_sett["deadline_pool"]
|
||||
|
||||
# Include critical environment variables with submission + Session
|
||||
keys = [
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER"
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
environment["AVALON_PROJECT"] = project_name
|
||||
environment["AVALON_ASSET"] = instance.context.data["asset"]
|
||||
environment["AVALON_TASK"] = instance.context.data["task"]
|
||||
environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME")
|
||||
environment["OPENPYPE_PUBLISH_SUBSET"] = instance.data["subset"]
|
||||
environment["AYON_LOG_NO_COLORS"] = "1"
|
||||
environment["AYON_USERNAME"] = instance.context.data["user"]
|
||||
environment["AYON_REMOTE_PUBLISH"] = "1"
|
||||
|
||||
for key, value in environment.items():
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
def get_plugin_info(self):
|
||||
# Not all hosts can import this module.
|
||||
from maya import cmds
|
||||
scene = self._instance.context.data["currentFile"]
|
||||
|
||||
plugin_info = MayaPluginInfo()
|
||||
plugin_info.SceneFile = scene
|
||||
plugin_info.ScriptFilename = "{OPENPYPE_REPOS_ROOT}/openpype/scripts/remote_publish.py" # noqa
|
||||
plugin_info.Version = cmds.about(version=True)
|
||||
plugin_info.ProjectPath = cmds.workspace(query=True,
|
||||
rootDirectory=True)
|
||||
|
||||
return attr.asdict(plugin_info)
|
||||
|
|
@ -373,10 +373,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
keys = [
|
||||
"PYTHONPATH",
|
||||
"PATH",
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_APP_NAME",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_USERNAME",
|
||||
"OPENPYPE_SG_USER",
|
||||
"KITSU_LOGIN",
|
||||
|
|
@ -125,9 +125,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
create_metadata_path(instance, anatomy)
|
||||
|
||||
environment = {
|
||||
"AVALON_PROJECT": instance.context.data["projectName"],
|
||||
"AVALON_ASSET": instance.context.data["asset"],
|
||||
"AVALON_TASK": instance.context.data["task"],
|
||||
"AYON_PROJECT_NAME": instance.context.data["projectName"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["asset"],
|
||||
"AYON_TASK_NAME": instance.context.data["task"],
|
||||
"AYON_USERNAME": instance.context.data["user"],
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
"IS_TEST": str(int(is_in_tests())),
|
||||
|
|
|
|||
|
|
@ -130,7 +130,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_USERNAME",
|
||||
"OPENPYPE_SG_USER",
|
||||
"KITSU_LOGIN",
|
||||
|
|
@ -202,9 +202,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
create_metadata_path(instance, anatomy)
|
||||
|
||||
environment = {
|
||||
"AVALON_PROJECT": instance.context.data["projectName"],
|
||||
"AVALON_ASSET": instance.context.data["asset"],
|
||||
"AVALON_TASK": instance.context.data["task"],
|
||||
"AYON_PROJECT_NAME": instance.context.data["projectName"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["asset"],
|
||||
"AYON_TASK_NAME": instance.context.data["task"],
|
||||
"AYON_USERNAME": instance.context.data["user"],
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
"IS_TEST": str(int(is_in_tests())),
|
||||
|
|
@ -330,151 +330,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
data = instance.data.copy()
|
||||
context = instance.context
|
||||
self.context = context
|
||||
self.anatomy = instance.context.data["anatomy"]
|
||||
|
||||
asset = data.get("asset") or context.data["asset"]
|
||||
subset = data.get("subset")
|
||||
|
||||
start = instance.data.get("frameStart")
|
||||
if start is None:
|
||||
start = context.data["frameStart"]
|
||||
|
||||
end = instance.data.get("frameEnd")
|
||||
if end is None:
|
||||
end = context.data["frameEnd"]
|
||||
|
||||
handle_start = instance.data.get("handleStart")
|
||||
if handle_start is None:
|
||||
handle_start = context.data["handleStart"]
|
||||
|
||||
handle_end = instance.data.get("handleEnd")
|
||||
if handle_end is None:
|
||||
handle_end = context.data["handleEnd"]
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if fps is None:
|
||||
fps = context.data["fps"]
|
||||
|
||||
if data.get("extendFrames", False):
|
||||
start, end = self._extend_frames(
|
||||
asset,
|
||||
subset,
|
||||
start,
|
||||
end,
|
||||
data["overrideExistingFrame"])
|
||||
|
||||
try:
|
||||
source = data["source"]
|
||||
except KeyError:
|
||||
source = context.data["currentFile"]
|
||||
|
||||
success, rootless_path = (
|
||||
self.anatomy.find_root_template_from_path(source)
|
||||
)
|
||||
if success:
|
||||
source = rootless_path
|
||||
|
||||
else:
|
||||
# `rootless_path` is not set to `source` if none of roots match
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues."
|
||||
).format(source))
|
||||
|
||||
family = "render"
|
||||
if ("prerender" in instance.data["families"] or
|
||||
"prerender.farm" in instance.data["families"]):
|
||||
family = "prerender"
|
||||
families = [family]
|
||||
|
||||
# pass review to families if marked as review
|
||||
do_not_add_review = False
|
||||
if data.get("review"):
|
||||
families.append("review")
|
||||
elif data.get("review") is False:
|
||||
self.log.debug("Instance has review explicitly disabled.")
|
||||
do_not_add_review = True
|
||||
|
||||
instance_skeleton_data = {
|
||||
"family": family,
|
||||
"subset": subset,
|
||||
"families": families,
|
||||
"asset": asset,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStartHandle": start - handle_start,
|
||||
"frameEndHandle": end + handle_end,
|
||||
"comment": instance.data["comment"],
|
||||
"fps": fps,
|
||||
"source": source,
|
||||
"extendFrames": data.get("extendFrames"),
|
||||
"overrideExistingFrame": data.get("overrideExistingFrame"),
|
||||
"pixelAspect": data.get("pixelAspect", 1),
|
||||
"resolutionWidth": data.get("resolutionWidth", 1920),
|
||||
"resolutionHeight": data.get("resolutionHeight", 1080),
|
||||
"multipartExr": data.get("multipartExr", False),
|
||||
"jobBatchName": data.get("jobBatchName", ""),
|
||||
"useSequenceForReview": data.get("useSequenceForReview", True),
|
||||
# map inputVersions `ObjectId` -> `str` so json supports it
|
||||
"inputVersions": list(map(str, data.get("inputVersions", []))),
|
||||
"colorspace": instance.data.get("colorspace"),
|
||||
"stagingDir_persistent": instance.data.get(
|
||||
"stagingDir_persistent", False
|
||||
)
|
||||
}
|
||||
|
||||
# skip locking version if we are creating v01
|
||||
instance_version = instance.data.get("version") # take this if exists
|
||||
if instance_version != 1:
|
||||
instance_skeleton_data["version"] = instance_version
|
||||
|
||||
# transfer specific families from original instance to new render
|
||||
for item in self.families_transfer:
|
||||
if item in instance.data.get("families", []):
|
||||
instance_skeleton_data["families"] += [item]
|
||||
|
||||
# transfer specific properties from original instance based on
|
||||
# mapping dictionary `instance_transfer`
|
||||
for key, values in self.instance_transfer.items():
|
||||
if key in instance.data.get("families", []):
|
||||
for v in values:
|
||||
instance_skeleton_data[v] = instance.data.get(v)
|
||||
|
||||
# look into instance data if representations are not having any
|
||||
# which are having tag `publish_on_farm` and include them
|
||||
for repre in instance.data.get("representations", []):
|
||||
staging_dir = repre.get("stagingDir")
|
||||
if staging_dir:
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.find_root_template_from_path(
|
||||
staging_dir
|
||||
)
|
||||
)
|
||||
if success:
|
||||
repre["stagingDir"] = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging_dir))
|
||||
repre["stagingDir"] = staging_dir
|
||||
|
||||
if "publish_on_farm" in repre.get("tags"):
|
||||
# create representations attribute of not there
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
instance_skeleton_data["representations"].append(repre)
|
||||
|
||||
instances = None
|
||||
assert data.get("expectedFiles"), ("Submission from old Pype version"
|
||||
" - missing expectedFiles")
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
instance_skeleton_data = create_skeleton_instance(
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from Deadline.Scripting import (
|
|||
DirectoryUtils,
|
||||
ProcessUtils,
|
||||
)
|
||||
__version__ = "1.0.0"
|
||||
__version__ = "1.0.1"
|
||||
VERSION_REGEX = re.compile(
|
||||
r"(?P<major>0|[1-9]\d*)"
|
||||
r"\.(?P<minor>0|[1-9]\d*)"
|
||||
|
|
@ -471,12 +471,21 @@ def inject_ayon_environment(deadlinePlugin):
|
|||
]
|
||||
|
||||
add_kwargs = {
|
||||
"project": job.GetJobEnvironmentKeyValue("AVALON_PROJECT"),
|
||||
"asset": job.GetJobEnvironmentKeyValue("AVALON_ASSET"),
|
||||
"task": job.GetJobEnvironmentKeyValue("AVALON_TASK"),
|
||||
"app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"),
|
||||
"envgroup": "farm",
|
||||
}
|
||||
# Support backwards compatible keys
|
||||
for key, env_keys in (
|
||||
("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]),
|
||||
("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]),
|
||||
("task", ["AYON_TASK_NAME", "AVALON_TASK"]),
|
||||
("app", ["AYON_APP_NAME", "AVALON_APP_NAME"]),
|
||||
):
|
||||
value = ""
|
||||
for env_key in env_keys:
|
||||
value = job.GetJobEnvironmentKeyValue(env_key)
|
||||
if value:
|
||||
break
|
||||
add_kwargs[key] = value
|
||||
|
||||
if job.GetJobEnvironmentKeyValue("IS_TEST"):
|
||||
args.append("--automatic-tests")
|
||||
|
|
@ -486,8 +495,8 @@ def inject_ayon_environment(deadlinePlugin):
|
|||
args.extend(["--{}".format(key), value])
|
||||
else:
|
||||
raise RuntimeError((
|
||||
"Missing required env vars: AVALON_PROJECT, AVALON_ASSET,"
|
||||
" AVALON_TASK, AVALON_APP_NAME"
|
||||
"Missing required env vars: AYON_PROJECT_NAME,"
|
||||
" AYON_FOLDER_PATH, AYON_TASK_NAME, AYON_APP_NAME"
|
||||
))
|
||||
|
||||
environment = {
|
||||
|
|
|
|||
|
|
@ -361,8 +361,8 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
|
||||
if not all(add_kwargs.values()):
|
||||
raise RuntimeError((
|
||||
"Missing required env vars: AVALON_PROJECT, AVALON_ASSET,"
|
||||
" AVALON_TASK, AVALON_APP_NAME"
|
||||
"Missing required env vars: AYON_PROJECT_NAME, AYON_FOLDER_PATH,"
|
||||
" AYON_TASK_NAME, AYON_APP_NAME"
|
||||
))
|
||||
|
||||
for key, value in add_kwargs.items():
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"AVALON_APP_NAME",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_USERNAME",
|
||||
"OPENPYPE_SG_USER",
|
||||
]
|
||||
|
|
@ -179,9 +179,9 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin,
|
|||
anatomy_data = instance.context.data["anatomyData"]
|
||||
|
||||
environment = RREnvList({
|
||||
"AVALON_PROJECT": anatomy_data["project"]["name"],
|
||||
"AVALON_ASSET": instance.context.data["asset"],
|
||||
"AVALON_TASK": anatomy_data["task"]["name"],
|
||||
"AYON_PROJECT_NAME": anatomy_data["project"]["name"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["asset"],
|
||||
"AYON_TASK_NAME": anatomy_data["task"]["name"],
|
||||
"AYON_USERNAME": anatomy_data["user"]
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -136,10 +136,10 @@ class OpenPypeContextSelector:
|
|||
|
||||
def run_publish(self):
|
||||
"""Run publish process."""
|
||||
env = {"AVALON_PROJECT": str(self.context.get("project")),
|
||||
"AVALON_ASSET": str(self.context.get("asset")),
|
||||
"AVALON_TASK": str(self.context.get("task")),
|
||||
# "AVALON_APP_NAME": str(self.context.get("app_name"))
|
||||
env = {"AYON_PROJECT_NAME": str(self.context.get("project")),
|
||||
"AYON_FOLDER_PATH": str(self.context.get("asset")),
|
||||
"AYON_TASK_NAME": str(self.context.get("task")),
|
||||
# "AYON_APP_NAME": str(self.context.get("app_name"))
|
||||
}
|
||||
|
||||
print(">>> setting environment:")
|
||||
|
|
@ -182,10 +182,18 @@ print("running selector")
|
|||
selector = OpenPypeContextSelector()
|
||||
|
||||
# try to set context from environment
|
||||
selector.context["project"] = os.getenv("AVALON_PROJECT")
|
||||
selector.context["asset"] = os.getenv("AVALON_ASSET")
|
||||
selector.context["task"] = os.getenv("AVALON_TASK")
|
||||
# selector.context["app_name"] = os.getenv("AVALON_APP_NAME")
|
||||
for key, env_keys in (
|
||||
("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]),
|
||||
("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]),
|
||||
("task", ["AYON_TASK_NAME", "AVALON_TASK"]),
|
||||
# ("app_name", ["AYON_APP_NAME", "AVALON_APP_NAME"])
|
||||
):
|
||||
value = ""
|
||||
for env_key in env_keys:
|
||||
value = os.getenv(env_key)
|
||||
if value:
|
||||
break
|
||||
selector.context[key] = value
|
||||
|
||||
# if anything inside is None, scratch the whole thing and
|
||||
# ask user for context.
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ class LauncherAction(object):
|
|||
|
||||
Args:
|
||||
session (dict[str, Union[str, None]]): Session data with
|
||||
AVALON_PROJECT, AVALON_ASSET and AVALON_TASK.
|
||||
AYON_PROJECT_NAME, AYON_FOLDER_PATH and AYON_TASK_NAME.
|
||||
"""
|
||||
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -8,9 +8,6 @@ import numbers
|
|||
import six
|
||||
import time
|
||||
|
||||
from ayon_core.settings.lib import (
|
||||
get_local_settings,
|
||||
)
|
||||
from ayon_core.client import get_project, get_ayon_server_api_connection
|
||||
from ayon_core.lib import Logger, get_local_site_id
|
||||
from ayon_core.lib.path_templates import (
|
||||
|
|
@ -423,7 +420,7 @@ class Anatomy(BaseAnatomy):
|
|||
|
||||
def __init__(self, project_name=None, site_name=None):
|
||||
if not project_name:
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
project_name = os.environ.get("AYON_PROJECT_NAME")
|
||||
|
||||
if not project_name:
|
||||
raise ProjectNotSet((
|
||||
|
|
@ -453,7 +450,7 @@ class Anatomy(BaseAnatomy):
|
|||
return cls._sync_server_addon_cache.data
|
||||
|
||||
@classmethod
|
||||
def _get_studio_roots_overrides(cls, project_name, local_settings=None):
|
||||
def _get_studio_roots_overrides(cls, project_name):
|
||||
"""This would return 'studio' site override by local settings.
|
||||
|
||||
Notes:
|
||||
|
|
@ -465,7 +462,6 @@ class Anatomy(BaseAnatomy):
|
|||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
local_settings (Optional[dict[str, Any]]): Prepared local settings.
|
||||
|
||||
Returns:
|
||||
Union[Dict[str, str], None]): Local root overrides.
|
||||
|
|
@ -488,11 +484,6 @@ class Anatomy(BaseAnatomy):
|
|||
should be returned.
|
||||
"""
|
||||
|
||||
# Local settings may be used more than once or may not be used at all
|
||||
# - to avoid slowdowns 'get_local_settings' is not called until it's
|
||||
# really needed
|
||||
local_settings = None
|
||||
|
||||
# First check if sync server is available and enabled
|
||||
sync_server = cls.get_sync_server_addon()
|
||||
if sync_server is None or not sync_server.enabled:
|
||||
|
|
@ -503,11 +494,8 @@ class Anatomy(BaseAnatomy):
|
|||
# Use sync server to receive active site name
|
||||
project_cache = cls._default_site_id_cache[project_name]
|
||||
if project_cache.is_outdated:
|
||||
local_settings = get_local_settings()
|
||||
project_cache.update_data(
|
||||
sync_server.get_active_site_type(
|
||||
project_name, local_settings
|
||||
)
|
||||
sync_server.get_active_site_type(project_name)
|
||||
)
|
||||
site_name = project_cache.data
|
||||
|
||||
|
|
@ -517,12 +505,12 @@ class Anatomy(BaseAnatomy):
|
|||
# Handle studio root overrides without sync server
|
||||
# - studio root overrides can be done even without sync server
|
||||
roots_overrides = cls._get_studio_roots_overrides(
|
||||
project_name, local_settings
|
||||
project_name
|
||||
)
|
||||
else:
|
||||
# Ask sync server to get roots overrides
|
||||
roots_overrides = sync_server.get_site_root_overrides(
|
||||
project_name, site_name, local_settings
|
||||
project_name, site_name
|
||||
)
|
||||
site_cache.update_data(roots_overrides)
|
||||
return site_cache.data
|
||||
|
|
|
|||
|
|
@ -1018,7 +1018,7 @@ def _get_imageio_settings(project_settings, host_name):
|
|||
tuple[dict, dict]: image io settings for global and host
|
||||
"""
|
||||
# get image io from global and host_name
|
||||
imageio_global = project_settings["global"]["imageio"]
|
||||
imageio_global = project_settings["core"]["imageio"]
|
||||
# host is optional, some might not have any settings
|
||||
imageio_host = project_settings.get(host_name, {}).get("imageio", {})
|
||||
|
||||
|
|
|
|||
|
|
@ -117,12 +117,12 @@ def install_host(host):
|
|||
|
||||
addons_manager = _get_addons_manager()
|
||||
|
||||
project_name = os.getenv("AVALON_PROJECT")
|
||||
project_name = os.getenv("AYON_PROJECT_NAME")
|
||||
# WARNING: This might be an issue
|
||||
# - commented out because 'traypublisher' does not have set project
|
||||
# if not project_name:
|
||||
# raise ValueError(
|
||||
# "AVALON_PROJECT is missing in environment variables."
|
||||
# "AYON_PROJECT_NAME is missing in environment variables."
|
||||
# )
|
||||
|
||||
log.info("Activating {}..".format(project_name))
|
||||
|
|
@ -152,7 +152,7 @@ def install_host(host):
|
|||
print("Registering pyblish target: automated")
|
||||
pyblish.api.register_target("automated")
|
||||
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
host_name = os.environ.get("AYON_HOST_NAME")
|
||||
|
||||
# Give option to handle host installation
|
||||
for addon in addons_manager.get_enabled_addons():
|
||||
|
|
@ -172,7 +172,7 @@ def install_ayon_plugins(project_name=None, host_name=None):
|
|||
register_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
if host_name is None:
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
host_name = os.environ.get("AYON_HOST_NAME")
|
||||
|
||||
addons_manager = _get_addons_manager()
|
||||
publish_plugin_dirs = addons_manager.collect_publish_plugin_paths(
|
||||
|
|
@ -196,7 +196,7 @@ def install_ayon_plugins(project_name=None, host_name=None):
|
|||
register_inventory_action_path(path)
|
||||
|
||||
if project_name is None:
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
project_name = os.environ.get("AYON_PROJECT_NAME")
|
||||
|
||||
# Register studio specific plugins
|
||||
if project_name:
|
||||
|
|
@ -208,8 +208,8 @@ def install_ayon_plugins(project_name=None, host_name=None):
|
|||
platform_name = platform.system().lower()
|
||||
project_plugins = (
|
||||
project_settings
|
||||
.get("global", {})
|
||||
.get("project_plugins", {})
|
||||
["core"]
|
||||
["project_plugins"]
|
||||
.get(platform_name)
|
||||
) or []
|
||||
for path in project_plugins:
|
||||
|
|
@ -331,7 +331,7 @@ def get_current_host_name():
|
|||
"""Current host name.
|
||||
|
||||
Function is based on currently registered host integration or environment
|
||||
variable 'AVALON_APP'.
|
||||
variable 'AYON_HOST_NAME'.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Name of host integration in current process or None.
|
||||
|
|
@ -340,7 +340,7 @@ def get_current_host_name():
|
|||
host = registered_host()
|
||||
if isinstance(host, HostBase):
|
||||
return host.name
|
||||
return os.environ.get("AVALON_APP")
|
||||
return os.environ.get("AYON_HOST_NAME")
|
||||
|
||||
|
||||
def get_global_context():
|
||||
|
|
@ -365,9 +365,9 @@ def get_global_context():
|
|||
"""
|
||||
|
||||
return {
|
||||
"project_name": os.environ.get("AVALON_PROJECT"),
|
||||
"asset_name": os.environ.get("AVALON_ASSET"),
|
||||
"task_name": os.environ.get("AVALON_TASK"),
|
||||
"project_name": os.environ.get("AYON_PROJECT_NAME"),
|
||||
"asset_name": os.environ.get("AYON_FOLDER_PATH"),
|
||||
"task_name": os.environ.get("AYON_TASK_NAME"),
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -474,10 +474,10 @@ def get_template_data_from_session(session=None, system_settings=None):
|
|||
"""
|
||||
|
||||
if session is not None:
|
||||
project_name = session["AVALON_PROJECT"]
|
||||
asset_name = session["AVALON_ASSET"]
|
||||
task_name = session["AVALON_TASK"]
|
||||
host_name = session["AVALON_APP"]
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
asset_name = session["AYON_FOLDER_PATH"]
|
||||
task_name = session["AYON_TASK_NAME"]
|
||||
host_name = session["AYON_HOST_NAME"]
|
||||
else:
|
||||
context = get_current_context()
|
||||
project_name = context["project_name"]
|
||||
|
|
@ -525,8 +525,8 @@ def get_workdir_from_session(session=None, template_key=None):
|
|||
"""
|
||||
|
||||
if session is not None:
|
||||
project_name = session["AVALON_PROJECT"]
|
||||
host_name = session["AVALON_APP"]
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
host_name = session["AYON_HOST_NAME"]
|
||||
else:
|
||||
project_name = get_current_project_name()
|
||||
host_name = get_current_host_name()
|
||||
|
|
@ -566,10 +566,10 @@ def get_custom_workfile_template_from_session(
|
|||
"""
|
||||
|
||||
if session is not None:
|
||||
project_name = session["AVALON_PROJECT"]
|
||||
asset_name = session["AVALON_ASSET"]
|
||||
task_name = session["AVALON_TASK"]
|
||||
host_name = session["AVALON_APP"]
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
asset_name = session["AYON_FOLDER_PATH"]
|
||||
task_name = session["AYON_TASK_NAME"]
|
||||
host_name = session["AYON_HOST_NAME"]
|
||||
else:
|
||||
context = get_current_context()
|
||||
project_name = context["project_name"]
|
||||
|
|
@ -616,10 +616,10 @@ def change_current_context(asset_doc, task_name, template_key=None):
|
|||
|
||||
folder_path = get_asset_name_identifier(asset_doc)
|
||||
envs = {
|
||||
"AVALON_PROJECT": project_name,
|
||||
"AVALON_ASSET": folder_path,
|
||||
"AVALON_TASK": task_name,
|
||||
"AVALON_WORKDIR": workdir,
|
||||
"AYON_PROJECT_NAME": project_name,
|
||||
"AYON_FOLDER_PATH": folder_path,
|
||||
"AYON_TASK_NAME": task_name,
|
||||
"AYON_WORKDIR": workdir,
|
||||
}
|
||||
|
||||
# Update the Session and environments. Pop from environments all keys with
|
||||
|
|
|
|||
|
|
@ -1536,7 +1536,7 @@ class CreateContext:
|
|||
def host_name(self):
|
||||
if hasattr(self.host, "name"):
|
||||
return self.host.name
|
||||
return os.environ["AVALON_APP"]
|
||||
return os.environ["AYON_HOST_NAME"]
|
||||
|
||||
def get_current_project_name(self):
|
||||
"""Project name which was used as current context on context reset.
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ class LegacyCreator(object):
|
|||
def apply_settings(cls, project_settings, system_settings):
|
||||
"""Apply OpenPype settings to a plugin class."""
|
||||
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
host_name = os.environ.get("AYON_HOST_NAME")
|
||||
plugin_type = "create"
|
||||
plugin_type_settings = (
|
||||
project_settings
|
||||
|
|
@ -54,7 +54,7 @@ class LegacyCreator(object):
|
|||
)
|
||||
global_type_settings = (
|
||||
project_settings
|
||||
.get("global", {})
|
||||
.get("core", {})
|
||||
.get(plugin_type, {})
|
||||
)
|
||||
if not global_type_settings and not plugin_type_settings:
|
||||
|
|
|
|||
|
|
@ -47,10 +47,10 @@ def get_subset_name_template(
|
|||
|
||||
if project_settings is None:
|
||||
project_settings = get_project_settings(project_name)
|
||||
tools_settings = project_settings["global"]["tools"]
|
||||
profiles = tools_settings["creator"]["subset_name_profiles"]
|
||||
tools_settings = project_settings["core"]["tools"]
|
||||
profiles = tools_settings["creator"]["product_name_profiles"]
|
||||
filtering_criteria = {
|
||||
"families": family,
|
||||
"product_types": family,
|
||||
"hosts": host_name,
|
||||
"tasks": task_name,
|
||||
"task_types": task_type
|
||||
|
|
@ -59,7 +59,19 @@ def get_subset_name_template(
|
|||
matching_profile = filter_profiles(profiles, filtering_criteria)
|
||||
template = None
|
||||
if matching_profile:
|
||||
template = matching_profile["template"]
|
||||
# TODO remove formatting keys replacement
|
||||
template = (
|
||||
matching_profile["template"]
|
||||
.replace("{task[name]}", "{task}")
|
||||
.replace("{Task[name]}", "{Task}")
|
||||
.replace("{TASK[NAME]}", "{TASK}")
|
||||
.replace("{product[type]}", "{family}")
|
||||
.replace("{Product[type]}", "{Family}")
|
||||
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{Folder[name]}", "{Asset}")
|
||||
.replace("{FOLDER[NAME]}", "{ASSET}")
|
||||
)
|
||||
|
||||
# Make sure template is set (matching may have empty string)
|
||||
if not template:
|
||||
|
|
@ -82,9 +94,9 @@ def get_subset_name(
|
|||
"""Calculate subset name based on passed context and OpenPype settings.
|
||||
|
||||
Subst name templates are defined in `project_settings/global/tools/creator
|
||||
/subset_name_profiles` where are profiles with host name, family, task name
|
||||
and task type filters. If context does not match any profile then
|
||||
`DEFAULT_SUBSET_TEMPLATE` is used as default template.
|
||||
/product_name_profiles` where are profiles with host name, family,
|
||||
task name and task type filters. If context does not match any profile
|
||||
then `DEFAULT_SUBSET_TEMPLATE` is used as default template.
|
||||
|
||||
That's main reason why so many arguments are required to calculate subset
|
||||
name.
|
||||
|
|
@ -128,13 +140,13 @@ def get_subset_name(
|
|||
return ""
|
||||
|
||||
if not host_name:
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
host_name = os.environ.get("AYON_HOST_NAME")
|
||||
|
||||
# Use only last part of class family value split by dot (`.`)
|
||||
family = family.rsplit(".", 1)[-1]
|
||||
|
||||
if project_name is None:
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
project_name = os.environ.get("AYON_PROJECT_NAME")
|
||||
|
||||
asset_tasks = asset_doc.get("data", {}).get("tasks") or {}
|
||||
task_info = asset_tasks.get(task_name) or {}
|
||||
|
|
|
|||
|
|
@ -321,7 +321,7 @@ def prepare_representations(skeleton_data, exp_files, anatomy, aov_filter,
|
|||
|
||||
"""
|
||||
representations = []
|
||||
host_name = os.environ.get("AVALON_APP", "")
|
||||
host_name = os.environ.get("AYON_HOST_NAME", "")
|
||||
collections, remainders = clique.assemble(exp_files)
|
||||
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
|
|
@ -541,7 +541,7 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data,
|
|||
|
||||
"""
|
||||
# TODO: this needs to be taking the task from context or instance
|
||||
task = os.environ["AVALON_TASK"]
|
||||
task = os.environ["AYON_TASK_NAME"]
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
subset = skeleton["subset"]
|
||||
|
|
@ -611,7 +611,7 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data,
|
|||
|
||||
log.info("Creating data for: {}".format(subset_name))
|
||||
|
||||
app = os.environ.get("AVALON_APP", "")
|
||||
app = os.environ.get("AYON_HOST_NAME", "")
|
||||
|
||||
if isinstance(col, list):
|
||||
render_file_name = os.path.basename(col[0])
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class LoaderPlugin(list):
|
|||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings, system_settings):
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
host_name = os.environ.get("AYON_HOST_NAME")
|
||||
plugin_type = "load"
|
||||
plugin_type_settings = (
|
||||
project_settings
|
||||
|
|
@ -47,7 +47,7 @@ class LoaderPlugin(list):
|
|||
)
|
||||
global_type_settings = (
|
||||
project_settings
|
||||
.get("global", {})
|
||||
.get("core", {})
|
||||
.get(plugin_type, {})
|
||||
)
|
||||
if not global_type_settings and not plugin_type_settings:
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ def _list_path_items(folder_structure):
|
|||
def get_project_basic_paths(project_name):
|
||||
project_settings = get_project_settings(project_name)
|
||||
folder_structure = (
|
||||
project_settings["global"]["project_folder_structure"]
|
||||
project_settings["core"]["project_folder_structure"]
|
||||
)
|
||||
if not folder_structure:
|
||||
return []
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ def get_template_name_profiles(
|
|||
|
||||
return copy.deepcopy(
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["publish"]
|
||||
["template_name_profiles"]
|
||||
|
|
@ -95,7 +95,7 @@ def get_hero_template_name_profiles(
|
|||
|
||||
return copy.deepcopy(
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["publish"]
|
||||
["hero_template_name_profiles"]
|
||||
|
|
@ -138,7 +138,7 @@ def get_publish_template_name(
|
|||
template = None
|
||||
filter_criteria = {
|
||||
"hosts": host_name,
|
||||
"families": family,
|
||||
"product_types": family,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
}
|
||||
|
|
@ -383,7 +383,7 @@ def get_plugin_settings(plugin, project_settings, log, category=None):
|
|||
|
||||
# TODO: change after all plugins are moved one level up
|
||||
if category_from_file in ("ayon_core", "openpype"):
|
||||
category_from_file = "global"
|
||||
category_from_file = "core"
|
||||
|
||||
try:
|
||||
return (
|
||||
|
|
@ -437,7 +437,7 @@ def filter_pyblish_plugins(plugins):
|
|||
# - kept becau on farm is probably used host 'shell' which propably
|
||||
# affect how settings are applied there
|
||||
host_name = pyblish.api.current_host()
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
project_name = os.environ.get("AYON_PROJECT_NAME")
|
||||
|
||||
project_settings = get_project_settings(project_name)
|
||||
system_settings = get_system_settings()
|
||||
|
|
@ -485,26 +485,6 @@ def filter_pyblish_plugins(plugins):
|
|||
plugins.remove(plugin)
|
||||
|
||||
|
||||
def remote_publish(log):
|
||||
"""Loops through all plugins, logs to console. Used for tests.
|
||||
|
||||
Args:
|
||||
log (Logger)
|
||||
"""
|
||||
|
||||
# Error exit as soon as any error occurs.
|
||||
error_format = "Failed {plugin.__name__}: {error}\n{error.traceback}"
|
||||
|
||||
for result in pyblish.util.publish_iter():
|
||||
if not result["error"]:
|
||||
continue
|
||||
|
||||
error_message = error_format.format(**result)
|
||||
log.error(error_message)
|
||||
# 'Fatal Error: ' is because of Deadline
|
||||
raise RuntimeError("Fatal Error: {}".format(error_message))
|
||||
|
||||
|
||||
def get_errored_instances_from_context(context, plugin=None):
|
||||
"""Collect failed instances from pyblish context.
|
||||
|
||||
|
|
@ -744,7 +724,7 @@ def get_custom_staging_dir_info(project_name, host_name, family, task_name,
|
|||
ValueError - if misconfigured template should be used
|
||||
"""
|
||||
settings = project_settings or get_project_settings(project_name)
|
||||
custom_staging_dir_profiles = (settings["global"]
|
||||
custom_staging_dir_profiles = (settings["core"]
|
||||
["tools"]
|
||||
["publish"]
|
||||
["custom_staging_dir_profiles"])
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def get_versioning_start(
|
|||
project_settings = get_project_settings(project_name)
|
||||
|
||||
version_start = 1
|
||||
settings = project_settings["global"]
|
||||
settings = project_settings["core"]
|
||||
profiles = settings.get("version_start_category", {}).get("profiles", [])
|
||||
|
||||
if not profiles:
|
||||
|
|
|
|||
|
|
@ -229,8 +229,8 @@ class BuildWorkfile:
|
|||
def get_build_presets(self, task_name, asset_doc):
|
||||
""" Returns presets to build workfile for task name.
|
||||
|
||||
Presets are loaded for current project set in
|
||||
io.Session["AVALON_PROJECT"], filtered by registered host
|
||||
Presets are loaded for current project received by
|
||||
'get_current_project_name', filtered by registered host
|
||||
and entered task name.
|
||||
|
||||
Args:
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ def is_workfile_lock_enabled(host_name, project_name, project_setting=None):
|
|||
project_setting = get_project_settings(project_name)
|
||||
workfile_lock_profiles = (
|
||||
project_setting
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["workfile_lock_profiles"])
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ def get_workfile_template_key(
|
|||
try:
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["core"]
|
||||
["tools"]
|
||||
["Workfiles"]
|
||||
["workfile_template_profiles"]
|
||||
|
|
@ -157,7 +157,7 @@ def get_workdir(
|
|||
task_name (str): Task name for which are workdir data preapred.
|
||||
host_name (str): Host which is used to workdir. This is required
|
||||
because workdir template may contain `{app}` key. In `Session`
|
||||
is stored under `AVALON_APP` key.
|
||||
is stored under `AYON_HOST_NAME` key.
|
||||
anatomy (Anatomy): Optional argument. Anatomy object is created using
|
||||
project name from `project_doc`. It is preferred to pass this
|
||||
argument as initialization of a new Anatomy object may be time
|
||||
|
|
@ -507,7 +507,7 @@ def create_workdir_extra_folders(
|
|||
|
||||
# Load extra folders profiles
|
||||
extra_folders_profiles = (
|
||||
project_settings["global"]["tools"]["Workfiles"]["extra_folders"]
|
||||
project_settings["core"]["tools"]["Workfiles"]["extra_folders"]
|
||||
)
|
||||
# Skip if are empty
|
||||
if not extra_folders_profiles:
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ class AbstractTemplateBuilder(object):
|
|||
if isinstance(host, HostBase):
|
||||
host_name = host.name
|
||||
else:
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
host_name = os.environ.get("AYON_HOST_NAME")
|
||||
|
||||
self._host = host
|
||||
self._host_name = host_name
|
||||
|
|
@ -129,19 +129,19 @@ class AbstractTemplateBuilder(object):
|
|||
def project_name(self):
|
||||
if isinstance(self._host, HostBase):
|
||||
return self._host.get_current_project_name()
|
||||
return os.getenv("AVALON_PROJECT")
|
||||
return os.getenv("AYON_PROJECT_NAME")
|
||||
|
||||
@property
|
||||
def current_asset_name(self):
|
||||
if isinstance(self._host, HostBase):
|
||||
return self._host.get_current_asset_name()
|
||||
return os.getenv("AVALON_ASSET")
|
||||
return os.getenv("AYON_FOLDER_PATH")
|
||||
|
||||
@property
|
||||
def current_task_name(self):
|
||||
if isinstance(self._host, HostBase):
|
||||
return self._host.get_current_task_name()
|
||||
return os.getenv("AVALON_TASK")
|
||||
return os.getenv("AYON_TASK_NAME")
|
||||
|
||||
def get_current_context(self):
|
||||
if isinstance(self._host, HostBase):
|
||||
|
|
@ -585,7 +585,7 @@ class AbstractTemplateBuilder(object):
|
|||
template_path (str): Fullpath for current task and
|
||||
host's template file.
|
||||
"""
|
||||
last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE")
|
||||
last_workfile_path = os.environ.get("AYON_LAST_WORKFILE")
|
||||
self.log.info("__ last_workfile_path: {}".format(last_workfile_path))
|
||||
if os.path.exists(last_workfile_path):
|
||||
# ignore in case workfile existence
|
||||
|
|
|
|||
|
|
@ -22,14 +22,14 @@ class OpenTaskPath(LauncherAction):
|
|||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
return bool(session.get("AVALON_ASSET"))
|
||||
return bool(session.get("AYON_FOLDER_PATH"))
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
from qtpy import QtCore, QtWidgets
|
||||
|
||||
project_name = session["AVALON_PROJECT"]
|
||||
asset_name = session["AVALON_ASSET"]
|
||||
task_name = session.get("AVALON_TASK", None)
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
asset_name = session["AYON_FOLDER_PATH"]
|
||||
task_name = session.get("AYON_TASK_NAME", None)
|
||||
|
||||
path = self._get_workdir(project_name, asset_name, task_name)
|
||||
if not path:
|
||||
|
|
|
|||
|
|
@ -359,7 +359,7 @@
|
|||
#
|
||||
# if mongo_changes_bulk:
|
||||
# dbcon = AvalonMongoDB()
|
||||
# dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
# dbcon.Session["AYON_PROJECT_NAME"] = project_name
|
||||
# dbcon.install()
|
||||
# dbcon.bulk_write(mongo_changes_bulk)
|
||||
# dbcon.uninstall()
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
active = True
|
||||
|
||||
# Presets
|
||||
paterns = None # list of regex paterns
|
||||
patterns = None # list of regex patterns
|
||||
remove_temp_renders = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -115,10 +115,10 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
src = os.path.normpath(src)
|
||||
dest = os.path.normpath(dest)
|
||||
|
||||
# add src dir into clearing dir paths (regex paterns)
|
||||
# add src dir into clearing dir paths (regex patterns)
|
||||
transfers_dirs.append(os.path.dirname(src))
|
||||
|
||||
# add dest dir into clearing dir paths (regex paterns)
|
||||
# add dest dir into clearing dir paths (regex patterns)
|
||||
transfers_dirs.append(os.path.dirname(dest))
|
||||
|
||||
if src in skip_cleanup_filepaths:
|
||||
|
|
@ -141,13 +141,13 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
# add dir for cleanup
|
||||
dirnames.append(os.path.dirname(src))
|
||||
|
||||
# clean by regex paterns
|
||||
# clean by regex patterns
|
||||
# make unique set
|
||||
transfers_dirs = set(transfers_dirs)
|
||||
|
||||
self.log.debug("__ transfers_dirs: `{}`".format(transfers_dirs))
|
||||
self.log.debug("__ self.paterns: `{}`".format(self.paterns))
|
||||
if self.paterns:
|
||||
self.log.debug("__ self.patterns: `{}`".format(self.patterns))
|
||||
if self.patterns:
|
||||
files = list()
|
||||
# get list of all available content of dirs
|
||||
for _dir in transfers_dirs:
|
||||
|
|
@ -159,14 +159,14 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("__ files: `{}`".format(files))
|
||||
|
||||
# remove all files which match regex patern
|
||||
# remove all files which match regex pattern
|
||||
for f in files:
|
||||
if os.path.normpath(f) in skip_cleanup_filepaths:
|
||||
continue
|
||||
|
||||
for p in self.paterns:
|
||||
patern = re.compile(p)
|
||||
if not patern.findall(f):
|
||||
for p in self.patterns:
|
||||
pattern = re.compile(p)
|
||||
if not pattern.findall(f):
|
||||
continue
|
||||
if not os.path.exists(f):
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -4,9 +4,9 @@ Requires:
|
|||
context -> anatomy
|
||||
context -> projectEntity
|
||||
context -> assetEntity
|
||||
context -> task
|
||||
context -> username
|
||||
context -> datetimeData
|
||||
session -> AVALON_TASK
|
||||
|
||||
Provides:
|
||||
context -> anatomyData
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class CollectInstanceCommentDef(
|
|||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_setting, _):
|
||||
plugin_settings = project_setting["global"]["publish"].get(
|
||||
plugin_settings = project_setting["core"]["publish"].get(
|
||||
"collect_comment_per_instance"
|
||||
)
|
||||
if not plugin_settings:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
"""Collect Anatomy and global anatomy data.
|
||||
|
||||
Requires:
|
||||
session -> AVALON_ASSET
|
||||
context -> projectName
|
||||
context -> asset
|
||||
context -> task
|
||||
|
|
|
|||
|
|
@ -57,9 +57,9 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
asset_name = create_context.get_current_asset_name()
|
||||
task_name = create_context.get_current_task_name()
|
||||
for key, value in (
|
||||
("AVALON_PROJECT", project_name),
|
||||
("AVALON_ASSET", asset_name),
|
||||
("AVALON_TASK", task_name)
|
||||
("AYON_PROJECT_NAME", project_name),
|
||||
("AYON_FOLDER_PATH", asset_name),
|
||||
("AYON_TASK_NAME", task_name)
|
||||
):
|
||||
if value is None:
|
||||
os.environ.pop(key, None)
|
||||
|
|
|
|||
|
|
@ -24,13 +24,13 @@ class CollectHostName(pyblish.api.ContextPlugin):
|
|||
if host_name and app_name and app_label:
|
||||
return
|
||||
|
||||
# Use AVALON_APP to get host name if available
|
||||
# Use AYON_HOST_NAME to get host name if available
|
||||
if not host_name:
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
host_name = os.environ.get("AYON_HOST_NAME")
|
||||
|
||||
# Use AVALON_APP_NAME to get full app name
|
||||
# Use AYON_APP_NAME to get full app name
|
||||
if not app_name:
|
||||
app_name = os.environ.get("AVALON_APP_NAME")
|
||||
app_name = os.environ.get("AYON_APP_NAME")
|
||||
|
||||
# Fill missing values based on app full name
|
||||
if (not host_name or not app_label) and app_name:
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue