Merge branch 'develop' into enhancement/OP-7075_Validate-Camera-Attributes

This commit is contained in:
Libor Batek 2024-02-21 12:19:13 +01:00 committed by GitHub
commit 6ec4e4e95c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
155 changed files with 1462 additions and 2120 deletions

View file

@ -788,6 +788,7 @@ class AddonsManager:
addon_classes.append(modules_item)
aliased_names = []
for addon_cls in addon_classes:
name = addon_cls.__name__
if issubclass(addon_cls, OpenPypeModule):
@ -807,6 +808,13 @@ class AddonsManager:
self._addons.append(addon)
self._addons_by_id[addon.id] = addon
self._addons_by_name[addon.name] = addon
# NOTE This will be removed with release 1.0.0 of ayon-core
# please use carefully.
# Gives option to use alias name for addon for cases when
# name in OpenPype was not the same as in AYON.
name_alias = getattr(addon, "openpype_alias", None)
if name_alias:
aliased_names.append((name_alias, addon))
enabled_str = "X"
if not addon.enabled:
enabled_str = " "
@ -822,6 +830,17 @@ class AddonsManager:
exc_info=True
)
for item in aliased_names:
name_alias, addon = item
if name_alias not in self._addons_by_name:
self._addons_by_name[name_alias] = addon
continue
self.log.warning(
"Alias name '{}' of addon '{}' is already assigned.".format(
name_alias, addon.name
)
)
if self._report is not None:
report[self._report_total_key] = time.time() - time_start
self._report["Initialization"] = report

View file

@ -73,6 +73,20 @@ class Commands:
import pyblish.api
import pyblish.util
# Fix older jobs
for src_key, dst_key in (
("AVALON_PROJECT", "AYON_PROJECT_NAME"),
("AVALON_ASSET", "AYON_FOLDER_PATH"),
("AVALON_TASK", "AYON_TASK_NAME"),
("AVALON_WORKDIR", "AYON_WORKDIR"),
("AVALON_APP_NAME", "AYON_APP_NAME"),
("AVALON_APP", "AYON_HOST_NAME"),
):
if src_key in os.environ and dst_key not in os.environ:
os.environ[dst_key] = os.environ[src_key]
# Remove old keys, so we're sure they're not used
os.environ.pop(src_key, None)
log = Logger.get_logger("CLI-publish")
install_ayon_plugins()
@ -87,7 +101,7 @@ class Commands:
if not any(paths):
raise RuntimeError("No publish paths specified")
app_full_name = os.getenv("AVALON_APP_NAME")
app_full_name = os.getenv("AYON_APP_NAME")
if app_full_name:
context = get_global_context()
env = get_app_environments_for_context(

View file

@ -21,7 +21,7 @@ class CreateWorkdirExtraFolders(PreLaunchHook):
return
env = self.data.get("env") or {}
workdir = env.get("AVALON_WORKDIR")
workdir = env.get("AYON_WORKDIR")
if not workdir or not os.path.exists(workdir):
return

View file

@ -181,6 +181,10 @@ class HostDirmap(object):
exclude_locals=False,
cached=False)
# TODO implement
# Dirmap is dependent on 'get_site_local_overrides' which
# is not implemented in AYON. The mapping should be received
# from sitesync addon.
active_overrides = get_site_local_overrides(
project_name, active_site)
remote_overrides = get_site_local_overrides(

View file

@ -106,7 +106,7 @@ class HostBase(object):
Union[str, None]: Current project name.
"""
return os.environ.get("AVALON_PROJECT")
return os.environ.get("AYON_PROJECT_NAME")
def get_current_asset_name(self):
"""
@ -114,7 +114,7 @@ class HostBase(object):
Union[str, None]: Current asset name.
"""
return os.environ.get("AVALON_ASSET")
return os.environ.get("AYON_FOLDER_PATH")
def get_current_task_name(self):
"""
@ -122,7 +122,7 @@ class HostBase(object):
Union[str, None]: Current task name.
"""
return os.environ.get("AVALON_TASK")
return os.environ.get("AYON_TASK_NAME")
def get_current_context(self):
"""Get current context information.

View file

@ -234,7 +234,7 @@ class IWorkfileHost:
str: Path to new workdir.
"""
return session["AVALON_WORKDIR"]
return session["AYON_WORKDIR"]
# --- Deprecated method names ---
def file_extensions(self):

View file

@ -297,11 +297,11 @@ class AfterEffectsRoute(WebSocketRoute):
log.info("Setting context change")
log.info("project {} asset {} ".format(project, asset))
if project:
os.environ["AVALON_PROJECT"] = project
os.environ["AYON_PROJECT_NAME"] = project
if asset:
os.environ["AVALON_ASSET"] = asset
os.environ["AYON_FOLDER_PATH"] = asset
if task:
os.environ["AVALON_TASK"] = task
os.environ["AYON_TASK_NAME"] = task
async def read(self):
log.debug("aftereffects.read client calls server server calls "

View file

@ -194,13 +194,13 @@ class RenderCreator(Creator):
name into created subset name.
Position of composition name could be set in
`project_settings/global/tools/creator/subset_name_profiles` with some
form of '{composition}' placeholder.
`project_settings/global/tools/creator/product_name_profiles` with
some form of '{composition}' placeholder.
Composition name will be used implicitly if multiple composition should
be handled at same time.
If {composition} placeholder is not us 'subset_name_profiles'
If {composition} placeholder is not us 'product_name_profiles'
composition name will be capitalized and set at the end of subset name
if necessary.

View file

@ -272,7 +272,7 @@ def set_resolution(data):
def on_new():
project = os.environ.get("AVALON_PROJECT")
project = os.environ.get("AYON_PROJECT_NAME")
settings = get_project_settings(project).get("blender")
set_resolution_startup = settings.get("set_resolution_startup")
@ -293,7 +293,7 @@ def on_new():
def on_open():
project = os.environ.get("AVALON_PROJECT")
project = os.environ.get("AYON_PROJECT_NAME")
settings = get_project_settings(project).get("blender")
set_resolution_startup = settings.get("set_resolution_startup")
@ -379,7 +379,7 @@ def _on_task_changed():
# `directory` attribute, so it opens in that directory (does it?).
# https://docs.blender.org/api/blender2.8/bpy.types.Operator.html#calling-a-file-selector
# https://docs.blender.org/api/blender2.8/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add
workdir = os.getenv("AVALON_WORKDIR")
workdir = os.getenv("AYON_WORKDIR")
log.debug("New working directory: %s", workdir)

View file

@ -47,6 +47,22 @@ def get_multilayer(settings):
["multilayer_exr"])
def get_renderer(settings):
"""Get renderer from blender settings."""
return (settings["blender"]
["RenderSettings"]
["renderer"])
def get_compositing(settings):
"""Get compositing from blender settings."""
return (settings["blender"]
["RenderSettings"]
["compositing"])
def get_render_product(output_path, name, aov_sep):
"""
Generate the path to the render product. Blender interprets the `#`
@ -91,66 +107,120 @@ def set_render_format(ext, multilayer):
image_settings.file_format = "TIFF"
def set_render_passes(settings):
aov_list = (settings["blender"]
["RenderSettings"]
["aov_list"])
custom_passes = (settings["blender"]
["RenderSettings"]
["custom_passes"])
def set_render_passes(settings, renderer):
aov_list = set(settings["blender"]["RenderSettings"]["aov_list"])
custom_passes = settings["blender"]["RenderSettings"]["custom_passes"]
# Common passes for both renderers
vl = bpy.context.view_layer
# Data Passes
vl.use_pass_combined = "combined" in aov_list
vl.use_pass_z = "z" in aov_list
vl.use_pass_mist = "mist" in aov_list
vl.use_pass_normal = "normal" in aov_list
# Light Passes
vl.use_pass_diffuse_direct = "diffuse_light" in aov_list
vl.use_pass_diffuse_color = "diffuse_color" in aov_list
vl.use_pass_glossy_direct = "specular_light" in aov_list
vl.use_pass_glossy_color = "specular_color" in aov_list
vl.eevee.use_pass_volume_direct = "volume_light" in aov_list
vl.use_pass_emit = "emission" in aov_list
vl.use_pass_environment = "environment" in aov_list
vl.use_pass_shadow = "shadow" in aov_list
vl.use_pass_ambient_occlusion = "ao" in aov_list
cycles = vl.cycles
# Cryptomatte Passes
vl.use_pass_cryptomatte_object = "cryptomatte_object" in aov_list
vl.use_pass_cryptomatte_material = "cryptomatte_material" in aov_list
vl.use_pass_cryptomatte_asset = "cryptomatte_asset" in aov_list
cycles.denoising_store_passes = "denoising" in aov_list
cycles.use_pass_volume_direct = "volume_direct" in aov_list
cycles.use_pass_volume_indirect = "volume_indirect" in aov_list
if renderer == "BLENDER_EEVEE":
# Eevee exclusive passes
eevee = vl.eevee
# Light Passes
vl.use_pass_shadow = "shadow" in aov_list
eevee.use_pass_volume_direct = "volume_light" in aov_list
# Effects Passes
eevee.use_pass_bloom = "bloom" in aov_list
eevee.use_pass_transparent = "transparent" in aov_list
# Cryptomatte Passes
vl.use_pass_cryptomatte_accurate = "cryptomatte_accurate" in aov_list
elif renderer == "CYCLES":
# Cycles exclusive passes
cycles = vl.cycles
# Data Passes
vl.use_pass_position = "position" in aov_list
vl.use_pass_vector = "vector" in aov_list
vl.use_pass_uv = "uv" in aov_list
cycles.denoising_store_passes = "denoising" in aov_list
vl.use_pass_object_index = "object_index" in aov_list
vl.use_pass_material_index = "material_index" in aov_list
cycles.pass_debug_sample_count = "sample_count" in aov_list
# Light Passes
vl.use_pass_diffuse_indirect = "diffuse_indirect" in aov_list
vl.use_pass_glossy_indirect = "specular_indirect" in aov_list
vl.use_pass_transmission_direct = "transmission_direct" in aov_list
vl.use_pass_transmission_indirect = "transmission_indirect" in aov_list
vl.use_pass_transmission_color = "transmission_color" in aov_list
cycles.use_pass_volume_direct = "volume_light" in aov_list
cycles.use_pass_volume_indirect = "volume_indirect" in aov_list
cycles.use_pass_shadow_catcher = "shadow" in aov_list
aovs_names = [aov.name for aov in vl.aovs]
for cp in custom_passes:
cp_name = cp[0]
cp_name = cp["attribute"]
if cp_name not in aovs_names:
aov = vl.aovs.add()
aov.name = cp_name
else:
aov = vl.aovs[cp_name]
aov.type = cp[1].get("type", "VALUE")
aov.type = cp["value"]
return aov_list, custom_passes
return list(aov_list), custom_passes
def set_node_tree(output_path, name, aov_sep, ext, multilayer):
def _create_aov_slot(name, aov_sep, slots, rpass_name, multi_exr, output_path):
filename = f"{name}{aov_sep}{rpass_name}.####"
slot = slots.new(rpass_name if multi_exr else filename)
filepath = str(output_path / filename.lstrip("/"))
return slot, filepath
def set_node_tree(
output_path, render_product, name, aov_sep, ext, multilayer, compositing
):
# Set the scene to use the compositor node tree to render
bpy.context.scene.use_nodes = True
tree = bpy.context.scene.node_tree
# Get the Render Layers node
rl_node = None
comp_layer_type = "CompositorNodeRLayers"
output_type = "CompositorNodeOutputFile"
compositor_type = "CompositorNodeComposite"
# Get the Render Layer, Composite and the previous output nodes
render_layer_node = None
composite_node = None
old_output_node = None
for node in tree.nodes:
if node.bl_idname == "CompositorNodeRLayers":
rl_node = node
if node.bl_idname == comp_layer_type:
render_layer_node = node
elif node.bl_idname == compositor_type:
composite_node = node
elif node.bl_idname == output_type and "AYON" in node.name:
old_output_node = node
if render_layer_node and composite_node and old_output_node:
break
# If there's not a Render Layers node, we create it
if not rl_node:
rl_node = tree.nodes.new("CompositorNodeRLayers")
if not render_layer_node:
render_layer_node = tree.nodes.new(comp_layer_type)
# Get the enabled output sockets, that are the active passes for the
# render.
@ -158,48 +228,81 @@ def set_node_tree(output_path, name, aov_sep, ext, multilayer):
exclude_sockets = ["Image", "Alpha", "Noisy Image"]
passes = [
socket
for socket in rl_node.outputs
for socket in render_layer_node.outputs
if socket.enabled and socket.name not in exclude_sockets
]
# Remove all output nodes
for node in tree.nodes:
if node.bl_idname == "CompositorNodeOutputFile":
tree.nodes.remove(node)
# Create a new output node
output = tree.nodes.new("CompositorNodeOutputFile")
output = tree.nodes.new(output_type)
image_settings = bpy.context.scene.render.image_settings
output.format.file_format = image_settings.file_format
slots = None
# In case of a multilayer exr, we don't need to use the output node,
# because the blender render already outputs a multilayer exr.
if ext == "exr" and multilayer:
output.layer_slots.clear()
return []
multi_exr = ext == "exr" and multilayer
slots = output.layer_slots if multi_exr else output.file_slots
output.base_path = render_product if multi_exr else str(output_path)
output.file_slots.clear()
output.base_path = str(output_path)
slots.clear()
aov_file_products = []
old_links = {
link.from_socket.name: link for link in tree.links
if link.to_node == old_output_node}
# Create a new socket for the beauty output
pass_name = "rgba" if multi_exr else "beauty"
slot, _ = _create_aov_slot(
name, aov_sep, slots, pass_name, multi_exr, output_path)
tree.links.new(render_layer_node.outputs["Image"], slot)
if compositing:
# Create a new socket for the composite output
pass_name = "composite"
comp_socket, filepath = _create_aov_slot(
name, aov_sep, slots, pass_name, multi_exr, output_path)
aov_file_products.append(("Composite", filepath))
# For each active render pass, we add a new socket to the output node
# and link it
for render_pass in passes:
filepath = f"{name}{aov_sep}{render_pass.name}.####"
for rpass in passes:
slot, filepath = _create_aov_slot(
name, aov_sep, slots, rpass.name, multi_exr, output_path)
aov_file_products.append((rpass.name, filepath))
output.file_slots.new(filepath)
# If the rpass was not connected with the old output node, we connect
# it with the new one.
if not old_links.get(rpass.name):
tree.links.new(rpass, slot)
filename = str(output_path / filepath.lstrip("/"))
for link in list(old_links.values()):
# Check if the socket is still available in the new output node.
socket = output.inputs.get(link.to_socket.name)
# If it is, we connect it with the new output node.
if socket:
tree.links.new(link.from_socket, socket)
# Then, we remove the old link.
tree.links.remove(link)
aov_file_products.append((render_pass.name, filename))
# If there's a composite node, we connect its input with the new output
if compositing and composite_node:
for link in tree.links:
if link.to_node == composite_node:
tree.links.new(link.from_socket, comp_socket)
break
node_input = output.inputs[-1]
if old_output_node:
output.location = old_output_node.location
tree.nodes.remove(old_output_node)
tree.links.new(render_pass, node_input)
output.name = "AYON File Output"
output.label = "AYON File Output"
return aov_file_products
return [] if multi_exr else aov_file_products
def imprint_render_settings(node, data):
@ -228,17 +331,23 @@ def prepare_rendering(asset_group):
aov_sep = get_aov_separator(settings)
ext = get_image_format(settings)
multilayer = get_multilayer(settings)
renderer = get_renderer(settings)
compositing = get_compositing(settings)
set_render_format(ext, multilayer)
aov_list, custom_passes = set_render_passes(settings)
bpy.context.scene.render.engine = renderer
aov_list, custom_passes = set_render_passes(settings, renderer)
output_path = Path.joinpath(dirpath, render_folder, file_name)
render_product = get_render_product(output_path, name, aov_sep)
aov_file_product = set_node_tree(
output_path, name, aov_sep, ext, multilayer)
output_path, render_product, name, aov_sep,
ext, multilayer, compositing)
bpy.context.scene.render.filepath = render_product
# Clear the render filepath, so that the output is handled only by the
# output node in the compositor.
bpy.context.scene.render.filepath = ""
render_settings = {
"render_folder": render_folder,

View file

@ -82,7 +82,7 @@ def file_extensions() -> List[str]:
def work_root(session: dict) -> str:
"""Return the default root to browse for work files."""
work_dir = session["AVALON_WORKDIR"]
work_dir = session["AYON_WORKDIR"]
scene_dir = session.get("AVALON_SCENEDIR")
if scene_dir:
return str(Path(work_dir, scene_dir))

View file

@ -1,8 +1,10 @@
"""Create render."""
import bpy
from ayon_core.lib import version_up
from ayon_core.hosts.blender.api import plugin
from ayon_core.hosts.blender.api.render_lib import prepare_rendering
from ayon_core.hosts.blender.api.workio import save_file
class CreateRenderlayer(plugin.BaseCreator):
@ -37,6 +39,7 @@ class CreateRenderlayer(plugin.BaseCreator):
# settings. Even the validator to check that the file is saved will
# detect the file as saved, even if it isn't. The only solution for
# now it is to force the file to be saved.
bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath)
filepath = version_up(bpy.data.filepath)
save_file(filepath, copy=False)
return collection

View file

@ -28,15 +28,27 @@ class ValidateDeadlinePublish(pyblish.api.InstancePlugin,
def process(self, instance):
if not self.is_active(instance.data):
return
tree = bpy.context.scene.node_tree
output_type = "CompositorNodeOutputFile"
output_node = None
# Remove all output nodes that inlcude "AYON" in the name.
# There should be only one.
for node in tree.nodes:
if node.bl_idname == output_type and "AYON" in node.name:
output_node = node
break
if not output_node:
raise PublishValidationError(
"No output node found in the compositor tree."
)
filepath = bpy.data.filepath
file = os.path.basename(filepath)
filename, ext = os.path.splitext(file)
if filename not in bpy.context.scene.render.filepath:
if filename not in output_node.base_path:
raise PublishValidationError(
"Render output folder "
"doesn't match the blender scene name! "
"Use Repair action to "
"fix the folder file path."
"Render output folder doesn't match the blender scene name! "
"Use Repair action to fix the folder file path."
)
@classmethod

View file

@ -34,4 +34,4 @@ def current_file():
def work_root(session):
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")

View file

@ -70,7 +70,7 @@ class LoadClip(opfapi.ClipLoader):
self.log.info("Loading with colorspace: `{}`".format(colorspace))
# create workfile path
workfile_dir = os.environ["AVALON_WORKDIR"]
workfile_dir = os.environ["AYON_WORKDIR"]
openclip_dir = os.path.join(
workfile_dir, clip_name
)

View file

@ -80,7 +80,7 @@ class LoadClipBatch(opfapi.ClipLoader):
self.log.info("Loading with colorspace: `{}`".format(colorspace))
# create workfile path
workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"]
workfile_dir = options.get("workdir") or os.environ["AYON_WORKDIR"]
openclip_dir = os.path.join(
workfile_dir, clip_name
)

View file

@ -22,7 +22,7 @@ def get_fusion_version(app_name):
The function is triggered by the prelaunch hooks to get the fusion version.
`app_name` is obtained by prelaunch hooks from the
`launch_context.env.get("AVALON_APP_NAME")`.
`launch_context.env.get("AYON_APP_NAME")`.
To get a correct Fusion version, a version number should be present
in the `applications/fusion/variants` key

View file

@ -135,7 +135,7 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
return current_filepath
def work_root(self, session):
work_dir = session["AVALON_WORKDIR"]
work_dir = session["AYON_WORKDIR"]
scene_dir = session.get("AVALON_SCENEDIR")
if scene_dir:
return os.path.join(work_dir, scene_dir)

View file

@ -135,7 +135,7 @@ class GenericCreateSaver(Creator):
ext = data["creator_attributes"]["image_format"]
# Subset change detected
workdir = os.path.normpath(os.getenv("AVALON_WORKDIR"))
workdir = os.path.normpath(os.getenv("AYON_WORKDIR"))
formatting_data.update({
"workdir": workdir,
"frame": "0" * frame_padding,

View file

@ -131,7 +131,7 @@ class FusionCopyPrefsPrelaunch(PreLaunchHook):
) = self.get_copy_fusion_prefs_settings()
# Get launched application context and return correct app version
app_name = self.launch_context.env.get("AVALON_APP_NAME")
app_name = self.launch_context.env.get("AYON_APP_NAME")
app_version = get_fusion_version(app_name)
if app_version is None:
version_names = ", ".join(str(x) for x in FUSION_VERSIONS_DICT)

View file

@ -28,7 +28,7 @@ class FusionPrelaunch(PreLaunchHook):
def execute(self):
# making sure python 3 is installed at provided path
# Py 3.3-3.10 for Fusion 18+ or Py 3.6 for Fu 16-17
app_data = self.launch_context.env.get("AVALON_APP_NAME")
app_data = self.launch_context.env.get("AYON_APP_NAME")
app_version = get_fusion_version(app_data)
if not app_version:
raise ApplicationLaunchFailed(

View file

@ -74,4 +74,4 @@ def current_file():
def work_root(session):
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")

View file

@ -77,7 +77,7 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
expected_settings.pop("resolutionWidth")
expected_settings.pop("resolutionHeight")
if (any(re.search(pattern, os.getenv('AVALON_TASK'))
if (any(re.search(pattern, os.getenv('AYON_TASK_NAME'))
for pattern in self.skip_timelines_check)):
self.log.info("Skipping frames check because of "
"task name and pattern {}".format(

View file

@ -70,4 +70,4 @@ def current_file():
def work_root(session):
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")

View file

@ -10,7 +10,7 @@ class SetPath(PreLaunchHook):
launch_types = {LaunchTypes.local}
def execute(self):
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")
workdir = self.launch_context.env.get("AYON_WORKDIR", "")
if not workdir:
self.log.warning("BUG: Workdir is not filled.")
return

View file

@ -2,7 +2,7 @@ import hou
import pyblish.api
from ayon_core.hosts.houdini.api import lib
import ayon_core.hosts.houdini.api.usd as hou_usdlib
import ayon_core.lib.usdlib as usdlib
from ayon_core.pipeline import usdlib
class CollectInstancesUsdLayered(pyblish.api.ContextPlugin):

View file

@ -5,7 +5,7 @@ from ayon_core.client import (
get_asset_by_name,
get_asset_name_identifier,
)
import ayon_core.lib.usdlib as usdlib
from ayon_core.pipeline import usdlib
class CollectUsdBootstrap(pyblish.api.InstancePlugin):

View file

@ -63,9 +63,8 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
rt.callbacks.addScript(rt.Name('postWorkspaceChange'),
self._deferred_menu_creation)
def has_unsaved_changes(self):
# TODO: how to get it from 3dsmax?
return True
def workfile_has_unsaved_changes(self):
return rt.getSaveRequired()
def get_workfile_extensions(self):
return [".max"]

View file

@ -10,7 +10,7 @@ class SetPath(PreLaunchHook):
launch_types = {LaunchTypes.local}
def execute(self):
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")
workdir = self.launch_context.env.get("AYON_WORKDIR", "")
if not workdir:
self.log.warning("BUG: Workdir is not filled.")
return

View file

@ -0,0 +1,109 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating workfiles."""
from ayon_core.pipeline import CreatedInstance, AutoCreator
from ayon_core.client import get_asset_by_name, get_asset_name_identifier
from ayon_core.hosts.max.api import plugin
from ayon_core.hosts.max.api.lib import read, imprint
from pymxs import runtime as rt
class CreateWorkfile(plugin.MaxCreatorBase, AutoCreator):
"""Workfile auto-creator."""
identifier = "io.ayon.creators.max.workfile"
label = "Workfile"
family = "workfile"
icon = "fa5.file"
default_variant = "Main"
def create(self):
variant = self.default_variant
current_instance = next(
(
instance for instance in self.create_context.instances
if instance.creator_identifier == self.identifier
), None)
project_name = self.project_name
asset_name = self.create_context.get_current_asset_name()
task_name = self.create_context.get_current_task_name()
host_name = self.create_context.host_name
if current_instance is None:
asset_doc = get_asset_by_name(project_name, asset_name)
subset_name = self.get_subset_name(
variant, task_name, asset_doc, project_name, host_name
)
data = {
"folderPath": asset_name,
"task": task_name,
"variant": variant
}
data.update(
self.get_dynamic_data(
variant, task_name, asset_doc,
project_name, host_name, current_instance)
)
self.log.info("Auto-creating workfile instance...")
instance_node = self.create_node(subset_name)
data["instance_node"] = instance_node.name
current_instance = CreatedInstance(
self.family, subset_name, data, self
)
self._add_instance_to_context(current_instance)
imprint(instance_node.name, current_instance.data)
elif (
current_instance["folderPath"] != asset_name
or current_instance["task"] != task_name
):
# Update instance context if is not the same
asset_doc = get_asset_by_name(project_name, asset_name)
subset_name = self.get_subset_name(
variant, task_name, asset_doc, project_name, host_name
)
asset_name = get_asset_name_identifier(asset_doc)
current_instance["folderPath"] = asset_name
current_instance["task"] = task_name
current_instance["subset"] = subset_name
def collect_instances(self):
self.cache_subsets(self.collection_shared_data)
for instance in self.collection_shared_data["max_cached_subsets"].get(self.identifier, []): # noqa
if not rt.getNodeByName(instance):
continue
created_instance = CreatedInstance.from_existing(
read(rt.GetNodeByName(instance)), self
)
self._add_instance_to_context(created_instance)
def update_instances(self, update_list):
for created_inst, _ in update_list:
instance_node = created_inst.get("instance_node")
imprint(
instance_node,
created_inst.data_to_store()
)
def remove_instances(self, instances):
"""Remove specified instance from the scene.
This is only removing `id` parameter so instance is no longer
instance, because it might contain valuable data for artist.
"""
for instance in instances:
instance_node = rt.GetNodeByName(
instance.data.get("instance_node"))
if instance_node:
rt.Delete(instance_node)
self._remove_instance_from_context(instance)
def create_node(self, subset_name):
if rt.getNodeByName(subset_name):
node = rt.getNodeByName(subset_name)
return node
node = rt.Container(name=subset_name)
node.isHidden = True
return node

View file

@ -0,0 +1,23 @@
import os
import pyblish.api
from pymxs import runtime as rt
class CollectCurrentFile(pyblish.api.ContextPlugin):
"""Inject the current working file."""
order = pyblish.api.CollectorOrder - 0.5
label = "Max Current File"
hosts = ['max']
def process(self, context):
"""Inject the current working file"""
folder = rt.maxFilePath
file = rt.maxFileName
if not folder or not file:
self.log.error("Scene is not saved.")
current_file = os.path.join(folder, file)
context.data["currentFile"] = current_file
self.log.debug("Scene path: {}".format(current_file))

View file

@ -12,7 +12,9 @@ class CollectMembers(pyblish.api.InstancePlugin):
hosts = ['max']
def process(self, instance):
if instance.data["family"] == "workfile":
self.log.debug("Skipping Collecting Members for workfile family.")
return
if instance.data.get("instance_node"):
container = rt.GetNodeByName(instance.data["instance_node"])
instance.data["members"] = [

View file

@ -6,57 +6,41 @@ import pyblish.api
from pymxs import runtime as rt
class CollectWorkfile(pyblish.api.ContextPlugin):
class CollectWorkfile(pyblish.api.InstancePlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.01
label = "Collect 3dsmax Workfile"
hosts = ['max']
families = ["workfile"]
def process(self, context):
def process(self, instance):
"""Inject the current working file."""
context = instance.context
folder = rt.maxFilePath
file = rt.maxFileName
if not folder or not file:
self.log.error("Scene is not saved.")
current_file = os.path.join(folder, file)
context.data['currentFile'] = current_file
filename, ext = os.path.splitext(file)
task = context.data["task"]
ext = os.path.splitext(file)[-1].lstrip(".")
data = {}
# create instance
instance = context.create_instance(name=filename)
subset = 'workfile' + task.capitalize()
data.update({
"subset": subset,
"asset": context.data["asset"],
"label": subset,
"publish": True,
"family": 'workfile',
"families": ['workfile'],
"setMembers": [current_file],
"frameStart": context.data['frameStart'],
"frameEnd": context.data['frameEnd'],
"handleStart": context.data['handleStart'],
"handleEnd": context.data['handleEnd']
"setMembers": context.data["currentFile"],
"frameStart": context.data["frameStart"],
"frameEnd": context.data["frameEnd"],
"handleStart": context.data["handleStart"],
"handleEnd": context.data["handleEnd"]
})
data['representations'] = [{
'name': ext.lstrip("."),
'ext': ext.lstrip("."),
'files': file,
data["representations"] = [{
"name": ext,
"ext": ext,
"files": file,
"stagingDir": folder,
}]
instance.data.update(data)
self.log.info('Collected instance: {}'.format(file))
self.log.info('Scene path: {}'.format(current_file))
self.log.info('staging Dir: {}'.format(folder))
self.log.info('subset: {}'.format(subset))
self.log.debug("Collected data: {}".format(data))
self.log.debug("Collected instance: {}".format(file))
self.log.debug("staging Dir: {}".format(folder))

View file

@ -1,11 +1,9 @@
import pyblish.api
import os
from ayon_core.pipeline import registered_host
class SaveCurrentScene(pyblish.api.ContextPlugin):
"""Save current scene
"""
"""Save current scene"""
label = "Save current file"
order = pyblish.api.ExtractorOrder - 0.49
@ -13,9 +11,13 @@ class SaveCurrentScene(pyblish.api.ContextPlugin):
families = ["maxrender", "workfile"]
def process(self, context):
from pymxs import runtime as rt
folder = rt.maxFilePath
file = rt.maxFileName
current = os.path.join(folder, file)
assert context.data["currentFile"] == current
rt.saveMaxFile(current)
host = registered_host()
current_file = host.get_current_workfile()
assert context.data["currentFile"] == current_file
if host.workfile_has_unsaved_changes():
self.log.info(f"Saving current file: {current_file}")
host.save_workfile(current_file)
else:
self.log.debug("No unsaved changes, skipping file save..")

View file

@ -8,5 +8,8 @@
local pythonpath = systemTools.getEnvVariable "MAX_PYTHONPATH"
systemTools.setEnvVariable "PYTHONPATH" pythonpath
/*opens the create menu on startup to ensure users are presented with a useful default view.*/
max create mode
python.ExecuteFile startup
)

View file

@ -38,25 +38,6 @@ class ToolWindows:
cls._windows[tool] = window
def edit_shader_definitions():
from qtpy import QtWidgets
from ayon_core.hosts.maya.api.shader_definition_editor import (
ShaderDefinitionsEditor
)
from ayon_core.tools.utils import qt_app_context
top_level_widgets = QtWidgets.QApplication.topLevelWidgets()
main_window = next(widget for widget in top_level_widgets
if widget.objectName() == "MayaWindow")
with qt_app_context():
window = ToolWindows.get_window("shader_definition_editor")
if not window:
window = ShaderDefinitionsEditor(parent=main_window)
ToolWindows.set_window("shader_definition_editor", window)
window.show()
def _resolution_from_document(doc):
if not doc or "data" not in doc:
print("Entered document is not valid. \"{}\"".format(str(doc)))

View file

@ -246,7 +246,7 @@ def _set_project():
None
"""
workdir = os.getenv("AVALON_WORKDIR")
workdir = os.getenv("AYON_WORKDIR")
try:
os.makedirs(workdir)
@ -628,7 +628,7 @@ def on_task_changed():
# Run
menu.update_menu_task_label()
workdir = os.getenv("AVALON_WORKDIR")
workdir = os.getenv("AYON_WORKDIR")
if os.path.exists(workdir):
log.info("Updating Maya workspace for task change to %s", workdir)
_set_project()
@ -677,7 +677,7 @@ def workfile_save_before_xgen(event):
import xgenm
current_work_dir = os.getenv("AVALON_WORKDIR").replace("\\", "/")
current_work_dir = os.getenv("AYON_WORKDIR").replace("\\", "/")
expected_work_dir = event.data["workdir_path"].replace("\\", "/")
if current_work_dir == expected_work_dir:
return

View file

@ -612,7 +612,7 @@ def get_load_color_for_family(family, settings=None):
else:
raise ValueError("Invalid color definition {}".format(str(color)))
if type(red, int):
if isinstance(red, int):
red = red / 255.0
green = green / 255.0
blue = blue / 255.0

View file

@ -1,176 +0,0 @@
# -*- coding: utf-8 -*-
"""Editor for shader definitions.
Shader names are stored as simple text file over GridFS in mongodb.
"""
import os
from qtpy import QtWidgets, QtCore, QtGui
from ayon_core.client.mongo import OpenPypeMongoConnection
from ayon_core import resources
import gridfs
DEFINITION_FILENAME = "{}/maya/shader_definition.txt".format(
os.getenv("AVALON_PROJECT"))
class ShaderDefinitionsEditor(QtWidgets.QWidget):
"""Widget serving as simple editor for shader name definitions."""
# name of the file used to store definitions
def __init__(self, parent=None):
super(ShaderDefinitionsEditor, self).__init__(parent)
self._mongo = OpenPypeMongoConnection.get_mongo_client()
self._gridfs = gridfs.GridFS(
self._mongo[os.getenv("OPENPYPE_DATABASE_NAME")])
self._editor = None
self._original_content = self._read_definition_file()
self.setObjectName("shaderDefinitionEditor")
self.setWindowTitle("OpenPype shader name definition editor")
icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
self.setWindowIcon(icon)
self.setWindowFlags(QtCore.Qt.Window)
self.setParent(parent)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.resize(750, 500)
self._setup_ui()
self._reload()
def _setup_ui(self):
"""Setup UI of Widget."""
layout = QtWidgets.QVBoxLayout(self)
label = QtWidgets.QLabel()
label.setText("Put shader names here - one name per line:")
layout.addWidget(label)
self._editor = QtWidgets.QPlainTextEdit()
self._editor.setStyleSheet("border: none;")
layout.addWidget(self._editor)
btn_layout = QtWidgets.QHBoxLayout()
save_btn = QtWidgets.QPushButton("Save")
save_btn.clicked.connect(self._save)
reload_btn = QtWidgets.QPushButton("Reload")
reload_btn.clicked.connect(self._reload)
exit_btn = QtWidgets.QPushButton("Exit")
exit_btn.clicked.connect(self._close)
btn_layout.addWidget(reload_btn)
btn_layout.addWidget(save_btn)
btn_layout.addWidget(exit_btn)
layout.addLayout(btn_layout)
def _read_definition_file(self, file=None):
"""Read definition file from database.
Args:
file (gridfs.grid_file.GridOut, Optional): File to read. If not
set, new query will be issued to find it.
Returns:
str: Content of the file or empty string if file doesn't exist.
"""
content = ""
if not file:
file = self._gridfs.find_one(
{"filename": DEFINITION_FILENAME})
if not file:
print(">>> [SNDE]: nothing in database yet")
return content
content = file.read()
file.close()
return content
def _write_definition_file(self, content, force=False):
"""Write content as definition to file in database.
Before file is written, check is made if its content has not
changed. If is changed, warning is issued to user if he wants
it to overwrite. Note: GridFs doesn't allow changing file content.
You need to delete existing file and create new one.
Args:
content (str): Content to write.
Raises:
ContentException: If file is changed in database while
editor is running.
"""
file = self._gridfs.find_one(
{"filename": DEFINITION_FILENAME})
if file:
content_check = self._read_definition_file(file)
if content == content_check:
print(">>> [SNDE]: content not changed")
return
if self._original_content != content_check:
if not force:
raise ContentException("Content changed")
print(">>> [SNDE]: overwriting data")
file.close()
self._gridfs.delete(file._id)
file = self._gridfs.new_file(
filename=DEFINITION_FILENAME,
content_type='text/plain',
encoding='utf-8')
file.write(content)
file.close()
QtCore.QTimer.singleShot(200, self._reset_style)
self._editor.setStyleSheet("border: 1px solid #33AF65;")
self._original_content = content
def _reset_style(self):
"""Reset editor style back.
Used to visually indicate save.
"""
self._editor.setStyleSheet("border: none;")
def _close(self):
self.hide()
def closeEvent(self, event):
event.ignore()
self.hide()
def _reload(self):
print(">>> [SNDE]: reloading")
self._set_content(self._read_definition_file())
def _save(self):
try:
self._write_definition_file(content=self._editor.toPlainText())
except ContentException:
# content has changed meanwhile
print(">>> [SNDE]: content has changed")
self._show_overwrite_warning()
def _set_content(self, content):
self._editor.setPlainText(content)
def _show_overwrite_warning(self):
reply = QtWidgets.QMessageBox.question(
self,
"Warning",
("Content you are editing was changed meanwhile in database.\n"
"Please, reload and solve the conflict."),
QtWidgets.QMessageBox.OK)
if reply == QtWidgets.QMessageBox.OK:
# do nothing
pass
class ContentException(Exception):
"""This is risen during save if file is changed in database."""
pass

View file

@ -35,7 +35,7 @@ def current_file():
def work_root(session):
work_dir = session["AVALON_WORKDIR"]
work_dir = session["AYON_WORKDIR"]
scene_dir = None
# Query scene file rule from workspace.mel if it exists in WORKDIR

View file

@ -12,7 +12,7 @@ class PreCopyMel(PreLaunchHook):
def execute(self):
project_doc = self.data["project_doc"]
workdir = self.launch_context.env.get("AVALON_WORKDIR")
workdir = self.launch_context.env.get("AYON_WORKDIR")
if not workdir:
self.log.warning("BUG: Workdir is not filled.")
return

View file

@ -8,13 +8,12 @@ publishing on farm.
Requires:
instance -> families
instance -> setMembers
instance -> asset
context -> currentFile
context -> workspaceDir
context -> user
session -> AVALON_ASSET
Optional:
Provides:

View file

@ -1,161 +0,0 @@
# -*- coding: utf-8 -*-
"""Validate model nodes names."""
import os
import platform
import re
import gridfs
import pyblish.api
from maya import cmds
import ayon_core.hosts.maya.api.action
from ayon_core.client.mongo import OpenPypeMongoConnection
from ayon_core.hosts.maya.api.shader_definition_editor import (
DEFINITION_FILENAME)
from ayon_core.pipeline.publish import (
OptionalPyblishPluginMixin, PublishValidationError, ValidateContentsOrder)
class ValidateModelName(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validate name of model
starts with (somename)_###_(materialID)_GEO
materialID must be present in list
padding number doesn't have limit
"""
optional = True
order = ValidateContentsOrder
hosts = ["maya"]
families = ["model"]
label = "Model Name"
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction]
material_file = None
database_file = DEFINITION_FILENAME
@classmethod
def get_invalid(cls, instance):
"""Get invalid nodes."""
use_db = cls.database
def is_group(group_name):
"""Find out if supplied transform is group or not."""
try:
children = cmds.listRelatives(group_name, children=True)
for child in children:
if not cmds.ls(child, transforms=True):
return False
return True
except Exception:
return False
invalid = []
content_instance = instance.data.get("setMembers", None)
if not content_instance:
cls.log.error("Instance has no nodes!")
return True
pass
# validate top level group name
assemblies = cmds.ls(content_instance, assemblies=True, long=True)
if len(assemblies) != 1:
cls.log.error("Must have exactly one top group")
return assemblies or True
top_group = assemblies[0]
regex = cls.top_level_regex
r = re.compile(regex)
m = r.match(top_group)
project_name = instance.context.data["projectName"]
current_asset_name = instance.context.data["asset"]
if m is None:
cls.log.error("invalid name on: {}".format(top_group))
cls.log.error("name doesn't match regex {}".format(regex))
invalid.append(top_group)
else:
if "asset" in r.groupindex:
if m.group("asset") != current_asset_name:
cls.log.error("Invalid asset name in top level group.")
return top_group
if "subset" in r.groupindex:
if m.group("subset") != instance.data.get("subset"):
cls.log.error("Invalid subset name in top level group.")
return top_group
if "project" in r.groupindex:
if m.group("project") != project_name:
cls.log.error("Invalid project name in top level group.")
return top_group
descendants = cmds.listRelatives(content_instance,
allDescendents=True,
fullPath=True) or []
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
trns = cmds.ls(descendants, long=False, type='transform')
# filter out groups
filtered = [node for node in trns if not is_group(node)]
# load shader list file as utf-8
shaders = []
if not use_db:
material_file = cls.material_file[platform.system().lower()]
if material_file:
if os.path.isfile(material_file):
shader_file = open(material_file, "r")
shaders = shader_file.readlines()
shader_file.close()
else:
cls.log.error("Missing shader name definition file.")
return True
else:
client = OpenPypeMongoConnection.get_mongo_client()
fs = gridfs.GridFS(client[os.getenv("OPENPYPE_DATABASE_NAME")])
shader_file = fs.find_one({"filename": cls.database_file})
if not shader_file:
cls.log.error("Missing shader name definition in database.")
return True
shaders = shader_file.read().splitlines()
shader_file.close()
# strip line endings from list
shaders = [s.rstrip() for s in shaders if s.rstrip()]
# compile regex for testing names
regex = cls.regex
r = re.compile(regex)
for obj in filtered:
cls.log.debug("testing: {}".format(obj))
m = r.match(obj)
if m is None:
cls.log.error("invalid name on: {}".format(obj))
invalid.append(obj)
else:
# if we have shader files and shader named group is in
# regex, test this group against names in shader file
if "shader" in r.groupindex and shaders:
try:
if not m.group('shader') in shaders:
cls.log.error(
"invalid materialID on: {0} ({1})".format(
obj, m.group('shader')))
invalid.append(obj)
except IndexError:
# shader named group doesn't match
cls.log.error(
"shader group doesn't match: {}".format(obj))
invalid.append(obj)
return invalid
def process(self, instance):
"""Plugin entry point."""
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
"Model naming is invalid. See the log.")

View file

@ -38,7 +38,7 @@ if explicit_plugins_loading["enabled"]:
key = "AYON_OPEN_WORKFILE_POST_INITIALIZATION"
if bool(int(os.environ.get(key, "0"))):
def _log_and_open():
path = os.environ["AVALON_LAST_WORKFILE"]
path = os.environ["AYON_LAST_WORKFILE"]
print("Opening \"{}\"".format(path))
cmds.file(path, open=True, force=True)
cmds.evalDeferred(

View file

@ -120,7 +120,7 @@ def deprecated(new_destination):
class Context:
main_window = None
context_action_item = None
project_name = os.getenv("AVALON_PROJECT")
project_name = os.getenv("AYON_PROJECT_NAME")
# Workfile related code
workfiles_launched = False
workfiles_tool_timer = None
@ -2605,7 +2605,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
def set_favorites(self):
from .utils import set_context_favorites
work_dir = os.getenv("AVALON_WORKDIR")
work_dir = os.getenv("AYON_WORKDIR")
asset = get_current_asset_name()
favorite_items = OrderedDict()
@ -2953,7 +2953,7 @@ def process_workfile_builder():
create_fv_on = workfile_builder.get("create_first_version") or None
builder_on = workfile_builder.get("builder_on_start") or None
last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE")
last_workfile_path = os.environ.get("AYON_LAST_WORKFILE")
# generate first version in file not existing and feature is enabled
if create_fv_on and not os.path.exists(last_workfile_path):
@ -3203,7 +3203,7 @@ class DirmapCache:
@classmethod
def project_name(cls):
if cls._project_name is None:
cls._project_name = os.getenv("AVALON_PROJECT")
cls._project_name = os.getenv("AYON_PROJECT_NAME")
return cls._project_name
@classmethod

View file

@ -179,7 +179,7 @@ def add_nuke_callbacks():
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
if nuke_settings["nuke_dirmap"]["enabled"]:
if nuke_settings["dirmap"]["enabled"]:
log.info("Added Nuke's dir-mapping callback ...")
# Add dirmap for file paths.
nuke.addFilenameFilter(dirmap_file_name_filter)

View file

@ -493,7 +493,7 @@ def get_colorspace_from_node(node):
def get_review_presets_config():
settings = get_current_project_settings()
review_profiles = (
settings["global"]
settings["core"]
["publish"]
["ExtractReview"]
["profiles"]
@ -1348,7 +1348,9 @@ def _remove_old_knobs(node):
def exposed_write_knobs(settings, plugin_name, instance_node):
exposed_knobs = settings["nuke"]["create"][plugin_name]["exposed_knobs"]
exposed_knobs = settings["nuke"]["create"][plugin_name].get(
"exposed_knobs", []
)
if exposed_knobs:
instance_node.addKnob(nuke.Text_Knob('', 'Write Knobs'))
write_node = nuke.allNodes(group=instance_node, filter="Write")[0]

View file

@ -68,7 +68,7 @@ def current_file():
def work_root(session):
work_dir = session["AVALON_WORKDIR"]
work_dir = session["AYON_WORKDIR"]
scene_dir = session.get("AVALON_SCENEDIR")
if scene_dir:
path = os.path.join(work_dir, scene_dir)

View file

@ -65,7 +65,7 @@ class ValidateExposedKnobs(
group_node = instance.data["transientData"]["node"]
nuke_settings = instance.context.data["project_settings"]["nuke"]
create_settings = nuke_settings["create"][plugin]
exposed_knobs = create_settings["exposed_knobs"]
exposed_knobs = create_settings.get("exposed_knobs", [])
unexposed_knobs = []
for knob in exposed_knobs:
if knob not in group_node.knobs():

View file

@ -112,7 +112,7 @@ class WriteNodeKnobSettingPanel(nukescripts.PythonPanel):
for write_node in write_selected_nodes:
# data for mapping the path
data = {
"work": os.getenv("AVALON_WORKDIR"),
"work": os.getenv("AYON_WORKDIR"),
"subset": write_node["name"].value(),
"frame": "#" * frame_padding,
"ext": ext

View file

@ -62,7 +62,7 @@ class PhotoshopHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
return None
def work_root(self, session):
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")
def open_workfile(self, filepath):
lib.stub().open(filepath)

View file

@ -209,8 +209,8 @@ class ImageCreator(Creator):
'Use layer name in subset' will explicitly add layer name into subset
name. Position of this name is configurable in
`project_settings/global/tools/creator/subset_name_profiles`.
If layer placeholder ({layer}) is not used in `subset_name_profiles`
`project_settings/global/tools/creator/product_name_profiles`.
If layer placeholder ({layer}) is not used in `product_name_profiles`
but layer name should be used (set explicitly in UI or implicitly if
multiple images should be created), it is added in capitalized form
as a suffix to subset name.

View file

@ -52,10 +52,10 @@ class CollectBatchData(pyblish.api.ContextPlugin):
assert os.path.exists(batch_dir), \
"Folder {} doesn't exist".format(batch_dir)
project_name = os.environ.get("AVALON_PROJECT")
project_name = os.environ.get("AYON_PROJECT_NAME")
if project_name is None:
raise AssertionError(
"Environment `AVALON_PROJECT` was not found."
"Environment `AYON_PROJECT_NAME` was not found."
"Could not set project `root` which may cause issues."
)
@ -68,8 +68,8 @@ class CollectBatchData(pyblish.api.ContextPlugin):
batch_data["context"]
)
os.environ["AVALON_ASSET"] = asset_name
os.environ["AVALON_TASK"] = task_name
os.environ["AYON_FOLDER_PATH"] = asset_name
os.environ["AYON_TASK_NAME"] = task_name
context.data["asset"] = asset_name
context.data["task"] = task_name

View file

@ -79,7 +79,7 @@ def open_file(filepath):
def current_file():
pm = get_project_manager()
file_ext = file_extensions()[0]
workdir_path = os.getenv("AVALON_WORKDIR")
workdir_path = os.getenv("AYON_WORKDIR")
project = pm.GetCurrentProject()
project_name = project.GetName()
file_name = project_name + file_ext
@ -93,4 +93,4 @@ def current_file():
def work_root(session):
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")

View file

@ -16,25 +16,31 @@ class ShotMetadataSolver:
NO_DECOR_PATERN = re.compile(r"\{([a-z]*?)\}")
# presets
clip_name_tokenizer = None
shot_rename = True
shot_hierarchy = None
shot_add_tasks = None
def __init__(self, logger):
self.clip_name_tokenizer = []
self.shot_rename = {
"enabled": False,
"shot_rename_template": "",
}
self.shot_hierarchy = {
"enabled": False,
"parents": [],
"parents_path": "",
}
self.shot_add_tasks = []
self.log = logger
def __init__(
def update_data(
self,
clip_name_tokenizer,
shot_rename,
shot_hierarchy,
shot_add_tasks,
logger
shot_add_tasks
):
self.clip_name_tokenizer = clip_name_tokenizer
self.shot_rename = shot_rename
self.shot_hierarchy = shot_hierarchy
self.shot_add_tasks = shot_add_tasks
self.log = logger
def _rename_template(self, data):
"""Shot renaming function
@ -86,7 +92,9 @@ class ShotMetadataSolver:
search_text = parent_name + clip_name
for token_key, pattern in self.clip_name_tokenizer.items():
for clip_name_item in self.clip_name_tokenizer:
token_key = clip_name_item["name"]
pattern = clip_name_item["regex"]
p = re.compile(pattern)
match = p.findall(search_text)
if not match:
@ -137,11 +145,11 @@ class ShotMetadataSolver:
))
_parent_tokens_type = {
parent_token["name"]: parent_token["type"]
parent_token["name"]: parent_token["parent_type"]
for parent_token in hierarchy_parents
}
for _index, _parent in enumerate(
shot_hierarchy["parents_path"].split("/")
shot_hierarchy["parents_path"].split("/")
):
# format parent token with value which is formatted
try:
@ -262,22 +270,22 @@ class ShotMetadataSolver:
"""
tasks_to_add = {}
project_tasks = project_doc["config"]["tasks"]
for task_name, task_data in self.shot_add_tasks.items():
_task_data = deepcopy(task_data)
project_task_types = project_doc["config"]["tasks"]
for task_item in self.shot_add_tasks:
task_name = task_item["name"]
task_type = task_item["task_type"]
# check if task type in project task types
if _task_data["type"] in project_tasks.keys():
tasks_to_add[task_name] = _task_data
else:
if task_type not in project_task_types.keys():
raise KeyError(
"Missing task type `{}` for `{}` is not"
" existing in `{}``".format(
_task_data["type"],
task_type,
task_name,
list(project_tasks.keys())
list(project_task_types.keys())
)
)
tasks_to_add[task_name] = {"type": task_type}
return tasks_to_add

View file

@ -22,7 +22,7 @@ class TrayPublisherHost(HostBase, IPublishHost):
name = "traypublisher"
def install(self):
os.environ["AVALON_APP"] = self.name
os.environ["AYON_HOST_NAME"] = self.name
pyblish.api.register_host("traypublisher")
pyblish.api.register_plugin_path(PUBLISH_PATH)
@ -40,7 +40,7 @@ class TrayPublisherHost(HostBase, IPublishHost):
def set_project_name(self, project_name):
# TODO Deregister project specific plugins and register new project
# plugins
os.environ["AVALON_PROJECT"] = project_name
os.environ["AYON_PROJECT_NAME"] = project_name
HostContext.set_project_name(project_name)

View file

@ -311,7 +311,7 @@ class SettingsCreator(TrayPublishCreator):
@classmethod
def from_settings(cls, item_data):
identifier = item_data["identifier"]
family = item_data["family"]
family = item_data["product_type"]
if not identifier:
identifier = "settings_{}".format(family)
return type(

View file

@ -174,46 +174,42 @@ Supporting publishing new shots to project
or updating already created. Publishing will create OTIO file.
"""
icon = "fa.file"
product_type_presets = []
def __init__(
self, project_settings, *args, **kwargs
):
super(EditorialSimpleCreator, self).__init__(
project_settings, *args, **kwargs
)
def __init__(self, *args, **kwargs):
self._shot_metadata_solver = ShotMetadataSolver(self.log)
super(EditorialSimpleCreator, self).__init__(*args, **kwargs)
def apply_settings(self, project_settings):
editorial_creators = deepcopy(
project_settings["traypublisher"]["editorial_creators"]
)
# get this creator settings by identifier
self._creator_settings = editorial_creators.get(self.identifier)
creator_settings = editorial_creators.get(self.identifier)
clip_name_tokenizer = self._creator_settings["clip_name_tokenizer"]
shot_rename = self._creator_settings["shot_rename"]
shot_hierarchy = self._creator_settings["shot_hierarchy"]
shot_add_tasks = self._creator_settings["shot_add_tasks"]
self._shot_metadata_solver = ShotMetadataSolver(
clip_name_tokenizer,
shot_rename,
shot_hierarchy,
shot_add_tasks,
self.log
self._shot_metadata_solver.update_data(
creator_settings["clip_name_tokenizer"],
creator_settings["shot_rename"],
creator_settings["shot_hierarchy"],
creator_settings["shot_add_tasks"]
)
# try to set main attributes from settings
if self._creator_settings.get("default_variants"):
self.default_variants = self._creator_settings["default_variants"]
self.product_type_presets = creator_settings["product_type_presets"]
default_variants = creator_settings.get("default_variants")
if default_variants:
self.default_variants = default_variants
def create(self, subset_name, instance_data, pre_create_data):
allowed_family_presets = self._get_allowed_family_presets(
allowed_product_type_presets = self._get_allowed_product_type_presets(
pre_create_data)
product_types = {
item["product_type"]
for item in self.product_type_presets
}
clip_instance_properties = {
k: v for k, v in pre_create_data.items()
k: v
for k, v in pre_create_data.items()
if k != "sequence_filepath_data"
if k not in [
i["family"] for i in self._creator_settings["family_presets"]
]
if k not in product_types
}
asset_name = instance_data["folderPath"]
@ -255,7 +251,7 @@ or updating already created. Publishing will create OTIO file.
otio_timeline,
media_path,
clip_instance_properties,
allowed_family_presets,
allowed_product_type_presets,
os.path.basename(seq_path),
first_otio_timeline
)
@ -355,7 +351,7 @@ or updating already created. Publishing will create OTIO file.
otio_timeline,
media_path,
instance_data,
family_presets,
product_type_presets,
sequence_file_name,
first_otio_timeline=None
):
@ -365,7 +361,7 @@ or updating already created. Publishing will create OTIO file.
otio_timeline (otio.Timeline): otio timeline object
media_path (str): media file path string
instance_data (dict): clip instance data
family_presets (list): list of dict settings subset presets
product_type_presets (list): list of dict settings subset presets
"""
tracks = [
@ -411,17 +407,17 @@ or updating already created. Publishing will create OTIO file.
"instance_id": None
}
for _fpreset in family_presets:
for product_type_preset in product_type_presets:
# exclude audio family if no audio stream
if (
_fpreset["family"] == "audio"
product_type_preset["product_type"] == "audio"
and not media_data.get("audio")
):
continue
instance = self._make_subset_instance(
otio_clip,
_fpreset,
product_type_preset,
deepcopy(base_instance_data),
parenting_data
)
@ -533,7 +529,7 @@ or updating already created. Publishing will create OTIO file.
def _make_subset_instance(
self,
otio_clip,
preset,
product_type_preset,
instance_data,
parenting_data
):
@ -541,16 +537,16 @@ or updating already created. Publishing will create OTIO file.
Args:
otio_clip (otio.Clip): otio clip object
preset (dict): single family preset
product_type_preset (dict): single family preset
instance_data (dict): instance data
parenting_data (dict): shot instance parent data
Returns:
CreatedInstance: creator instance object
"""
family = preset["family"]
family = product_type_preset["product_type"]
label = self._make_subset_naming(
preset,
product_type_preset,
instance_data
)
instance_data["label"] = label
@ -569,11 +565,11 @@ or updating already created. Publishing will create OTIO file.
else:
# add review family if defined
instance_data.update({
"outputFileType": preset["output_file_type"],
"outputFileType": product_type_preset["output_file_type"],
"parent_instance_id": parenting_data["instance_id"],
"creator_attributes": {
"parent_instance": parenting_data["instance_label"],
"add_review_family": preset.get("review")
"add_review_family": product_type_preset.get("review")
}
})
@ -585,15 +581,11 @@ or updating already created. Publishing will create OTIO file.
return c_instance
def _make_subset_naming(
self,
preset,
instance_data
):
def _make_subset_naming(self, product_type_preset, instance_data):
""" Subset name maker
Args:
preset (dict): single preset item
product_type_preset (dict): single preset item
instance_data (dict): instance data
Returns:
@ -602,10 +594,10 @@ or updating already created. Publishing will create OTIO file.
asset_name = instance_data["creator_attributes"]["folderPath"]
variant_name = instance_data["variant"]
family = preset["family"]
family = product_type_preset["product_type"]
# get variant name from preset or from inheritance
_variant_name = preset.get("variant") or variant_name
_variant_name = product_type_preset.get("variant") or variant_name
# subset name
subset_name = "{}{}".format(
@ -763,7 +755,7 @@ or updating already created. Publishing will create OTIO file.
"sourceOut": int(source_out)
}
def _get_allowed_family_presets(self, pre_create_data):
def _get_allowed_product_type_presets(self, pre_create_data):
""" Filter out allowed family presets.
Args:
@ -773,10 +765,11 @@ or updating already created. Publishing will create OTIO file.
list: lit of dict with preset items
"""
return [
{"family": "shot"},
{"product_type": "shot"},
*[
preset for preset in self._creator_settings["family_presets"]
if pre_create_data[preset["family"]]
preset
for preset in self.product_type_presets
if pre_create_data[preset["product_type"]]
]
]
@ -853,8 +846,8 @@ or updating already created. Publishing will create OTIO file.
]
# add variants swithers
attr_defs.extend(
BoolDef(_var["family"], label=_var["family"])
for _var in self._creator_settings["family_presets"]
BoolDef(item["product_type"], label=item["product_type"])
for item in self.product_type_presets
)
attr_defs.append(UISeparatorDef())

View file

@ -8,7 +8,7 @@ log = Logger.get_logger(__name__)
def initialize():
from ayon_core.hosts.traypublisher.api.plugin import SettingsCreator
project_name = os.environ["AVALON_PROJECT"]
project_name = os.environ["AYON_PROJECT_NAME"]
project_settings = get_project_settings(project_name)
simple_creators = project_settings["traypublisher"]["simple_creators"]

View file

@ -68,7 +68,7 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
log.info("AYON - Installing TVPaint integration")
# Create workdir folder if does not exist yet
workdir = os.getenv("AVALON_WORKDIR")
workdir = os.getenv("AYON_WORKDIR")
if not os.path.exists(workdir):
os.makedirs(workdir)
@ -155,7 +155,7 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
return execute_george(george_script)
def work_root(self, session):
return session["AVALON_WORKDIR"]
return session["AYON_WORKDIR"]
def get_current_workfile(self):
return execute_george("tv_GetProjectName")
@ -174,7 +174,7 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
# Setup project settings if its the template that's launched.
# TODO also check for template creation when it's possible to define
# templates
last_workfile = os.environ.get("AVALON_LAST_WORKFILE")
last_workfile = os.environ.get("AYON_LAST_WORKFILE")
if not last_workfile or os.path.exists(last_workfile):
return

View file

@ -85,8 +85,8 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
if workfile_context:
# Change current context with context from workfile
key_map = (
("AVALON_ASSET", "asset_name"),
("AVALON_TASK", "task_name")
("AYON_FOLDER_PATH", "asset_name"),
("AYON_TASK_NAME", "task_name")
)
for env_key, key in key_map:
os.environ[env_key] = workfile_context[key]

View file

@ -31,7 +31,7 @@ class ExtractSequence(pyblish.api.Extractor):
families = ["review", "render"]
# Modifiable with settings
review_bg = [255, 255, 255, 255]
review_bg = [255, 255, 255, 1.0]
def process(self, instance):
self.log.info(

View file

@ -6,7 +6,7 @@ class ValidateWorkfileProjectName(pyblish.api.ContextPlugin):
"""Validate project name stored in workfile metadata.
It is not possible to publish from different project than is set in
environment variable "AVALON_PROJECT".
environment variable "AYON_PROJECT_NAME".
"""
label = "Validate Workfile Project Name"

View file

@ -60,7 +60,7 @@ def start_rendering():
inst_data.append(data)
try:
project = os.environ.get("AVALON_PROJECT")
project = os.environ.get("AYON_PROJECT_NAME")
anatomy = Anatomy(project)
root = anatomy.roots['renders']
except Exception as e:

View file

@ -146,7 +146,7 @@ class UnrealPrelaunchHook(PreLaunchHook):
def execute(self):
"""Hook entry method."""
workdir = self.launch_context.env["AVALON_WORKDIR"]
workdir = self.launch_context.env["AYON_WORKDIR"]
executable = str(self.launch_context.executable)
engine_version = self.app_name.split("/")[-1].replace("-", ".")
try:

View file

@ -16,7 +16,6 @@ from ayon_core.client import get_asset_name_identifier
from ayon_core.settings import (
get_system_settings,
get_project_settings,
get_local_settings
)
from ayon_core.settings.constants import (
METADATA_KEYS,
@ -419,7 +418,14 @@ class ApplicationManager:
# Prepare known applications
app_defs = applications_addon_settings["applications"]
additional_apps = app_defs.pop("additional_apps")
app_defs.update(additional_apps)
for additional_app in additional_apps:
app_name = additional_app.pop("name")
if app_name in app_defs:
self.log.warning((
"Additional application '{}' is already"
" in built-in applications."
).format(app_name))
app_defs[app_name] = additional_app
for group_name, variant_defs in app_defs.items():
group = ApplicationGroup(group_name, variant_defs, self)
@ -1521,16 +1527,17 @@ def prepare_app_environments(
# Use environments from local settings
filtered_local_envs = {}
system_settings = data["system_settings"]
whitelist_envs = system_settings["general"].get("local_env_white_list")
if whitelist_envs:
local_settings = get_local_settings()
local_envs = local_settings.get("environments") or {}
filtered_local_envs = {
key: value
for key, value in local_envs.items()
if key in whitelist_envs
}
# NOTE Overrides for environment variables are not implemented in AYON.
# system_settings = data["system_settings"]
# whitelist_envs = system_settings["general"].get("local_env_white_list")
# if whitelist_envs:
# local_settings = get_local_settings()
# local_envs = local_settings.get("environments") or {}
# filtered_local_envs = {
# key: value
# for key, value in local_envs.items()
# if key in whitelist_envs
# }
# Apply local environment variables for already existing values
for key, value in filtered_local_envs.items():
@ -1649,8 +1656,9 @@ def apply_project_environments_value(
if project_settings is None:
project_settings = get_project_settings(project_name)
env_value = project_settings["global"]["project_environments"]
env_value = project_settings["core"]["project_environments"]
if env_value:
env_value = json.loads(env_value)
parsed_value = parse_environments(env_value, env_group)
env.update(acre.compute(
_merge_env(parsed_value, env),
@ -1691,15 +1699,15 @@ def prepare_context_environments(data, env_group=None, addons_manager=None):
app = data["app"]
context_env = {
"AVALON_PROJECT": project_doc["name"],
"AVALON_APP_NAME": app.full_name
"AYON_PROJECT_NAME": project_doc["name"],
"AYON_APP_NAME": app.full_name
}
if asset_doc:
asset_name = get_asset_name_identifier(asset_doc)
context_env["AVALON_ASSET"] = asset_name
context_env["AYON_FOLDER_PATH"] = asset_name
if task_name:
context_env["AVALON_TASK"] = task_name
context_env["AYON_TASK_NAME"] = task_name
log.debug(
"Context environments set:\n{}".format(
@ -1717,7 +1725,7 @@ def prepare_context_environments(data, env_group=None, addons_manager=None):
if not app.is_host:
return
data["env"]["AVALON_APP"] = app.host_name
data["env"]["AYON_HOST_NAME"] = app.host_name
if not asset_doc or not task_name:
# QUESTION replace with log.info and skip workfile discovery?
@ -1763,7 +1771,7 @@ def prepare_context_environments(data, env_group=None, addons_manager=None):
"Couldn't create workdir because: {}".format(str(exc))
)
data["env"]["AVALON_WORKDIR"] = workdir
data["env"]["AYON_WORKDIR"] = workdir
_prepare_last_workfile(data, workdir, addons_manager)
@ -1880,7 +1888,7 @@ def _prepare_last_workfile(data, workdir, addons_manager):
"Setting last workfile path: {}".format(last_workfile_path)
)
data["env"]["AVALON_LAST_WORKFILE"] = last_workfile_path
data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path
data["last_workfile_path"] = last_workfile_path
@ -1909,7 +1917,7 @@ def should_start_last_workfile(
project_settings = get_project_settings(project_name)
profiles = (
project_settings
["global"]
["core"]
["tools"]
["Workfiles"]
["last_workfile_on_startup"]
@ -1959,7 +1967,7 @@ def should_workfile_tool_start(
project_settings = get_project_settings(project_name)
profiles = (
project_settings
["global"]
["core"]
["tools"]
["Workfiles"]
["open_workfile_tool_on_startup"]

View file

@ -5,7 +5,6 @@ import platform
import getpass
import socket
from ayon_core.settings.lib import get_local_settings
from .execute import get_ayon_launcher_args
from .local_settings import get_local_site_id
@ -96,7 +95,6 @@ def get_all_current_info():
return {
"workstation": get_workstation_info(),
"env": os.environ.copy(),
"local_settings": get_local_settings(),
"ayon": get_ayon_info(),
}

View file

@ -257,7 +257,7 @@ class Logger:
return cls._process_name
# Get process name
process_name = os.environ.get("AVALON_APP_NAME")
process_name = os.environ.get("AYON_APP_NAME")
if not process_name:
try:
import psutil

View file

@ -1385,23 +1385,26 @@ def _get_image_dimensions(application, input_path, log):
def convert_color_values(application, color_value):
"""Get color mapping for ffmpeg and oiiotool.
Args:
application (str): Application for which command should be created.
color_value (list[int]): List of 8bit int values for RGBA.
color_value (tuple[int, int, int, float]): List of 8bit int values
for RGBA.
Returns:
str: ffmpeg returns hex string, oiiotool is string with floats.
"""
red, green, blue, alpha = color_value
if application == "ffmpeg":
return "{0:0>2X}{1:0>2X}{2:0>2X}@{3}".format(
red, green, blue, (alpha / 255.0)
red, green, blue, alpha
)
elif application == "oiiotool":
red = float(red / 255)
green = float(green / 255)
blue = float(blue / 255)
alpha = float(alpha / 255)
return "{0:.3f},{1:.3f},{2:.3f},{3:.3f}".format(
red, green, blue, alpha)

View file

@ -2,22 +2,27 @@ import os
import threading
import time
from ayon_core.modules import OpenPypeModule, ITrayModule, IPluginPaths
from ayon_core.modules import AYONAddon, ITrayModule, IPluginPaths
from ayon_core.client import get_asset_by_name
from .constants import CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH
class ClockifyModule(OpenPypeModule, ITrayModule, IPluginPaths):
class ClockifyModule(AYONAddon, ITrayModule, IPluginPaths):
name = "clockify"
def initialize(self, modules_settings):
clockify_settings = modules_settings[self.name]
self.enabled = clockify_settings["enabled"]
self.workspace_name = clockify_settings["workspace_name"]
def initialize(self, studio_settings):
enabled = self.name in studio_settings
workspace_name = None
if enabled:
clockify_settings = studio_settings[self.name]
workspace_name = clockify_settings["workspace_name"]
if self.enabled and not self.workspace_name:
raise Exception("Clockify Workspace is not set in settings.")
if enabled and workspace_name:
self.log.warning("Clockify Workspace is not set in settings.")
enabled = False
self.enabled = enabled
self.workspace_name = workspace_name
self.timer_manager = None
self.MessageWidgetClass = None

View file

@ -12,7 +12,7 @@ class ClockifyStart(LauncherAction):
def is_compatible(self, session):
"""Return whether the action is compatible with the session"""
if "AVALON_TASK" in session:
if "AYON_TASK_NAME" in session:
return True
return False
@ -20,9 +20,9 @@ class ClockifyStart(LauncherAction):
self.clockify_api.set_api()
user_id = self.clockify_api.user_id
workspace_id = self.clockify_api.workspace_id
project_name = session["AVALON_PROJECT"]
asset_name = session["AVALON_ASSET"]
task_name = session["AVALON_TASK"]
project_name = session["AYON_PROJECT_NAME"]
asset_name = session["AYON_FOLDER_PATH"]
task_name = session["AYON_TASK_NAME"]
description = asset_name
# fetch asset docs

View file

@ -36,7 +36,7 @@ class ClockifySync(LauncherAction):
raise ClockifyPermissionsCheckFailed(
"Current CLockify user is missing permissions for this action!"
)
project_name = session.get("AVALON_PROJECT") or ""
project_name = session.get("AYON_PROJECT_NAME") or ""
projects_to_sync = []
if project_name.strip():

View file

@ -4,7 +4,7 @@ import six
import sys
from ayon_core.lib import requests_get, Logger
from ayon_core.modules import OpenPypeModule, IPluginPaths
from ayon_core.modules import AYONAddon, IPluginPaths
class DeadlineWebserviceError(Exception):
@ -13,28 +13,28 @@ class DeadlineWebserviceError(Exception):
"""
class DeadlineModule(OpenPypeModule, IPluginPaths):
class DeadlineModule(AYONAddon, IPluginPaths):
name = "deadline"
def __init__(self, manager, settings):
self.deadline_urls = {}
super(DeadlineModule, self).__init__(manager, settings)
def initialize(self, modules_settings):
def initialize(self, studio_settings):
# This module is always enabled
deadline_settings = modules_settings[self.name]
self.enabled = deadline_settings["enabled"]
deadline_url = deadline_settings.get("DEADLINE_REST_URL")
if deadline_url:
self.deadline_urls = {"default": deadline_url}
else:
self.deadline_urls = deadline_settings.get("deadline_urls") # noqa: E501
deadline_urls = {}
enabled = self.name in studio_settings
if enabled:
deadline_settings = studio_settings[self.name]
deadline_urls = {
url_item["name"]: url_item["value"]
for url_item in deadline_settings["deadline_urls"]
}
if not self.deadline_urls:
self.enabled = False
self.log.warning(("default Deadline Webservice URL "
"not specified. Disabling module."))
return
if enabled and not deadline_urls:
enabled = False
self.log.warning((
"Deadline Webservice URLs are not specified. Disabling addon."
))
self.enabled = enabled
self.deadline_urls = deadline_urls
def get_plugin_paths(self):
"""Deadline plugin paths."""

View file

@ -47,11 +47,11 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
deadline_settings = (
render_instance.context.data
["system_settings"]
["modules"]
["deadline"]
)
default_server = render_instance.context.data["defaultDeadline"]
# QUESTION How and where is this is set? Should be removed?
instance_server = render_instance.data.get("deadlineServers")
if not instance_server:
self.log.debug("Using default server.")
@ -64,7 +64,10 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
asString=True
)
default_servers = deadline_settings["deadline_urls"]
default_servers = {
url_item["name"]: url_item["value"]
for url_item in deadline_settings["deadline_urls"]
}
project_servers = (
render_instance.context.data
["project_settings"]

View file

@ -1,39 +0,0 @@
# -*- coding: utf-8 -*-
"""Collect instances that should be processed and published on DL.
"""
import os
import pyblish.api
from ayon_core.pipeline import PublishValidationError
class CollectDeadlinePublishableInstances(pyblish.api.InstancePlugin):
"""Collect instances that should be processed and published on DL.
Some long running publishes (not just renders) could be offloaded to DL,
this plugin compares theirs name against env variable, marks only
publishable by farm.
Triggered only when running only in headless mode, eg on a farm.
"""
order = pyblish.api.CollectorOrder + 0.499
label = "Collect Deadline Publishable Instance"
targets = ["remote"]
def process(self, instance):
self.log.debug("CollectDeadlinePublishableInstances")
publish_inst = os.environ.get("OPENPYPE_PUBLISH_SUBSET", '')
if not publish_inst:
raise PublishValidationError("OPENPYPE_PUBLISH_SUBSET env var "
"required for remote publishing")
subset_name = instance.data["subset"]
if subset_name == publish_inst:
self.log.debug("Publish {}".format(subset_name))
instance.data["publish"] = True
instance.data["farm"] = False
else:
self.log.debug("Skipping {}".format(subset_name))
instance.data["publish"] = False

View file

@ -80,11 +80,11 @@ class AfterEffectsSubmitDeadline(
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_WORKDIR",
"AVALON_APP_NAME",
"AYON_PROJECT_NAME",
"AYON_FOLDER_PATH",
"AYON_TASK_NAME",
"AYON_WORKDIR",
"AYON_APP_NAME",
"AYON_LOG_NO_COLORS",
"IS_TEST"
]

View file

@ -102,11 +102,11 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
"FTRACK_API_USER",
"FTRACK_SERVER",
"OPENPYPE_SG_USER",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_WORKDIR",
"AVALON_APP_NAME",
"AYON_PROJECT_NAME",
"AYON_FOLDER_PATH",
"AYON_TASK_NAME",
"AYON_WORKDIR",
"AYON_APP_NAME",
"IS_TEST"
]

View file

@ -220,11 +220,11 @@ class FusionSubmitDeadline(
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_WORKDIR",
"AVALON_APP_NAME",
"AYON_PROJECT_NAME",
"AYON_FOLDER_PATH",
"AYON_TASK_NAME",
"AYON_WORKDIR",
"AYON_APP_NAME",
"AYON_LOG_NO_COLORS",
"IS_TEST",
"AYON_BUNDLE_NAME",

View file

@ -273,11 +273,11 @@ class HarmonySubmitDeadline(
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_WORKDIR",
"AVALON_APP_NAME",
"AYON_PROJECT_NAME",
"AYON_FOLDER_PATH",
"AYON_TASK_NAME",
"AYON_WORKDIR",
"AYON_APP_NAME",
"AYON_LOG_NO_COLORS"
"IS_TEST"
]

View file

@ -98,11 +98,11 @@ class HoudiniCacheSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline
"FTRACK_API_USER",
"FTRACK_SERVER",
"OPENPYPE_SG_USER",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_WORKDIR",
"AVALON_APP_NAME",
"AYON_PROJECT_NAME",
"AYON_FOLDER_PATH",
"AYON_TASK_NAME",
"AYON_WORKDIR",
"AYON_APP_NAME",
"AYON_LOG_NO_COLORS",
]

View file

@ -44,7 +44,10 @@ class VrayRenderPluginInfo():
@attr.s
class RedshiftRenderPluginInfo():
SceneFile = attr.ib(default=None)
Version = attr.ib(default=None)
# Use "1" as the default Redshift version just because it
# default fallback version in Deadline's Redshift plugin
# if no version was specified
Version = attr.ib(default="1")
class HoudiniSubmitDeadline(
@ -204,11 +207,11 @@ class HoudiniSubmitDeadline(
"FTRACK_API_USER",
"FTRACK_SERVER",
"OPENPYPE_SG_USER",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_WORKDIR",
"AVALON_APP_NAME",
"AYON_PROJECT_NAME",
"AYON_FOLDER_PATH",
"AYON_TASK_NAME",
"AYON_WORKDIR",
"AYON_APP_NAME",
"AYON_LOG_NO_COLORS",
]

View file

@ -106,11 +106,11 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
"FTRACK_API_USER",
"FTRACK_SERVER",
"OPENPYPE_SG_USER",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_WORKDIR",
"AVALON_APP_NAME",
"AYON_PROJECT_NAME",
"AYON_FOLDER_PATH",
"AYON_TASK_NAME",
"AYON_WORKDIR",
"AYON_APP_NAME",
"IS_TEST"
]

View file

@ -207,11 +207,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
"FTRACK_API_USER",
"FTRACK_SERVER",
"OPENPYPE_SG_USER",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_WORKDIR",
"AVALON_APP_NAME",
"AYON_PROJECT_NAME",
"AYON_FOLDER_PATH",
"AYON_TASK_NAME",
"AYON_WORKDIR",
"AYON_APP_NAME",
"IS_TEST"
]

View file

@ -1,131 +0,0 @@
import os
import attr
from datetime import datetime
from ayon_core.pipeline import PublishXmlValidationError
from ayon_core.lib import is_in_tests
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
import pyblish.api
@attr.s
class MayaPluginInfo(object):
Build = attr.ib(default=None) # Don't force build
StrictErrorChecking = attr.ib(default=True)
SceneFile = attr.ib(default=None) # Input scene
Version = attr.ib(default=None) # Mandatory for Deadline
ProjectPath = attr.ib(default=None)
ScriptJob = attr.ib(default=True)
ScriptFilename = attr.ib(default=None)
class MayaSubmitRemotePublishDeadline(
abstract_submit_deadline.AbstractSubmitDeadline):
"""Submit Maya scene to perform a local publish in Deadline.
Publishing in Deadline can be helpful for scenes that publish very slow.
This way it can process in the background on another machine without the
Artist having to wait for the publish to finish on their local machine.
Submission is done through the Deadline Web Service. DL then triggers
`openpype/scripts/remote_publish.py`.
Each publishable instance creates its own full publish job.
Different from `ProcessSubmittedJobOnFarm` which creates publish job
depending on metadata json containing context and instance data of
rendered files.
"""
label = "Submit Scene to Deadline"
order = pyblish.api.IntegratorOrder
hosts = ["maya"]
families = ["publish.farm"]
targets = ["local"]
def process(self, instance):
# Ensure no errors so far
if not (all(result["success"]
for result in instance.context.data["results"])):
raise PublishXmlValidationError("Publish process has errors")
if not instance.data["publish"]:
self.log.warning("No active instances found. "
"Skipping submission..")
return
super(MayaSubmitRemotePublishDeadline, self).process(instance)
def get_job_info(self):
instance = self._instance
context = instance.context
project_name = instance.context.data["projectName"]
scene = instance.context.data["currentFile"]
scenename = os.path.basename(scene)
job_name = "{scene} [PUBLISH]".format(scene=scenename)
batch_name = "{code} - {scene}".format(code=project_name,
scene=scenename)
if is_in_tests():
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
job_info = DeadlineJobInfo(Plugin="MayaBatch")
job_info.BatchName = batch_name
job_info.Name = job_name
job_info.UserName = context.data.get("user")
job_info.Comment = context.data.get("comment", "")
# use setting for publish job on farm, no reason to have it separately
project_settings = context.data["project_settings"]
deadline_publish_job_sett = project_settings["deadline"]["publish"]["ProcessSubmittedJobOnFarm"] # noqa
job_info.Department = deadline_publish_job_sett["deadline_department"]
job_info.ChunkSize = deadline_publish_job_sett["deadline_chunk_size"]
job_info.Priority = deadline_publish_job_sett["deadline_priority"]
job_info.Group = deadline_publish_job_sett["deadline_group"]
job_info.Pool = deadline_publish_job_sett["deadline_pool"]
# Include critical environment variables with submission + Session
keys = [
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER"
]
environment = {
key: os.environ[key]
for key in keys
if key in os.environ
}
environment["AVALON_PROJECT"] = project_name
environment["AVALON_ASSET"] = instance.context.data["asset"]
environment["AVALON_TASK"] = instance.context.data["task"]
environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME")
environment["OPENPYPE_PUBLISH_SUBSET"] = instance.data["subset"]
environment["AYON_LOG_NO_COLORS"] = "1"
environment["AYON_USERNAME"] = instance.context.data["user"]
environment["AYON_REMOTE_PUBLISH"] = "1"
for key, value in environment.items():
job_info.EnvironmentKeyValue[key] = value
def get_plugin_info(self):
# Not all hosts can import this module.
from maya import cmds
scene = self._instance.context.data["currentFile"]
plugin_info = MayaPluginInfo()
plugin_info.SceneFile = scene
plugin_info.ScriptFilename = "{OPENPYPE_REPOS_ROOT}/openpype/scripts/remote_publish.py" # noqa
plugin_info.Version = cmds.about(version=True)
plugin_info.ProjectPath = cmds.workspace(query=True,
rootDirectory=True)
return attr.asdict(plugin_info)

View file

@ -373,10 +373,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
keys = [
"PYTHONPATH",
"PATH",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_APP_NAME",
"AYON_PROJECT_NAME",
"AYON_FOLDER_PATH",
"AYON_TASK_NAME",
"AYON_APP_NAME",
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",

View file

@ -67,7 +67,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER",
"AVALON_APP_NAME",
"AYON_APP_NAME",
"AYON_USERNAME",
"OPENPYPE_SG_USER",
"KITSU_LOGIN",
@ -125,9 +125,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
create_metadata_path(instance, anatomy)
environment = {
"AVALON_PROJECT": instance.context.data["projectName"],
"AVALON_ASSET": instance.context.data["asset"],
"AVALON_TASK": instance.context.data["task"],
"AYON_PROJECT_NAME": instance.context.data["projectName"],
"AYON_FOLDER_PATH": instance.context.data["asset"],
"AYON_TASK_NAME": instance.context.data["task"],
"AYON_USERNAME": instance.context.data["user"],
"AYON_LOG_NO_COLORS": "1",
"IS_TEST": str(int(is_in_tests())),

View file

@ -130,7 +130,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER",
"AVALON_APP_NAME",
"AYON_APP_NAME",
"AYON_USERNAME",
"OPENPYPE_SG_USER",
"KITSU_LOGIN",
@ -202,9 +202,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
create_metadata_path(instance, anatomy)
environment = {
"AVALON_PROJECT": instance.context.data["projectName"],
"AVALON_ASSET": instance.context.data["asset"],
"AVALON_TASK": instance.context.data["task"],
"AYON_PROJECT_NAME": instance.context.data["projectName"],
"AYON_FOLDER_PATH": instance.context.data["asset"],
"AYON_TASK_NAME": instance.context.data["task"],
"AYON_USERNAME": instance.context.data["user"],
"AYON_LOG_NO_COLORS": "1",
"IS_TEST": str(int(is_in_tests())),
@ -330,151 +330,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
self.log.debug("Skipping local instance.")
return
data = instance.data.copy()
context = instance.context
self.context = context
self.anatomy = instance.context.data["anatomy"]
asset = data.get("asset") or context.data["asset"]
subset = data.get("subset")
start = instance.data.get("frameStart")
if start is None:
start = context.data["frameStart"]
end = instance.data.get("frameEnd")
if end is None:
end = context.data["frameEnd"]
handle_start = instance.data.get("handleStart")
if handle_start is None:
handle_start = context.data["handleStart"]
handle_end = instance.data.get("handleEnd")
if handle_end is None:
handle_end = context.data["handleEnd"]
fps = instance.data.get("fps")
if fps is None:
fps = context.data["fps"]
if data.get("extendFrames", False):
start, end = self._extend_frames(
asset,
subset,
start,
end,
data["overrideExistingFrame"])
try:
source = data["source"]
except KeyError:
source = context.data["currentFile"]
success, rootless_path = (
self.anatomy.find_root_template_from_path(source)
)
if success:
source = rootless_path
else:
# `rootless_path` is not set to `source` if none of roots match
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues."
).format(source))
family = "render"
if ("prerender" in instance.data["families"] or
"prerender.farm" in instance.data["families"]):
family = "prerender"
families = [family]
# pass review to families if marked as review
do_not_add_review = False
if data.get("review"):
families.append("review")
elif data.get("review") is False:
self.log.debug("Instance has review explicitly disabled.")
do_not_add_review = True
instance_skeleton_data = {
"family": family,
"subset": subset,
"families": families,
"asset": asset,
"frameStart": start,
"frameEnd": end,
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStartHandle": start - handle_start,
"frameEndHandle": end + handle_end,
"comment": instance.data["comment"],
"fps": fps,
"source": source,
"extendFrames": data.get("extendFrames"),
"overrideExistingFrame": data.get("overrideExistingFrame"),
"pixelAspect": data.get("pixelAspect", 1),
"resolutionWidth": data.get("resolutionWidth", 1920),
"resolutionHeight": data.get("resolutionHeight", 1080),
"multipartExr": data.get("multipartExr", False),
"jobBatchName": data.get("jobBatchName", ""),
"useSequenceForReview": data.get("useSequenceForReview", True),
# map inputVersions `ObjectId` -> `str` so json supports it
"inputVersions": list(map(str, data.get("inputVersions", []))),
"colorspace": instance.data.get("colorspace"),
"stagingDir_persistent": instance.data.get(
"stagingDir_persistent", False
)
}
# skip locking version if we are creating v01
instance_version = instance.data.get("version") # take this if exists
if instance_version != 1:
instance_skeleton_data["version"] = instance_version
# transfer specific families from original instance to new render
for item in self.families_transfer:
if item in instance.data.get("families", []):
instance_skeleton_data["families"] += [item]
# transfer specific properties from original instance based on
# mapping dictionary `instance_transfer`
for key, values in self.instance_transfer.items():
if key in instance.data.get("families", []):
for v in values:
instance_skeleton_data[v] = instance.data.get(v)
# look into instance data if representations are not having any
# which are having tag `publish_on_farm` and include them
for repre in instance.data.get("representations", []):
staging_dir = repre.get("stagingDir")
if staging_dir:
success, rootless_staging_dir = (
self.anatomy.find_root_template_from_path(
staging_dir
)
)
if success:
repre["stagingDir"] = rootless_staging_dir
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(staging_dir))
repre["stagingDir"] = staging_dir
if "publish_on_farm" in repre.get("tags"):
# create representations attribute of not there
if "representations" not in instance_skeleton_data.keys():
instance_skeleton_data["representations"] = []
instance_skeleton_data["representations"].append(repre)
instances = None
assert data.get("expectedFiles"), ("Submission from old Pype version"
" - missing expectedFiles")
anatomy = instance.context.data["anatomy"]
instance_skeleton_data = create_skeleton_instance(

View file

@ -14,7 +14,7 @@ from Deadline.Scripting import (
DirectoryUtils,
ProcessUtils,
)
__version__ = "1.0.0"
__version__ = "1.0.1"
VERSION_REGEX = re.compile(
r"(?P<major>0|[1-9]\d*)"
r"\.(?P<minor>0|[1-9]\d*)"
@ -471,12 +471,21 @@ def inject_ayon_environment(deadlinePlugin):
]
add_kwargs = {
"project": job.GetJobEnvironmentKeyValue("AVALON_PROJECT"),
"asset": job.GetJobEnvironmentKeyValue("AVALON_ASSET"),
"task": job.GetJobEnvironmentKeyValue("AVALON_TASK"),
"app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"),
"envgroup": "farm",
}
# Support backwards compatible keys
for key, env_keys in (
("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]),
("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]),
("task", ["AYON_TASK_NAME", "AVALON_TASK"]),
("app", ["AYON_APP_NAME", "AVALON_APP_NAME"]),
):
value = ""
for env_key in env_keys:
value = job.GetJobEnvironmentKeyValue(env_key)
if value:
break
add_kwargs[key] = value
if job.GetJobEnvironmentKeyValue("IS_TEST"):
args.append("--automatic-tests")
@ -486,8 +495,8 @@ def inject_ayon_environment(deadlinePlugin):
args.extend(["--{}".format(key), value])
else:
raise RuntimeError((
"Missing required env vars: AVALON_PROJECT, AVALON_ASSET,"
" AVALON_TASK, AVALON_APP_NAME"
"Missing required env vars: AYON_PROJECT_NAME,"
" AYON_FOLDER_PATH, AYON_TASK_NAME, AYON_APP_NAME"
))
environment = {

View file

@ -361,8 +361,8 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin,
if not all(add_kwargs.values()):
raise RuntimeError((
"Missing required env vars: AVALON_PROJECT, AVALON_ASSET,"
" AVALON_TASK, AVALON_APP_NAME"
"Missing required env vars: AYON_PROJECT_NAME, AYON_FOLDER_PATH,"
" AYON_TASK_NAME, AYON_APP_NAME"
))
for key, value in add_kwargs.items():

View file

@ -63,7 +63,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin,
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER",
"AVALON_APP_NAME",
"AYON_APP_NAME",
"AYON_USERNAME",
"OPENPYPE_SG_USER",
]
@ -179,9 +179,9 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin,
anatomy_data = instance.context.data["anatomyData"]
environment = RREnvList({
"AVALON_PROJECT": anatomy_data["project"]["name"],
"AVALON_ASSET": instance.context.data["asset"],
"AVALON_TASK": anatomy_data["task"]["name"],
"AYON_PROJECT_NAME": anatomy_data["project"]["name"],
"AYON_FOLDER_PATH": instance.context.data["asset"],
"AYON_TASK_NAME": anatomy_data["task"]["name"],
"AYON_USERNAME": anatomy_data["user"]
})

View file

@ -136,10 +136,10 @@ class OpenPypeContextSelector:
def run_publish(self):
"""Run publish process."""
env = {"AVALON_PROJECT": str(self.context.get("project")),
"AVALON_ASSET": str(self.context.get("asset")),
"AVALON_TASK": str(self.context.get("task")),
# "AVALON_APP_NAME": str(self.context.get("app_name"))
env = {"AYON_PROJECT_NAME": str(self.context.get("project")),
"AYON_FOLDER_PATH": str(self.context.get("asset")),
"AYON_TASK_NAME": str(self.context.get("task")),
# "AYON_APP_NAME": str(self.context.get("app_name"))
}
print(">>> setting environment:")
@ -182,10 +182,18 @@ print("running selector")
selector = OpenPypeContextSelector()
# try to set context from environment
selector.context["project"] = os.getenv("AVALON_PROJECT")
selector.context["asset"] = os.getenv("AVALON_ASSET")
selector.context["task"] = os.getenv("AVALON_TASK")
# selector.context["app_name"] = os.getenv("AVALON_APP_NAME")
for key, env_keys in (
("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]),
("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]),
("task", ["AYON_TASK_NAME", "AVALON_TASK"]),
# ("app_name", ["AYON_APP_NAME", "AVALON_APP_NAME"])
):
value = ""
for env_key in env_keys:
value = os.getenv(env_key)
if value:
break
selector.context[key] = value
# if anything inside is None, scratch the whole thing and
# ask user for context.

View file

@ -3,8 +3,8 @@ import platform
from ayon_core.client import get_asset_by_name
from ayon_core.modules import (
OpenPypeModule,
from ayon_core.addon import (
AYONAddon,
ITrayService,
IPluginPaths
)
@ -76,7 +76,7 @@ class ExampleTimersManagerConnector:
class TimersManager(
OpenPypeModule,
AYONAddon,
ITrayService,
IPluginPaths
):
@ -99,23 +99,27 @@ class TimersManager(
"start_timer"
)
def initialize(self, modules_settings):
timers_settings = modules_settings[self.name]
def initialize(self, studio_settings):
timers_settings = studio_settings.get(self.name)
enabled = timers_settings is not None
self.enabled = timers_settings["enabled"]
auto_stop = False
full_time = 0
message_time = 0
if enabled:
# When timer will stop if idle manager is running (minutes)
full_time = int(timers_settings["full_time"] * 60)
# How many minutes before the timer is stopped will popup the message
message_time = int(timers_settings["message_time"] * 60)
# When timer will stop if idle manager is running (minutes)
full_time = int(timers_settings["full_time"] * 60)
# How many minutes before the timer is stopped will popup the message
message_time = int(timers_settings["message_time"] * 60)
auto_stop = timers_settings["auto_stop"]
platform_name = platform.system().lower()
# Turn of auto stop on MacOs because pynput requires root permissions
# and on linux can cause thread locks on application close
if full_time <= 0 or platform_name in ("darwin", "linux"):
auto_stop = False
auto_stop = timers_settings["auto_stop"]
platform_name = platform.system().lower()
# Turn of auto stop on MacOs because pynput requires root permissions
# and on linux can cause thread locks on application close
if full_time <= 0 or platform_name in ("darwin", "linux"):
auto_stop = False
self.enabled = enabled
self.auto_stop = auto_stop
self.time_show_message = full_time - message_time
self.time_stop_timer = full_time

View file

@ -26,7 +26,7 @@ class LauncherAction(object):
Args:
session (dict[str, Union[str, None]]): Session data with
AVALON_PROJECT, AVALON_ASSET and AVALON_TASK.
AYON_PROJECT_NAME, AYON_FOLDER_PATH and AYON_TASK_NAME.
"""
return True

View file

@ -8,9 +8,6 @@ import numbers
import six
import time
from ayon_core.settings.lib import (
get_local_settings,
)
from ayon_core.client import get_project, get_ayon_server_api_connection
from ayon_core.lib import Logger, get_local_site_id
from ayon_core.lib.path_templates import (
@ -423,7 +420,7 @@ class Anatomy(BaseAnatomy):
def __init__(self, project_name=None, site_name=None):
if not project_name:
project_name = os.environ.get("AVALON_PROJECT")
project_name = os.environ.get("AYON_PROJECT_NAME")
if not project_name:
raise ProjectNotSet((
@ -453,7 +450,7 @@ class Anatomy(BaseAnatomy):
return cls._sync_server_addon_cache.data
@classmethod
def _get_studio_roots_overrides(cls, project_name, local_settings=None):
def _get_studio_roots_overrides(cls, project_name):
"""This would return 'studio' site override by local settings.
Notes:
@ -465,7 +462,6 @@ class Anatomy(BaseAnatomy):
Args:
project_name (str): Name of project.
local_settings (Optional[dict[str, Any]]): Prepared local settings.
Returns:
Union[Dict[str, str], None]): Local root overrides.
@ -488,11 +484,6 @@ class Anatomy(BaseAnatomy):
should be returned.
"""
# Local settings may be used more than once or may not be used at all
# - to avoid slowdowns 'get_local_settings' is not called until it's
# really needed
local_settings = None
# First check if sync server is available and enabled
sync_server = cls.get_sync_server_addon()
if sync_server is None or not sync_server.enabled:
@ -503,11 +494,8 @@ class Anatomy(BaseAnatomy):
# Use sync server to receive active site name
project_cache = cls._default_site_id_cache[project_name]
if project_cache.is_outdated:
local_settings = get_local_settings()
project_cache.update_data(
sync_server.get_active_site_type(
project_name, local_settings
)
sync_server.get_active_site_type(project_name)
)
site_name = project_cache.data
@ -517,12 +505,12 @@ class Anatomy(BaseAnatomy):
# Handle studio root overrides without sync server
# - studio root overrides can be done even without sync server
roots_overrides = cls._get_studio_roots_overrides(
project_name, local_settings
project_name
)
else:
# Ask sync server to get roots overrides
roots_overrides = sync_server.get_site_root_overrides(
project_name, site_name, local_settings
project_name, site_name
)
site_cache.update_data(roots_overrides)
return site_cache.data

View file

@ -254,7 +254,7 @@ def get_imageio_file_rules_colorspace_from_filepath(
# match file rule from path
colorspace_name = None
for file_rule in file_rules.values():
for file_rule in file_rules:
pattern = file_rule["pattern"]
extension = file_rule["ext"]
ext_match = re.match(
@ -281,7 +281,7 @@ def get_config_file_rules_colorspace_from_filepath(config_path, filepath):
filepath (str): path leading to a file
Returns:
Any[str, None]: matching colorspace name
Union[str, None]: matching colorspace name
"""
if not compatibility_check():
# python environment is not compatible with PyOpenColorIO
@ -918,28 +918,13 @@ def get_imageio_file_rules(project_name, host_name, project_settings=None):
Defaults to None.
Returns:
dict: file rules data
list[dict[str, Any]]: file rules data
"""
project_settings = project_settings or get_project_settings(project_name)
imageio_global, imageio_host = _get_imageio_settings(
project_settings, host_name)
# get file rules from global and host_name
frules_global = imageio_global["file_rules"]
activate_global_rules = (
frules_global.get("activate_global_file_rules", False)
# TODO: remove this in future - backward compatibility
or frules_global.get("enabled")
)
global_rules = frules_global["rules"]
if not activate_global_rules:
log.info(
"Colorspace global file rules are disabled."
)
global_rules = {}
# host is optional, some might not have any settings
frules_host = imageio_host.get("file_rules", {})
@ -949,8 +934,24 @@ def get_imageio_file_rules(project_name, host_name, project_settings=None):
# TODO: remove this in future - backward compatibility
activate_host_rules = frules_host.get("enabled", False)
# return host rules if activated or global rules
return frules_host["rules"] if activate_host_rules else global_rules
if activate_host_rules:
return frules_host["rules"]
# get file rules from global and host_name
frules_global = imageio_global["file_rules"]
activate_global_rules = (
frules_global.get("activate_global_file_rules", False)
# TODO: remove this in future - backward compatibility
or frules_global.get("enabled")
)
if not activate_global_rules:
log.info(
"Colorspace global file rules are disabled."
)
return []
return frules_global["rules"]
def get_remapped_colorspace_to_native(
@ -1017,7 +1018,7 @@ def _get_imageio_settings(project_settings, host_name):
tuple[dict, dict]: image io settings for global and host
"""
# get image io from global and host_name
imageio_global = project_settings["global"]["imageio"]
imageio_global = project_settings["core"]["imageio"]
# host is optional, some might not have any settings
imageio_host = project_settings.get(host_name, {}).get("imageio", {})

View file

@ -117,12 +117,12 @@ def install_host(host):
addons_manager = _get_addons_manager()
project_name = os.getenv("AVALON_PROJECT")
project_name = os.getenv("AYON_PROJECT_NAME")
# WARNING: This might be an issue
# - commented out because 'traypublisher' does not have set project
# if not project_name:
# raise ValueError(
# "AVALON_PROJECT is missing in environment variables."
# "AYON_PROJECT_NAME is missing in environment variables."
# )
log.info("Activating {}..".format(project_name))
@ -152,7 +152,7 @@ def install_host(host):
print("Registering pyblish target: automated")
pyblish.api.register_target("automated")
host_name = os.environ.get("AVALON_APP")
host_name = os.environ.get("AYON_HOST_NAME")
# Give option to handle host installation
for addon in addons_manager.get_enabled_addons():
@ -172,7 +172,7 @@ def install_ayon_plugins(project_name=None, host_name=None):
register_inventory_action_path(INVENTORY_PATH)
if host_name is None:
host_name = os.environ.get("AVALON_APP")
host_name = os.environ.get("AYON_HOST_NAME")
addons_manager = _get_addons_manager()
publish_plugin_dirs = addons_manager.collect_publish_plugin_paths(
@ -196,7 +196,7 @@ def install_ayon_plugins(project_name=None, host_name=None):
register_inventory_action_path(path)
if project_name is None:
project_name = os.environ.get("AVALON_PROJECT")
project_name = os.environ.get("AYON_PROJECT_NAME")
# Register studio specific plugins
if project_name:
@ -208,8 +208,8 @@ def install_ayon_plugins(project_name=None, host_name=None):
platform_name = platform.system().lower()
project_plugins = (
project_settings
.get("global", {})
.get("project_plugins", {})
["core"]
["project_plugins"]
.get(platform_name)
) or []
for path in project_plugins:
@ -331,7 +331,7 @@ def get_current_host_name():
"""Current host name.
Function is based on currently registered host integration or environment
variable 'AVALON_APP'.
variable 'AYON_HOST_NAME'.
Returns:
Union[str, None]: Name of host integration in current process or None.
@ -340,7 +340,7 @@ def get_current_host_name():
host = registered_host()
if isinstance(host, HostBase):
return host.name
return os.environ.get("AVALON_APP")
return os.environ.get("AYON_HOST_NAME")
def get_global_context():
@ -365,9 +365,9 @@ def get_global_context():
"""
return {
"project_name": os.environ.get("AVALON_PROJECT"),
"asset_name": os.environ.get("AVALON_ASSET"),
"task_name": os.environ.get("AVALON_TASK"),
"project_name": os.environ.get("AYON_PROJECT_NAME"),
"asset_name": os.environ.get("AYON_FOLDER_PATH"),
"task_name": os.environ.get("AYON_TASK_NAME"),
}
@ -474,10 +474,10 @@ def get_template_data_from_session(session=None, system_settings=None):
"""
if session is not None:
project_name = session["AVALON_PROJECT"]
asset_name = session["AVALON_ASSET"]
task_name = session["AVALON_TASK"]
host_name = session["AVALON_APP"]
project_name = session["AYON_PROJECT_NAME"]
asset_name = session["AYON_FOLDER_PATH"]
task_name = session["AYON_TASK_NAME"]
host_name = session["AYON_HOST_NAME"]
else:
context = get_current_context()
project_name = context["project_name"]
@ -525,8 +525,8 @@ def get_workdir_from_session(session=None, template_key=None):
"""
if session is not None:
project_name = session["AVALON_PROJECT"]
host_name = session["AVALON_APP"]
project_name = session["AYON_PROJECT_NAME"]
host_name = session["AYON_HOST_NAME"]
else:
project_name = get_current_project_name()
host_name = get_current_host_name()
@ -566,10 +566,10 @@ def get_custom_workfile_template_from_session(
"""
if session is not None:
project_name = session["AVALON_PROJECT"]
asset_name = session["AVALON_ASSET"]
task_name = session["AVALON_TASK"]
host_name = session["AVALON_APP"]
project_name = session["AYON_PROJECT_NAME"]
asset_name = session["AYON_FOLDER_PATH"]
task_name = session["AYON_TASK_NAME"]
host_name = session["AYON_HOST_NAME"]
else:
context = get_current_context()
project_name = context["project_name"]
@ -616,10 +616,10 @@ def change_current_context(asset_doc, task_name, template_key=None):
folder_path = get_asset_name_identifier(asset_doc)
envs = {
"AVALON_PROJECT": project_name,
"AVALON_ASSET": folder_path,
"AVALON_TASK": task_name,
"AVALON_WORKDIR": workdir,
"AYON_PROJECT_NAME": project_name,
"AYON_FOLDER_PATH": folder_path,
"AYON_TASK_NAME": task_name,
"AYON_WORKDIR": workdir,
}
# Update the Session and environments. Pop from environments all keys with

View file

@ -1536,7 +1536,7 @@ class CreateContext:
def host_name(self):
if hasattr(self.host, "name"):
return self.host.name
return os.environ["AVALON_APP"]
return os.environ["AYON_HOST_NAME"]
def get_current_project_name(self):
"""Project name which was used as current context on context reset.

View file

@ -45,7 +45,7 @@ class LegacyCreator(object):
def apply_settings(cls, project_settings, system_settings):
"""Apply OpenPype settings to a plugin class."""
host_name = os.environ.get("AVALON_APP")
host_name = os.environ.get("AYON_HOST_NAME")
plugin_type = "create"
plugin_type_settings = (
project_settings
@ -54,7 +54,7 @@ class LegacyCreator(object):
)
global_type_settings = (
project_settings
.get("global", {})
.get("core", {})
.get(plugin_type, {})
)
if not global_type_settings and not plugin_type_settings:

View file

@ -47,10 +47,10 @@ def get_subset_name_template(
if project_settings is None:
project_settings = get_project_settings(project_name)
tools_settings = project_settings["global"]["tools"]
profiles = tools_settings["creator"]["subset_name_profiles"]
tools_settings = project_settings["core"]["tools"]
profiles = tools_settings["creator"]["product_name_profiles"]
filtering_criteria = {
"families": family,
"product_types": family,
"hosts": host_name,
"tasks": task_name,
"task_types": task_type
@ -59,7 +59,19 @@ def get_subset_name_template(
matching_profile = filter_profiles(profiles, filtering_criteria)
template = None
if matching_profile:
template = matching_profile["template"]
# TODO remove formatting keys replacement
template = (
matching_profile["template"]
.replace("{task[name]}", "{task}")
.replace("{Task[name]}", "{Task}")
.replace("{TASK[NAME]}", "{TASK}")
.replace("{product[type]}", "{family}")
.replace("{Product[type]}", "{Family}")
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
.replace("{folder[name]}", "{asset}")
.replace("{Folder[name]}", "{Asset}")
.replace("{FOLDER[NAME]}", "{ASSET}")
)
# Make sure template is set (matching may have empty string)
if not template:
@ -82,9 +94,9 @@ def get_subset_name(
"""Calculate subset name based on passed context and OpenPype settings.
Subst name templates are defined in `project_settings/global/tools/creator
/subset_name_profiles` where are profiles with host name, family, task name
and task type filters. If context does not match any profile then
`DEFAULT_SUBSET_TEMPLATE` is used as default template.
/product_name_profiles` where are profiles with host name, family,
task name and task type filters. If context does not match any profile
then `DEFAULT_SUBSET_TEMPLATE` is used as default template.
That's main reason why so many arguments are required to calculate subset
name.
@ -128,13 +140,13 @@ def get_subset_name(
return ""
if not host_name:
host_name = os.environ.get("AVALON_APP")
host_name = os.environ.get("AYON_HOST_NAME")
# Use only last part of class family value split by dot (`.`)
family = family.rsplit(".", 1)[-1]
if project_name is None:
project_name = os.environ.get("AVALON_PROJECT")
project_name = os.environ.get("AYON_PROJECT_NAME")
asset_tasks = asset_doc.get("data", {}).get("tasks") or {}
task_info = asset_tasks.get(task_name) or {}

View file

@ -321,7 +321,7 @@ def prepare_representations(skeleton_data, exp_files, anatomy, aov_filter,
"""
representations = []
host_name = os.environ.get("AVALON_APP", "")
host_name = os.environ.get("AYON_HOST_NAME", "")
collections, remainders = clique.assemble(exp_files)
log = Logger.get_logger("farm_publishing")
@ -541,7 +541,7 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data,
"""
# TODO: this needs to be taking the task from context or instance
task = os.environ["AVALON_TASK"]
task = os.environ["AYON_TASK_NAME"]
anatomy = instance.context.data["anatomy"]
subset = skeleton["subset"]
@ -611,7 +611,7 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data,
log.info("Creating data for: {}".format(subset_name))
app = os.environ.get("AVALON_APP", "")
app = os.environ.get("AYON_HOST_NAME", "")
if isinstance(col, list):
render_file_name = os.path.basename(col[0])

Some files were not shown because too many files have changed in this diff Show more