Merge branch 'develop' of https://github.com/ynput/ayon-core into enhancement/OP-7075_Validate-Camera-Attributes

This commit is contained in:
Kayla Man 2024-02-14 00:06:23 +08:00
commit 55eef62b7e
104 changed files with 832 additions and 1763 deletions

View file

@ -92,8 +92,8 @@ class HostDirmap(object):
self.on_enable_dirmap()
for k, sp in enumerate(mapping["source-path"]):
dst = mapping["destination-path"][k]
for k, sp in enumerate(mapping["source_path"]):
dst = mapping["destination_path"][k]
try:
# add trailing slash if missing
sp = os.path.join(sp, '')
@ -116,7 +116,7 @@ class HostDirmap(object):
continue
def get_mappings(self):
"""Get translation from source-path to destination-path.
"""Get translation from source_path to destination_path.
It checks if Site Sync is enabled and user chose to use local
site, in that case configuration in Local Settings takes precedence
@ -138,8 +138,8 @@ class HostDirmap(object):
if (
not mapping
or not mapping.get("destination-path")
or not mapping.get("source-path")
or not mapping.get("destination_path")
or not mapping.get("source_path")
):
return {}
self.log.info("Processing directory mapping ...")
@ -154,7 +154,7 @@ class HostDirmap(object):
in Local Settings.
Returns:
dict : { "source-path": [XXX], "destination-path": [YYYY]}
dict : { "source_path": [XXX], "destination_path": [YYYY]}
"""
project_name = self.project_name
@ -210,13 +210,13 @@ class HostDirmap(object):
continue
if os.path.isdir(active_site_dir):
if "destination-path" not in mapping:
mapping["destination-path"] = []
mapping["destination-path"].append(active_site_dir)
if "destination_path" not in mapping:
mapping["destination_path"] = []
mapping["destination_path"].append(active_site_dir)
if "source-path" not in mapping:
mapping["source-path"] = []
mapping["source-path"].append(remote_site_dir)
if "source_path" not in mapping:
mapping["source_path"] = []
mapping["source_path"].append(remote_site_dir)
self.log.debug("local sync mapping:: {}".format(mapping))
return mapping

View file

@ -15,8 +15,7 @@ from wsrpc_aiohttp import (
from qtpy import QtCore
from ayon_core.lib import Logger
from ayon_core.tests.lib import is_in_tests
from ayon_core.lib import Logger, is_in_tests
from ayon_core.pipeline import install_host
from ayon_core.addon import AddonsManager
from ayon_core.tools.utils import host_tools, get_ayon_qt_app

View file

@ -1,9 +1,11 @@
import os
import json
import clique
import pyblish.api
import bpy
import pyblish.api
from ayon_core.pipeline import publish
from ayon_core.hosts.blender.api import capture
from ayon_core.hosts.blender.api.lib import maintained_time
@ -23,6 +25,8 @@ class ExtractPlayblast(publish.Extractor, publish.OptionalPyblishPluginMixin):
optional = True
order = pyblish.api.ExtractorOrder + 0.01
presets = "{}"
def process(self, instance):
if not self.is_active(instance.data):
return
@ -59,8 +63,7 @@ class ExtractPlayblast(publish.Extractor, publish.OptionalPyblishPluginMixin):
self.log.debug(f"Outputting images to {path}")
project_settings = instance.context.data["project_settings"]["blender"]
presets = project_settings["publish"]["ExtractPlayblast"]["presets"]
presets = json.loads(self.presets)
preset = presets.get("default")
preset.update({
"camera": camera,

View file

@ -1,5 +1,6 @@
import os
import glob
import json
import pyblish.api
from ayon_core.pipeline import publish
@ -21,7 +22,7 @@ class ExtractThumbnail(publish.Extractor):
hosts = ["blender"]
families = ["review"]
order = pyblish.api.ExtractorOrder + 0.01
presets = {}
presets = "{}"
def process(self, instance):
self.log.debug("Extracting capture..")
@ -44,7 +45,8 @@ class ExtractThumbnail(publish.Extractor):
family = instance.data.get("family")
isolate = instance.data("isolate", None)
preset = self.presets.get(family, {})
presets = json.loads(self.presets)
preset = presets.get(family, {})
preset.update({
"camera": camera,

View file

@ -50,17 +50,28 @@ class LoadClipBatch(opfapi.ClipLoader):
version_name = version.get("name", None)
colorspace = self.get_colorspace(context)
# TODO remove '{folder[name]}' and '{product[name]}' replacement
clip_name_template = (
self.clip_name_template
.replace("{folder[name]}", "{asset}")
.replace("{product[name]}", "{subset}")
)
layer_rename_template = (
self.layer_rename_template
.replace("{folder[name]}", "{asset}")
.replace("{product[name]}", "{subset}")
)
# in case output is not in context replace key to representation
if not context["representation"]["context"].get("output"):
self.clip_name_template = self.clip_name_template.replace(
clip_name_template = clip_name_template.replace(
"output", "representation")
self.layer_rename_template = self.layer_rename_template.replace(
layer_rename_template = layer_rename_template.replace(
"output", "representation")
formatting_data = deepcopy(context["representation"]["context"])
formatting_data["batch"] = self.batch.name.get_value()
clip_name = StringTemplate(self.clip_name_template).format(
clip_name = StringTemplate(clip_name_template).format(
formatting_data)
# convert colorspace with ocio to flame mapping
@ -86,7 +97,7 @@ class LoadClipBatch(opfapi.ClipLoader):
"path": path.replace("\\", "/"),
"colorspace": colorspace,
"version": "v{:0>3}".format(version_name),
"layer_rename_template": self.layer_rename_template,
"layer_rename_template": layer_rename_template,
"layer_rename_patterns": self.layer_rename_patterns,
"context_data": formatting_data
}

View file

@ -1,6 +1,5 @@
import os
import re
import tempfile
from copy import deepcopy
import pyblish.api
@ -15,12 +14,12 @@ from ayon_core.pipeline.editorial import (
import flame
class ExtractSubsetResources(publish.Extractor):
class ExtractProductResources(publish.Extractor):
"""
Extractor for transcoding files from Flame clip
"""
label = "Extract subset resources"
label = "Extract product resources"
order = pyblish.api.ExtractorOrder
families = ["clip"]
hosts = ["flame"]
@ -47,7 +46,7 @@ class ExtractSubsetResources(publish.Extractor):
hide_ui_on_process = True
# settings
export_presets_mapping = {}
export_presets_mapping = []
def process(self, instance):
if not self.keep_original_representation:
@ -146,15 +145,21 @@ class ExtractSubsetResources(publish.Extractor):
# append staging dir for later cleanup
instance.context.data["cleanupFullPaths"].append(staging_dir)
export_presets_mapping = {}
for preset_mapping in deepcopy(self.export_presets_mapping):
name = preset_mapping.pop("name")
export_presets_mapping[name] = preset_mapping
# add default preset type for thumbnail and reviewable video
# update them with settings and override in case the same
# are found in there
_preset_keys = [k.split('_')[0] for k in self.export_presets_mapping]
_preset_keys = [k.split('_')[0] for k in export_presets_mapping]
export_presets = {
k: v for k, v in deepcopy(self.default_presets).items()
k: v
for k, v in deepcopy(self.default_presets).items()
if k not in _preset_keys
}
export_presets.update(self.export_presets_mapping)
export_presets.update(export_presets_mapping)
if not instance.data.get("versionData"):
instance.data["versionData"] = {}

View file

@ -147,7 +147,16 @@ class GenericCreateSaver(Creator):
})
# build file path to render
filepath = self.temp_rendering_path_template.format(**formatting_data)
# TODO make sure the keys are available in 'formatting_data'
temp_rendering_path_template = (
self.temp_rendering_path_template
.replace("{product[name]}", "{subset}")
.replace("{product[type]}", "{family}")
.replace("{folder[name]}", "{asset}")
.replace("{task[name]}", "{task}")
)
filepath = temp_rendering_path_template.format(**formatting_data)
comp = get_current_comp()
tool["Clip"] = comp.ReverseMapPath(os.path.normpath(filepath))

View file

@ -52,7 +52,7 @@ Because Harmony projects are directories, this integration uses `.zip` as work f
### Show Workfiles on launch
You can show the Workfiles app when Harmony launches by setting environment variable `AVALON_HARMONY_WORKFILES_ON_LAUNCH=1`.
You can show the Workfiles app when Harmony launches by setting environment variable `AYON_HARMONY_WORKFILES_ON_LAUNCH=1`.
## Developing

View file

@ -349,7 +349,7 @@ function start() {
/** hostname or ip of server - should be localhost */
var host = '127.0.0.1';
/** port of the server */
var port = parseInt(System.getenv('AVALON_HARMONY_PORT'));
var port = parseInt(System.getenv('AYON_HARMONY_PORT'));
// Attach the client to the QApplication to preserve.
var app = QCoreApplication.instance();

View file

@ -189,14 +189,14 @@ def launch(application_path, *args):
install_host(harmony)
ProcessContext.port = random.randrange(49152, 65535)
os.environ["AVALON_HARMONY_PORT"] = str(ProcessContext.port)
os.environ["AYON_HARMONY_PORT"] = str(ProcessContext.port)
ProcessContext.application_path = application_path
# Launch Harmony.
setup_startup_scripts()
check_libs()
if not os.environ.get("AVALON_HARMONY_WORKFILES_ON_LAUNCH", False):
if not os.environ.get("AYON_HARMONY_WORKFILES_ON_LAUNCH", False):
open_empty_workfile()
return

View file

@ -155,7 +155,9 @@ class ExtractPointCloud(publish.Extractor):
custom_attr_list = []
attr_settings = self.settings["attribute"]
for key, value in attr_settings.items():
for attr in attr_settings:
key = attr["name"]
value = attr["value"]
custom_attr = "{0}.PRTChannels_{1}=True".format(operator,
value)
self.log.debug(

View file

@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
"""Validator for Attributes."""
import json
from pyblish.api import ContextPlugin, ValidatorOrder
from pymxs import runtime as rt
@ -61,9 +63,13 @@ class ValidateAttributes(OptionalPyblishPluginMixin,
@classmethod
def get_invalid(cls, context):
attributes = (
context.data["project_settings"]["max"]["publish"]
["ValidateAttributes"]["attributes"]
attributes = json.loads(
context.data
["project_settings"]
["max"]
["publish"]
["ValidateAttributes"]
["attributes"]
)
if not attributes:
return
@ -112,9 +118,13 @@ class ValidateAttributes(OptionalPyblishPluginMixin,
@classmethod
def repair(cls, context):
attributes = (
context.data["project_settings"]["max"]["publish"]
["ValidateAttributes"]["attributes"]
attributes = json.loads(
context.data
["project_settings"]
["max"]
["publish"]
["ValidateAttributes"]
["attributes"]
)
invalid_attributes = cls.get_invalid(context)
for attrs in invalid_attributes:

View file

@ -25,7 +25,7 @@ class ValidateLoadedPlugin(OptionalPyblishPluginMixin,
optional = True
actions = [RepairAction]
family_plugins_mapping = {}
family_plugins_mapping = []
@classmethod
def get_invalid(cls, instance):
@ -34,6 +34,12 @@ class ValidateLoadedPlugin(OptionalPyblishPluginMixin,
if not family_plugins_mapping:
return
# Backward compatibility - settings did have 'product_types'
if "product_types" in family_plugins_mapping:
family_plugins_mapping["families"] = family_plugins_mapping.pop(
"product_types"
)
invalid = []
# Find all plug-in requirements for current instance
instance_families = {instance.data["family"]}
@ -47,7 +53,9 @@ class ValidateLoadedPlugin(OptionalPyblishPluginMixin,
if not mapping:
return
match_families = {fam.strip() for fam in mapping["families"]}
match_families = {
fam.strip() for fam in mapping["families"]
}
has_match = "*" in match_families or match_families.intersection(
instance_families)

View file

@ -59,7 +59,9 @@ class ValidatePointCloud(pyblish.api.InstancePlugin):
event_name = sub_anim.name
opt = "${0}.{1}.export_particles".format(sel.name,
event_name)
for key, value in attr_settings.items():
for attr in attr_settings:
key = attr["name"]
value = attr["value"]
custom_attr = "{0}.PRTChannels_{1}".format(opt,
value)
try:

View file

@ -329,7 +329,7 @@ def generate_capture_preset(instance, camera, path,
# Update preset with current panel setting
# if override_viewport_options is turned off
if not capture_preset["Viewport Options"]["override_viewport_options"]:
if not capture_preset["ViewportOptions"]["override_viewport_options"]:
panel_preset = capture.parse_view(preset["panel"])
panel_preset.pop("camera")
preset.update(panel_preset)
@ -2937,14 +2937,15 @@ def load_capture_preset(data):
options.update(data["Generic"])
options.update(data["Resolution"])
camera_options.update(data['Camera Options'])
camera_options.update(data["CameraOptions"])
viewport_options.update(data["Renderer"])
# DISPLAY OPTIONS
disp_options = {}
for key, value in data['Display Options'].items():
if key.startswith('background'):
for key, value in data["DisplayOptions"].items():
if key.startswith("background"):
# Convert background, backgroundTop, backgroundBottom colors
if len(value) == 4:
# Ignore alpha + convert RGB to float
value = [
@ -2956,7 +2957,7 @@ def load_capture_preset(data):
elif key == "displayGradient":
disp_options[key] = value
options['display_options'] = disp_options
options["display_options"] = disp_options
# Viewport Options has a mixture of Viewport2 Options and Viewport Options
# to pass along to capture. So we'll need to differentiate between the two
@ -2981,7 +2982,7 @@ def load_capture_preset(data):
"motionBlurShutterOpenFraction",
"lineAAEnable"
}
for key, value in data['Viewport Options'].items():
for key, value in data["ViewportOptions"].items():
# There are some keys we want to ignore
if key in {"override_viewport_options", "high_quality"}:
@ -3140,119 +3141,6 @@ def fix_incompatible_containers():
"ReferenceLoader", type="string")
def _null(*args):
pass
class shelf():
'''A simple class to build shelves in maya. Since the build method is empty,
it should be extended by the derived class to build the necessary shelf
elements. By default it creates an empty shelf called "customShelf".'''
###########################################################################
'''This is an example shelf.'''
# class customShelf(_shelf):
# def build(self):
# self.addButon(label="button1")
# self.addButon("button2")
# self.addButon("popup")
# p = cmds.popupMenu(b=1)
# self.addMenuItem(p, "popupMenuItem1")
# self.addMenuItem(p, "popupMenuItem2")
# sub = self.addSubMenu(p, "subMenuLevel1")
# self.addMenuItem(sub, "subMenuLevel1Item1")
# sub2 = self.addSubMenu(sub, "subMenuLevel2")
# self.addMenuItem(sub2, "subMenuLevel2Item1")
# self.addMenuItem(sub2, "subMenuLevel2Item2")
# self.addMenuItem(sub, "subMenuLevel1Item2")
# self.addMenuItem(p, "popupMenuItem3")
# self.addButon("button3")
# customShelf()
###########################################################################
def __init__(self, name="customShelf", iconPath="", preset={}):
self.name = name
self.iconPath = iconPath
self.labelBackground = (0, 0, 0, 0)
self.labelColour = (.9, .9, .9)
self.preset = preset
self._cleanOldShelf()
cmds.setParent(self.name)
self.build()
def build(self):
'''This method should be overwritten in derived classes to actually
build the shelf elements. Otherwise, nothing is added to the shelf.'''
for item in self.preset['items']:
if not item.get('command'):
item['command'] = self._null
if item['type'] == 'button':
self.addButon(item['name'],
command=item['command'],
icon=item['icon'])
if item['type'] == 'menuItem':
self.addMenuItem(item['parent'],
item['name'],
command=item['command'],
icon=item['icon'])
if item['type'] == 'subMenu':
self.addMenuItem(item['parent'],
item['name'],
command=item['command'],
icon=item['icon'])
def addButon(self, label, icon="commandButton.png",
command=_null, doubleCommand=_null):
'''
Adds a shelf button with the specified label, command,
double click command and image.
'''
cmds.setParent(self.name)
if icon:
icon = os.path.join(self.iconPath, icon)
print(icon)
cmds.shelfButton(width=37, height=37, image=icon, label=label,
command=command, dcc=doubleCommand,
imageOverlayLabel=label, olb=self.labelBackground,
olc=self.labelColour)
def addMenuItem(self, parent, label, command=_null, icon=""):
'''
Adds a shelf button with the specified label, command,
double click command and image.
'''
if icon:
icon = os.path.join(self.iconPath, icon)
print(icon)
return cmds.menuItem(p=parent, label=label, c=command, i="")
def addSubMenu(self, parent, label, icon=None):
'''
Adds a sub menu item with the specified label and icon to
the specified parent popup menu.
'''
if icon:
icon = os.path.join(self.iconPath, icon)
print(icon)
return cmds.menuItem(p=parent, label=label, i=icon, subMenu=1)
def _cleanOldShelf(self):
'''
Checks if the shelf exists and empties it if it does
or creates it if it does not.
'''
if cmds.shelfLayout(self.name, ex=1):
if cmds.shelfLayout(self.name, q=1, ca=1):
for each in cmds.shelfLayout(self.name, q=1, ca=1):
cmds.deleteUI(each)
else:
cmds.shelfLayout(self.name, p="ShelfLayout")
def update_content_on_context_change():
"""
This will update scene content to match new asset on context change
@ -4059,10 +3947,10 @@ def get_capture_preset(task_name, task_type, subset, project_settings, log):
Args:
task_name (str): Task name.
take_type (str): Task type.
task_type (str): Task type.
subset (str): Subset name.
project_settings (dict): Project settings.
log (object): Logging object.
log (logging.Logger): Logging object.
"""
capture_preset = None
filtering_criteria = {
@ -4091,8 +3979,18 @@ def get_capture_preset(task_name, task_type, subset, project_settings, log):
"Falling back to deprecated Extract Playblast capture preset "
"because no new style playblast profiles are defined."
)
capture_preset = plugin_settings["capture_preset"]
capture_preset = plugin_settings.get("capture_preset")
if capture_preset:
# Create deepcopy of preset as we'll change the values
capture_preset = copy.deepcopy(capture_preset)
viewport_options = capture_preset["ViewportOptions"]
# Change 'list' to 'dict' for 'capture.py'
viewport_options["pluginObjects"] = {
item["name"]: item["value"]
for item in viewport_options["pluginObjects"]
}
return capture_preset or {}

View file

@ -46,7 +46,7 @@ class RenderSettings(object):
project_settings = get_project_settings(
get_current_project_name()
)
render_settings = project_settings["maya"]["RenderSettings"]
render_settings = project_settings["maya"]["render_settings"]
image_prefixes = {
"vray": render_settings["vray_renderer"]["image_prefix"],
"arnold": render_settings["arnold_renderer"]["image_prefix"],
@ -82,12 +82,12 @@ class RenderSettings(object):
try:
aov_separator = self._aov_chars[(
self._project_settings["maya"]
["RenderSettings"]
["render_settings"]
["aov_separator"]
)]
except KeyError:
aov_separator = "_"
reset_frame = self._project_settings["maya"]["RenderSettings"]["reset_current_frame"] # noqa
reset_frame = self._project_settings["maya"]["render_settings"]["reset_current_frame"] # noqa
if reset_frame:
start_frame = cmds.getAttr("defaultRenderGlobals.startFrame")
@ -131,7 +131,7 @@ class RenderSettings(object):
import maya.mel as mel # noqa: F401
createOptions()
render_settings = self._project_settings["maya"]["RenderSettings"]
render_settings = self._project_settings["maya"]["render_settings"]
arnold_render_presets = render_settings["arnold_renderer"] # noqa
# Force resetting settings and AOV list to avoid having to deal with
# AOV checking logic, for now.
@ -180,7 +180,7 @@ class RenderSettings(object):
from maya import cmds # noqa: F401
import maya.mel as mel # noqa: F401
render_settings = self._project_settings["maya"]["RenderSettings"]
render_settings = self._project_settings["maya"]["render_settings"]
redshift_render_presets = render_settings["redshift_renderer"]
remove_aovs = render_settings["remove_aovs"]
@ -239,7 +239,7 @@ class RenderSettings(object):
rman_render_presets = (
self._project_settings
["maya"]
["RenderSettings"]
["render_settings"]
["renderman_renderer"]
)
display_filters = rman_render_presets["display_filters"]
@ -304,7 +304,7 @@ class RenderSettings(object):
settings = cmds.ls(type="VRaySettingsNode")
node = settings[0] if settings else cmds.createNode("VRaySettingsNode")
render_settings = self._project_settings["maya"]["RenderSettings"]
render_settings = self._project_settings["maya"]["render_settings"]
vray_render_presets = render_settings["vray_renderer"]
# vrayRenderElement
remove_aovs = render_settings["remove_aovs"]
@ -390,7 +390,8 @@ class RenderSettings(object):
import maya.mel as mel # noqa: F401
for item in additional_attribs:
attribute, value = item
attribute = item["attribute"]
value = item["value"]
attribute = str(attribute) # ensure str conversion from settings
attribute_type = cmds.getAttr(attribute, type=True)
if attribute_type in {"long", "bool"}:

View file

@ -9,7 +9,8 @@ import maya.cmds as cmds
from ayon_core.pipeline import (
get_current_asset_name,
get_current_task_name
get_current_task_name,
registered_host
)
from ayon_core.pipeline.workfile import BuildWorkfile
from ayon_core.tools.utils import host_tools
@ -21,8 +22,10 @@ from .workfile_template_builder import (
create_placeholder,
update_placeholder,
build_workfile_template,
update_workfile_template,
update_workfile_template
)
from ayon_core.tools.workfile_template_build import open_template_ui
from .workfile_template_builder import MayaTemplateBuilder
log = logging.getLogger(__name__)
@ -167,16 +170,6 @@ def install(project_settings):
tearOff=True,
parent=MENU_NAME
)
cmds.menuItem(
"Create Placeholder",
parent=builder_menu,
command=create_placeholder
)
cmds.menuItem(
"Update Placeholder",
parent=builder_menu,
command=update_placeholder
)
cmds.menuItem(
"Build Workfile from template",
parent=builder_menu,
@ -187,6 +180,27 @@ def install(project_settings):
parent=builder_menu,
command=update_workfile_template
)
cmds.menuItem(
divider=True,
parent=builder_menu
)
cmds.menuItem(
"Open Template",
parent=builder_menu,
command=lambda *args: open_template_ui(
MayaTemplateBuilder(registered_host()), get_main_window()
),
)
cmds.menuItem(
"Create Placeholder",
parent=builder_menu,
command=create_placeholder
)
cmds.menuItem(
"Update Placeholder",
parent=builder_menu,
command=update_placeholder
)
cmds.setParent(MENU_NAME, menu=True)

View file

@ -22,6 +22,7 @@ from ayon_core.pipeline import (
LegacyCreator,
LoaderPlugin,
get_representation_path,
get_current_project_name,
)
from ayon_core.pipeline.load import LoadError
from ayon_core.client import get_asset_by_name
@ -585,6 +586,39 @@ class RenderlayerCreator(NewCreator, MayaCreatorBase):
project_name)
def get_load_color_for_family(family, settings=None):
"""Get color for family from settings.
Args:
family (str): Family name.
settings (Optional[dict]): Settings dictionary.
Returns:
Union[tuple[float, float, float], None]: RGB color.
"""
if settings is None:
settings = get_project_settings(get_current_project_name())
colors = settings["maya"]["load"]["colors"]
color = colors.get(family)
if not color:
return None
if len(color) == 3:
red, green, blue = color
elif len(color) == 4:
red, green, blue, _ = color
else:
raise ValueError("Invalid color definition {}".format(str(color)))
if type(red, int):
red = red / 255.0
green = green / 255.0
blue = blue / 255.0
return red, green, blue
class Loader(LoaderPlugin):
hosts = ["maya"]
@ -611,33 +645,38 @@ class Loader(LoaderPlugin):
options["attach_to_root"] = True
custom_naming = self.load_settings[loader_key]
if not custom_naming['namespace']:
if not custom_naming["namespace"]:
raise LoadError("No namespace specified in "
"Maya ReferenceLoader settings")
elif not custom_naming['group_name']:
elif not custom_naming["group_name"]:
self.log.debug("No custom group_name, no group will be created.")
options["attach_to_root"] = False
asset = context['asset']
subset = context['subset']
asset = context["asset"]
subset = context["subset"]
family = (
subset["data"].get("family")
or subset["data"]["families"][0]
)
formatting_data = {
"asset_name": asset['name'],
"asset_type": asset['type'],
"asset_name": asset["name"],
"asset_type": asset["type"],
"folder": {
"name": asset["name"],
},
"subset": subset['name'],
"family": (
subset['data'].get('family') or
subset['data']['families'][0]
)
"subset": subset["name"],
"product": {
"name": subset["name"],
"type": family,
},
"family": family
}
custom_namespace = custom_naming['namespace'].format(
custom_namespace = custom_naming["namespace"].format(
**formatting_data
)
custom_group_name = custom_naming['group_name'].format(
custom_group_name = custom_naming["group_name"].format(
**formatting_data
)
@ -937,7 +976,7 @@ class ReferenceLoader(Loader):
"""
settings = get_project_settings(project_name)
use_env_var_as_root = (settings["maya"]
["maya-dirmap"]
["maya_dirmap"]
["use_env_var_as_root"])
if use_env_var_as_root:
anatomy = Anatomy(project_name)

View file

@ -35,7 +35,7 @@ class CreateRenderlayer(plugin.RenderlayerCreator):
@classmethod
def apply_settings(cls, project_settings):
cls.render_settings = project_settings["maya"]["RenderSettings"]
cls.render_settings = project_settings["maya"]["render_settings"]
def create(self, subset_name, instance_data, pre_create_data):
# Only allow a single render instance to exist

View file

@ -75,7 +75,7 @@ class CreateReview(plugin.MayaCreator):
"review_width": preset["Resolution"]["width"],
"review_height": preset["Resolution"]["height"],
"isolate": preset["Generic"]["isolate_view"],
"imagePlane": preset["Viewport Options"]["imagePlane"],
"imagePlane": preset["ViewportOptions"]["imagePlane"],
"panZoom": preset["Generic"]["pan_zoom"]
}
for key, value in mapping.items():

View file

@ -23,7 +23,7 @@ class CreateVRayScene(plugin.RenderlayerCreator):
@classmethod
def apply_settings(cls, project_settings):
cls.render_settings = project_settings["maya"]["RenderSettings"]
cls.render_settings = project_settings["maya"]["render_settings"]
def create(self, subset_name, instance_data, pre_create_data):
# Only allow a single render instance to exist

View file

@ -15,6 +15,7 @@ from ayon_core.hosts.maya.api.lib import (
convert_to_maya_fps
)
from ayon_core.hosts.maya.api.pipeline import containerise
from ayon_core.hosts.maya.api.plugin import get_load_color_for_family
def is_sequence(files):
@ -66,11 +67,12 @@ class ArnoldStandinLoader(load.LoaderPlugin):
# Set color.
settings = get_project_settings(context["project"]["name"])
color = settings['maya']['load']['colors'].get('ass')
color = get_load_color_for_family("ass", settings)
if color is not None:
red, green, blue = color
cmds.setAttr(root + ".useOutlinerColor", True)
cmds.setAttr(
root + ".outlinerColor", color[0], color[1], color[2]
root + ".outlinerColor", red, green, blue
)
with maintained_selection():

View file

@ -9,6 +9,7 @@ from ayon_core.pipeline import (
get_representation_path
)
from ayon_core.settings import get_project_settings
from ayon_core.hosts.maya.api.plugin import get_load_color_for_family
class GpuCacheLoader(load.LoaderPlugin):
@ -39,13 +40,12 @@ class GpuCacheLoader(load.LoaderPlugin):
project_name = context["project"]["name"]
settings = get_project_settings(project_name)
colors = settings['maya']['load']['colors']
c = colors.get('model')
if c is not None:
color = get_load_color_for_family("model", settings)
if color is not None:
red, green, blue = color
cmds.setAttr(root + ".useOutlinerColor", 1)
cmds.setAttr(
root + ".outlinerColor",
(float(c[0]) / 255), (float(c[1]) / 255), (float(c[2]) / 255)
root + ".outlinerColor", red, green, blue
)
# Create transform with shape

View file

@ -16,6 +16,7 @@ from ayon_core.hosts.maya.api.lib import (
unique_namespace
)
from ayon_core.hosts.maya.api.pipeline import containerise
from ayon_core.hosts.maya.api.plugin import get_load_color_for_family
class RedshiftProxyLoader(load.LoaderPlugin):
@ -59,12 +60,13 @@ class RedshiftProxyLoader(load.LoaderPlugin):
# colour the group node
project_name = context["project"]["name"]
settings = get_project_settings(project_name)
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
color = get_load_color_for_family(family, settings)
if color is not None:
red, green, blue = color
cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1)
cmds.setAttr("{0}.outlinerColor".format(group_node),
c[0], c[1], c[2])
cmds.setAttr(
"{0}.outlinerColor".format(group_node), red, green, blue
)
return containerise(
name=name,

View file

@ -1,4 +1,3 @@
import os
import difflib
import contextlib
@ -6,7 +5,7 @@ from maya import cmds
import qargparse
from ayon_core.settings import get_project_settings
import ayon_core.hosts.maya.api.plugin
from ayon_core.hosts.maya.api import plugin
from ayon_core.hosts.maya.api.lib import (
maintained_selection,
get_container_members,
@ -87,7 +86,7 @@ def preserve_modelpanel_cameras(container, log=None):
cmds.modelPanel(panel, edit=True, camera=new_camera)
class ReferenceLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader):
class ReferenceLoader(plugin.ReferenceLoader):
"""Reference file"""
families = ["model",
@ -185,14 +184,16 @@ class ReferenceLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader):
"{}.displayHandle".format(group_name), display_handle
)
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
color = plugin.get_load_color_for_family(family, settings)
if color is not None:
red, green, blue = color
cmds.setAttr("{}.useOutlinerColor".format(group_name), 1)
cmds.setAttr("{}.outlinerColor".format(group_name),
(float(c[0]) / 255),
(float(c[1]) / 255),
(float(c[2]) / 255))
cmds.setAttr(
"{}.outlinerColor".format(group_name),
red,
green,
blue
)
cmds.setAttr(
"{}.displayHandle".format(group_name), display_handle

View file

@ -5,6 +5,7 @@ from ayon_core.pipeline import (
load,
get_representation_path
)
from ayon_core.hosts.maya.api.plugin import get_load_color_for_family
# TODO aiVolume doesn't automatically set velocity fps correctly, set manual?
@ -50,16 +51,11 @@ class LoadVDBtoArnold(load.LoaderPlugin):
project_name = context["project"]["name"]
settings = get_project_settings(project_name)
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
color = get_load_color_for_family(family, settings)
if color is not None:
red, green, blue = color
cmds.setAttr(root + ".useOutlinerColor", 1)
cmds.setAttr(root + ".outlinerColor",
(float(c[0]) / 255),
(float(c[1]) / 255),
(float(c[2]) / 255)
)
cmds.setAttr(root + ".outlinerColor", red, green, blue)
# Create VRayVolumeGrid
grid_node = cmds.createNode("aiVolume",

View file

@ -5,6 +5,7 @@ from ayon_core.pipeline import (
load,
get_representation_path
)
from ayon_core.hosts.maya.api.plugin import get_load_color_for_family
class LoadVDBtoRedShift(load.LoaderPlugin):
@ -69,16 +70,11 @@ class LoadVDBtoRedShift(load.LoaderPlugin):
project_name = context["project"]["name"]
settings = get_project_settings(project_name)
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
color = get_load_color_for_family(family, settings)
if color is not None:
red, green, blue = color
cmds.setAttr(root + ".useOutlinerColor", 1)
cmds.setAttr(root + ".outlinerColor",
(float(c[0])/255),
(float(c[1])/255),
(float(c[2])/255)
)
cmds.setAttr(root + ".outlinerColor", red, green, blue)
# Create VR
volume_node = cmds.createNode("RedshiftVolumeShape",

View file

@ -5,6 +5,7 @@ from ayon_core.pipeline import (
load,
get_representation_path
)
from ayon_core.hosts.maya.api.plugin import get_load_color_for_family
from maya import cmds
@ -129,15 +130,11 @@ class LoadVDBtoVRay(load.LoaderPlugin):
project_name = context["project"]["name"]
settings = get_project_settings(project_name)
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
color = get_load_color_for_family(family, settings)
if color is not None:
red, green, blue = color
cmds.setAttr(root + ".useOutlinerColor", 1)
cmds.setAttr(root + ".outlinerColor",
float(c[0]) / 255,
float(c[1]) / 255,
float(c[2]) / 255)
cmds.setAttr(root + ".outlinerColor", red, green, blue)
# Create VRayVolumeGrid
grid_node = cmds.createNode("VRayVolumeGrid",

View file

@ -22,6 +22,7 @@ from ayon_core.hosts.maya.api.lib import (
unique_namespace
)
from ayon_core.hosts.maya.api.pipeline import containerise
from ayon_core.hosts.maya.api.plugin import get_load_color_for_family
class VRayProxyLoader(load.LoaderPlugin):
@ -80,15 +81,12 @@ class VRayProxyLoader(load.LoaderPlugin):
# colour the group node
project_name = context["project"]["name"]
settings = get_project_settings(project_name)
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
color = get_load_color_for_family(family, settings)
if color is not None:
red, green, blue = color
cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1)
cmds.setAttr(
"{0}.outlinerColor".format(group_node),
(float(c[0]) / 255),
(float(c[1]) / 255),
(float(c[2]) / 255)
"{0}.outlinerColor".format(group_node), red, green, blue
)
return containerise(

View file

@ -1,5 +1,4 @@
# -*- coding: utf-8 -*-
import os
import maya.cmds as cmds # noqa
from ayon_core.settings import get_project_settings
from ayon_core.pipeline import (
@ -12,6 +11,7 @@ from ayon_core.hosts.maya.api.lib import (
unique_namespace
)
from ayon_core.hosts.maya.api.pipeline import containerise
from ayon_core.hosts.maya.api.plugin import get_load_color_for_family
class VRaySceneLoader(load.LoaderPlugin):
@ -58,14 +58,12 @@ class VRaySceneLoader(load.LoaderPlugin):
# colour the group node
project_name = context["project"]["name"]
settings = get_project_settings(project_name)
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
color = get_load_color_for_family(family, settings)
if color is not None:
red, green, blue = color
cmds.setAttr("{0}.useOutlinerColor".format(root_node), 1)
cmds.setAttr("{0}.outlinerColor".format(root_node),
(float(c[0])/255),
(float(c[1])/255),
(float(c[2])/255)
cmds.setAttr(
"{0}.outlinerColor".format(root_node), red, green, blue
)
return containerise(

View file

@ -13,6 +13,7 @@ from ayon_core.pipeline import (
)
from ayon_core.hosts.maya.api import lib
from ayon_core.hosts.maya.api.pipeline import containerise
from ayon_core.hosts.maya.api.plugin import get_load_color_for_family
# Do not reset these values on update but only apply on first load
@ -81,16 +82,11 @@ class YetiCacheLoader(load.LoaderPlugin):
project_name = context["project"]["name"]
settings = get_project_settings(project_name)
colors = settings['maya']['load']['colors']
c = colors.get(family)
if c is not None:
color = get_load_color_for_family(family, settings)
if color is not None:
red, green, blue = color
cmds.setAttr(group_node + ".useOutlinerColor", 1)
cmds.setAttr(group_node + ".outlinerColor",
(float(c[0])/255),
(float(c[1])/255),
(float(c[2])/255)
)
cmds.setAttr(group_node + ".outlinerColor", red, green, blue)
nodes.append(group_node)

View file

@ -1,11 +1,10 @@
import maya.cmds as cmds
from ayon_core.settings import get_current_project_settings
import ayon_core.hosts.maya.api.plugin
from ayon_core.hosts.maya.api import plugin
from ayon_core.hosts.maya.api import lib
class YetiRigLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader):
class YetiRigLoader(plugin.ReferenceLoader):
"""This loader will load Yeti rig."""
families = ["yetiRig"]
@ -41,14 +40,12 @@ class YetiRigLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader):
groupName=group_name
)
settings = get_current_project_settings()
colors = settings["maya"]["load"]["colors"]
c = colors.get("yetiRig")
if c is not None:
color = plugin.get_load_color_for_family("yetiRig")
if color is not None:
red, green, blue = color
cmds.setAttr(group_name + ".useOutlinerColor", 1)
cmds.setAttr(
group_name + ".outlinerColor",
(float(c[0]) / 255), (float(c[1]) / 255), (float(c[2]) / 255)
group_name + ".outlinerColor", red, green, blue
)
self[:] = nodes

View file

@ -39,7 +39,7 @@ class CollectReview(pyblish.api.InstancePlugin):
if display_lights == "project_settings":
settings = instance.context.data["project_settings"]
settings = settings["maya"]["publish"]["ExtractPlayblast"]
settings = settings["capture_preset"]["Viewport Options"]
settings = settings["capture_preset"]["ViewportOptions"]
display_lights = settings["displayLights"]
# Collect camera focal length.

View file

@ -1,4 +1,5 @@
import os
import json
from maya import cmds
@ -21,7 +22,7 @@ class ExtractCameraAlembic(publish.Extractor,
label = "Extract Camera (Alembic)"
hosts = ["maya"]
families = ["camera", "matchmove"]
bake_attributes = []
bake_attributes = "[]"
def process(self, instance):
@ -95,11 +96,12 @@ class ExtractCameraAlembic(publish.Extractor,
job_str += ' -file "{0}"'.format(path)
bake_attributes = json.loads(self.bake_attributes)
# bake specified attributes in preset
assert isinstance(self.bake_attributes, (list, tuple)), (
assert isinstance(bake_attributes, list), (
"Attributes to bake must be specified as a list"
)
for attr in self.bake_attributes:
for attr in bake_attributes:
self.log.debug("Adding {} attribute".format(attr))
job_str += " -attr {0}".format(attr)

View file

@ -112,9 +112,11 @@ class ExtractCameraMayaScene(publish.Extractor,
def process(self, instance):
"""Plugin entry point."""
# get settings
ext_mapping = (
instance.context.data["project_settings"]["maya"]["ext_mapping"]
)
maya_settings = instance.context.data["project_settings"]["maya"]
ext_mapping = {
item["name"]: item["value"]
for item in maya_settings["ext_mapping"]
}
if ext_mapping:
self.log.debug("Looking in settings for scene type ...")
# use extension mapping for first family found

View file

@ -37,9 +37,11 @@ class ExtractImportReference(publish.Extractor,
if not self.is_active(instance.data):
return
ext_mapping = (
instance.context.data["project_settings"]["maya"]["ext_mapping"]
)
maya_settings = instance.context.data["project_settings"]["maya"]
ext_mapping = {
item["name"]: item["value"]
for item in maya_settings["ext_mapping"]
}
if ext_mapping:
self.log.debug("Looking in settings for scene type ...")
# use extension mapping for first family found

View file

@ -431,9 +431,11 @@ class ExtractLook(publish.Extractor):
project settings.
"""
ext_mapping = (
instance.context.data["project_settings"]["maya"]["ext_mapping"]
)
maya_settings = instance.context.data["project_settings"]["maya"]
ext_mapping = {
item["name"]: item["value"]
for item in maya_settings["ext_mapping"]
}
if ext_mapping:
self.log.debug("Looking in settings for scene type ...")
# use extension mapping for first family found

View file

@ -43,9 +43,11 @@ class ExtractMayaSceneRaw(publish.Extractor, AYONPyblishPluginMixin):
def process(self, instance):
"""Plugin entry point."""
ext_mapping = (
instance.context.data["project_settings"]["maya"]["ext_mapping"]
)
maya_settings = instance.context.data["project_settings"]["maya"]
ext_mapping = {
item["name"]: item["value"]
for item in maya_settings["ext_mapping"]
}
if ext_mapping:
self.log.debug("Looking in settings for scene type ...")
# use extension mapping for first family found

View file

@ -35,9 +35,11 @@ class ExtractModel(publish.Extractor,
if not self.is_active(instance.data):
return
ext_mapping = (
instance.context.data["project_settings"]["maya"]["ext_mapping"]
)
maya_settings = instance.context.data["project_settings"]["maya"]
ext_mapping = {
item["name"]: item["value"]
for item in maya_settings["ext_mapping"]
}
if ext_mapping:
self.log.debug("Looking in settings for scene type ...")
# use extension mapping for first family found

View file

@ -18,9 +18,11 @@ class ExtractRig(publish.Extractor):
def process(self, instance):
"""Plugin entry point."""
ext_mapping = (
instance.context.data["project_settings"]["maya"]["ext_mapping"]
)
maya_settings = instance.context.data["project_settings"]["maya"]
ext_mapping = {
item["name"]: item["value"]
for item in maya_settings["ext_mapping"]
}
if ext_mapping:
self.log.debug("Looking in settings for scene type ...")
# use extension mapping for first family found

View file

@ -100,9 +100,11 @@ class ExtractYetiRig(publish.Extractor):
def process(self, instance):
"""Plugin entry point."""
ext_mapping = (
instance.context.data["project_settings"]["maya"]["ext_mapping"]
)
maya_settings = instance.context.data["project_settings"]["maya"]
ext_mapping = {
item["name"]: item["value"]
for item in maya_settings["ext_mapping"]
}
if ext_mapping:
self.log.debug("Looking in settings for scene type ...")
# use extension mapping for first family found

View file

@ -1,3 +1,4 @@
import json
from collections import defaultdict
import pyblish.api
@ -23,19 +24,19 @@ class ValidateAttributes(pyblish.api.InstancePlugin,
"""
order = ValidateContentsOrder
label = "Attributes"
label = "Validate Attributes"
hosts = ["maya"]
actions = [RepairAction]
optional = True
attributes = None
attributes = "{}"
def process(self, instance):
if not self.is_active(instance.data):
return
# Check for preset existence.
if not self.attributes:
if not self.get_attributes_data():
return
invalid = self.get_invalid(instance, compute=True)
@ -44,6 +45,10 @@ class ValidateAttributes(pyblish.api.InstancePlugin,
"Found attributes with invalid values: {}".format(invalid)
)
@classmethod
def get_attributes_data(cls):
return json.loads(cls.attributes)
@classmethod
def get_invalid(cls, instance, compute=False):
if compute:
@ -55,21 +60,22 @@ class ValidateAttributes(pyblish.api.InstancePlugin,
def get_invalid_attributes(cls, instance):
invalid_attributes = []
attributes_data = cls.get_attributes_data()
# Filter families.
families = [instance.data["family"]]
families += instance.data.get("families", [])
families = set(families) & set(cls.attributes.keys())
families = set(families) & set(attributes_data.keys())
if not families:
return []
# Get all attributes to validate.
attributes = defaultdict(dict)
for family in families:
if family not in cls.attributes:
if family not in attributes_data:
# No attributes to validate for family
continue
for preset_attr, preset_value in cls.attributes[family].items():
for preset_attr, preset_value in attributes_data[family].items():
node_name, attribute_name = preset_attr.split(".", 1)
attributes[node_name][attribute_name] = preset_value

View file

@ -39,7 +39,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin,
"yeticache"]
optional = True
actions = [RepairAction]
exclude_families = []
exclude_product_types = []
def process(self, instance):
if not self.is_active(instance.data):
@ -73,7 +73,9 @@ class ValidateFrameRange(pyblish.api.InstancePlugin,
# compare with data on instance
errors = []
if [ef for ef in self.exclude_families
# QUESTION shouldn't this be just:
# 'if instance.data["family"] in self.exclude_product_types:'
if [ef for ef in self.exclude_product_types
if instance.data["family"] in ef]:
return
if (inst_start != frame_start_handle):

View file

@ -30,14 +30,18 @@ class ValidatePluginPathAttributes(pyblish.api.InstancePlugin):
def get_invalid(cls, instance):
invalid = list()
file_attrs = cls.attribute
file_attrs = {
item["name"]: item["value"]
for item in cls.attribute
}
if not file_attrs:
return invalid
# Consider only valid node types to avoid "Unknown object type" warning
all_node_types = set(cmds.allNodeTypes())
node_types = [
key for key in file_attrs.keys()
key
for key in file_attrs.keys()
if key in all_node_types
]

View file

@ -55,12 +55,15 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin):
if staging_dir:
cls.log.debug(
"Staging dir found: \"{}\". Ignoring setting from "
"`project_settings/maya/RenderSettings/"
"`project_settings/maya/render_settings/"
"default_render_image_folder`.".format(staging_dir)
)
return staging_dir
return instance.context.data.get('project_settings')\
.get('maya') \
.get('RenderSettings') \
.get('default_render_image_folder')
return (
instance.context.data
["project_settings"]
["maya"]
["render_settings"]
["default_render_image_folder"]
)

View file

@ -265,7 +265,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
# load validation definitions from settings
settings_lights_flag = instance.context.data["project_settings"].get(
"maya", {}).get(
"RenderSettings", {}).get(
"render_settings", {}).get(
"enable_all_lights", False)
instance_lights_flag = instance.data.get("renderSetupIncludeLights")
@ -281,6 +281,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
# if so, compare its value from the one required.
for data in cls.get_nodes(instance, renderer):
for node in data["nodes"]:
# Why is captured 'PublishValidationError'? How it can be
# raised by 'cmds.getAttr(...)'?
try:
render_value = cmds.getAttr(
"{}.{}".format(node, data["attribute"])
@ -310,11 +312,16 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
@classmethod
def get_nodes(cls, instance, renderer):
maya_settings = instance.context.data["project_settings"]["maya"]
renderer_key = "{}_render_attributes".format(renderer)
validation_settings = (
maya_settings["publish"]["ValidateRenderSettings"].get(
"{}_render_attributes".format(renderer)
) or []
)
renderer_key
)
) or []
validation_settings = [
(item["type"], item["value"])
for item in validation_settings
]
result = []
for attr, values in OrderedDict(validation_settings).items():
values = [convert_to_int_or_float(v) for v in values if v]

View file

@ -7,6 +7,7 @@ from ayon_core.hosts.maya.api import lib
from ayon_core.pipeline.publish import (
RepairAction,
ValidateContentsOrder,
PublishValidationError
)
@ -38,7 +39,8 @@ class ValidateRigJointsHidden(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
raise ValueError("Visible joints found: {0}".format(invalid))
raise PublishValidationError(
"Visible joints found: {0}".format(invalid))
@classmethod
def repair(cls, instance):

View file

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
"""Plugin for validating naming conventions."""
import json
from maya import cmds
import pyblish.api
@ -35,29 +36,37 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin,
"""
order = ValidateContentsOrder
hosts = ['maya']
families = ['model']
hosts = ["maya"]
families = ["model"]
optional = True
label = 'Suffix Naming Conventions'
label = "Suffix Naming Conventions"
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction]
SUFFIX_NAMING_TABLE = {"mesh": ["_GEO", "_GES", "_GEP", "_OSD"],
"nurbsCurve": ["_CRV"],
"nurbsSurface": ["_NRB"],
"locator": ["_LOC"],
"group": ["_GRP"]}
SUFFIX_NAMING_TABLE = json.dumps({
"mesh": ["_GEO", "_GES", "_GEP", "_OSD"],
"nurbsCurve": ["_CRV"],
"nurbsSurface": ["_NRB"],
"locator": ["_LOC"],
"group": ["_GRP"]
})
ALLOW_IF_NOT_IN_SUFFIX_TABLE = True
@classmethod
def get_table_for_invalid(cls):
ss = []
for k, v in cls.SUFFIX_NAMING_TABLE.items():
ss.append(" - <b>{}</b>: {}".format(k, ", ".join(v)))
suffix_naming_table = json.loads(cls.SUFFIX_NAMING_TABLE)
ss = [
" - <b>{}</b>: {}".format(k, ", ".join(v))
for k, v in suffix_naming_table.items()
]
return "<br>".join(ss)
@staticmethod
def is_valid_name(node_name, shape_type,
SUFFIX_NAMING_TABLE, ALLOW_IF_NOT_IN_SUFFIX_TABLE):
def is_valid_name(
node_name,
shape_type,
suffix_naming_table,
allow_if_not_in_suffix_table
):
"""Return whether node's name is correct.
The correctness for a transform's suffix is dependent on what
@ -70,18 +79,18 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin,
Args:
node_name (str): Node name.
shape_type (str): Type of node.
SUFFIX_NAMING_TABLE (dict): Mapping dict for suffixes.
ALLOW_IF_NOT_IN_SUFFIX_TABLE (dict): Filter dict.
suffix_naming_table (dict): Mapping dict for suffixes.
allow_if_not_in_suffix_table (bool): Default output.
"""
if shape_type not in SUFFIX_NAMING_TABLE:
return ALLOW_IF_NOT_IN_SUFFIX_TABLE
else:
suffices = SUFFIX_NAMING_TABLE[shape_type]
for suffix in suffices:
if node_name.endswith(suffix):
return True
return False
if shape_type not in suffix_naming_table:
return allow_if_not_in_suffix_table
suffices = suffix_naming_table[shape_type]
for suffix in suffices:
if node_name.endswith(suffix):
return True
return False
@classmethod
def get_invalid(cls, instance):
@ -91,9 +100,10 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin,
instance (:class:`pyblish.api.Instance`): published instance.
"""
transforms = cmds.ls(instance, type='transform', long=True)
transforms = cmds.ls(instance, type="transform", long=True)
invalid = []
suffix_naming_table = json.loads(cls.SUFFIX_NAMING_TABLE)
for transform in transforms:
shapes = cmds.listRelatives(transform,
shapes=True,
@ -101,9 +111,12 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin,
noIntermediate=True)
shape_type = cmds.nodeType(shapes[0]) if shapes else "group"
if not cls.is_valid_name(transform, shape_type,
cls.SUFFIX_NAMING_TABLE,
cls.ALLOW_IF_NOT_IN_SUFFIX_TABLE):
if not cls.is_valid_name(
transform,
shape_type,
suffix_naming_table,
cls.ALLOW_IF_NOT_IN_SUFFIX_TABLE
):
invalid.append(transform)
return invalid

View file

@ -46,24 +46,5 @@ if bool(int(os.environ.get(key, "0"))):
lowestPriority=True
)
# Build a shelf.
shelf_preset = settings['maya'].get('project_shelf')
if shelf_preset:
icon_path = os.path.join(
os.environ['OPENPYPE_PROJECT_SCRIPTS'],
project_name,
"icons")
icon_path = os.path.abspath(icon_path)
for i in shelf_preset['imports']:
import_string = "from {} import {}".format(project_name, i)
print(import_string)
exec(import_string)
cmds.evalDeferred(
"mlib.shelf(name=shelf_preset['name'], iconPath=icon_path,"
" preset=shelf_preset)"
)
print("Finished OpenPype usersetup.")

View file

@ -21,10 +21,12 @@ from ayon_core.pipeline import (
AVALON_CONTAINER_ID,
get_current_asset_name,
get_current_task_name,
registered_host,
)
from ayon_core.pipeline.workfile import BuildWorkfile
from ayon_core.tools.utils import host_tools
from ayon_core.hosts.nuke import NUKE_ROOT_DIR
from ayon_core.tools.workfile_template_build import open_template_ui
from .command import viewer_update_and_undo_stop
from .lib import (
@ -55,6 +57,7 @@ from .workfile_template_builder import (
build_workfile_template,
create_placeholder,
update_placeholder,
NukeTemplateBuilder,
)
from .workio import (
open_file,
@ -176,7 +179,7 @@ def add_nuke_callbacks():
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
if nuke_settings["nuke-dirmap"]["enabled"]:
if nuke_settings["nuke_dirmap"]["enabled"]:
log.info("Added Nuke's dir-mapping callback ...")
# Add dirmap for file paths.
nuke.addFilenameFilter(dirmap_file_name_filter)
@ -313,7 +316,7 @@ def _install_menu():
lambda: BuildWorkfile().process()
)
menu_template = menu.addMenu("Template Builder") # creating template menu
menu_template = menu.addMenu("Template Builder")
menu_template.addCommand(
"Build Workfile from template",
lambda: build_workfile_template()
@ -321,6 +324,12 @@ def _install_menu():
if not ASSIST:
menu_template.addSeparator()
menu_template.addCommand(
"Open template",
lambda: open_template_ui(
NukeTemplateBuilder(registered_host()), get_main_window()
)
)
menu_template.addCommand(
"Create Place Holder",
lambda: create_placeholder()

View file

@ -7,7 +7,7 @@ from ayon_core.pipeline.workfile.workfile_template_builder import (
LoadPlaceholderItem,
CreatePlaceholderItem,
PlaceholderLoadMixin,
PlaceholderCreateMixin
PlaceholderCreateMixin,
)
from ayon_core.tools.workfile_template_build import (
WorkfileBuildPlaceholderDialog,

View file

@ -3,12 +3,11 @@ import sys
import contextlib
import traceback
from ayon_core.lib import env_value_to_bool, Logger
from ayon_core.lib import env_value_to_bool, Logger, is_in_tests
from ayon_core.addon import AddonsManager
from ayon_core.pipeline import install_host
from ayon_core.tools.utils import host_tools
from ayon_core.tools.utils import get_ayon_qt_app
from ayon_core.tests.lib import is_in_tests
from .launch_logic import ProcessLauncher, stub

View file

@ -21,7 +21,7 @@ from openpype_modules.webpublisher.lib import (
get_batch_asset_task_info,
parse_json
)
from ayon_core.tests.lib import is_in_tests
from ayon_core.lib import is_in_tests
class CollectBatchData(pyblish.api.ContextPlugin):

View file

@ -3,10 +3,9 @@ import re
import pyblish.api
from ayon_core.lib import prepare_template_data
from ayon_core.lib import prepare_template_data, is_in_tests
from ayon_core.hosts.photoshop import api as photoshop
from ayon_core.settings import get_project_settings
from ayon_core.tests.lib import is_in_tests
class CollectColorCodedInstances(pyblish.api.ContextPlugin):
@ -29,9 +28,8 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
Identifier:
id (str): "pyblish.avalon.instance"
"""
order = pyblish.api.CollectorOrder + 0.100
label = "Instances"
label = "Collect Color-coded Instances"
order = pyblish.api.CollectorOrder
hosts = ["photoshop"]
targets = ["automated"]
@ -42,7 +40,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
# flattened template cannot
subset_template_name = ""
create_flatten_image = "no"
flatten_subset_template = ""
flatten_product_name_template = ""
def process(self, context):
self.log.info("CollectColorCodedInstances")
@ -124,12 +122,12 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
if self.create_flatten_image != "no" and publishable_layers:
self.log.debug("create_flatten_image")
if not self.flatten_subset_template:
if not self.flatten_product_name_template:
self.log.warning("No template for flatten image")
return
fill_pairs.pop("layer")
subset = self.flatten_subset_template.format(
subset = self.flatten_product_name_template.format(
**prepare_template_data(fill_pairs))
first_layer = publishable_layers[0] # dummy layer

View file

@ -6,24 +6,17 @@ Provides:
instance -> family ("review")
"""
import os
import pyblish.api
from ayon_core.pipeline.create import get_subset_name
class CollectReview(pyblish.api.ContextPlugin):
"""Adds review to families for instances marked to be reviewable.
"""
label = "Collect Review"
label = "Review"
hosts = ["photoshop"]
order = pyblish.api.CollectorOrder + 0.1
publish = True
def process(self, context):
for instance in context:
creator_attributes = instance.data["creator_attributes"]

View file

@ -240,33 +240,34 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
def _install_shelves(self, project_settings):
shelves = project_settings["substancepainter"].get("shelves", {})
shelves = project_settings["substancepainter"].get("shelves", [])
if not shelves:
return
# Prepare formatting data if we detect any path which might have
# template tokens like {asset} in there.
formatting_data = {}
has_formatting_entries = any("{" in path for path in shelves.values())
has_formatting_entries = any("{" in item["value"] for item in shelves)
if has_formatting_entries:
project_name = self.get_current_project_name()
asset_name = self.get_current_asset_name()
task_name = self.get_current_asset_name()
system_settings = get_system_settings()
formatting_data = get_template_data_with_names(project_name,
asset_name,
task_name,
system_settings)
formatting_data = get_template_data_with_names(
project_name, asset_name, task_name, system_settings
)
anatomy = Anatomy(project_name)
formatting_data["root"] = anatomy.roots
for name, path in shelves.items():
shelf_name = None
for shelve_item in shelves:
# Allow formatting with anatomy for the paths
path = shelve_item["value"]
if "{" in path:
path = StringTemplate.format_template(path, formatting_data)
name = shelve_item["name"]
shelf_name = None
try:
shelf_name = lib.load_shelf(path, name=name)
except ValueError as exc:

View file

@ -158,6 +158,7 @@ from .ayon_info import (
is_running_from_build,
is_staging_enabled,
is_dev_mode_enabled,
is_in_tests,
)
@ -229,6 +230,8 @@ __all__ = [
"IniSettingRegistry",
"JSONSettingRegistry",
"AYONSecureRegistry",
"AYONSettingsRegistry",
"OpenPypeSecureRegistry",
"OpenPypeSettingsRegistry",
"get_local_site_id",
@ -271,6 +274,7 @@ __all__ = [
"terminal",
"get_datetime_data",
"get_timestamp",
"get_formatted_current_time",
"Logger",
@ -278,6 +282,7 @@ __all__ = [
"is_running_from_build",
"is_staging_enabled",
"is_dev_mode_enabled",
"is_in_tests",
"requests_get",
"requests_post"

View file

@ -230,29 +230,25 @@ class ApplicationGroup:
self.manager = manager
self._data = data
self.enabled = data.get("enabled", True)
self.label = data.get("label") or None
self.icon = data.get("icon") or None
self._environment = data.get("environment") or {}
self.enabled = data["enabled"]
self.label = data["label"] or None
self.icon = data["icon"] or None
env = {}
try:
env = json.loads(data["environment"])
except Exception:
pass
self._environment = env
host_name = data.get("host_name", None)
host_name = data["host_name"] or None
self.is_host = host_name is not None
self.host_name = host_name
variants = data.get("variants") or {}
key_label_mapping = variants.pop(M_DYNAMIC_KEY_LABEL, {})
for variant_name, variant_data in variants.items():
if variant_name in METADATA_KEYS:
continue
if "variant_label" not in variant_data:
variant_label = key_label_mapping.get(variant_name)
if variant_label:
variant_data["variant_label"] = variant_label
variants[variant_name] = Application(
variant_name, variant_data, self
)
settings_variants = data["variants"]
variants = {}
for variant_data in settings_variants:
app_variant = Application(variant_data, self)
variants[app_variant.name] = app_variant
self.variants = variants
@ -274,62 +270,56 @@ class Application:
Object by itself does nothing special.
Args:
name (str): Specific version (or variant) of application.
e.g. "maya2020", "nuke11.3", etc.
data (dict): Data for the version containing information about
executables, variant label or if is enabled.
Only required key is `executables`.
group (ApplicationGroup): App group object that created the application
and under which application belongs.
"""
def __init__(self, name, data, group):
self.name = name
self.group = group
def __init__(self, data, group):
self._data = data
name = data["name"]
label = data["label"] or name
enabled = False
if group.enabled:
enabled = data.get("enabled", True)
self.enabled = enabled
self.use_python_2 = data.get("use_python_2", False)
self.label = data.get("variant_label") or name
self.full_name = "/".join((group.name, name))
if group.label:
full_label = " ".join((group.label, self.label))
full_label = " ".join((group.label, label))
else:
full_label = self.label
self.full_label = full_label
self._environment = data.get("environment") or {}
full_label = label
env = {}
try:
env = json.loads(data["environment"])
except Exception:
pass
arguments = data.get("arguments")
arguments = data["arguments"]
if isinstance(arguments, dict):
arguments = arguments.get(platform.system().lower())
if not arguments:
arguments = []
_executables = data["executables"].get(platform.system().lower(), [])
executables = [
ApplicationExecutable(executable)
for executable in _executables
]
self.group = group
self.name = name
self.label = label
self.enabled = enabled
self.use_python_2 = data.get("use_python_2", False)
self.full_name = "/".join((group.name, name))
self.full_label = full_label
self.arguments = arguments
if "executables" not in data:
self.executables = [
UndefinedApplicationExecutable()
]
return
_executables = data["executables"]
if isinstance(_executables, dict):
_executables = _executables.get(platform.system().lower())
if not _executables:
_executables = []
executables = []
for executable in _executables:
executables.append(ApplicationExecutable(executable))
self.executables = executables
self._environment = env
def __repr__(self):
return "<{}> - {}".format(self.__class__.__name__, self.full_name)
@ -384,12 +374,12 @@ class ApplicationManager:
"""Load applications and tools and store them by their full name.
Args:
system_settings (dict): Preloaded system settings. When passed manager
studio_settings (dict): Preloaded studio settings. When passed manager
will always use these values. Gives ability to create manager
using different settings.
"""
def __init__(self, system_settings=None):
def __init__(self, studio_settings=None):
self.log = Logger.get_logger(self.__class__.__name__)
self.app_groups = {}
@ -397,16 +387,16 @@ class ApplicationManager:
self.tool_groups = {}
self.tools = {}
self._system_settings = system_settings
self._studio_settings = studio_settings
self.refresh()
def set_system_settings(self, system_settings):
def set_studio_settings(self, studio_settings):
"""Ability to change init system settings.
This will trigger refresh of manager.
"""
self._system_settings = system_settings
self._studio_settings = studio_settings
self.refresh()
@ -417,72 +407,30 @@ class ApplicationManager:
self.tool_groups.clear()
self.tools.clear()
if self._system_settings is not None:
settings = copy.deepcopy(self._system_settings)
if self._studio_settings is not None:
settings = copy.deepcopy(self._studio_settings)
else:
settings = get_system_settings(
clear_metadata=False, exclude_locals=False
)
all_app_defs = {}
applications_addon_settings = settings["applications"]
# Prepare known applications
app_defs = settings["applications"]
additional_apps = {}
app_defs = applications_addon_settings["applications"]
additional_apps = app_defs.pop("additional_apps")
app_defs.update(additional_apps)
for group_name, variant_defs in app_defs.items():
if group_name in METADATA_KEYS:
continue
if group_name == "additional_apps":
additional_apps = variant_defs
else:
all_app_defs[group_name] = variant_defs
# Prepare additional applications
# - First find dynamic keys that can be used as labels of group
dynamic_keys = {}
for group_name, variant_defs in additional_apps.items():
if group_name == M_DYNAMIC_KEY_LABEL:
dynamic_keys = variant_defs
break
# Add additional apps to known applications
for group_name, variant_defs in additional_apps.items():
if group_name in METADATA_KEYS:
continue
# Determine group label
label = variant_defs.get("label")
if not label:
# Look for label set in dynamic labels
label = dynamic_keys.get(group_name)
if not label:
label = group_name
variant_defs["label"] = label
all_app_defs[group_name] = variant_defs
for group_name, variant_defs in all_app_defs.items():
if group_name in METADATA_KEYS:
continue
group = ApplicationGroup(group_name, variant_defs, self)
self.app_groups[group_name] = group
for app in group:
self.applications[app.full_name] = app
tools_definitions = settings["tools"]["tool_groups"]
tool_label_mapping = tools_definitions.pop(M_DYNAMIC_KEY_LABEL, {})
for tool_group_name, tool_group_data in tools_definitions.items():
if not tool_group_name or tool_group_name in METADATA_KEYS:
continue
tool_group_label = (
tool_label_mapping.get(tool_group_name) or tool_group_name
)
group = EnvironmentToolGroup(
tool_group_name, tool_group_label, tool_group_data, self
)
self.tool_groups[tool_group_name] = group
tools_definitions = applications_addon_settings["tool_groups"]
for tool_group_data in tools_definitions:
group = EnvironmentToolGroup(tool_group_data, self)
self.tool_groups[group.name] = group
for tool in group:
self.tools[tool.full_name] = tool
@ -571,30 +519,31 @@ class EnvironmentToolGroup:
are same.
Args:
name (str): Name of the tool group.
data (dict): Group's information with it's variants.
data (dict): Group information with variants.
manager (ApplicationManager): Manager that creates the group.
"""
def __init__(self, name, label, data, manager):
def __init__(self, data, manager):
name = data["name"]
label = data["label"]
self.name = name
self.label = label
self._data = data
self.manager = manager
self._environment = data["environment"]
variants = data.get("variants") or {}
label_by_key = variants.pop(M_DYNAMIC_KEY_LABEL, {})
environment = {}
try:
environment = json.loads(data["environment"])
except Exception:
pass
self._environment = environment
variants = data.get("variants") or []
variants_by_name = {}
for variant_name, variant_data in variants.items():
if variant_name in METADATA_KEYS:
continue
variant_label = label_by_key.get(variant_name) or variant_name
tool = EnvironmentTool(
variant_name, variant_label, variant_data, self
)
variants_by_name[variant_name] = tool
for variant_data in variants:
tool = EnvironmentTool(variant_data, self)
variants_by_name[tool.name] = tool
self.variants = variants_by_name
def __repr__(self):
@ -615,23 +564,25 @@ class EnvironmentTool:
Structure of tool information.
Args:
name (str): Name of the tool.
variant_data (dict): Variant data with environments and
host and app variant filters.
group (str): Name of group which wraps tool.
group (EnvironmentToolGroup): Name of group which wraps tool.
"""
def __init__(self, name, label, variant_data, group):
def __init__(self, variant_data, group):
# Backwards compatibility 3.9.1 - 3.9.2
# - 'variant_data' contained only environments but contain also host
# and application variant filters
host_names = variant_data.get("host_names", [])
app_variants = variant_data.get("app_variants", [])
name = variant_data["name"]
label = variant_data["label"]
host_names = variant_data["host_names"]
app_variants = variant_data["app_variants"]
if "environment" in variant_data:
environment = variant_data["environment"]
else:
environment = variant_data
environment = {}
try:
environment = json.loads(variant_data["environment"])
except Exception:
pass
self.host_names = host_names
self.app_variants = app_variants

View file

@ -38,6 +38,16 @@ def is_staging_enabled():
return os.getenv("AYON_USE_STAGING") == "1"
def is_in_tests():
"""Process is running in automatic tests mode.
Returns:
bool: True if running in tests.
"""
return os.environ.get("AYON_IN_TESTS") == "1"
def is_dev_mode_enabled():
"""Dev mode is enabled in AYON.

View file

@ -7,10 +7,10 @@ from datetime import datetime
from ayon_core.lib import (
env_value_to_bool,
collect_frames,
is_in_tests,
)
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
from ayon_core.tests.lib import is_in_tests
@attr.s

View file

@ -10,10 +10,10 @@ from ayon_core.lib import (
BoolDef,
NumberDef,
TextDef,
is_in_tests,
)
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
from ayon_core.pipeline.farm.tools import iter_expected_files
from ayon_core.tests.lib import is_in_tests
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo

View file

@ -12,7 +12,7 @@ import pyblish.api
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
from ayon_core.tests.lib import is_in_tests
from ayon_core.lib import is_in_tests
class _ZipFile(ZipFile):

View file

@ -7,11 +7,11 @@ import pyblish.api
from ayon_core.lib import (
TextDef,
NumberDef,
is_in_tests,
)
from ayon_core.pipeline import (
AYONPyblishPluginMixin
)
from ayon_core.tests.lib import is_in_tests
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo

View file

@ -6,10 +6,10 @@ from datetime import datetime
import pyblish.api
from ayon_core.pipeline import AYONPyblishPluginMixin
from ayon_core.tests.lib import is_in_tests
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
from ayon_core.lib import (
is_in_tests,
BoolDef,
NumberDef
)

View file

@ -18,6 +18,7 @@ Attributes:
from __future__ import print_function
import os
import json
import getpass
import copy
import re
@ -35,14 +36,14 @@ from ayon_core.lib import (
BoolDef,
NumberDef,
TextDef,
EnumDef
EnumDef,
is_in_tests,
)
from ayon_core.hosts.maya.api.lib_rendersettings import RenderSettings
from ayon_core.hosts.maya.api.lib import get_attr_in_layer
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
from ayon_core.tests.lib import is_in_tests
from ayon_core.pipeline.farm.tools import iter_expected_files
@ -130,8 +131,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
cls.group = settings.get("group", cls.group)
cls.strict_error_checking = settings.get("strict_error_checking",
cls.strict_error_checking)
cls.jobInfo = settings.get("jobInfo", cls.jobInfo)
cls.pluginInfo = settings.get("pluginInfo", cls.pluginInfo)
job_info = settings.get("jobInfo")
if job_info:
job_info = json.loads(job_info)
plugin_info = settings.get("pluginInfo")
if plugin_info:
plugin_info = json.loads(plugin_info)
cls.jobInfo = job_info or cls.jobInfo
cls.pluginInfo = plugin_info or cls.pluginInfo
def get_job_info(self):
job_info = DeadlineJobInfo(Plugin="MayaBatch")
@ -251,7 +259,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
default_rs_include_lights = (
instance.context.data['project_settings']
['maya']
['RenderSettings']
['render_settings']
['enable_all_lights']
)

View file

@ -3,7 +3,7 @@ import attr
from datetime import datetime
from ayon_core.pipeline import PublishXmlValidationError
from ayon_core.tests.lib import is_in_tests
from ayon_core.lib import is_in_tests
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo

View file

@ -10,8 +10,8 @@ import pyblish.api
from ayon_core.pipeline.publish import (
AYONPyblishPluginMixin
)
from ayon_core.tests.lib import is_in_tests
from ayon_core.lib import (
is_in_tests,
BoolDef,
NumberDef
)
@ -39,10 +39,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
concurrent_tasks = 1
group = ""
department = ""
limit_groups = {}
limit_groups = []
use_gpu = False
env_allowed_keys = []
env_search_replace_values = {}
env_search_replace_values = []
workfile_dependency = True
use_published_workfile = True
@ -404,8 +404,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
# finally search replace in values of any key
if self.env_search_replace_values:
for key, value in environment.items():
for _k, _v in self.env_search_replace_values.items():
environment[key] = value.replace(_k, _v)
for item in self.env_search_replace_values:
environment[key] = value.replace(
item["name"], item["value"]
)
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
@ -541,8 +543,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
import nuke
captured_groups = []
for lg_name, list_node_class in self.limit_groups.items():
for node_class in list_node_class:
for limit_group in self.limit_groups:
lg_name = limit_group["name"]
for node_class in limit_group["value"]:
for node in nuke.allNodes(recurseGroups=True):
# ignore all nodes not member of defined class
if node.Class() not in node_class:

View file

@ -12,8 +12,7 @@ from ayon_core.client import (
get_last_version_by_subset_name,
)
from ayon_core.pipeline import publish
from ayon_core.lib import EnumDef
from ayon_core.tests.lib import is_in_tests
from ayon_core.lib import EnumDef, is_in_tests
from ayon_core.pipeline.version_start import get_versioning_start
from ayon_core.pipeline.farm.pyblish_functions import (

View file

@ -13,8 +13,7 @@ from ayon_core.client import (
get_last_version_by_subset_name,
)
from ayon_core.pipeline import publish
from ayon_core.lib import EnumDef
from ayon_core.tests.lib import is_in_tests
from ayon_core.lib import EnumDef, is_in_tests
from ayon_core.pipeline.version_start import get_versioning_start
from ayon_core.pipeline.farm.pyblish_functions import (
@ -99,12 +98,33 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
"karma_rop", "vray_rop",
"redshift_rop"]
aov_filter = {"maya": [r".*([Bb]eauty).*"],
"blender": [r".*([Bb]eauty).*"],
"aftereffects": [r".*"], # for everything from AE
"harmony": [r".*"], # for everything from AE
"celaction": [r".*"],
"max": [r".*"]}
aov_filter = [
{
"name": "maya",
"value": [r".*([Bb]eauty).*"]
},
{
"name": "blender",
"value": [r".*([Bb]eauty).*"]
},
{
# for everything from AE
"name": "aftereffects",
"value": [r".*"]
},
{
"name": "harmony",
"value": [r".*"]
},
{
"name": "celaction",
"value": [r".*"]
},
{
"name": "max",
"value": [r".*"]
},
]
environ_keys = [
"FTRACK_API_USER",
@ -506,17 +526,23 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
self.log.debug("Instance has review explicitly disabled.")
do_not_add_review = True
aov_filter = {
item["name"]: item["value"]
for item in self.aov_filter
}
if isinstance(instance.data.get("expectedFiles")[0], dict):
instances = create_instances_for_aov(
instance, instance_skeleton_data,
self.aov_filter, self.skip_integration_repre_list,
do_not_add_review)
aov_filter,
self.skip_integration_repre_list,
do_not_add_review
)
else:
representations = prepare_representations(
instance_skeleton_data,
instance.data.get("expectedFiles"),
anatomy,
self.aov_filter,
aov_filter,
self.skip_integration_repre_list,
do_not_add_review,
instance.context,

View file

@ -15,6 +15,7 @@ import re
import os
import platform
__version__ = "1.0.0"
######################################################################
# This is the function that Deadline calls to get an instance of the
@ -52,6 +53,9 @@ class AyonDeadlinePlugin(DeadlinePlugin):
del self.RenderArgumentCallback
def InitializeProcess(self):
self.LogInfo(
"Initializing process with AYON plugin {}".format(__version__)
)
self.PluginType = PluginType.Simple
self.StdoutHandling = True

View file

@ -14,7 +14,7 @@ from Deadline.Scripting import (
DirectoryUtils,
ProcessUtils,
)
__version__ = "1.0.0"
VERSION_REGEX = re.compile(
r"(?P<major>0|[1-9]\d*)"
r"\.(?P<minor>0|[1-9]\d*)"
@ -593,7 +593,7 @@ def inject_render_job_id(deadlinePlugin):
def __main__(deadlinePlugin):
print("*** GlobalJobPreload start ...")
print("*** GlobalJobPreload {} start ...".format(__version__))
print(">>> Getting job ...")
job = deadlinePlugin.GetJob()

View file

@ -10,7 +10,12 @@ from datetime import datetime
import pyblish.api
from ayon_core.lib import BoolDef, NumberDef, is_running_from_build
from ayon_core.lib import (
BoolDef,
NumberDef,
is_running_from_build,
is_in_tests,
)
from ayon_core.lib.execute import run_ayon_launcher_process
from ayon_core.modules.royalrender.api import Api as rrApi
from ayon_core.modules.royalrender.rr_job import (
@ -22,7 +27,6 @@ from ayon_core.modules.royalrender.rr_job import (
from ayon_core.pipeline import AYONPyblishPluginMixin
from ayon_core.pipeline.publish import KnownPublishError
from ayon_core.pipeline.publish.lib import get_published_workfile_instance
from ayon_core.tests.lib import is_in_tests
class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin,

View file

@ -19,10 +19,10 @@ from ayon_core.client import (
get_asset_name_identifier,
get_ayon_server_api_connection,
)
from ayon_core.lib import is_in_tests
from ayon_core.lib.events import emit_event
from ayon_core.addon import load_addons, AddonsManager
from ayon_core.settings import get_project_settings
from ayon_core.tests.lib import is_in_tests
from .publish.lib import filter_pyblish_plugins
from .anatomy import Anatomy

View file

@ -17,12 +17,11 @@ def get_general_template_data(system_settings=None):
if not system_settings:
system_settings = get_system_settings()
studio_name = system_settings["general"]["studio_name"]
studio_code = system_settings["general"]["studio_code"]
core_settings = system_settings["core"]
return {
"studio": {
"name": studio_name,
"code": studio_code
"name": core_settings["studio_name"],
"code": core_settings["studio_code"]
},
"user": get_ayon_username()
}

View file

@ -321,7 +321,7 @@ class BuildWorkfile:
continue
# Check families
profile_families = profile.get("families")
profile_families = profile.get("product_types")
if not profile_families:
self.log.warning((
"Build profile is missing families configuration: {0}"
@ -338,7 +338,7 @@ class BuildWorkfile:
continue
# Prepare lowered families and representation names
profile["families_lowered"] = [
profile["product_types_lowered"] = [
fam.lower() for fam in profile_families
]
profile["repre_names_lowered"] = [
@ -375,11 +375,11 @@ class BuildWorkfile:
family_low = family.lower()
for profile in profiles:
# Skip profile if does not contain family
if family_low not in profile["families_lowered"]:
if family_low not in profile["product_types_lowered"]:
continue
# Precompile name filters as regexes
profile_regexes = profile.get("subset_name_filters")
profile_regexes = profile.get("product_name_filters")
if profile_regexes:
_profile_regexes = []
for regex in profile_regexes:
@ -538,7 +538,7 @@ class BuildWorkfile:
build_presets += self.build_presets.get("linked_assets", [])
subset_ids_ordered = []
for preset in build_presets:
for preset_family in preset["families"]:
for preset_family in preset["product_types"]:
for id, subset in subsets_by_id.items():
if preset_family not in subset["data"].get("families", []):
continue

View file

@ -553,6 +553,12 @@ class AbstractTemplateBuilder(object):
self.clear_shared_populate_data()
def open_template(self):
"""Open template file with registered host."""
template_preset = self.get_template_preset()
template_path = template_preset["path"]
self.host.open_file(template_path)
@abstractmethod
def import_template(self, template_path):
"""

View file

@ -5,7 +5,7 @@ import shutil
import pyblish.api
import re
from ayon_core.tests.lib import is_in_tests
from ayon_core.lib import is_in_tests
class CleanUp(pyblish.api.InstancePlugin):

View file

@ -43,7 +43,7 @@ class CollectAudio(pyblish.api.ContextPlugin):
"unreal"
]
audio_subset_name = "audioMain"
audio_product_name = "audioMain"
def process(self, context):
# Fake filtering by family inside context plugin
@ -71,9 +71,9 @@ class CollectAudio(pyblish.api.ContextPlugin):
asset_names = set(instances_by_asset_name.keys())
self.log.debug((
"Searching for audio subset '{subset}' in assets {assets}"
"Searching for audio product '{subset}' in assets {assets}"
).format(
subset=self.audio_subset_name,
subset=self.audio_product_name,
assets=", ".join([
'"{}"'.format(asset_name)
for asset_name in asset_names
@ -130,11 +130,11 @@ class CollectAudio(pyblish.api.ContextPlugin):
}
asset_ids = set(asset_id_by_name.values())
# Query subsets with name define by 'audio_subset_name' attr
# Query subsets with name define by 'audio_product_name' attr
# - one or none subsets with the name should be available on an asset
subset_docs = get_subsets(
project_name,
subset_names=[self.audio_subset_name],
subset_names=[self.audio_product_name],
asset_ids=asset_ids,
fields=["_id", "parent"]
)

View file

@ -61,7 +61,10 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
("AVALON_ASSET", asset_name),
("AVALON_TASK", task_name)
):
os.environ[key] = value
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = value
def create_instance(
self,

View file

@ -1,8 +1,7 @@
import os
import pyblish.api
from ayon_core.lib import get_version_from_path
from ayon_core.tests.lib import is_in_tests
from ayon_core.lib import get_version_from_path, is_in_tests
from ayon_core.pipeline import KnownPublishError

View file

@ -74,7 +74,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
alpha_exts = ["exr", "png", "dpx"]
# Preset attributes
profiles = None
profiles = []
def process(self, instance):
self.log.debug(str(instance.data["representations"]))
@ -112,7 +112,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
self.profiles,
{
"hosts": host_name,
"families": family,
"product_types": family,
},
logger=self.log)
if not profile:
@ -719,12 +719,12 @@ class ExtractReview(pyblish.api.InstancePlugin):
lut_filters = self.lut_filters(new_repre, instance, ffmpeg_input_args)
ffmpeg_video_filters.extend(lut_filters)
bg_alpha = 0
bg_alpha = 0.0
bg_color = output_def.get("bg_color")
if bg_color:
bg_red, bg_green, bg_blue, bg_alpha = bg_color
if bg_alpha > 0:
if bg_alpha > 0.0:
if not temp_data["input_allow_bg"]:
self.log.info((
"Output definition has defined BG color input was"
@ -734,8 +734,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
bg_color_hex = "#{0:0>2X}{1:0>2X}{2:0>2X}".format(
bg_red, bg_green, bg_blue
)
bg_color_alpha = float(bg_alpha) / 255
bg_color_str = "{}@{}".format(bg_color_hex, bg_color_alpha)
bg_color_str = "{}@{}".format(bg_color_hex, bg_alpha)
self.log.info("Applying BG color {}".format(bg_color_str))
color_args = [
@ -1079,7 +1078,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
fill_color_hex = "{0:0>2X}{1:0>2X}{2:0>2X}".format(
f_red, f_green, f_blue
)
fill_color_alpha = float(f_alpha) / 255
fill_color_alpha = f_alpha
line_thickness = letter_box_def["line_thickness"]
line_color = letter_box_def["line_color"]
@ -1087,7 +1086,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
line_color_hex = "{0:0>2X}{1:0>2X}{2:0>2X}".format(
l_red, l_green, l_blue
)
line_color_alpha = float(l_alpha) / 255
line_color_alpha = l_alpha
# test ratios and define if pillar or letter boxes
output_ratio = float(output_width) / float(output_height)
@ -1283,8 +1282,12 @@ class ExtractReview(pyblish.api.InstancePlugin):
# NOTE Setting only one of `width` or `heigth` is not allowed
# - settings value can't have None but has value of 0
output_width = output_def.get("width") or output_width or None
output_height = output_def.get("height") or output_height or None
output_width = (
output_def.get("output_width") or output_width or None
)
output_height = (
output_def.get("output_height") or output_height or None
)
# Force to use input resolution if output resolution was not defined
# in settings. Resolution from instance is not used when
# 'use_input_res' is set to 'True'.
@ -1294,7 +1297,12 @@ class ExtractReview(pyblish.api.InstancePlugin):
overscan_color_value = "black"
overscan_color = output_def.get("overscan_color")
if overscan_color:
bg_red, bg_green, bg_blue, _ = overscan_color
if len(overscan_color) == 3:
bg_red, bg_green, bg_blue = overscan_color
else:
# Backwards compatibility
bg_red, bg_green, bg_blue, _ = overscan_color
overscan_color_value = "#{0:0>2X}{1:0>2X}{2:0>2X}".format(
bg_red, bg_green, bg_blue
)
@ -1503,14 +1511,16 @@ class ExtractReview(pyblish.api.InstancePlugin):
subset_name (str): name of subset
Returns:
list: Containg all output definitions matching entered families.
dict[str, Any]: Containing all output definitions matching entered
families.
"""
outputs = profile.get("outputs") or {}
if not outputs:
return outputs
filtered_outputs = {}
for filename_suffix, output_def in outputs.items():
outputs = profile.get("outputs")
if not outputs:
return filtered_outputs
for output_def in outputs:
filename_suffix = output_def["name"]
output_filters = output_def.get("filter")
# If no filter on output preset, skip filtering and add output
# profile for farther processing
@ -1523,16 +1533,16 @@ class ExtractReview(pyblish.api.InstancePlugin):
continue
# Subsets name filters
subset_filters = [
subset_filter
for subset_filter in output_filters.get("subsets", [])
product_name_filters = [
name_filter
for name_filter in output_filters.get("product_names", [])
# Skip empty strings
if subset_filter
if name_filter
]
if subset_name and subset_filters:
if subset_name and product_name_filters:
match = False
for subset_filter in subset_filters:
compiled = re.compile(subset_filter)
for product_name_filter in product_name_filters:
compiled = re.compile(product_name_filter)
if compiled.search(subset_name):
match = True
break

View file

@ -4,7 +4,6 @@ from .constants import (
)
from .lib import (
get_general_environments,
get_global_settings,
get_system_settings,
get_project_settings,
get_current_project_settings,
@ -17,7 +16,6 @@ __all__ = (
"PROJECT_SETTINGS_KEY",
"get_general_environments",
"get_global_settings",
"get_system_settings",
"get_project_settings",
"get_current_project_settings",

View file

@ -72,97 +72,22 @@ def _convert_host_imageio(host_settings):
imageio_file_rules["rules"] = new_rules
def _convert_applications_groups(groups, clear_metadata):
environment_key = "environment"
if isinstance(groups, dict):
new_groups = []
for name, item in groups.items():
item["name"] = name
new_groups.append(item)
groups = new_groups
output = {}
group_dynamic_labels = {}
for group in groups:
group_name = group.pop("name")
if "label" in group:
group_dynamic_labels[group_name] = group["label"]
tool_group_envs = group[environment_key]
if isinstance(tool_group_envs, six.string_types):
group[environment_key] = json.loads(tool_group_envs)
variants = {}
variant_dynamic_labels = {}
for variant in group.pop("variants"):
variant_name = variant.pop("name")
label = variant.get("label")
if label and label != variant_name:
variant_dynamic_labels[variant_name] = label
variant_envs = variant[environment_key]
if isinstance(variant_envs, six.string_types):
variant[environment_key] = json.loads(variant_envs)
variants[variant_name] = variant
group["variants"] = variants
if not clear_metadata:
variants["__dynamic_keys_labels__"] = variant_dynamic_labels
output[group_name] = group
if not clear_metadata:
output["__dynamic_keys_labels__"] = group_dynamic_labels
return output
def _convert_applications_system_settings(
ayon_settings, output, clear_metadata
):
# Addon settings
addon_settings = ayon_settings["applications"]
# Remove project settings
addon_settings.pop("only_available", None)
# Applications settings
ayon_apps = addon_settings["applications"]
additional_apps = ayon_apps.pop("additional_apps")
applications = _convert_applications_groups(
ayon_apps, clear_metadata
)
applications["additional_apps"] = _convert_applications_groups(
additional_apps, clear_metadata
)
# Tools settings
tools = _convert_applications_groups(
addon_settings["tool_groups"], clear_metadata
)
output["applications"] = applications
output["tools"] = {"tool_groups": tools}
def _convert_general(ayon_settings, output, default_settings):
# TODO get studio name/code
core_settings = ayon_settings["core"]
environments = core_settings["environments"]
if isinstance(environments, six.string_types):
environments = json.loads(environments)
general = default_settings["general"]
general.update({
"log_to_server": False,
"studio_name": core_settings["studio_name"],
"studio_code": core_settings["studio_code"],
"environment": environments
})
output["general"] = general
output["core"] = ayon_settings["core"]
version_check_interval = (
default_settings["general"]["version_check_interval"]
)
output["general"] = {
"version_check_interval": version_check_interval
}
def _convert_kitsu_system_settings(
ayon_settings, output, addon_versions, default_settings
):
if "kitsu" in ayon_settings:
output["kitsu"] = ayon_settings["kitsu"]
enabled = addon_versions.get("kitsu") is not None
kitsu_settings = default_settings["modules"]["kitsu"]
kitsu_settings["enabled"] = enabled
@ -290,7 +215,7 @@ def convert_system_settings(ayon_settings, default_settings, addon_versions):
"modules": {}
}
if "applications" in ayon_settings:
_convert_applications_system_settings(ayon_settings, output, False)
output["applications"] = ayon_settings["applications"]
if "core" in ayon_settings:
_convert_general(ayon_settings, output, default_settings)
@ -313,27 +238,12 @@ def convert_system_settings(ayon_settings, default_settings, addon_versions):
# --------- Project settings ---------
def _convert_applications_project_settings(ayon_settings, output):
if "applications" not in ayon_settings:
return
output["applications"] = {
"only_available": ayon_settings["applications"]["only_available"]
}
def _convert_blender_project_settings(ayon_settings, output):
if "blender" not in ayon_settings:
return
ayon_blender = ayon_settings["blender"]
_convert_host_imageio(ayon_blender)
ayon_publish = ayon_blender["publish"]
for plugin in ("ExtractThumbnail", "ExtractPlayblast"):
plugin_settings = ayon_publish[plugin]
plugin_settings["presets"] = json.loads(plugin_settings["presets"])
output["blender"] = ayon_blender
@ -353,53 +263,7 @@ def _convert_flame_project_settings(ayon_settings, output):
ayon_flame = ayon_settings["flame"]
ayon_publish_flame = ayon_flame["publish"]
# Plugin 'ExtractSubsetResources' renamed to 'ExtractProductResources'
if "ExtractSubsetResources" in ayon_publish_flame:
ayon_product_resources = ayon_publish_flame["ExtractSubsetResources"]
else:
ayon_product_resources = (
ayon_publish_flame.pop("ExtractProductResources"))
ayon_publish_flame["ExtractSubsetResources"] = ayon_product_resources
# 'ExtractSubsetResources' changed model of 'export_presets_mapping'
# - some keys were moved under 'other_parameters'
new_subset_resources = {}
for item in ayon_product_resources.pop("export_presets_mapping"):
name = item.pop("name")
if "other_parameters" in item:
other_parameters = item.pop("other_parameters")
item.update(other_parameters)
new_subset_resources[name] = item
ayon_product_resources["export_presets_mapping"] = new_subset_resources
# 'imageio' changed model
# - missing subkey 'project' which is in root of 'imageio' model
_convert_host_imageio(ayon_flame)
ayon_imageio_flame = ayon_flame["imageio"]
if "project" not in ayon_imageio_flame:
profile_mapping = ayon_imageio_flame.pop("profilesMapping")
ayon_flame["imageio"] = {
"project": ayon_imageio_flame,
"profilesMapping": profile_mapping
}
ayon_load_flame = ayon_flame["load"]
for plugin_name in ("LoadClip", "LoadClipBatch"):
plugin_settings = ayon_load_flame[plugin_name]
plugin_settings["families"] = plugin_settings.pop("product_types")
plugin_settings["clip_name_template"] = (
plugin_settings["clip_name_template"]
.replace("{folder[name]}", "{asset}")
.replace("{product[name]}", "{subset}")
)
plugin_settings["layer_rename_template"] = (
plugin_settings["layer_rename_template"]
.replace("{folder[name]}", "{asset}")
.replace("{product[name]}", "{subset}")
)
output["flame"] = ayon_flame
@ -410,39 +274,6 @@ def _convert_fusion_project_settings(ayon_settings, output):
ayon_fusion = ayon_settings["fusion"]
_convert_host_imageio(ayon_fusion)
ayon_imageio_fusion = ayon_fusion["imageio"]
if "ocioSettings" in ayon_imageio_fusion:
ayon_ocio_setting = ayon_imageio_fusion.pop("ocioSettings")
paths = ayon_ocio_setting.pop("ocioPathModel")
for key, value in tuple(paths.items()):
new_value = []
if value:
new_value.append(value)
paths[key] = new_value
ayon_ocio_setting["configFilePath"] = paths
ayon_imageio_fusion["ocio"] = ayon_ocio_setting
elif "ocio" in ayon_imageio_fusion:
paths = ayon_imageio_fusion["ocio"].pop("configFilePath")
for key, value in tuple(paths.items()):
new_value = []
if value:
new_value.append(value)
paths[key] = new_value
ayon_imageio_fusion["ocio"]["configFilePath"] = paths
_convert_host_imageio(ayon_imageio_fusion)
ayon_create_saver = ayon_fusion["create"]["CreateSaver"]
ayon_create_saver["temp_rendering_path_template"] = (
ayon_create_saver["temp_rendering_path_template"]
.replace("{product[name]}", "{subset}")
.replace("{product[type]}", "{family}")
.replace("{folder[name]}", "{asset}")
.replace("{task[name]}", "{task}")
)
output["fusion"] = ayon_fusion
@ -452,173 +283,8 @@ def _convert_maya_project_settings(ayon_settings, output):
ayon_maya = ayon_settings["maya"]
# Change key of render settings
ayon_maya["RenderSettings"] = ayon_maya.pop("render_settings")
# Convert extensions mapping
ayon_maya["ext_mapping"] = {
item["name"]: item["value"]
for item in ayon_maya["ext_mapping"]
}
# Maya dirmap
ayon_maya_dirmap = ayon_maya.pop("maya_dirmap")
ayon_maya_dirmap_path = ayon_maya_dirmap["paths"]
ayon_maya_dirmap_path["source-path"] = (
ayon_maya_dirmap_path.pop("source_path")
)
ayon_maya_dirmap_path["destination-path"] = (
ayon_maya_dirmap_path.pop("destination_path")
)
ayon_maya["maya-dirmap"] = ayon_maya_dirmap
# Create plugins
ayon_create = ayon_maya["create"]
ayon_create_static_mesh = ayon_create["CreateUnrealStaticMesh"]
if "static_mesh_prefixes" in ayon_create_static_mesh:
ayon_create_static_mesh["static_mesh_prefix"] = (
ayon_create_static_mesh.pop("static_mesh_prefixes")
)
# --- Publish (START) ---
ayon_publish = ayon_maya["publish"]
try:
attributes = json.loads(
ayon_publish["ValidateAttributes"]["attributes"]
)
except ValueError:
attributes = {}
ayon_publish["ValidateAttributes"]["attributes"] = attributes
try:
SUFFIX_NAMING_TABLE = json.loads(
ayon_publish
["ValidateTransformNamingSuffix"]
["SUFFIX_NAMING_TABLE"]
)
except ValueError:
SUFFIX_NAMING_TABLE = {}
ayon_publish["ValidateTransformNamingSuffix"]["SUFFIX_NAMING_TABLE"] = (
SUFFIX_NAMING_TABLE
)
validate_frame_range = ayon_publish["ValidateFrameRange"]
if "exclude_product_types" in validate_frame_range:
validate_frame_range["exclude_families"] = (
validate_frame_range.pop("exclude_product_types"))
# Extract playblast capture settings
validate_rendern_settings = ayon_publish["ValidateRenderSettings"]
for key in (
"arnold_render_attributes",
"vray_render_attributes",
"redshift_render_attributes",
"renderman_render_attributes",
):
if key not in validate_rendern_settings:
continue
validate_rendern_settings[key] = [
[item["type"], item["value"]]
for item in validate_rendern_settings[key]
]
plugin_path_attributes = ayon_publish["ValidatePluginPathAttributes"]
plugin_path_attributes["attribute"] = {
item["name"]: item["value"]
for item in plugin_path_attributes["attribute"]
}
ayon_capture_preset = ayon_publish["ExtractPlayblast"]["capture_preset"]
display_options = ayon_capture_preset["DisplayOptions"]
for key in ("background", "backgroundBottom", "backgroundTop"):
display_options[key] = _convert_color(display_options[key])
for src_key, dst_key in (
("DisplayOptions", "Display Options"),
("ViewportOptions", "Viewport Options"),
("CameraOptions", "Camera Options"),
):
ayon_capture_preset[dst_key] = ayon_capture_preset.pop(src_key)
viewport_options = ayon_capture_preset["Viewport Options"]
viewport_options["pluginObjects"] = {
item["name"]: item["value"]
for item in viewport_options["pluginObjects"]
}
ayon_playblast_settings = ayon_publish["ExtractPlayblast"]["profiles"]
if ayon_playblast_settings:
for setting in ayon_playblast_settings:
capture_preset = setting["capture_preset"]
display_options = capture_preset["DisplayOptions"]
for key in ("background", "backgroundBottom", "backgroundTop"):
display_options[key] = _convert_color(display_options[key])
for src_key, dst_key in (
("DisplayOptions", "Display Options"),
("ViewportOptions", "Viewport Options"),
("CameraOptions", "Camera Options"),
):
capture_preset[dst_key] = capture_preset.pop(src_key)
viewport_options = capture_preset["Viewport Options"]
viewport_options["pluginObjects"] = {
item["name"]: item["value"]
for item in viewport_options["pluginObjects"]
}
# Extract Camera Alembic bake attributes
try:
bake_attributes = json.loads(
ayon_publish["ExtractCameraAlembic"]["bake_attributes"]
)
except ValueError:
bake_attributes = []
ayon_publish["ExtractCameraAlembic"]["bake_attributes"] = bake_attributes
# --- Publish (END) ---
for renderer_settings in ayon_maya["RenderSettings"].values():
if (
not isinstance(renderer_settings, dict)
or "additional_options" not in renderer_settings
):
continue
renderer_settings["additional_options"] = [
[item["attribute"], item["value"]]
for item in renderer_settings["additional_options"]
]
# Workfile build
ayon_workfile_build = ayon_maya["workfile_build"]
for item in ayon_workfile_build["profiles"]:
for key in ("current_context", "linked_assets"):
for subitem in item[key]:
if "families" in subitem:
break
subitem["families"] = subitem.pop("product_types")
subitem["subset_name_filters"] = subitem.pop(
"product_name_filters")
_convert_host_imageio(ayon_maya)
ayon_maya_load = ayon_maya["load"]
load_colors = ayon_maya_load["colors"]
for key, color in tuple(load_colors.items()):
load_colors[key] = _convert_color(color)
reference_loader = ayon_maya_load["reference_loader"]
reference_loader["namespace"] = (
reference_loader["namespace"]
.replace("{product[name]}", "{subset}")
)
if ayon_maya_load.get("import_loader"):
import_loader = ayon_maya_load["import_loader"]
import_loader["namespace"] = (
import_loader["namespace"]
.replace("{product[name]}", "{subset}")
)
output["maya"] = ayon_maya
@ -627,31 +293,8 @@ def _convert_3dsmax_project_settings(ayon_settings, output):
return
ayon_max = ayon_settings["max"]
_convert_host_imageio(ayon_max)
if "PointCloud" in ayon_max:
point_cloud_attribute = ayon_max["PointCloud"]["attribute"]
new_point_cloud_attribute = {
item["name"]: item["value"]
for item in point_cloud_attribute
}
ayon_max["PointCloud"]["attribute"] = new_point_cloud_attribute
# --- Publish (START) ---
ayon_publish = ayon_max["publish"]
if "ValidateAttributes" in ayon_publish:
try:
attributes = json.loads(
ayon_publish["ValidateAttributes"]["attributes"]
)
except ValueError:
attributes = {}
ayon_publish["ValidateAttributes"]["attributes"] = attributes
if "ValidateLoadedPlugin" in ayon_publish:
loaded_plugin = (
ayon_publish["ValidateLoadedPlugin"]["family_plugins_mapping"]
)
for item in loaded_plugin:
item["families"] = item.pop("product_types")
_convert_host_imageio(ayon_max)
output["max"] = ayon_max
@ -708,15 +351,6 @@ def _convert_nuke_project_settings(ayon_settings, output):
ayon_nuke = ayon_settings["nuke"]
# --- Dirmap ---
dirmap = ayon_nuke.pop("dirmap")
for src_key, dst_key in (
("source_path", "source-path"),
("destination_path", "destination-path"),
):
dirmap["paths"][dst_key] = dirmap["paths"].pop(src_key)
ayon_nuke["nuke-dirmap"] = dirmap
# --- Load ---
ayon_load = ayon_nuke["load"]
ayon_load["LoadClip"]["_representations"] = (
@ -888,18 +522,6 @@ def _convert_photoshop_project_settings(ayon_settings, output):
ayon_photoshop = ayon_settings["photoshop"]
_convert_host_imageio(ayon_photoshop)
ayon_publish_photoshop = ayon_photoshop["publish"]
ayon_colorcoded = ayon_publish_photoshop["CollectColorCodedInstances"]
if "flatten_product_type_template" in ayon_colorcoded:
ayon_colorcoded["flatten_subset_template"] = (
ayon_colorcoded.pop("flatten_product_type_template"))
collect_review = ayon_publish_photoshop["CollectReview"]
if "active" in collect_review:
collect_review["publish"] = collect_review.pop("active")
output["photoshop"] = ayon_photoshop
@ -909,44 +531,15 @@ def _convert_substancepainter_project_settings(ayon_settings, output):
ayon_substance_painter = ayon_settings["substancepainter"]
_convert_host_imageio(ayon_substance_painter)
if "shelves" in ayon_substance_painter:
shelves_items = ayon_substance_painter["shelves"]
new_shelves_items = {
item["name"]: item["value"]
for item in shelves_items
}
ayon_substance_painter["shelves"] = new_shelves_items
output["substancepainter"] = ayon_substance_painter
def _convert_tvpaint_project_settings(ayon_settings, output):
if "tvpaint" not in ayon_settings:
return
ayon_tvpaint = ayon_settings["tvpaint"]
_convert_host_imageio(ayon_tvpaint)
ayon_publish_settings = ayon_tvpaint["publish"]
for plugin_name in (
"ValidateProjectSettings",
"ValidateMarks",
"ValidateStartFrame",
"ValidateAssetName",
):
ayon_value = ayon_publish_settings[plugin_name]
for src_key, dst_key in (
("action_enabled", "optional"),
("action_enable", "active"),
):
if src_key in ayon_value:
ayon_value[dst_key] = ayon_value.pop(src_key)
extract_sequence_setting = ayon_publish_settings["ExtractSequence"]
extract_sequence_setting["review_bg"] = _convert_color(
extract_sequence_setting["review_bg"]
)
output["tvpaint"] = ayon_tvpaint
@ -1031,49 +624,6 @@ def _convert_webpublisher_project_settings(ayon_settings, output):
output["webpublisher"] = ayon_webpublisher
def _convert_deadline_project_settings(ayon_settings, output):
if "deadline" not in ayon_settings:
return
ayon_deadline = ayon_settings["deadline"]
for key in ("deadline_urls",):
ayon_deadline.pop(key)
ayon_deadline_publish = ayon_deadline["publish"]
limit_groups = {
item["name"]: item["value"]
for item in ayon_deadline_publish["NukeSubmitDeadline"]["limit_groups"]
}
ayon_deadline_publish["NukeSubmitDeadline"]["limit_groups"] = limit_groups
maya_submit = ayon_deadline_publish["MayaSubmitDeadline"]
for json_key in ("jobInfo", "pluginInfo"):
src_text = maya_submit.pop(json_key)
try:
value = json.loads(src_text)
except ValueError:
value = {}
maya_submit[json_key] = value
nuke_submit = ayon_deadline_publish["NukeSubmitDeadline"]
nuke_submit["env_search_replace_values"] = {
item["name"]: item["value"]
for item in nuke_submit.pop("env_search_replace_values")
}
nuke_submit["limit_groups"] = {
item["name"]: item["value"] for item in nuke_submit.pop("limit_groups")
}
process_subsetted_job = ayon_deadline_publish["ProcessSubmittedJobOnFarm"]
process_subsetted_job["aov_filter"] = {
item["name"]: item["value"]
for item in process_subsetted_job.pop("aov_filter")
}
output["deadline"] = ayon_deadline
def _convert_royalrender_project_settings(ayon_settings, output):
if "royalrender" not in ayon_settings:
return
@ -1149,50 +699,9 @@ def _convert_global_project_settings(ayon_settings, output, default_settings):
ayon_core = ayon_settings["core"]
_convert_host_imageio(ayon_core)
for key in (
"environments",
"studio_name",
"studio_code",
):
ayon_core.pop(key, None)
# Publish conversion
ayon_publish = ayon_core["publish"]
ayon_collect_audio = ayon_publish["CollectAudio"]
if "audio_product_name" in ayon_collect_audio:
ayon_collect_audio["audio_subset_name"] = (
ayon_collect_audio.pop("audio_product_name"))
for profile in ayon_publish["ExtractReview"]["profiles"]:
if "product_types" in profile:
profile["families"] = profile.pop("product_types")
new_outputs = {}
for output_def in profile.pop("outputs"):
name = output_def.pop("name")
new_outputs[name] = output_def
output_def_filter = output_def["filter"]
if "product_names" in output_def_filter:
output_def_filter["subsets"] = (
output_def_filter.pop("product_names"))
for color_key in ("overscan_color", "bg_color"):
output_def[color_key] = _convert_color(output_def[color_key])
letter_box = output_def["letter_box"]
for color_key in ("fill_color", "line_color"):
letter_box[color_key] = _convert_color(letter_box[color_key])
if "output_width" in output_def:
output_def["width"] = output_def.pop("output_width")
if "output_height" in output_def:
output_def["height"] = output_def.pop("output_height")
profile["outputs"] = new_outputs
# ExtractThumbnail plugin
ayon_extract_thumbnail = ayon_publish["ExtractThumbnail"]
# fix display and view at oiio defaults
@ -1367,13 +876,14 @@ def convert_project_settings(ayon_settings, default_settings):
"houdini",
"resolve",
"unreal",
"applications",
"deadline",
}
for key in exact_match:
if key in ayon_settings:
output[key] = ayon_settings[key]
_convert_host_imageio(output[key])
_convert_applications_project_settings(ayon_settings, output)
_convert_blender_project_settings(ayon_settings, output)
_convert_celaction_project_settings(ayon_settings, output)
_convert_flame_project_settings(ayon_settings, output)
@ -1388,7 +898,6 @@ def convert_project_settings(ayon_settings, default_settings):
_convert_traypublisher_project_settings(ayon_settings, output)
_convert_webpublisher_project_settings(ayon_settings, output)
_convert_deadline_project_settings(ayon_settings, output)
_convert_royalrender_project_settings(ayon_settings, output)
_convert_kitsu_project_settings(ayon_settings, output)
_convert_shotgrid_project_settings(ayon_settings, output)

View file

@ -1271,7 +1271,7 @@
"icon": "{}/app_icons/harmony.png",
"host_name": "harmony",
"environment": {
"AVALON_HARMONY_WORKFILES_ON_LAUNCH": "1"
"AYON_HARMONY_WORKFILES_ON_LAUNCH": "1"
},
"variants": {
"21": {

View file

@ -15,7 +15,8 @@ from .constants import (
from .ayon_settings import (
get_ayon_project_settings,
get_ayon_system_settings
get_ayon_system_settings,
get_ayon_settings,
)
log = logging.getLogger(__name__)
@ -253,14 +254,9 @@ def get_current_project_settings():
return get_project_settings(project_name)
def get_global_settings():
default_settings = load_openpype_default_settings()
return default_settings["system_settings"]["general"]
def get_general_environments():
value = get_system_settings()
return value["general"]["environment"]
settings = get_ayon_settings()
return json.loads(settings["core"]["environments"])
def get_system_settings(*args, **kwargs):

View file

@ -1,4 +0,0 @@
Tests for Pype
--------------
Trigger by:
`pype test --pype`

View file

@ -1,88 +0,0 @@
import os
import sys
import shutil
import tempfile
import contextlib
import pyblish
import pyblish.plugin
from pyblish.vendor import six
# Setup
HOST = 'python'
FAMILY = 'test.family'
REGISTERED = pyblish.plugin.registered_paths()
PACKAGEPATH = pyblish.lib.main_package_path()
ENVIRONMENT = os.environ.get("PYBLISHPLUGINPATH", "")
PLUGINPATH = os.path.join(PACKAGEPATH, '..', 'tests', 'plugins')
def setup():
pyblish.plugin.deregister_all_paths()
def setup_empty():
"""Disable all plug-ins"""
setup()
pyblish.plugin.deregister_all_plugins()
pyblish.plugin.deregister_all_paths()
pyblish.plugin.deregister_all_hosts()
pyblish.plugin.deregister_all_callbacks()
pyblish.plugin.deregister_all_targets()
pyblish.api.deregister_all_discovery_filters()
def teardown():
"""Restore previously REGISTERED paths"""
pyblish.plugin.deregister_all_paths()
for path in REGISTERED:
pyblish.plugin.register_plugin_path(path)
os.environ["PYBLISHPLUGINPATH"] = ENVIRONMENT
pyblish.api.deregister_all_plugins()
pyblish.api.deregister_all_hosts()
pyblish.api.deregister_all_discovery_filters()
pyblish.api.deregister_test()
pyblish.api.__init__()
@contextlib.contextmanager
def captured_stdout():
"""Temporarily reassign stdout to a local variable"""
try:
sys.stdout = six.StringIO()
yield sys.stdout
finally:
sys.stdout = sys.__stdout__
@contextlib.contextmanager
def captured_stderr():
"""Temporarily reassign stderr to a local variable"""
try:
sys.stderr = six.StringIO()
yield sys.stderr
finally:
sys.stderr = sys.__stderr__
@contextlib.contextmanager
def tempdir():
"""Provide path to temporary directory"""
try:
tempdir = tempfile.mkdtemp()
yield tempdir
finally:
shutil.rmtree(tempdir)
def is_in_tests():
"""Returns if process is running in automatic tests mode.
In tests mode different source DB is used, some plugins might be disabled
etc.
"""
return os.environ.get("IS_TEST") == '1'

View file

@ -1,288 +0,0 @@
import pymongo
import bson
import random
from datetime import datetime
import os
class TestPerformance():
'''
Class for testing performance of representation and their 'files'
parts.
Discussion is if embedded array:
'files' : [ {'_id': '1111', 'path':'....},
{'_id'...}]
OR documents:
'files' : {
'1111': {'path':'....'},
'2222': {'path':'...'}
}
is faster.
Current results:
without additional partial index documents is 3x faster
With index is array 50x faster then document
Partial index something like:
db.getCollection('performance_test').createIndex
({'files._id': 1},
{partialFilterExpresion: {'files': {'$exists': true}}})
!DIDNT work for me, had to create manually in Compass
'''
MONGO_URL = 'mongodb://localhost:27017'
MONGO_DB = 'performance_test'
MONGO_COLLECTION = 'performance_test'
MAX_FILE_SIZE_B = 5000
MAX_NUMBER_OF_SITES = 50
ROOT_DIR = "C:/projects"
inserted_ids = []
def __init__(self, version='array'):
'''
It creates and fills collection, based on value of 'version'.
:param version: 'array' - files as embedded array,
'doc' - as document
'''
self.client = pymongo.MongoClient(self.MONGO_URL)
self.db = self.client[self.MONGO_DB]
self.collection_name = self.MONGO_COLLECTION
self.version = version
if self.version != 'array':
self.collection_name = self.MONGO_COLLECTION + '_doc'
self.collection = self.db[self.collection_name]
self.ids = [] # for testing
self.inserted_ids = []
def prepare(self, no_of_records=100000, create_files=False):
'''
Produce 'no_of_records' of representations with 'files' segment.
It depends on 'version' value in constructor, 'arrray' or 'doc'
:return:
'''
print('Purging {} collection'.format(self.collection_name))
self.collection.delete_many({})
id = bson.objectid.ObjectId()
insert_recs = []
for i in range(no_of_records):
file_id = bson.objectid.ObjectId()
file_id2 = bson.objectid.ObjectId()
file_id3 = bson.objectid.ObjectId()
self.inserted_ids.extend([file_id, file_id2, file_id3])
version_str = "v{:03d}".format(i + 1)
file_name = "test_Cylinder_workfileLookdev_{}.mb".\
format(version_str)
document = {"files": self.get_files(self.version, i + 1,
file_id, file_id2, file_id3,
create_files)
,
"context": {
"subset": "workfileLookdev",
"username": "petrk",
"task": "lookdev",
"family": "workfile",
"hierarchy": "Assets",
"project": {"code": "test", "name": "Test"},
"version": i + 1,
"asset": "Cylinder",
"representation": "mb",
"root": self.ROOT_DIR
},
"dependencies": [],
"name": "mb",
"parent": {"oid": '{}'.format(id)},
"data": {
"path": "C:\\projects\\test_performance\\Assets\\Cylinder\\publish\\workfile\\workfileLookdev\\{}\\{}".format(version_str, file_name), # noqa: E501
"template": "{root[work]}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{representation}" # noqa: E501
},
"type": "representation",
"schema": "openpype:representation-2.0"
}
insert_recs.append(document)
print('Prepared {} records in {} collection'.
format(no_of_records, self.collection_name))
self.collection.insert_many(insert_recs)
# TODO refactore to produce real array and not needeing ugly regex
self.collection.insert_one({"inserted_id": self.inserted_ids})
print('-' * 50)
def run(self, queries=1000, loops=3):
'''
Run X'queries' that are searching collection Y'loops' times
:param queries: how many times do ..find(...)
:param loops: loop of testing X queries
:return: None
'''
print('Testing version {} on {}'.format(self.version,
self.collection_name))
print('Queries rung {} in {} loops'.format(queries, loops))
inserted_ids = list(self.collection.
find({"inserted_id": {"$exists": True}}))
import re
self.ids = re.findall("'[0-9a-z]*'", str(inserted_ids))
import time
found_cnt = 0
for _ in range(loops):
print('Starting loop {}'.format(_))
start = time.time()
for _ in range(queries):
# val = random.choice(self.ids)
# val = val.replace("'", '')
val = random.randint(0, 50)
print(val)
if (self.version == 'array'):
# prepared for partial index, without 'files': exists
# wont engage
found = self.collection.\
find({'files': {"$exists": True},
'files.sites.name': "local_{}".format(val)}).\
count()
else:
key = "files.{}".format(val)
found = self.collection.find_one({key: {"$exists": True}})
print("found {} records".format(found))
# if found:
# found_cnt += len(list(found))
end = time.time()
print('duration per loop {}'.format(end - start))
print("found_cnt {}".format(found_cnt))
def get_files(self, mode, i, file_id, file_id2, file_id3,
create_files=False):
'''
Wrapper to decide if 'array' or document version should be used
:param mode: 'array'|'doc'
:param i: step number
:param file_id: ObjectId of first dummy file
:param file_id2: ..
:param file_id3: ..
:return:
'''
if mode == 'array':
return self.get_files_array(i, file_id, file_id2, file_id3,
create_files)
else:
return self.get_files_doc(i, file_id, file_id2, file_id3)
def get_files_array(self, i, file_id, file_id2, file_id3,
create_files=False):
ret = [
{
"path": "{root[work]}" + "{root[work]}/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_A_workfileLookdev_v{:03d}.dat".format(i, i), # noqa: E501
"_id": '{}'.format(file_id),
"hash": "temphash",
"sites": self.get_sites(self.MAX_NUMBER_OF_SITES),
"size": random.randint(0, self.MAX_FILE_SIZE_B)
},
{
"path": "{root[work]}" + "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_B_workfileLookdev_v{:03d}.dat".format(i, i), # noqa: E501
"_id": '{}'.format(file_id2),
"hash": "temphash",
"sites": self.get_sites(self.MAX_NUMBER_OF_SITES),
"size": random.randint(0, self.MAX_FILE_SIZE_B)
},
{
"path": "{root[work]}" + "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_C_workfileLookdev_v{:03d}.dat".format(i, i), # noqa: E501
"_id": '{}'.format(file_id3),
"hash": "temphash",
"sites": self.get_sites(self.MAX_NUMBER_OF_SITES),
"size": random.randint(0, self.MAX_FILE_SIZE_B)
}
]
if create_files:
for f in ret:
path = f.get("path").replace("{root[work]}", self.ROOT_DIR)
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, 'wb') as fp:
fp.write(os.urandom(f.get("size")))
return ret
def get_files_doc(self, i, file_id, file_id2, file_id3):
ret = {}
ret['{}'.format(file_id)] = {
"path": "{root[work]}" +
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" # noqa: E501
"v{:03d}/test_CylinderA_workfileLookdev_v{:03d}.mb".format(i, i), # noqa: E501
"hash": "temphash",
"sites": ["studio"],
"size": 87236
}
ret['{}'.format(file_id2)] = {
"path": "{root[work]}" +
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" # noqa: E501
"v{:03d}/test_CylinderB_workfileLookdev_v{:03d}.mb".format(i, i), # noqa: E501
"hash": "temphash",
"sites": ["studio"],
"size": 87236
}
ret['{}'.format(file_id3)] = {
"path": "{root[work]}" +
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" # noqa: E501
"v{:03d}/test_CylinderC_workfileLookdev_v{:03d}.mb".format(i, i), # noqa: E501
"hash": "temphash",
"sites": ["studio"],
"size": 87236
}
return ret
def get_sites(self, number_of_sites=50):
"""
Return array of sites declaration.
Currently on 1st site has "created_dt" fillled, which should
trigger upload to 'gdrive' site.
'gdrive' site is appended, its destination for syncing for
Sync Server
Args:
number_of_sites:
Returns:
"""
sites = []
for i in range(number_of_sites):
site = {'name': "local_{}".format(i)}
# do not create null 'created_dt' field, Mongo doesnt like it
if i == 0:
site['created_dt'] = datetime.now()
sites.append(site)
sites.append({'name': "gdrive"})
return sites
if __name__ == '__main__':
tp = TestPerformance('array')
tp.prepare(no_of_records=10000, create_files=True)
# tp.run(10, 3)
# print('-'*50)
#
# tp = TestPerformance('doc')
# tp.prepare() # enable to prepare data
# tp.run(1000, 3)

View file

@ -1,43 +0,0 @@
from ayon_core.pipeline import (
install_host,
LegacyCreator,
register_creator_plugin,
discover_creator_plugins,
)
class MyTestCreator(LegacyCreator):
my_test_property = "A"
def __init__(self, name, asset, options=None, data=None):
super(MyTestCreator, self).__init__(self, name, asset,
options=None, data=None)
# this is hack like no other - we need to inject our own avalon host
# and bypass all its validation. Avalon hosts are modules that needs
# `ls` callable as attribute. Voila:
class Test:
__name__ = "test"
ls = len
@staticmethod
def install():
register_creator_plugin(MyTestCreator)
def test_avalon_plugin_presets(monkeypatch, printer):
install_host(Test)
plugins = discover_creator_plugins()
printer("Test if we got our test plugin")
assert MyTestCreator in plugins
for p in plugins:
if p.__name__ == "MyTestCreator":
printer("Test if we have overridden existing property")
assert p.my_test_property == "B"
printer("Test if we have overridden superclass property")
assert p.active is False
printer("Test if we have added new property")
assert p.new_property == "new"

View file

@ -1,25 +0,0 @@
# Test for backward compatibility of restructure of lib.py into lib library
# Contains simple imports that should still work
def test_backward_compatibility(printer):
printer("Test if imports still work")
try:
from ayon_core.lib import execute_hook
from ayon_core.lib import PypeHook
from ayon_core.lib import ApplicationLaunchFailed
from ayon_core.lib import get_ffmpeg_tool_path
from ayon_core.lib import get_last_version_from_path
from ayon_core.lib import get_paths_from_environ
from ayon_core.lib import get_version_from_path
from ayon_core.lib import version_up
from ayon_core.lib import get_ffprobe_streams
from ayon_core.lib import source_hash
from ayon_core.lib import run_subprocess
except ImportError as e:
raise

View file

@ -1,60 +0,0 @@
import os
import pyblish.api
import pyblish.util
import pyblish.plugin
from ayon_core.pipeline.publish.lib import filter_pyblish_plugins
from . import lib
def test_pyblish_plugin_filter_modifier(printer, monkeypatch):
"""
Test if pyblish filter can filter and modify plugins on-the-fly.
"""
lib.setup_empty()
monkeypatch.setitem(os.environ, 'PYBLISHPLUGINPATH', '')
plugins = pyblish.api.registered_plugins()
printer("Test if we have no registered plugins")
assert len(plugins) == 0
paths = pyblish.api.registered_paths()
printer("Test if we have no registered plugin paths")
assert len(paths) == 0
class MyTestPlugin(pyblish.api.InstancePlugin):
my_test_property = 1
label = "Collect Renderable Camera(s)"
hosts = ["test"]
families = ["default"]
pyblish.api.register_host("test")
pyblish.api.register_plugin(MyTestPlugin)
pyblish.api.register_discovery_filter(filter_pyblish_plugins)
plugins = pyblish.api.discover()
printer("Test if only one plugin was discovered")
assert len(plugins) == 1
printer("Test if properties are modified correctly")
assert plugins[0].label == "loaded from preset"
assert plugins[0].families == ["changed", "by", "preset"]
assert plugins[0].optional is True
lib.teardown()
def test_pyblish_plugin_filter_removal(monkeypatch):
""" Test that plugin can be removed by filter """
lib.setup_empty()
monkeypatch.setitem(os.environ, 'PYBLISHPLUGINPATH', '')
plugins = pyblish.api.registered_plugins()
class MyTestRemovedPlugin(pyblish.api.InstancePlugin):
my_test_property = 1
label = "Collect Renderable Camera(s)"
hosts = ["test"]
families = ["default"]
pyblish.api.register_host("test")
pyblish.api.register_plugin(MyTestRemovedPlugin)
pyblish.api.register_discovery_filter(filter_pyblish_plugins)
plugins = pyblish.api.discover()
assert len(plugins) == 0

View file

@ -1,5 +1,8 @@
from .window import WorkfileBuildPlaceholderDialog
from .lib import open_template_ui
__all__ = (
"WorkfileBuildPlaceholderDialog",
"open_template_ui"
)

View file

@ -0,0 +1,28 @@
import traceback
from qtpy import QtWidgets
from ayon_core.tools.utils.dialogs import show_message_dialog
def open_template_ui(builder, main_window):
"""Open template from `builder`
Asks user about overwriting current scene and feedsback exceptions.
"""
result = QtWidgets.QMessageBox.question(
main_window,
"Opening template",
"Caution! You will loose unsaved changes.\nDo you want to continue?",
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No
)
if result == QtWidgets.QMessageBox.Yes:
try:
builder.open_template()
except Exception:
show_message_dialog(
title="Template Load Failed",
message="".join(traceback.format_exc()),
parent=main_window,
level="critical"
)

View file

@ -10,8 +10,6 @@ wsrpc_aiohttp = "^3.1.1" # websocket server
Click = "^8"
clique = "1.6.*"
jsonschema = "^2.6.0"
pymongo = "^3.11.2"
log4mongo = "^1.7"
pyblish-base = "^1.8.11"
pynput = "^1.7.2" # Timers manager - TODO remove
speedcopy = "^2.1"

View file

@ -9,7 +9,7 @@ from ayon_server.settings import (
task_types_enum,
)
from ayon_server.types import ColorRGBA_uint8
from ayon_server.types import ColorRGB_uint8, ColorRGBA_uint8
class ValidateBaseModel(BaseSettingsModel):
@ -387,8 +387,8 @@ class ExtractReviewOutputDefModel(BaseSettingsModel):
"Crop input overscan. See the documentation for more information."
)
)
overscan_color: ColorRGBA_uint8 = SettingsField(
(0, 0, 0, 0.0),
overscan_color: ColorRGB_uint8 = SettingsField(
(0, 0, 0),
title="Overscan color",
description=(
"Overscan color is used when input aspect ratio is not"
@ -901,7 +901,7 @@ DEFAULT_PUBLISH_VALUES = {
"single_frame_filter": "single_frame"
},
"overscan_crop": "",
"overscan_color": [0, 0, 0, 1.0],
"overscan_color": [0, 0, 0],
"width": 1920,
"height": 1080,
"scale_pixel_aspect": True,
@ -946,7 +946,7 @@ DEFAULT_PUBLISH_VALUES = {
"single_frame_filter": "multi_frame"
},
"overscan_crop": "",
"overscan_color": [0, 0, 0, 1.0],
"overscan_color": [0, 0, 0],
"width": 0,
"height": 0,
"scale_pixel_aspect": True,

View file

@ -39,9 +39,9 @@ class ValidateCameraAttributesModel(BaseSettingsModel):
class FamilyMappingItemModel(BaseSettingsModel):
product_types: list[str] = SettingsField(
families: list[str] = SettingsField(
default_factory=list,
title="Product Types"
title="Families"
)
plugins: list[str] = SettingsField(
default_factory=list,

View file

@ -1,40 +1,72 @@
from ayon_server.settings import BaseSettingsModel, SettingsField
from ayon_server.types import ColorRGBA_uint8
from ayon_server.types import ColorRGB_float
class ColorsSetting(BaseSettingsModel):
model: ColorRGBA_uint8 = SettingsField(
(209, 132, 30, 1.0), title="Model:")
rig: ColorRGBA_uint8 = SettingsField(
(59, 226, 235, 1.0), title="Rig:")
pointcache: ColorRGBA_uint8 = SettingsField(
(94, 209, 30, 1.0), title="Pointcache:")
animation: ColorRGBA_uint8 = SettingsField(
(94, 209, 30, 1.0), title="Animation:")
ass: ColorRGBA_uint8 = SettingsField(
(249, 135, 53, 1.0), title="Arnold StandIn:")
camera: ColorRGBA_uint8 = SettingsField(
(136, 114, 244, 1.0), title="Camera:")
fbx: ColorRGBA_uint8 = SettingsField(
(215, 166, 255, 1.0), title="FBX:")
mayaAscii: ColorRGBA_uint8 = SettingsField(
(67, 174, 255, 1.0), title="Maya Ascii:")
mayaScene: ColorRGBA_uint8 = SettingsField(
(67, 174, 255, 1.0), title="Maya Scene:")
setdress: ColorRGBA_uint8 = SettingsField(
(255, 250, 90, 1.0), title="Set Dress:")
layout: ColorRGBA_uint8 = SettingsField((
255, 250, 90, 1.0), title="Layout:")
vdbcache: ColorRGBA_uint8 = SettingsField(
(249, 54, 0, 1.0), title="VDB Cache:")
vrayproxy: ColorRGBA_uint8 = SettingsField(
(255, 150, 12, 1.0), title="VRay Proxy:")
vrayscene_layer: ColorRGBA_uint8 = SettingsField(
(255, 150, 12, 1.0), title="VRay Scene:")
yeticache: ColorRGBA_uint8 = SettingsField(
(99, 206, 220, 1.0), title="Yeti Cache:")
yetiRig: ColorRGBA_uint8 = SettingsField(
(0, 205, 125, 1.0), title="Yeti Rig:")
model: ColorRGB_float = SettingsField(
(0.82, 0.52, 0.12),
title="Model:"
)
rig: ColorRGB_float = SettingsField(
(0.23, 0.89, 0.92),
title="Rig:"
)
pointcache: ColorRGB_float = SettingsField(
(0.37, 0.82, 0.12),
title="Pointcache:"
)
animation: ColorRGB_float = SettingsField(
(0.37, 0.82, 0.12),
title="Animation:"
)
ass: ColorRGB_float = SettingsField(
(0.98, 0.53, 0.21),
title="Arnold StandIn:"
)
camera: ColorRGB_float = SettingsField(
(0.53, 0.45, 0.96),
title="Camera:"
)
fbx: ColorRGB_float = SettingsField(
(0.84, 0.65, 1.0),
title="FBX:"
)
mayaAscii: ColorRGB_float = SettingsField(
(0.26, 0.68, 1.0),
title="Maya Ascii:"
)
mayaScene: ColorRGB_float = SettingsField(
(0.26, 0.68, 1.0),
title="Maya Scene:"
)
setdress: ColorRGB_float = SettingsField(
(1.0, 0.98, 0.35),
title="Set Dress:"
)
layout: ColorRGB_float = SettingsField(
(1.0, 0.98, 0.35),
title="Layout:"
)
vdbcache: ColorRGB_float = SettingsField(
(0.98, 0.21, 0.0),
title="VDB Cache:"
)
vrayproxy: ColorRGB_float = SettingsField(
(1.0, 0.59, 0.05),
title="VRay Proxy:"
)
vrayscene_layer: ColorRGB_float = SettingsField(
(1.0, 0.59, 0.05),
title="VRay Scene:"
)
yeticache: ColorRGB_float = SettingsField(
(0.39, 0.81, 0.86),
title="Yeti Cache:"
)
yetiRig: ColorRGB_float = SettingsField(
(0.0, 0.80, 0.49),
title="Yeti Rig:"
)
class ReferenceLoaderModel(BaseSettingsModel):
@ -67,54 +99,22 @@ class LoadersModel(BaseSettingsModel):
DEFAULT_LOADERS_SETTING = {
"colors": {
"model": [
209, 132, 30, 1.0
],
"rig": [
59, 226, 235, 1.0
],
"pointcache": [
94, 209, 30, 1.0
],
"animation": [
94, 209, 30, 1.0
],
"ass": [
249, 135, 53, 1.0
],
"camera": [
136, 114, 244, 1.0
],
"fbx": [
215, 166, 255, 1.0
],
"mayaAscii": [
67, 174, 255, 1.0
],
"mayaScene": [
67, 174, 255, 1.0
],
"setdress": [
255, 250, 90, 1.0
],
"layout": [
255, 250, 90, 1.0
],
"vdbcache": [
249, 54, 0, 1.0
],
"vrayproxy": [
255, 150, 12, 1.0
],
"vrayscene_layer": [
255, 150, 12, 1.0
],
"yeticache": [
99, 206, 220, 1.0
],
"yetiRig": [
0, 205, 125, 1.0
]
"model": [0.82, 0.52, 0.12],
"rig": [0.23, 0.89, 0.92],
"pointcache": [0.37, 0.82, 0.12],
"animation": [0.37, 0.82, 0.12],
"ass": [0.98, 0.53, 0.21],
"camera":[0.53, 0.45, 0.96],
"fbx": [0.84, 0.65, 1.0],
"mayaAscii": [0.26, 0.68, 1.0],
"mayaScene": [0.26, 0.68, 1.0],
"setdress": [1.0, 0.98, 0.35],
"layout": [1.0, 0.98, 0.35],
"vdbcache": [0.98, 0.21, 0.0],
"vrayproxy": [1.0, 0.59, 0.05],
"vrayscene_layer": [1.0, 0.59, 0.05],
"yeticache": [0.39, 0.81, 0.86],
"yetiRig": [0.0, 0.80, 0.49],
},
"reference_loader": {
"namespace": "{folder[name]}_{product[name]}_##_",

View file

@ -6,7 +6,7 @@ from ayon_server.settings import (
ensure_unique_names,
task_types_enum,
)
from ayon_server.types import ColorRGBA_uint8
from ayon_server.types import ColorRGB_float
def hardware_falloff_enum():
@ -54,17 +54,17 @@ class DisplayOptionsSetting(BaseSettingsModel):
override_display: bool = SettingsField(
True, title="Override display options"
)
background: ColorRGBA_uint8 = SettingsField(
(125, 125, 125, 1.0), title="Background Color"
background: ColorRGB_float = SettingsField(
(0.5, 0.5, 0.5), title="Background Color"
)
displayGradient: bool = SettingsField(
True, title="Display background gradient"
)
backgroundTop: ColorRGBA_uint8 = SettingsField(
(125, 125, 125, 1.0), title="Background Top"
backgroundTop: ColorRGB_float = SettingsField(
(0.5, 0.5, 0.5), title="Background Top"
)
backgroundBottom: ColorRGBA_uint8 = SettingsField(
(125, 125, 125, 1.0), title="Background Bottom"
backgroundBottom: ColorRGB_float = SettingsField(
(0.5, 0.5, 0.5), title="Background Bottom"
)
@ -283,22 +283,19 @@ DEFAULT_PLAYBLAST_SETTING = {
"DisplayOptions": {
"override_display": True,
"background": [
125,
125,
125,
1.0
0.5,
0.5,
0.5
],
"backgroundBottom": [
125,
125,
125,
1.0
0.5,
0.5,
0.5
],
"backgroundTop": [
125,
125,
125,
1.0
0.5,
0.5,
0.5
],
"displayGradient": True
},

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring addon version."""
__version__ = "0.1.8"
__version__ = "0.1.9"

Some files were not shown because too many files have changed in this diff Show more