mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into feature/houdini_allow_taking_snapshoots
This commit is contained in:
commit
ee5cd1b0f6
13 changed files with 328 additions and 131 deletions
|
|
@ -38,15 +38,15 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin,
|
|||
context_label = "{} > {}".format(*context)
|
||||
instance_label = "{} > {}".format(folderPath, task)
|
||||
message = (
|
||||
"Instance '{}' publishes to different context than current "
|
||||
"context: {}. Current context: {}".format(
|
||||
"Instance '{}' publishes to different folder or task "
|
||||
"than current context: {}. Current context: {}".format(
|
||||
instance.name, instance_label, context_label
|
||||
)
|
||||
)
|
||||
raise PublishValidationError(
|
||||
message=message,
|
||||
description=(
|
||||
"## Publishing to a different context data\n"
|
||||
"## Publishing to a different context folder or task\n"
|
||||
"There are publish instances present which are publishing "
|
||||
"into a different folder path or task than your current context.\n\n"
|
||||
"Usually this is not what you want but there can be cases "
|
||||
|
|
|
|||
|
|
@ -22,15 +22,15 @@ class MayaAddon(AYONAddon, IHostAddon):
|
|||
if norm_path not in new_python_paths:
|
||||
new_python_paths.append(norm_path)
|
||||
|
||||
# add vendor path
|
||||
new_python_paths.append(
|
||||
os.path.join(MAYA_ROOT_DIR, "vendor", "python")
|
||||
)
|
||||
env["PYTHONPATH"] = os.pathsep.join(new_python_paths)
|
||||
|
||||
# Set default environments
|
||||
envs = {
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
# For python module 'qtpy'
|
||||
"QT_API": "PySide2",
|
||||
# For python module 'Qt'
|
||||
"QT_PREFERRED_BINDING": "PySide2"
|
||||
}
|
||||
for key, value in envs.items():
|
||||
env[key] = value
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import json
|
||||
import logging
|
||||
from functools import partial
|
||||
|
||||
|
|
@ -214,8 +215,18 @@ def install(project_settings):
|
|||
)
|
||||
return
|
||||
|
||||
config = project_settings["maya"]["scriptsmenu"]["definition"]
|
||||
_menu = project_settings["maya"]["scriptsmenu"]["name"]
|
||||
menu_settings = project_settings["maya"]["scriptsmenu"]
|
||||
menu_name = menu_settings["name"]
|
||||
config = menu_settings["definition"]
|
||||
|
||||
if menu_settings.get("definition_type") == "definition_json":
|
||||
data = menu_settings["definition_json"]
|
||||
try:
|
||||
config = json.loads(data)
|
||||
except json.JSONDecodeError as exc:
|
||||
print("Skipping studio menu, error decoding JSON definition.")
|
||||
log.error(exc)
|
||||
return
|
||||
|
||||
if not config:
|
||||
log.warning("Skipping studio menu, no definition found.")
|
||||
|
|
@ -223,8 +234,8 @@ def install(project_settings):
|
|||
|
||||
# run the launcher for Maya menu
|
||||
studio_menu = launchformaya.main(
|
||||
title=_menu.title(),
|
||||
objectName=_menu.title().lower().replace(" ", "_")
|
||||
title=menu_name.title(),
|
||||
objectName=menu_name.title().lower().replace(" ", "_")
|
||||
)
|
||||
|
||||
# apply configuration
|
||||
|
|
|
|||
|
|
@ -125,6 +125,11 @@ class ImportMayaLoader(ayon_core.hosts.maya.api.plugin.Loader):
|
|||
)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
super(ImportMayaLoader, cls).apply_settings(project_settings)
|
||||
cls.enabled = cls.load_settings["import_loader"].get("enabled", True)
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
import maya.cmds as cmds
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from maya import cmds # noqa
|
|||
import pyblish.api
|
||||
from ayon_core.hosts.maya.api import lib
|
||||
|
||||
SHAPE_ATTRS = ["castsShadows",
|
||||
SHAPE_ATTRS = {"castsShadows",
|
||||
"receiveShadows",
|
||||
"motionBlur",
|
||||
"primaryVisibility",
|
||||
|
|
@ -16,8 +16,7 @@ SHAPE_ATTRS = ["castsShadows",
|
|||
"visibleInReflections",
|
||||
"visibleInRefractions",
|
||||
"doubleSided",
|
||||
"opposite"]
|
||||
SHAPE_ATTRS = set(SHAPE_ATTRS)
|
||||
"opposite"}
|
||||
|
||||
|
||||
def get_pxr_multitexture_file_attrs(node):
|
||||
|
|
@ -59,9 +58,8 @@ for node_type in list(FILE_NODES.keys()):
|
|||
if node_type not in all_node_types:
|
||||
FILE_NODES.pop(node_type)
|
||||
|
||||
for node_type in RENDER_SET_TYPES:
|
||||
if node_type not in all_node_types:
|
||||
RENDER_SET_TYPES.remove(node_type)
|
||||
RENDER_SET_TYPES = [node_type for node_type in RENDER_SET_TYPES
|
||||
if node_type in all_node_types]
|
||||
del all_node_types
|
||||
|
||||
# Cache pixar dependency node types so we can perform a type lookup against it
|
||||
|
|
@ -109,8 +107,7 @@ def get_look_attrs(node):
|
|||
if cmds.objectType(node, isAType="shape"):
|
||||
attrs = cmds.listAttr(node, changedSinceFileOpen=True) or []
|
||||
for attr in attrs:
|
||||
if attr in SHAPE_ATTRS or \
|
||||
attr not in SHAPE_ATTRS and attr.startswith('ai'):
|
||||
if attr in SHAPE_ATTRS or attr.startswith('ai'):
|
||||
result.append(attr)
|
||||
return result
|
||||
|
||||
|
|
@ -290,7 +287,6 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
families = ["look"]
|
||||
label = "Collect Look"
|
||||
hosts = ["maya"]
|
||||
maketx = True
|
||||
|
||||
def process(self, instance):
|
||||
"""Collect the Look in the instance with the correct layer settings"""
|
||||
|
|
@ -302,15 +298,12 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
"""Collect looks.
|
||||
|
||||
Args:
|
||||
instance: Instance to collect.
|
||||
instance (pyblish.api.Instance): Instance to collect.
|
||||
|
||||
"""
|
||||
self.log.debug("Looking for look associations "
|
||||
"for %s" % instance.data['name'])
|
||||
|
||||
# Lookup set (optimization)
|
||||
instance_lookup = set(cmds.ls(instance, long=True))
|
||||
|
||||
# Discover related object sets
|
||||
self.log.debug("Gathering sets ...")
|
||||
sets = self.collect_sets(instance)
|
||||
|
|
@ -351,75 +344,15 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
# Collect file nodes used by shading engines (if we have any)
|
||||
files = []
|
||||
look_sets = list(sets.keys())
|
||||
shader_attrs = [
|
||||
"surfaceShader",
|
||||
"volumeShader",
|
||||
"displacementShader",
|
||||
"aiSurfaceShader",
|
||||
"aiVolumeShader",
|
||||
"rman__surface",
|
||||
"rman__displacement"
|
||||
]
|
||||
if look_sets:
|
||||
self.log.debug("Found look sets: {}".format(look_sets))
|
||||
|
||||
# Get all material attrs for all look sets to retrieve their inputs
|
||||
existing_attrs = []
|
||||
for look in look_sets:
|
||||
for attr in shader_attrs:
|
||||
if cmds.attributeQuery(attr, node=look, exists=True):
|
||||
existing_attrs.append("{}.{}".format(look, attr))
|
||||
|
||||
materials = cmds.listConnections(existing_attrs,
|
||||
source=True,
|
||||
destination=False) or []
|
||||
|
||||
self.log.debug("Found materials:\n{}".format(materials))
|
||||
|
||||
self.log.debug("Found the following sets:\n{}".format(look_sets))
|
||||
# Get the entire node chain of the look sets
|
||||
# history = cmds.listHistory(look_sets, allConnections=True)
|
||||
# if materials list is empty, listHistory() will crash with
|
||||
# RuntimeError
|
||||
history = set()
|
||||
if materials:
|
||||
history = set(
|
||||
cmds.listHistory(materials, allConnections=True))
|
||||
|
||||
# Since we retrieved history only of the connected materials
|
||||
# connected to the look sets above we now add direct history
|
||||
# for some of the look sets directly
|
||||
# handling render attribute sets
|
||||
|
||||
# Maya (at least 2024) crashes with Warning when render set type
|
||||
# isn't available. cmds.ls() will return empty list
|
||||
if RENDER_SET_TYPES:
|
||||
render_sets = cmds.ls(look_sets, type=RENDER_SET_TYPES)
|
||||
if render_sets:
|
||||
history.update(
|
||||
cmds.listHistory(render_sets,
|
||||
future=False,
|
||||
pruneDagObjects=True)
|
||||
or []
|
||||
)
|
||||
|
||||
# Ensure unique entries only
|
||||
history = list(history)
|
||||
|
||||
files = cmds.ls(history,
|
||||
# It's important only node types are passed that
|
||||
# exist (e.g. for loaded plugins) because otherwise
|
||||
# the result will turn back empty
|
||||
type=list(FILE_NODES.keys()),
|
||||
long=True)
|
||||
|
||||
# Sort for log readability
|
||||
files.sort()
|
||||
files = self.collect_file_nodes(look_sets)
|
||||
|
||||
self.log.debug("Collected file nodes:\n{}".format(files))
|
||||
# Collect textures if any file nodes are found
|
||||
|
||||
# Collect texture resources if any file nodes are found
|
||||
resources = []
|
||||
for node in files: # sort for log readability
|
||||
for node in files:
|
||||
resources.extend(self.collect_resources(node))
|
||||
instance.data["resources"] = resources
|
||||
self.log.debug("Collected resources: {}".format(resources))
|
||||
|
|
@ -439,6 +372,78 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("Collected look for %s" % instance)
|
||||
|
||||
def collect_file_nodes(self, look_sets):
|
||||
"""Get the entire node chain of the look sets and return file nodes
|
||||
|
||||
Arguments:
|
||||
look_sets (List[str]): List of sets and shading engines relevant
|
||||
to the look.
|
||||
|
||||
Returns:
|
||||
List[str]: List of file node names.
|
||||
|
||||
"""
|
||||
|
||||
shader_attrs = [
|
||||
"surfaceShader",
|
||||
"volumeShader",
|
||||
"displacementShader",
|
||||
"aiSurfaceShader",
|
||||
"aiVolumeShader",
|
||||
"rman__surface",
|
||||
"rman__displacement"
|
||||
]
|
||||
|
||||
# Get all material attrs for all look sets to retrieve their inputs
|
||||
existing_attrs = []
|
||||
for look_set in look_sets:
|
||||
for attr in shader_attrs:
|
||||
if cmds.attributeQuery(attr, node=look_set, exists=True):
|
||||
existing_attrs.append("{}.{}".format(look_set, attr))
|
||||
|
||||
materials = cmds.listConnections(existing_attrs,
|
||||
source=True,
|
||||
destination=False) or []
|
||||
|
||||
self.log.debug("Found materials:\n{}".format(materials))
|
||||
|
||||
# Get the entire node chain of the look sets
|
||||
# history = cmds.listHistory(look_sets, allConnections=True)
|
||||
# if materials list is empty, listHistory() will crash with
|
||||
# RuntimeError
|
||||
history = set()
|
||||
if materials:
|
||||
history.update(cmds.listHistory(materials, allConnections=True))
|
||||
|
||||
# Since we retrieved history only of the connected materials connected
|
||||
# to the look sets above we now add direct history for some of the
|
||||
# look sets directly handling render attribute sets
|
||||
|
||||
# Maya (at least 2024) crashes with Warning when render set type
|
||||
# isn't available. cmds.ls() will return empty list
|
||||
if RENDER_SET_TYPES:
|
||||
render_sets = cmds.ls(look_sets, type=RENDER_SET_TYPES)
|
||||
if render_sets:
|
||||
history.update(
|
||||
cmds.listHistory(render_sets,
|
||||
future=False,
|
||||
pruneDagObjects=True)
|
||||
or []
|
||||
)
|
||||
|
||||
# Get file nodes in the material history
|
||||
files = cmds.ls(list(history),
|
||||
# It's important only node types are passed that
|
||||
# exist (e.g. for loaded plugins) because otherwise
|
||||
# the result will turn back empty
|
||||
type=list(FILE_NODES.keys()),
|
||||
long=True)
|
||||
|
||||
# Sort for log readability
|
||||
files.sort()
|
||||
|
||||
return files
|
||||
|
||||
def collect_sets(self, instance):
|
||||
"""Collect all objectSets which are of importance for publishing
|
||||
|
||||
|
|
@ -446,7 +451,8 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
which need to be
|
||||
|
||||
Args:
|
||||
instance (list): all nodes to be published
|
||||
instance (pyblish.api.Instance): publish instance containing all
|
||||
nodes to be published.
|
||||
|
||||
Returns:
|
||||
dict
|
||||
|
|
@ -624,7 +630,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
"source": source, # required for resources
|
||||
"files": files,
|
||||
"color_space": color_space
|
||||
} # required for resources
|
||||
}
|
||||
|
||||
|
||||
class CollectModelRenderSets(CollectLook):
|
||||
|
|
@ -639,13 +645,13 @@ class CollectModelRenderSets(CollectLook):
|
|||
families = ["model"]
|
||||
label = "Collect Model Render Sets"
|
||||
hosts = ["maya"]
|
||||
maketx = True
|
||||
|
||||
def collect_sets(self, instance):
|
||||
"""Collect all related objectSets except shadingEngines
|
||||
|
||||
Args:
|
||||
instance (list): all nodes to be published
|
||||
instance (pyblish.api.Instance): publish instance containing all
|
||||
nodes to be published.
|
||||
|
||||
Returns:
|
||||
dict
|
||||
|
|
@ -661,7 +667,7 @@ class CollectModelRenderSets(CollectLook):
|
|||
if objset in sets:
|
||||
continue
|
||||
|
||||
if "shadingEngine" in cmds.nodeType(objset, inherited=True):
|
||||
if cmds.objectType(objset, isAType="shadingEngine"):
|
||||
continue
|
||||
|
||||
sets[objset] = {"uuid": lib.get_id(objset), "members": list()}
|
||||
|
|
|
|||
|
|
@ -11,8 +11,6 @@ from ayon_core.pipeline.publish import (
|
|||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class ValidateInstanceInContext(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
|
|
@ -38,17 +36,20 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin,
|
|||
return
|
||||
|
||||
folder_path = instance.data.get("folderPath")
|
||||
context_folder_path = self.get_context_folder_path(instance)
|
||||
if folder_path != context_folder_path:
|
||||
task = instance.data.get("task")
|
||||
context = self.get_context(instance)
|
||||
if (folder_path, task) != context:
|
||||
context_label = "{} > {}".format(*context)
|
||||
instance_label = "{} > {}".format(folder_path, task)
|
||||
raise PublishValidationError(
|
||||
message=(
|
||||
"Instance '{}' publishes to different folder than current"
|
||||
"Instance '{}' publishes to different context than current"
|
||||
" context: {}. Current context: {}".format(
|
||||
instance.name, folder_path, context_folder_path
|
||||
instance.name, instance_label, context_label
|
||||
)
|
||||
),
|
||||
description=(
|
||||
"## Publishing to a different folder\n"
|
||||
"## Publishing to a different context data\n"
|
||||
"There are publish instances present which are publishing "
|
||||
"into a different folder than your current context.\n\n"
|
||||
"Usually this is not what you want but there can be cases "
|
||||
|
|
@ -64,14 +65,20 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin,
|
|||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
context_folder_path = cls.get_context_folder_path(instance)
|
||||
instance_node = instance.data["instance_node"]
|
||||
cmds.setAttr(
|
||||
"{}.folderPath".format(instance_node),
|
||||
context_folder_path,
|
||||
type="string"
|
||||
context_folder_path, context_task = cls.get_context(
|
||||
instance)
|
||||
|
||||
create_context = instance.context.data["create_context"]
|
||||
instance_id = instance.data["instance_id"]
|
||||
created_instance = create_context.get_instance_by_id(
|
||||
instance_id
|
||||
)
|
||||
created_instance["folderPath"] = context_folder_path
|
||||
created_instance["task"] = context_task
|
||||
create_context.save_changes()
|
||||
|
||||
@staticmethod
|
||||
def get_context_folder_path(instance):
|
||||
return instance.context.data["folderPath"]
|
||||
def get_context(instance):
|
||||
"""Return asset, task from publishing context data"""
|
||||
context = instance.context
|
||||
return context.data["folderPath"], context.data["task"]
|
||||
|
|
|
|||
|
|
@ -84,19 +84,11 @@ class ValidateResolution(pyblish.api.InstancePlugin,
|
|||
|
||||
@classmethod
|
||||
def get_folder_resolution(cls, instance):
|
||||
folder_attributes = instance.data["folderEntity"]["attrib"]
|
||||
if (
|
||||
"resolutionWidth" in folder_attributes
|
||||
and "resolutionHeight" in folder_attributes
|
||||
and "pixelAspect" in folder_attributes
|
||||
):
|
||||
width = folder_attributes["resolutionWidth"]
|
||||
height = folder_attributes["resolutionHeight"]
|
||||
pixelAspect = folder_attributes["pixelAspect"]
|
||||
return int(width), int(height), float(pixelAspect)
|
||||
|
||||
# Defaults if not found in asset document or project document
|
||||
return 1920, 1080, 1.0
|
||||
task_attributes = instance.data["taskEntity"]["attrib"]
|
||||
width = task_attributes["resolutionWidth"]
|
||||
height = task_attributes["resolutionHeight"]
|
||||
pixel_aspect = task_attributes["pixelAspect"]
|
||||
return int(width), int(height), float(pixel_aspect)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
|
|
|||
|
|
@ -12,11 +12,7 @@ import logging
|
|||
from maya import cmds
|
||||
from maya import mel
|
||||
|
||||
try:
|
||||
from PySide2 import QtGui, QtWidgets
|
||||
except ImportError:
|
||||
from PySide import QtGui
|
||||
QtWidgets = QtGui
|
||||
from qtpy import QtGui, QtWidgets
|
||||
|
||||
version_info = (2, 3, 0)
|
||||
|
||||
|
|
@ -873,7 +869,11 @@ def _get_screen_size():
|
|||
if _in_standalone():
|
||||
return [0, 0]
|
||||
|
||||
rect = QtWidgets.QDesktopWidget().screenGeometry(-1)
|
||||
try:
|
||||
rect = QtWidgets.QDesktopWidget().screenGeometry(-1)
|
||||
except AttributeError:
|
||||
# in Qt6 it is a different call
|
||||
rect = QtWidgets.QApplication.primaryScreen().availableGeometry()
|
||||
return [rect.width(), rect.height()]
|
||||
|
||||
|
||||
|
|
@ -92,7 +92,8 @@ line-ending = "auto"
|
|||
|
||||
[tool.codespell]
|
||||
# Ignore words that are not in the dictionary.
|
||||
ignore-words-list = "ayon,ynput,parms,parm,hda"
|
||||
ignore-words-list = "ayon,ynput,parms,parm,hda,developpement"
|
||||
|
||||
skip = "./.*,./package/*,*/vendor/*,*/unreal/integration/*,*/aftereffects/api/extension/js/libs/*"
|
||||
count = true
|
||||
quiet-level = 3
|
||||
|
|
|
|||
|
|
@ -7,6 +7,26 @@
|
|||
"host_name": "maya",
|
||||
"environment": "{\n \"MAYA_DISABLE_CLIC_IPM\": \"Yes\",\n \"MAYA_DISABLE_CIP\": \"Yes\",\n \"MAYA_DISABLE_CER\": \"Yes\",\n \"PYMEL_SKIP_MEL_INIT\": \"Yes\",\n \"LC_ALL\": \"C\"\n}\n",
|
||||
"variants": [
|
||||
{
|
||||
"name": "2025",
|
||||
"label": "2025",
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\Autodesk\\Maya2025\\bin\\maya.exe"
|
||||
],
|
||||
"darwin": ["/Applications/Autodesk/maya2025/Maya.app"],
|
||||
"linux": [
|
||||
"/usr/autodesk/maya2025/bin/maya"
|
||||
]
|
||||
},
|
||||
"arguments": {
|
||||
"windows": [],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"environment": "{\n \"MAYA_VERSION\": \"2025\"\n}",
|
||||
"use_python_2": false
|
||||
},
|
||||
{
|
||||
"name": "2024",
|
||||
"label": "2024",
|
||||
|
|
|
|||
|
|
@ -2,6 +2,10 @@ from ayon_server.settings import BaseSettingsModel, SettingsField
|
|||
from ayon_server.types import ColorRGB_float, ColorRGBA_uint8
|
||||
|
||||
|
||||
class LoaderEnabledModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
|
||||
|
||||
class ColorsSetting(BaseSettingsModel):
|
||||
model: ColorRGBA_uint8 = SettingsField(
|
||||
(209, 132, 30, 1.0), title="Model:")
|
||||
|
|
@ -94,6 +98,7 @@ class ReferenceLoaderModel(BaseSettingsModel):
|
|||
|
||||
|
||||
class ImportLoaderModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
namespace: str = SettingsField(title="Namespace")
|
||||
group_name: str = SettingsField(title="Group name")
|
||||
|
||||
|
|
@ -113,6 +118,89 @@ class LoadersModel(BaseSettingsModel):
|
|||
title="Import Loader"
|
||||
)
|
||||
|
||||
# Enable/disable loaders
|
||||
ArnoldStandinLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Arnold Standin Loader"
|
||||
)
|
||||
AssemblyLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Assembly Loader"
|
||||
)
|
||||
AudioLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Audio Loader"
|
||||
)
|
||||
GpuCacheLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="GPU Cache Loader"
|
||||
)
|
||||
FileNodeLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="File Node (Image) Loader"
|
||||
)
|
||||
ImagePlaneLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Image Plane Loader"
|
||||
)
|
||||
LookLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Look Loader"
|
||||
)
|
||||
MatchmoveLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Matchmove Loader"
|
||||
)
|
||||
MultiverseUsdLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Multiverse USD Loader"
|
||||
)
|
||||
MultiverseUsdOverLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Multiverse USD Override Loader"
|
||||
)
|
||||
RedshiftProxyLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Redshift Proxy Loader"
|
||||
)
|
||||
RenderSetupLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Render Setup Loader"
|
||||
)
|
||||
LoadVDBtoArnold: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="VDB to Arnold Loader"
|
||||
)
|
||||
LoadVDBtoRedShift: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="VDB to Redshift Loader"
|
||||
)
|
||||
LoadVDBtoVRay: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="VDB to V-Ray Loader"
|
||||
)
|
||||
VRayProxyLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Vray Proxy Loader"
|
||||
)
|
||||
VRaySceneLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="VrayScene Loader"
|
||||
)
|
||||
XgenLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Xgen Loader"
|
||||
)
|
||||
YetiCacheLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Yeti Cache Loader"
|
||||
)
|
||||
YetiRigLoader: LoaderEnabledModel = SettingsField(
|
||||
default_factory=LoaderEnabledModel,
|
||||
title="Yeti Rig Loader"
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_LOADERS_SETTING = {
|
||||
"colors": {
|
||||
"model": [209, 132, 30, 1.0],
|
||||
|
|
@ -154,8 +242,29 @@ DEFAULT_LOADERS_SETTING = {
|
|||
"display_handle": True
|
||||
},
|
||||
"import_loader": {
|
||||
"enabled": True,
|
||||
"namespace": "{folder[name]}_{product[name]}_##_",
|
||||
"group_name": "_GRP",
|
||||
"display_handle": True
|
||||
}
|
||||
},
|
||||
"ArnoldStandinLoader": {"enabled": True},
|
||||
"AssemblyLoader": {"enabled": True},
|
||||
"AudioLoader": {"enabled": True},
|
||||
"FileNodeLoader": {"enabled": True},
|
||||
"GpuCacheLoader": {"enabled": True},
|
||||
"ImagePlaneLoader": {"enabled": True},
|
||||
"LookLoader": {"enabled": True},
|
||||
"MatchmoveLoader": {"enabled": True},
|
||||
"MultiverseUsdLoader": {"enabled": True},
|
||||
"MultiverseUsdOverLoader": {"enabled": True},
|
||||
"RedshiftProxyLoader": {"enabled": True},
|
||||
"RenderSetupLoader": {"enabled": True},
|
||||
"LoadVDBtoArnold": {"enabled": True},
|
||||
"LoadVDBtoRedShift": {"enabled": True},
|
||||
"LoadVDBtoVRay": {"enabled": True},
|
||||
"VRayProxyLoader": {"enabled": True},
|
||||
"VRaySceneLoader": {"enabled": True},
|
||||
"XgenLoader": {"enabled": True},
|
||||
"YetiCacheLoader": {"enabled": True},
|
||||
"YetiRigLoader": {"enabled": True},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,7 @@
|
|||
import json
|
||||
|
||||
from pydantic import validator
|
||||
from ayon_server.exceptions import BadRequestException
|
||||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
|
||||
|
||||
|
|
@ -14,19 +18,60 @@ class ScriptsmenuSubmodel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
_definition_mode_type = [
|
||||
{"value": "definition", "label": "Menu Builder"},
|
||||
{"value": "definition_json", "label": "Raw JSON (advanced)"}
|
||||
]
|
||||
|
||||
|
||||
class ScriptsmenuModel(BaseSettingsModel):
|
||||
"""Add a custom scripts menu to Maya"""
|
||||
_isGroup = True
|
||||
|
||||
name: str = SettingsField(title="Menu Name")
|
||||
|
||||
definition_type: str = SettingsField(
|
||||
title="Define menu using",
|
||||
description="Choose the way to define the custom scripts menu "
|
||||
"via settings",
|
||||
enum_resolver=lambda: _definition_mode_type,
|
||||
conditionalEnum=True,
|
||||
default="definition"
|
||||
)
|
||||
definition: list[ScriptsmenuSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Menu Definition",
|
||||
description="Scriptmenu Items Definition"
|
||||
)
|
||||
definition_json: str = SettingsField(
|
||||
"[]", title="Menu Definition JSON", widget="textarea",
|
||||
description=(
|
||||
"Define the custom tools menu using a JSON list. "
|
||||
"For more details on the JSON format, see "
|
||||
"[here](https://github.com/Colorbleed/scriptsmenu?tab=readme-ov-file#configuration)." # noqa: E501
|
||||
)
|
||||
)
|
||||
|
||||
@validator("definition_json")
|
||||
def validate_json(cls, value):
|
||||
if not value.strip():
|
||||
return "[]"
|
||||
try:
|
||||
converted_value = json.loads(value)
|
||||
success = isinstance(converted_value, list)
|
||||
except json.JSONDecodeError:
|
||||
success = False
|
||||
|
||||
if not success:
|
||||
raise BadRequestException(
|
||||
"The definition can't be parsed as json list object"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
DEFAULT_SCRIPTSMENU_SETTINGS = {
|
||||
"name": "Custom Tools",
|
||||
"definition_type": "definition",
|
||||
"definition": [
|
||||
{
|
||||
"type": "action",
|
||||
|
|
@ -39,5 +84,6 @@ DEFAULT_SCRIPTSMENU_SETTINGS = {
|
|||
"shader"
|
||||
]
|
||||
}
|
||||
]
|
||||
],
|
||||
"definition_json": "[]"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring addon version."""
|
||||
__version__ = "0.1.13"
|
||||
__version__ = "0.1.15"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue