mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/update_houdini_node_parms_on_creator_attribs_change
This commit is contained in:
commit
030b983dab
76 changed files with 1563 additions and 889 deletions
|
|
@ -60,7 +60,7 @@ def main(*subprocess_args):
|
|||
)
|
||||
)
|
||||
|
||||
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
|
||||
elif os.environ.get("AVALON_AFTEREFFECTS_WORKFILES_ON_LAUNCH", True):
|
||||
save = False
|
||||
if os.getenv("WORKFILES_SAVE_AS"):
|
||||
save = True
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class AERenderInstance(RenderInstance):
|
|||
|
||||
class CollectAERender(publish.AbstractCollectRender):
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.405
|
||||
order = pyblish.api.CollectorOrder + 0.100
|
||||
label = "Collect After Effects Render Layers"
|
||||
hosts = ["aftereffects"]
|
||||
|
||||
|
|
@ -145,6 +145,7 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
if "review" in instance.families:
|
||||
# to skip ExtractReview locally
|
||||
instance.families.remove("review")
|
||||
instance.deadline = inst.data.get("deadline")
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ def load_scripts(paths):
|
|||
if register:
|
||||
try:
|
||||
register()
|
||||
except:
|
||||
except: # noqa E722
|
||||
traceback.print_exc()
|
||||
else:
|
||||
print("\nWarning! '%s' has no register function, "
|
||||
|
|
@ -45,7 +45,7 @@ def load_scripts(paths):
|
|||
if unregister:
|
||||
try:
|
||||
unregister()
|
||||
except:
|
||||
except: # noqa E722
|
||||
traceback.print_exc()
|
||||
|
||||
def test_reload(mod):
|
||||
|
|
@ -57,7 +57,7 @@ def load_scripts(paths):
|
|||
|
||||
try:
|
||||
return importlib.reload(mod)
|
||||
except:
|
||||
except: # noqa E722
|
||||
traceback.print_exc()
|
||||
|
||||
def test_register(mod):
|
||||
|
|
|
|||
|
|
@ -143,13 +143,19 @@ def deselect_all():
|
|||
if obj.mode != 'OBJECT':
|
||||
modes.append((obj, obj.mode))
|
||||
bpy.context.view_layer.objects.active = obj
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
context_override = create_blender_context(active=obj)
|
||||
with bpy.context.temp_override(**context_override):
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
context_override = create_blender_context()
|
||||
with bpy.context.temp_override(**context_override):
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
for p in modes:
|
||||
bpy.context.view_layer.objects.active = p[0]
|
||||
bpy.ops.object.mode_set(mode=p[1])
|
||||
context_override = create_blender_context(active=p[0])
|
||||
with bpy.context.temp_override(**context_override):
|
||||
bpy.ops.object.mode_set(mode=p[1])
|
||||
|
||||
bpy.context.view_layer.objects.active = active
|
||||
|
||||
|
|
|
|||
|
|
@ -169,7 +169,7 @@ def validate_comp_prefs(comp=None, force_repair=False):
|
|||
def _on_repair():
|
||||
attributes = dict()
|
||||
for key, comp_key, _label in validations:
|
||||
value = folder_value[key]
|
||||
value = folder_attributes[key]
|
||||
comp_key_full = "Comp.FrameFormat.{}".format(comp_key)
|
||||
attributes[comp_key_full] = value
|
||||
comp.SetPrefs(attributes)
|
||||
|
|
|
|||
|
|
@ -115,6 +115,7 @@ class CollectFusionRender(
|
|||
if "review" in instance.families:
|
||||
# to skip ExtractReview locally
|
||||
instance.families.remove("review")
|
||||
instance.deadline = inst.data.get("deadline")
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -177,7 +177,10 @@ class CollectFarmRender(publish.AbstractCollectRender):
|
|||
outputFormat=info[1],
|
||||
outputStartFrame=info[3],
|
||||
leadingZeros=info[2],
|
||||
ignoreFrameHandleCheck=True
|
||||
ignoreFrameHandleCheck=True,
|
||||
#todo: inst is not available, must be determined, fix when
|
||||
#reworking to Publisher
|
||||
# deadline=inst.data.get("deadline")
|
||||
|
||||
)
|
||||
render_instance.context = context
|
||||
|
|
|
|||
|
|
@ -51,13 +51,12 @@ def open_file(filepath):
|
|||
|
||||
project = hiero.core.projects()[-1]
|
||||
|
||||
# open project file
|
||||
hiero.core.openProject(filepath.replace(os.path.sep, "/"))
|
||||
|
||||
# close previous project
|
||||
project.close()
|
||||
|
||||
|
||||
# Close previous project if its different to the current project.
|
||||
filepath = filepath.replace(os.path.sep, "/")
|
||||
if project.path().replace(os.path.sep, "/") != filepath:
|
||||
# open project file
|
||||
hiero.core.openProject(filepath)
|
||||
project.close()
|
||||
|
||||
return True
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@ from ayon_core.hosts.houdini.api import lib
|
|||
class CollectDataforCache(pyblish.api.InstancePlugin):
|
||||
"""Collect data for caching to Deadline."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.04
|
||||
# Run after Collect Frames
|
||||
order = pyblish.api.CollectorOrder + 0.11
|
||||
families = ["ass", "pointcache",
|
||||
"mantraifd", "redshiftproxy",
|
||||
"vdbcache"]
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class CollectFrames(pyblish.api.InstancePlugin):
|
|||
label = "Collect Frames"
|
||||
families = ["vdbcache", "imagesequence", "ass",
|
||||
"mantraifd", "redshiftproxy", "review",
|
||||
"bgeo"]
|
||||
"pointcache"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -28,10 +28,15 @@ class ExtractAlembic(publish.Extractor):
|
|||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
file_name = os.path.basename(output)
|
||||
if instance.data.get("frames"):
|
||||
# list of files
|
||||
files = instance.data["frames"]
|
||||
else:
|
||||
# single file
|
||||
files = os.path.basename(output)
|
||||
|
||||
# We run the render
|
||||
self.log.info("Writing alembic '%s' to '%s'" % (file_name,
|
||||
self.log.info("Writing alembic '%s' to '%s'" % (files,
|
||||
staging_dir))
|
||||
|
||||
render_rop(ropnode)
|
||||
|
|
@ -42,7 +47,7 @@ class ExtractAlembic(publish.Extractor):
|
|||
representation = {
|
||||
'name': 'abc',
|
||||
'ext': 'abc',
|
||||
'files': file_name,
|
||||
'files': files,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
|
|||
|
|
@ -6,12 +6,9 @@ import json
|
|||
from typing import Any, Dict, Union
|
||||
|
||||
import six
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_current_project_name,
|
||||
get_current_folder_path,
|
||||
get_current_task_name,
|
||||
colorspace
|
||||
)
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
|
|
|||
|
|
@ -22,7 +22,6 @@ ALEMBIC_ARGS = {
|
|||
"melPostJobCallback": str,
|
||||
"noNormals": bool,
|
||||
"preRoll": bool,
|
||||
"preRollStartFrame": int,
|
||||
"pythonPerFrameCallback": str,
|
||||
"pythonPostJobCallback": str,
|
||||
"renderableOnly": bool,
|
||||
|
|
@ -54,15 +53,22 @@ def extract_alembic(
|
|||
endFrame=None,
|
||||
eulerFilter=True,
|
||||
frameRange="",
|
||||
melPerFrameCallback=None,
|
||||
melPostJobCallback=None,
|
||||
noNormals=False,
|
||||
preRoll=False,
|
||||
preRollStartFrame=0,
|
||||
pythonPerFrameCallback=None,
|
||||
pythonPostJobCallback=None,
|
||||
renderableOnly=False,
|
||||
root=None,
|
||||
selection=True,
|
||||
startFrame=None,
|
||||
step=1.0,
|
||||
stripNamespaces=True,
|
||||
userAttr=None,
|
||||
userAttrPrefix=None,
|
||||
uvsOnly=False,
|
||||
uvWrite=True,
|
||||
verbose=False,
|
||||
wholeFrameGeo=False,
|
||||
|
|
@ -102,6 +108,11 @@ def extract_alembic(
|
|||
string formatted as: "startFrame endFrame". This argument
|
||||
overrides `startFrame` and `endFrame` arguments.
|
||||
|
||||
melPerFrameCallback (Optional[str]): MEL callback run per frame.
|
||||
|
||||
melPostJobCallback (Optional[str]): MEL callback after last frame is
|
||||
written.
|
||||
|
||||
noNormals (bool): When on, normal data from the original polygon
|
||||
objects is not included in the exported Alembic cache file.
|
||||
|
||||
|
|
@ -113,6 +124,11 @@ def extract_alembic(
|
|||
dependent translations and can be used to evaluate run-up that
|
||||
isn't actually translated. Defaults to 0.
|
||||
|
||||
pythonPerFrameCallback (Optional[str]): Python callback run per frame.
|
||||
|
||||
pythonPostJobCallback (Optional[str]): Python callback after last frame
|
||||
is written.
|
||||
|
||||
renderableOnly (bool): When on, any non-renderable nodes or hierarchy,
|
||||
such as hidden objects, are not included in the Alembic file.
|
||||
Defaults to False.
|
||||
|
|
@ -137,6 +153,15 @@ def extract_alembic(
|
|||
object with the namespace taco:foo:bar appears as bar in the
|
||||
Alembic file.
|
||||
|
||||
userAttr (list of str, optional): A specific user defined attribute to
|
||||
write out. Defaults to [].
|
||||
|
||||
userAttrPrefix (list of str, optional): Prefix filter for determining
|
||||
which user defined attributes to write out. Defaults to [].
|
||||
|
||||
uvsOnly (bool): When on, only uv data for PolyMesh and SubD shapes
|
||||
will be written to the Alembic file.
|
||||
|
||||
uvWrite (bool): When on, UV data from polygon meshes and subdivision
|
||||
objects are written to the Alembic file. Only the current UV map is
|
||||
included.
|
||||
|
|
@ -183,6 +208,8 @@ def extract_alembic(
|
|||
# Ensure list arguments are valid.
|
||||
attr = attr or []
|
||||
attrPrefix = attrPrefix or []
|
||||
userAttr = userAttr or []
|
||||
userAttrPrefix = userAttrPrefix or []
|
||||
root = root or []
|
||||
|
||||
# Pass the start and end frame on as `frameRange` so that it
|
||||
|
|
@ -213,8 +240,10 @@ def extract_alembic(
|
|||
"eulerFilter": eulerFilter,
|
||||
"noNormals": noNormals,
|
||||
"preRoll": preRoll,
|
||||
"root": root,
|
||||
"renderableOnly": renderableOnly,
|
||||
"uvWrite": uvWrite,
|
||||
"uvsOnly": uvsOnly,
|
||||
"writeColorSets": writeColorSets,
|
||||
"writeFaceSets": writeFaceSets,
|
||||
"wholeFrameGeo": wholeFrameGeo,
|
||||
|
|
@ -226,9 +255,10 @@ def extract_alembic(
|
|||
"step": step,
|
||||
"attr": attr,
|
||||
"attrPrefix": attrPrefix,
|
||||
"userAttr": userAttr,
|
||||
"userAttrPrefix": userAttrPrefix,
|
||||
"stripNamespaces": stripNamespaces,
|
||||
"verbose": verbose,
|
||||
"preRollStartFrame": preRollStartFrame
|
||||
"verbose": verbose
|
||||
}
|
||||
|
||||
# Validate options
|
||||
|
|
@ -264,6 +294,17 @@ def extract_alembic(
|
|||
if maya_version >= 2018:
|
||||
options['autoSubd'] = options.pop('writeCreases', False)
|
||||
|
||||
# Only add callbacks if they are set so that we're not passing `None`
|
||||
callbacks = {
|
||||
"melPerFrameCallback": melPerFrameCallback,
|
||||
"melPostJobCallback": melPostJobCallback,
|
||||
"pythonPerFrameCallback": pythonPerFrameCallback,
|
||||
"pythonPostJobCallback": pythonPostJobCallback,
|
||||
}
|
||||
for key, callback in callbacks.items():
|
||||
if callback:
|
||||
options[key] = str(callback)
|
||||
|
||||
# Format the job string from options
|
||||
job_args = list()
|
||||
for key, value in options.items():
|
||||
|
|
@ -297,7 +338,11 @@ def extract_alembic(
|
|||
# exports are made. (PLN-31)
|
||||
# TODO: Make sure this actually fixes the issues
|
||||
with evaluation("off"):
|
||||
cmds.AbcExport(j=job_str, verbose=verbose)
|
||||
cmds.AbcExport(
|
||||
j=job_str,
|
||||
verbose=verbose,
|
||||
preRollStartFrame=preRollStartFrame
|
||||
)
|
||||
|
||||
if verbose:
|
||||
log.debug("Extracted Alembic to: %s", file)
|
||||
|
|
|
|||
|
|
@ -1299,7 +1299,7 @@ def is_visible(node,
|
|||
override_enabled = cmds.getAttr('{}.overrideEnabled'.format(node))
|
||||
override_visibility = cmds.getAttr('{}.overrideVisibility'.format(
|
||||
node))
|
||||
if override_enabled and override_visibility:
|
||||
if override_enabled and not override_visibility:
|
||||
return False
|
||||
|
||||
if parentHidden:
|
||||
|
|
@ -4212,3 +4212,23 @@ def create_rig_animation_instance(
|
|||
variant=namespace,
|
||||
pre_create_data={"use_selection": True}
|
||||
)
|
||||
|
||||
|
||||
def get_node_index_under_parent(node: str) -> int:
|
||||
"""Return the index of a DAG node under its parent.
|
||||
|
||||
Arguments:
|
||||
node (str): A DAG Node path.
|
||||
|
||||
Returns:
|
||||
int: The DAG node's index under its parents or world
|
||||
|
||||
"""
|
||||
node = cmds.ls(node, long=True)[0] # enforce long names
|
||||
parent = node.rsplit("|", 1)[0]
|
||||
if not parent:
|
||||
return cmds.ls(assemblies=True, long=True).index(node)
|
||||
else:
|
||||
return cmds.listRelatives(parent,
|
||||
children=True,
|
||||
fullPath=True).index(node)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import json
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
|
|
@ -8,13 +10,15 @@ from ayon_core.pipeline import (
|
|||
)
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
TemplateAlreadyImported,
|
||||
AbstractTemplateBuilder
|
||||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
PlaceholderItem,
|
||||
)
|
||||
from ayon_core.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
)
|
||||
|
||||
from .lib import get_main_window
|
||||
from .lib import read, imprint, get_main_window
|
||||
|
||||
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
||||
|
||||
|
|
@ -86,6 +90,162 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
|
|||
return True
|
||||
|
||||
|
||||
class MayaPlaceholderPlugin(PlaceholderPlugin):
|
||||
"""Base Placeholder Plugin for Maya with one unified cache.
|
||||
|
||||
Creates a locator as placeholder node, which during populate provide
|
||||
all of its attributes defined on the locator's transform in
|
||||
`placeholder.data` and where `placeholder.scene_identifier` is the
|
||||
full path to the node.
|
||||
|
||||
Inherited classes must still implement `populate_placeholder`
|
||||
|
||||
"""
|
||||
|
||||
use_selection_as_parent = True
|
||||
item_class = PlaceholderItem
|
||||
|
||||
def _create_placeholder_name(self, placeholder_data):
|
||||
return self.identifier.replace(".", "_")
|
||||
|
||||
def _collect_scene_placeholders(self):
|
||||
nodes_by_identifier = self.builder.get_shared_populate_data(
|
||||
"placeholder_nodes"
|
||||
)
|
||||
if nodes_by_identifier is None:
|
||||
# Cache placeholder data to shared data
|
||||
nodes = cmds.ls("*.plugin_identifier", long=True, objectsOnly=True)
|
||||
|
||||
nodes_by_identifier = {}
|
||||
for node in nodes:
|
||||
identifier = cmds.getAttr("{}.plugin_identifier".format(node))
|
||||
nodes_by_identifier.setdefault(identifier, []).append(node)
|
||||
|
||||
# Set the cache
|
||||
self.builder.set_shared_populate_data(
|
||||
"placeholder_nodes", nodes_by_identifier
|
||||
)
|
||||
|
||||
return nodes_by_identifier
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
|
||||
parent = None
|
||||
if self.use_selection_as_parent:
|
||||
selection = cmds.ls(selection=True)
|
||||
if len(selection) > 1:
|
||||
raise ValueError(
|
||||
"More than one node is selected. "
|
||||
"Please select only one to define the parent."
|
||||
)
|
||||
parent = selection[0] if selection else None
|
||||
|
||||
placeholder_data["plugin_identifier"] = self.identifier
|
||||
placeholder_name = self._create_placeholder_name(placeholder_data)
|
||||
|
||||
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
|
||||
if parent:
|
||||
placeholder = cmds.parent(placeholder, selection[0])[0]
|
||||
|
||||
self.imprint(placeholder, placeholder_data)
|
||||
|
||||
def update_placeholder(self, placeholder_item, placeholder_data):
|
||||
node_name = placeholder_item.scene_identifier
|
||||
|
||||
changed_values = {}
|
||||
for key, value in placeholder_data.items():
|
||||
if value != placeholder_item.data.get(key):
|
||||
changed_values[key] = value
|
||||
|
||||
# Delete attributes to ensure we imprint new data with correct type
|
||||
for key in changed_values.keys():
|
||||
placeholder_item.data[key] = value
|
||||
if cmds.attributeQuery(key, node=node_name, exists=True):
|
||||
attribute = "{}.{}".format(node_name, key)
|
||||
cmds.deleteAttr(attribute)
|
||||
|
||||
self.imprint(node_name, changed_values)
|
||||
|
||||
def collect_placeholders(self):
|
||||
placeholders = []
|
||||
nodes_by_identifier = self._collect_scene_placeholders()
|
||||
for node in nodes_by_identifier.get(self.identifier, []):
|
||||
# TODO do data validations and maybe upgrades if they are invalid
|
||||
placeholder_data = self.read(node)
|
||||
placeholders.append(
|
||||
self.item_class(scene_identifier=node,
|
||||
data=placeholder_data,
|
||||
plugin=self)
|
||||
)
|
||||
|
||||
return placeholders
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Hide placeholder, add them to placeholder set.
|
||||
Used only by PlaceholderCreateMixin and PlaceholderLoadMixin
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# Hide placeholder and add them to placeholder set
|
||||
node = placeholder.scene_identifier
|
||||
|
||||
# If we just populate the placeholders from current scene, the
|
||||
# placeholder set will not be created so account for that.
|
||||
if not cmds.objExists(PLACEHOLDER_SET):
|
||||
cmds.sets(name=PLACEHOLDER_SET, empty=True)
|
||||
|
||||
cmds.sets(node, addElement=PLACEHOLDER_SET)
|
||||
cmds.hide(node)
|
||||
cmds.setAttr("{}.hiddenInOutliner".format(node), True)
|
||||
|
||||
def delete_placeholder(self, placeholder):
|
||||
"""Remove placeholder if building was successful
|
||||
|
||||
Used only by PlaceholderCreateMixin and PlaceholderLoadMixin.
|
||||
"""
|
||||
node = placeholder.scene_identifier
|
||||
|
||||
# To avoid that deleting a placeholder node will have Maya delete
|
||||
# any objectSets the node was a member of we will first remove it
|
||||
# from any sets it was a member of. This way the `PLACEHOLDERS_SET`
|
||||
# will survive long enough
|
||||
sets = cmds.listSets(o=node) or []
|
||||
for object_set in sets:
|
||||
cmds.sets(node, remove=object_set)
|
||||
|
||||
cmds.delete(node)
|
||||
|
||||
def imprint(self, node, data):
|
||||
"""Imprint call for placeholder node"""
|
||||
|
||||
# Complicated data that can't be represented as flat maya attributes
|
||||
# we write to json strings, e.g. multiselection EnumDef
|
||||
for key, value in data.items():
|
||||
if isinstance(value, (list, tuple, dict)):
|
||||
data[key] = "JSON::{}".format(json.dumps(value))
|
||||
|
||||
imprint(node, data)
|
||||
|
||||
def read(self, node):
|
||||
"""Read call for placeholder node"""
|
||||
|
||||
data = read(node)
|
||||
|
||||
# Complicated data that can't be represented as flat maya attributes
|
||||
# we read from json strings, e.g. multiselection EnumDef
|
||||
for key, value in data.items():
|
||||
if isinstance(value, str) and value.startswith("JSON::"):
|
||||
value = value[len("JSON::"):] # strip of JSON:: prefix
|
||||
data[key] = json.loads(value)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def build_workfile_template(*args):
|
||||
builder = MayaTemplateBuilder(registered_host())
|
||||
builder.build_template()
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ from ayon_core.lib import (
|
|||
BoolDef,
|
||||
NumberDef,
|
||||
)
|
||||
from ayon_core.pipeline import CreatedInstance
|
||||
|
||||
|
||||
def _get_animation_attr_defs(cls):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from maya import cmds
|
||||
|
||||
from ayon_core.hosts.maya.api import (
|
||||
lib,
|
||||
plugin
|
||||
|
|
@ -87,16 +89,24 @@ class CreateArnoldSceneSource(plugin.MayaCreator):
|
|||
|
||||
return defs
|
||||
|
||||
|
||||
class CreateArnoldSceneSourceProxy(CreateArnoldSceneSource):
|
||||
"""Arnold Scene Source Proxy
|
||||
|
||||
This product type facilitates working with proxy geometry in the viewport.
|
||||
"""
|
||||
|
||||
identifier = "io.openpype.creators.maya.assproxy"
|
||||
label = "Arnold Scene Source Proxy"
|
||||
product_type = "assProxy"
|
||||
icon = "cube"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
|
||||
from maya import cmds
|
||||
|
||||
instance = super(CreateArnoldSceneSource, self).create(
|
||||
product_name, instance_data, pre_create_data
|
||||
)
|
||||
|
||||
instance_node = instance.get("instance_node")
|
||||
|
||||
content = cmds.sets(name=instance_node + "_content_SET", empty=True)
|
||||
proxy = cmds.sets(name=instance_node + "_proxy_SET", empty=True)
|
||||
cmds.sets([content, proxy], forceElement=instance_node)
|
||||
cmds.sets([proxy], forceElement=instance_node)
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ from ayon_core.hosts.maya.api.lib import (
|
|||
unique_namespace,
|
||||
get_attribute_input,
|
||||
maintained_selection,
|
||||
get_fps_for_current_context
|
||||
)
|
||||
from ayon_core.hosts.maya.api.pipeline import containerise
|
||||
from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type
|
||||
|
|
@ -29,7 +30,13 @@ class ArnoldStandinLoader(load.LoaderPlugin):
|
|||
"""Load as Arnold standin"""
|
||||
|
||||
product_types = {
|
||||
"ass", "animation", "model", "proxyAbc", "pointcache", "usd"
|
||||
"ass",
|
||||
"assProxy",
|
||||
"animation",
|
||||
"model",
|
||||
"proxyAbc",
|
||||
"pointcache",
|
||||
"usd"
|
||||
}
|
||||
representations = {"ass", "abc", "usda", "usdc", "usd"}
|
||||
|
||||
|
|
@ -95,8 +102,10 @@ class ArnoldStandinLoader(load.LoaderPlugin):
|
|||
sequence = is_sequence(os.listdir(os.path.dirname(repre_path)))
|
||||
cmds.setAttr(standin_shape + ".useFrameExtension", sequence)
|
||||
|
||||
fps = float(version_attributes.get("fps")) or 25
|
||||
cmds.setAttr(standin_shape + ".abcFPS", fps)
|
||||
fps = (
|
||||
version_attributes.get("fps") or get_fps_for_current_context()
|
||||
)
|
||||
cmds.setAttr(standin_shape + ".abcFPS", float(fps))
|
||||
|
||||
nodes = [root, standin, standin_shape]
|
||||
if operator is not None:
|
||||
|
|
@ -128,6 +137,18 @@ class ArnoldStandinLoader(load.LoaderPlugin):
|
|||
proxy_path = "/".join([os.path.dirname(path), proxy_basename])
|
||||
return proxy_basename, proxy_path
|
||||
|
||||
def _update_operators(self, string_replace_operator, proxy_basename, path):
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".match",
|
||||
proxy_basename.split(".")[0],
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".replace",
|
||||
os.path.basename(path).split(".")[0],
|
||||
type="string"
|
||||
)
|
||||
|
||||
def _setup_proxy(self, shape, path, namespace):
|
||||
proxy_basename, proxy_path = self._get_proxy_path(path)
|
||||
|
||||
|
|
@ -150,16 +171,7 @@ class ArnoldStandinLoader(load.LoaderPlugin):
|
|||
"*.(@node=='{}')".format(node_type),
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".match",
|
||||
proxy_basename,
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".replace",
|
||||
os.path.basename(path),
|
||||
type="string"
|
||||
)
|
||||
self._update_operators(string_replace_operator, proxy_basename, path)
|
||||
|
||||
cmds.connectAttr(
|
||||
string_replace_operator + ".out",
|
||||
|
|
@ -194,18 +206,9 @@ class ArnoldStandinLoader(load.LoaderPlugin):
|
|||
path = get_representation_path(repre_entity)
|
||||
proxy_basename, proxy_path = self._get_proxy_path(path)
|
||||
|
||||
# Whether there is proxy or so, we still update the string operator.
|
||||
# Whether there is proxy or not, we still update the string operator.
|
||||
# If no proxy exists, the string operator won't replace anything.
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".match",
|
||||
proxy_basename,
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".replace",
|
||||
os.path.basename(path),
|
||||
type="string"
|
||||
)
|
||||
self._update_operators(string_replace_operator, proxy_basename, path)
|
||||
|
||||
dso_path = path
|
||||
if os.path.exists(proxy_path):
|
||||
|
|
|
|||
|
|
@ -10,21 +10,23 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
# Offset to be after renderable camera collection.
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = "Collect Arnold Scene Source"
|
||||
families = ["ass"]
|
||||
families = ["ass", "assProxy"]
|
||||
|
||||
def process(self, instance):
|
||||
objsets = instance.data["setMembers"]
|
||||
instance.data["members"] = []
|
||||
for set_member in instance.data["setMembers"]:
|
||||
if cmds.nodeType(set_member) != "objectSet":
|
||||
instance.data["members"].extend(self.get_hierarchy(set_member))
|
||||
continue
|
||||
|
||||
for objset in objsets:
|
||||
objset = str(objset)
|
||||
members = cmds.sets(objset, query=True)
|
||||
members = cmds.sets(set_member, query=True)
|
||||
members = cmds.ls(members, long=True)
|
||||
if members is None:
|
||||
self.log.warning("Skipped empty instance: \"%s\" " % objset)
|
||||
self.log.warning(
|
||||
"Skipped empty instance: \"%s\" " % set_member
|
||||
)
|
||||
continue
|
||||
if objset.endswith("content_SET"):
|
||||
instance.data["contentMembers"] = self.get_hierarchy(members)
|
||||
if objset.endswith("proxy_SET"):
|
||||
if set_member.endswith("proxy_SET"):
|
||||
instance.data["proxy"] = self.get_hierarchy(members)
|
||||
|
||||
# Use camera in object set if present else default to render globals
|
||||
|
|
@ -33,7 +35,7 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)]
|
||||
if renderable:
|
||||
camera = renderable[0]
|
||||
for node in instance.data["contentMembers"]:
|
||||
for node in instance.data["members"]:
|
||||
camera_shapes = cmds.listRelatives(
|
||||
node, shapes=True, type="camera"
|
||||
)
|
||||
|
|
@ -46,18 +48,11 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
self.log.debug("data: {}".format(instance.data))
|
||||
|
||||
def get_hierarchy(self, nodes):
|
||||
"""Return nodes with all their children.
|
||||
|
||||
Arguments:
|
||||
nodes (List[str]): List of nodes to collect children hierarchy for
|
||||
|
||||
Returns:
|
||||
list: Input nodes with their children hierarchy
|
||||
|
||||
"""
|
||||
"""Return nodes with all their children"""
|
||||
nodes = cmds.ls(nodes, long=True)
|
||||
if not nodes:
|
||||
return []
|
||||
|
||||
children = get_all_children(nodes, ignore_intermediate_objects=True)
|
||||
return list(children.union(nodes))
|
||||
children = get_all_children(nodes)
|
||||
# Make sure nodes merged with children only
|
||||
# contains unique entries
|
||||
return list(set(nodes + list(children)))
|
||||
|
|
|
|||
|
|
@ -17,8 +17,7 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
families = ["ass"]
|
||||
asciiAss = False
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
def _pre_process(self, instance, staging_dir):
|
||||
file_path = os.path.join(staging_dir, "{}.ass".format(instance.name))
|
||||
|
||||
# Mask
|
||||
|
|
@ -70,24 +69,38 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
"mask": mask
|
||||
}
|
||||
|
||||
filenames, nodes_by_id = self._extract(
|
||||
instance.data["contentMembers"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
return attribute_data, kwargs
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
attribute_data, kwargs = self._pre_process(instance, staging_dir)
|
||||
|
||||
filenames = self._extract(
|
||||
instance.data["members"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
self._post_process(
|
||||
instance, filenames, staging_dir, kwargs["startFrame"]
|
||||
)
|
||||
|
||||
def _post_process(self, instance, filenames, staging_dir, frame_start):
|
||||
nodes_by_id = self._nodes_by_id(instance[:])
|
||||
representation = {
|
||||
"name": "ass",
|
||||
"ext": "ass",
|
||||
"files": filenames if len(filenames) > 1 else filenames[0],
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": kwargs["startFrame"]
|
||||
"frameStart": frame_start
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
json_path = os.path.join(staging_dir, "{}.json".format(instance.name))
|
||||
json_path = os.path.join(
|
||||
staging_dir, "{}.json".format(instance.name)
|
||||
)
|
||||
with open(json_path, "w") as f:
|
||||
json.dump(nodes_by_id, f)
|
||||
|
||||
|
|
@ -104,13 +117,68 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
"Extracted instance {} to: {}".format(instance.name, staging_dir)
|
||||
)
|
||||
|
||||
# Extract proxy.
|
||||
if not instance.data.get("proxy", []):
|
||||
return
|
||||
def _nodes_by_id(self, nodes):
|
||||
nodes_by_id = defaultdict(list)
|
||||
|
||||
kwargs["filename"] = file_path.replace(".ass", "_proxy.ass")
|
||||
for node in nodes:
|
||||
id = lib.get_id(node)
|
||||
|
||||
filenames, _ = self._extract(
|
||||
if id is None:
|
||||
continue
|
||||
|
||||
# Converting Maya hierarchy separator "|" to Arnold separator "/".
|
||||
nodes_by_id[id].append(node.replace("|", "/"))
|
||||
|
||||
return nodes_by_id
|
||||
|
||||
def _extract(self, nodes, attribute_data, kwargs):
|
||||
filenames = []
|
||||
with lib.attribute_values(attribute_data):
|
||||
with lib.maintained_selection():
|
||||
self.log.debug(
|
||||
"Writing: {}".format(nodes)
|
||||
)
|
||||
cmds.select(nodes, noExpand=True)
|
||||
|
||||
self.log.debug(
|
||||
"Extracting ass sequence with: {}".format(kwargs)
|
||||
)
|
||||
|
||||
exported_files = cmds.arnoldExportAss(**kwargs)
|
||||
|
||||
for file in exported_files:
|
||||
filenames.append(os.path.split(file)[1])
|
||||
|
||||
self.log.debug("Exported: {}".format(filenames))
|
||||
|
||||
return filenames
|
||||
|
||||
|
||||
class ExtractArnoldSceneSourceProxy(ExtractArnoldSceneSource):
|
||||
"""Extract the content of the instance to an Arnold Scene Source file."""
|
||||
|
||||
label = "Extract Arnold Scene Source Proxy"
|
||||
hosts = ["maya"]
|
||||
families = ["assProxy"]
|
||||
asciiAss = True
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
attribute_data, kwargs = self._pre_process(instance, staging_dir)
|
||||
|
||||
filenames, _ = self._duplicate_extract(
|
||||
instance.data["members"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
self._post_process(
|
||||
instance, filenames, staging_dir, kwargs["startFrame"]
|
||||
)
|
||||
|
||||
kwargs["filename"] = os.path.join(
|
||||
staging_dir, "{}_proxy.ass".format(instance.name)
|
||||
)
|
||||
|
||||
filenames, _ = self._duplicate_extract(
|
||||
instance.data["proxy"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
|
|
@ -125,12 +193,11 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def _extract(self, nodes, attribute_data, kwargs):
|
||||
def _duplicate_extract(self, nodes, attribute_data, kwargs):
|
||||
self.log.debug(
|
||||
"Writing {} with:\n{}".format(kwargs["filename"], kwargs)
|
||||
)
|
||||
filenames = []
|
||||
nodes_by_id = defaultdict(list)
|
||||
# Duplicating nodes so they are direct children of the world. This
|
||||
# makes the hierarchy of any exported ass file the same.
|
||||
with lib.delete_after() as delete_bin:
|
||||
|
|
@ -147,7 +214,9 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
if not shapes:
|
||||
continue
|
||||
|
||||
duplicate_transform = cmds.duplicate(node)[0]
|
||||
basename = cmds.duplicate(node)[0]
|
||||
parents = cmds.ls(node, long=True)[0].split("|")[:-1]
|
||||
duplicate_transform = "|".join(parents + [basename])
|
||||
|
||||
if cmds.listRelatives(duplicate_transform, parent=True):
|
||||
duplicate_transform = cmds.parent(
|
||||
|
|
@ -172,28 +241,7 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
duplicate_nodes.extend(shapes)
|
||||
delete_bin.append(duplicate_transform)
|
||||
|
||||
# Copy cbId to mtoa_constant.
|
||||
for node in duplicate_nodes:
|
||||
# Converting Maya hierarchy separator "|" to Arnold
|
||||
# separator "/".
|
||||
nodes_by_id[lib.get_id(node)].append(node.replace("|", "/"))
|
||||
|
||||
with lib.attribute_values(attribute_data):
|
||||
with lib.maintained_selection():
|
||||
self.log.debug(
|
||||
"Writing: {}".format(duplicate_nodes)
|
||||
)
|
||||
cmds.select(duplicate_nodes, noExpand=True)
|
||||
|
||||
self.log.debug(
|
||||
"Extracting ass sequence with: {}".format(kwargs)
|
||||
)
|
||||
|
||||
exported_files = cmds.arnoldExportAss(**kwargs)
|
||||
|
||||
for file in exported_files:
|
||||
filenames.append(os.path.split(file)[1])
|
||||
|
||||
self.log.debug("Exported: {}".format(filenames))
|
||||
nodes_by_id = self._nodes_by_id(duplicate_nodes)
|
||||
filenames = self._extract(duplicate_nodes, attribute_data, kwargs)
|
||||
|
||||
return filenames, nodes_by_id
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from maya import cmds
|
|||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.maya.api.alembic import extract_alembic
|
||||
from ayon_core.hosts.maya.api.lib import (
|
||||
get_all_children,
|
||||
suspended_refresh,
|
||||
maintained_selection,
|
||||
iter_visible_nodes_in_range
|
||||
|
|
@ -40,7 +41,6 @@ class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin):
|
|||
# From settings
|
||||
attr = []
|
||||
attrPrefix = []
|
||||
autoSubd = False
|
||||
bake_attributes = []
|
||||
bake_attribute_prefixes = []
|
||||
dataFormat = "ogawa"
|
||||
|
|
@ -63,6 +63,7 @@ class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin):
|
|||
wholeFrameGeo = False
|
||||
worldSpace = True
|
||||
writeColorSets = False
|
||||
writeCreases = False
|
||||
writeFaceSets = False
|
||||
writeNormals = True
|
||||
writeUVSets = False
|
||||
|
|
@ -173,15 +174,9 @@ class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin):
|
|||
"writeVisibility": attribute_values.get(
|
||||
"writeVisibility", self.writeVisibility
|
||||
),
|
||||
"autoSubd": attribute_values.get(
|
||||
"autoSubd", self.autoSubd
|
||||
),
|
||||
"uvsOnly": attribute_values.get(
|
||||
"uvsOnly", self.uvsOnly
|
||||
),
|
||||
"writeNormals": attribute_values.get(
|
||||
"writeNormals", self.writeNormals
|
||||
),
|
||||
"melPerFrameCallback": attribute_values.get(
|
||||
"melPerFrameCallback", self.melPerFrameCallback
|
||||
),
|
||||
|
|
@ -193,7 +188,12 @@ class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin):
|
|||
),
|
||||
"pythonPostJobCallback": attribute_values.get(
|
||||
"pythonPostJobCallback", self.pythonPostJobCallback
|
||||
)
|
||||
),
|
||||
# Note that this converts `writeNormals` to `noNormals` for the
|
||||
# `AbcExport` equivalent in `extract_alembic`
|
||||
"noNormals": not attribute_values.get(
|
||||
"writeNormals", self.writeNormals
|
||||
),
|
||||
}
|
||||
|
||||
if instance.data.get("visibleOnly", False):
|
||||
|
|
@ -249,7 +249,6 @@ class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin):
|
|||
with maintained_selection():
|
||||
cmds.select(instance.data["proxy"])
|
||||
extract_alembic(**kwargs)
|
||||
|
||||
representation = {
|
||||
"name": "proxy",
|
||||
"ext": "abc",
|
||||
|
|
@ -268,20 +267,6 @@ class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin):
|
|||
return []
|
||||
|
||||
override_defs = OrderedDict({
|
||||
"autoSubd": BoolDef(
|
||||
"autoSubd",
|
||||
label="Auto Subd",
|
||||
default=cls.autoSubd,
|
||||
tooltip=(
|
||||
"If this flag is present and the mesh has crease edges, "
|
||||
"crease vertices or holes, the mesh (OPolyMesh) would now "
|
||||
"be written out as an OSubD and crease info will be stored"
|
||||
" in the Alembic file. Otherwise, creases info won't be "
|
||||
"preserved in Alembic file unless a custom Boolean "
|
||||
"attribute SubDivisionMesh has been added to mesh node and"
|
||||
" its value is true."
|
||||
)
|
||||
),
|
||||
"eulerFilter": BoolDef(
|
||||
"eulerFilter",
|
||||
label="Euler Filter",
|
||||
|
|
@ -354,6 +339,13 @@ class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin):
|
|||
default=cls.writeColorSets,
|
||||
tooltip="Write vertex colors with the geometry."
|
||||
),
|
||||
"writeCreases": BoolDef(
|
||||
"writeCreases",
|
||||
label="Write Creases",
|
||||
default=cls.writeCreases,
|
||||
tooltip="Write the geometry's edge and vertex crease "
|
||||
"information."
|
||||
),
|
||||
"writeFaceSets": BoolDef(
|
||||
"writeFaceSets",
|
||||
label="Write Face Sets",
|
||||
|
|
@ -527,9 +519,7 @@ class ExtractAnimation(ExtractAlembic):
|
|||
roots = cmds.sets(out_set, query=True) or []
|
||||
|
||||
# Include all descendants
|
||||
nodes = roots
|
||||
nodes += cmds.listRelatives(
|
||||
roots, allDescendents=True, fullPath=True
|
||||
) or []
|
||||
nodes = roots.copy()
|
||||
nodes.extend(get_all_children(roots, ignore_intermediate_objects=True))
|
||||
|
||||
return nodes, roots
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import inspect
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import OptionalPyblishPluginMixin
|
||||
|
|
@ -29,29 +30,28 @@ class ValidateAlembicDefaultsPointcache(
|
|||
|
||||
@classmethod
|
||||
def _get_publish_attributes(cls, instance):
|
||||
attributes = instance.data["publish_attributes"][
|
||||
cls.plugin_name(
|
||||
instance.data["publish_attributes"]
|
||||
)
|
||||
]
|
||||
|
||||
return attributes
|
||||
return instance.data["publish_attributes"][cls.plugin_name]
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
settings = self._get_settings(instance.context)
|
||||
|
||||
attributes = self._get_publish_attributes(instance)
|
||||
|
||||
msg = (
|
||||
"Alembic Extract setting \"{}\" is not the default value:"
|
||||
"\nCurrent: {}"
|
||||
"\nDefault Value: {}\n"
|
||||
)
|
||||
errors = []
|
||||
invalid = {}
|
||||
for key, value in attributes.items():
|
||||
if key not in settings:
|
||||
# This may occur if attributes have changed over time and an
|
||||
# existing instance has older legacy attributes that do not
|
||||
# match the current settings definition.
|
||||
self.log.warning(
|
||||
"Publish attribute %s not found in Alembic Export "
|
||||
"default settings. Ignoring validation for attribute.",
|
||||
key
|
||||
)
|
||||
continue
|
||||
|
||||
default_value = settings[key]
|
||||
|
||||
# Lists are best to compared sorted since we cant rely on the order
|
||||
|
|
@ -61,10 +61,35 @@ class ValidateAlembicDefaultsPointcache(
|
|||
default_value = sorted(default_value)
|
||||
|
||||
if value != default_value:
|
||||
errors.append(msg.format(key, value, default_value))
|
||||
invalid[key] = value, default_value
|
||||
|
||||
if errors:
|
||||
raise PublishValidationError("\n".join(errors))
|
||||
if invalid:
|
||||
non_defaults = "\n".join(
|
||||
f"- {key}: {value} \t(default: {default_value})"
|
||||
for key, (value, default_value) in invalid.items()
|
||||
)
|
||||
|
||||
raise PublishValidationError(
|
||||
"Alembic extract options differ from default values:\n"
|
||||
f"{non_defaults}",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_description():
|
||||
return inspect.cleandoc(
|
||||
"""### Alembic Extract settings differ from defaults
|
||||
|
||||
The alembic export options differ from the project default values.
|
||||
|
||||
If this is intentional you can disable this validation by
|
||||
disabling **Validate Alembic Options Default**.
|
||||
|
||||
If not you may use the "Repair" action to revert all the options to
|
||||
their default values.
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
|
@ -75,13 +100,20 @@ class ValidateAlembicDefaultsPointcache(
|
|||
)
|
||||
|
||||
# Set the settings values on the create context then save to workfile.
|
||||
publish_attributes = instance.data["publish_attributes"]
|
||||
plugin_name = cls.plugin_name(publish_attributes)
|
||||
attributes = cls._get_publish_attributes(instance)
|
||||
settings = cls._get_settings(instance.context)
|
||||
create_publish_attributes = create_instance.data["publish_attributes"]
|
||||
attributes = cls._get_publish_attributes(create_instance)
|
||||
for key in attributes:
|
||||
create_publish_attributes[plugin_name][key] = settings[key]
|
||||
if key not in settings:
|
||||
# This may occur if attributes have changed over time and an
|
||||
# existing instance has older legacy attributes that do not
|
||||
# match the current settings definition.
|
||||
cls.log.warning(
|
||||
"Publish attribute %s not found in Alembic Export "
|
||||
"default settings. Ignoring repair for attribute.",
|
||||
key
|
||||
)
|
||||
continue
|
||||
attributes[key] = settings[key]
|
||||
|
||||
create_context.save_changes()
|
||||
|
||||
|
|
@ -93,6 +125,6 @@ class ValidateAlembicDefaultsAnimation(
|
|||
|
||||
The defaults are defined in the project settings.
|
||||
"""
|
||||
label = "Validate Alembic Options Defaults"
|
||||
label = "Validate Alembic Options Defaults"
|
||||
families = ["animation"]
|
||||
plugin_name = "ExtractAnimation"
|
||||
|
|
|
|||
|
|
@ -1,30 +1,56 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder, PublishValidationError
|
||||
)
|
||||
from ayon_core.hosts.maya.api.lib import is_visible
|
||||
|
||||
|
||||
class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
|
||||
"""Validate Arnold Scene Source.
|
||||
|
||||
We require at least 1 root node/parent for the meshes. This is to ensure we
|
||||
can duplicate the nodes and preserve the names.
|
||||
Ensure no nodes are hidden.
|
||||
"""
|
||||
|
||||
If using proxies we need the nodes to share the same names and not be
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["ass", "assProxy"]
|
||||
label = "Validate Arnold Scene Source"
|
||||
|
||||
def process(self, instance):
|
||||
# Validate against having nodes hidden, which will result in the
|
||||
# extraction to ignore the node.
|
||||
nodes = instance.data["members"] + instance.data.get("proxy", [])
|
||||
nodes = [x for x in nodes if cmds.objectType(x, isAType='dagNode')]
|
||||
hidden_nodes = [
|
||||
x for x in nodes if not is_visible(x, intermediateObject=False)
|
||||
]
|
||||
if hidden_nodes:
|
||||
raise PublishValidationError(
|
||||
"Found hidden nodes:\n\n{}\n\nPlease unhide for"
|
||||
" publishing.".format("\n".join(hidden_nodes))
|
||||
)
|
||||
|
||||
|
||||
class ValidateArnoldSceneSourceProxy(pyblish.api.InstancePlugin):
|
||||
"""Validate Arnold Scene Source Proxy.
|
||||
|
||||
When using proxies we need the nodes to share the same names and not be
|
||||
parent to the world. This ends up needing at least two groups with content
|
||||
nodes and proxy nodes in another.
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
label = "Validate Arnold Scene Source"
|
||||
families = ["assProxy"]
|
||||
label = "Validate Arnold Scene Source Proxy"
|
||||
|
||||
def _get_nodes_by_name(self, nodes):
|
||||
ungrouped_nodes = []
|
||||
nodes_by_name = {}
|
||||
parents = []
|
||||
same_named_nodes = {}
|
||||
for node in nodes:
|
||||
node_split = node.split("|")
|
||||
if len(node_split) == 2:
|
||||
|
|
@ -35,33 +61,16 @@ class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
parents.append(parent)
|
||||
|
||||
node_name = node.rsplit("|", 1)[-1].rsplit(":", 1)[-1]
|
||||
|
||||
# Check for same same nodes, which can happen in different
|
||||
# hierarchies.
|
||||
if node_name in nodes_by_name:
|
||||
try:
|
||||
same_named_nodes[node_name].append(node)
|
||||
except KeyError:
|
||||
same_named_nodes[node_name] = [
|
||||
nodes_by_name[node_name], node
|
||||
]
|
||||
|
||||
nodes_by_name[node_name] = node
|
||||
|
||||
if same_named_nodes:
|
||||
message = "Found nodes with the same name:"
|
||||
for name, nodes in same_named_nodes.items():
|
||||
message += "\n\n\"{}\":\n{}".format(name, "\n".join(nodes))
|
||||
|
||||
raise PublishValidationError(message)
|
||||
|
||||
return ungrouped_nodes, nodes_by_name, parents
|
||||
|
||||
def process(self, instance):
|
||||
# Validate against nodes directly parented to world.
|
||||
ungrouped_nodes = []
|
||||
|
||||
nodes, content_nodes_by_name, content_parents = (
|
||||
self._get_nodes_by_name(instance.data["contentMembers"])
|
||||
self._get_nodes_by_name(instance.data["members"])
|
||||
)
|
||||
ungrouped_nodes.extend(nodes)
|
||||
|
||||
|
|
@ -70,24 +79,21 @@ class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
)
|
||||
ungrouped_nodes.extend(nodes)
|
||||
|
||||
# Validate against nodes directly parented to world.
|
||||
if ungrouped_nodes:
|
||||
raise PublishValidationError(
|
||||
"Found nodes parented to the world: {}\n"
|
||||
"All nodes need to be grouped.".format(ungrouped_nodes)
|
||||
)
|
||||
|
||||
# Proxy validation.
|
||||
if not instance.data.get("proxy", []):
|
||||
return
|
||||
|
||||
# Validate for content and proxy nodes amount being the same.
|
||||
if len(instance.data["contentMembers"]) != len(instance.data["proxy"]):
|
||||
if len(instance.data["members"]) != len(instance.data["proxy"]):
|
||||
raise PublishValidationError(
|
||||
"Amount of content nodes ({}) and proxy nodes ({}) needs to "
|
||||
"be the same.".format(
|
||||
len(instance.data["contentMembers"]),
|
||||
len(instance.data["proxy"])
|
||||
"be the same.\nContent nodes: {}\nProxy nodes:{}".format(
|
||||
len(instance.data["members"]),
|
||||
len(instance.data["proxy"]),
|
||||
instance.data["members"],
|
||||
instance.data["proxy"]
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin,
|
|||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
families = ["assProxy"]
|
||||
label = "Validate Arnold Scene Source CBID"
|
||||
actions = [RepairAction]
|
||||
optional = False
|
||||
|
|
@ -40,15 +40,11 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin,
|
|||
|
||||
@classmethod
|
||||
def get_invalid_couples(cls, instance):
|
||||
content_nodes_by_name = cls._get_nodes_by_name(
|
||||
instance.data["contentMembers"]
|
||||
)
|
||||
proxy_nodes_by_name = cls._get_nodes_by_name(
|
||||
instance.data.get("proxy", [])
|
||||
)
|
||||
nodes_by_name = cls._get_nodes_by_name(instance.data["members"])
|
||||
proxy_nodes_by_name = cls._get_nodes_by_name(instance.data["proxy"])
|
||||
|
||||
invalid_couples = []
|
||||
for content_name, content_node in content_nodes_by_name.items():
|
||||
for content_name, content_node in nodes_by_name.items():
|
||||
proxy_node = proxy_nodes_by_name.get(content_name, None)
|
||||
|
||||
if not proxy_node:
|
||||
|
|
@ -70,7 +66,7 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin,
|
|||
if not self.is_active(instance.data):
|
||||
return
|
||||
# Proxy validation.
|
||||
if not instance.data.get("proxy", []):
|
||||
if not instance.data["proxy"]:
|
||||
return
|
||||
|
||||
# Validate for proxy nodes sharing the same cbId as content nodes.
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from ayon_core.pipeline.publish import (
|
|||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.hosts.maya.api import lib
|
||||
from ayon_core.hosts.maya.api.lib_rendersettings import RenderSettings
|
||||
|
|
@ -37,7 +38,8 @@ def get_redshift_image_format_labels():
|
|||
return mel.eval("{0}={0}".format(var))
|
||||
|
||||
|
||||
class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
||||
class ValidateRenderSettings(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validates the global render settings
|
||||
|
||||
* File Name Prefix must start with: `<Scene>`
|
||||
|
|
@ -55,7 +57,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
|||
* Frame Padding must be:
|
||||
* default: 4
|
||||
|
||||
* Animation must be toggle on, in Render Settings - Common tab:
|
||||
* Animation must be toggled on, in Render Settings - Common tab:
|
||||
* vray: Animation on standard of specific
|
||||
* arnold: Frame / Animation ext: Any choice without "(Single Frame)"
|
||||
* redshift: Animation toggled on
|
||||
|
|
@ -67,10 +69,11 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
label = "Render Settings"
|
||||
label = "Validate Render Settings"
|
||||
hosts = ["maya"]
|
||||
families = ["renderlayer"]
|
||||
actions = [RepairAction]
|
||||
optional = True
|
||||
|
||||
ImagePrefixes = {
|
||||
'mentalray': 'defaultRenderGlobals.imageFilePrefix',
|
||||
|
|
@ -112,6 +115,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
|||
DEFAULT_PREFIX = "<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>"
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
|
|
|
|||
|
|
@ -1,87 +1,48 @@
|
|||
import json
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
LoadPlaceholderItem
|
||||
)
|
||||
from ayon_core.hosts.maya.api.lib import (
|
||||
read,
|
||||
imprint,
|
||||
get_reference_node
|
||||
get_container_transforms,
|
||||
get_node_parent,
|
||||
get_node_index_under_parent
|
||||
)
|
||||
from ayon_core.hosts.maya.api.workfile_template_builder import (
|
||||
MayaPlaceholderPlugin,
|
||||
)
|
||||
from ayon_core.hosts.maya.api.workfile_template_builder import PLACEHOLDER_SET
|
||||
|
||||
|
||||
class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
|
||||
class MayaPlaceholderLoadPlugin(MayaPlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "maya.load"
|
||||
label = "Maya load"
|
||||
|
||||
def _collect_scene_placeholders(self):
|
||||
# Cache placeholder data to shared data
|
||||
placeholder_nodes = self.builder.get_shared_populate_data(
|
||||
"placeholder_nodes"
|
||||
)
|
||||
if placeholder_nodes is None:
|
||||
attributes = cmds.ls("*.plugin_identifier", long=True)
|
||||
placeholder_nodes = {}
|
||||
for attribute in attributes:
|
||||
node_name = attribute.rpartition(".")[0]
|
||||
placeholder_nodes[node_name] = (
|
||||
self._parse_placeholder_node_data(node_name)
|
||||
)
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"placeholder_nodes", placeholder_nodes
|
||||
)
|
||||
return placeholder_nodes
|
||||
|
||||
def _parse_placeholder_node_data(self, node_name):
|
||||
placeholder_data = read(node_name)
|
||||
parent_name = (
|
||||
cmds.getAttr(node_name + ".parent", asString=True)
|
||||
or node_name.rpartition("|")[0]
|
||||
or ""
|
||||
)
|
||||
if parent_name:
|
||||
siblings = cmds.listRelatives(parent_name, children=True)
|
||||
else:
|
||||
siblings = cmds.ls(assemblies=True)
|
||||
node_shortname = node_name.rpartition("|")[2]
|
||||
current_index = cmds.getAttr(node_name + ".index", asString=True)
|
||||
if current_index < 0:
|
||||
current_index = siblings.index(node_shortname)
|
||||
|
||||
placeholder_data.update({
|
||||
"parent": parent_name,
|
||||
"index": current_index
|
||||
})
|
||||
return placeholder_data
|
||||
item_class = LoadPlaceholderItem
|
||||
|
||||
def _create_placeholder_name(self, placeholder_data):
|
||||
placeholder_name_parts = placeholder_data["builder_type"].split("_")
|
||||
|
||||
pos = 1
|
||||
# Split builder type: context_assets, linked_assets, all_assets
|
||||
prefix, suffix = placeholder_data["builder_type"].split("_", 1)
|
||||
parts = [prefix]
|
||||
|
||||
# add family if any
|
||||
placeholder_product_type = placeholder_data.get("product_type")
|
||||
if placeholder_product_type is None:
|
||||
placeholder_product_type = placeholder_data.get("family")
|
||||
|
||||
if placeholder_product_type:
|
||||
placeholder_name_parts.insert(pos, placeholder_product_type)
|
||||
pos += 1
|
||||
parts.append(placeholder_product_type)
|
||||
|
||||
# add loader arguments if any
|
||||
loader_args = placeholder_data["loader_args"]
|
||||
if loader_args:
|
||||
loader_args = json.loads(loader_args.replace('\'', '\"'))
|
||||
values = [v for v in loader_args.values()]
|
||||
for value in values:
|
||||
placeholder_name_parts.insert(pos, value)
|
||||
pos += 1
|
||||
loader_args = eval(loader_args)
|
||||
for value in loader_args.values():
|
||||
parts.append(str(value))
|
||||
|
||||
placeholder_name = "_".join(placeholder_name_parts)
|
||||
parts.append(suffix)
|
||||
placeholder_name = "_".join(parts)
|
||||
|
||||
return placeholder_name.capitalize()
|
||||
|
||||
|
|
@ -104,68 +65,6 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
|
|||
)
|
||||
return loaded_representation_ids
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
selection = cmds.ls(selection=True)
|
||||
if len(selection) > 1:
|
||||
raise ValueError("More then one item are selected")
|
||||
|
||||
parent = selection[0] if selection else None
|
||||
|
||||
placeholder_data["plugin_identifier"] = self.identifier
|
||||
|
||||
placeholder_name = self._create_placeholder_name(placeholder_data)
|
||||
|
||||
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
|
||||
if parent:
|
||||
placeholder = cmds.parent(placeholder, selection[0])[0]
|
||||
|
||||
imprint(placeholder, placeholder_data)
|
||||
|
||||
# Add helper attributes to keep placeholder info
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="parent",
|
||||
hidden=True,
|
||||
dataType="string"
|
||||
)
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="index",
|
||||
hidden=True,
|
||||
attributeType="short",
|
||||
defaultValue=-1
|
||||
)
|
||||
|
||||
cmds.setAttr(placeholder + ".parent", "", type="string")
|
||||
|
||||
def update_placeholder(self, placeholder_item, placeholder_data):
|
||||
node_name = placeholder_item.scene_identifier
|
||||
new_values = {}
|
||||
for key, value in placeholder_data.items():
|
||||
placeholder_value = placeholder_item.data.get(key)
|
||||
if value != placeholder_value:
|
||||
new_values[key] = value
|
||||
placeholder_item.data[key] = value
|
||||
|
||||
for key in new_values.keys():
|
||||
cmds.deleteAttr(node_name + "." + key)
|
||||
|
||||
imprint(node_name, new_values)
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, placeholder_data in scene_placeholders.items():
|
||||
if placeholder_data.get("plugin_identifier") != self.identifier:
|
||||
continue
|
||||
|
||||
# TODO do data validations and maybe upgrades if they are invalid
|
||||
output.append(
|
||||
LoadPlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_load_placeholder(placeholder)
|
||||
|
||||
|
|
@ -176,30 +75,6 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
|
|||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# Hide placeholder and add them to placeholder set
|
||||
node = placeholder.scene_identifier
|
||||
|
||||
# If we just populate the placeholders from current scene, the
|
||||
# placeholder set will not be created so account for that.
|
||||
if not cmds.objExists(PLACEHOLDER_SET):
|
||||
cmds.sets(name=PLACEHOLDER_SET, empty=True)
|
||||
|
||||
cmds.sets(node, addElement=PLACEHOLDER_SET)
|
||||
cmds.hide(node)
|
||||
cmds.setAttr(node + ".hiddenInOutliner", True)
|
||||
|
||||
def delete_placeholder(self, placeholder):
|
||||
"""Remove placeholder if building was successful"""
|
||||
cmds.delete(placeholder.scene_identifier)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
self._parent_in_hierarchy(placeholder, container)
|
||||
|
||||
|
|
@ -215,56 +90,43 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
|
|||
if not container:
|
||||
return
|
||||
|
||||
roots = cmds.sets(container, q=True) or []
|
||||
ref_node = None
|
||||
try:
|
||||
ref_node = get_reference_node(roots)
|
||||
except AssertionError as e:
|
||||
self.log.info(e.args[0])
|
||||
# TODO: This currently returns only a single root but a loaded scene
|
||||
# could technically load more than a single root
|
||||
container_root = get_container_transforms(container, root=True)
|
||||
|
||||
nodes_to_parent = []
|
||||
for root in roots:
|
||||
if ref_node:
|
||||
ref_root = cmds.referenceQuery(root, nodes=True)[0]
|
||||
ref_root = (
|
||||
cmds.listRelatives(ref_root, parent=True, path=True) or
|
||||
[ref_root]
|
||||
)
|
||||
nodes_to_parent.extend(ref_root)
|
||||
continue
|
||||
if root.endswith("_RN"):
|
||||
# Backwards compatibility for hardcoded reference names.
|
||||
refRoot = cmds.referenceQuery(root, n=True)[0]
|
||||
refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot]
|
||||
nodes_to_parent.extend(refRoot)
|
||||
elif root not in cmds.listSets(allSets=True):
|
||||
nodes_to_parent.append(root)
|
||||
# Bugfix: The get_container_transforms does not recognize the load
|
||||
# reference group currently
|
||||
# TODO: Remove this when it does
|
||||
parent = get_node_parent(container_root)
|
||||
if parent:
|
||||
container_root = parent
|
||||
roots = [container_root]
|
||||
|
||||
elif not cmds.sets(root, q=True):
|
||||
return
|
||||
# Add the loaded roots to the holding sets if they exist
|
||||
holding_sets = cmds.listSets(object=placeholder.scene_identifier) or []
|
||||
for holding_set in holding_sets:
|
||||
cmds.sets(roots, forceElement=holding_set)
|
||||
|
||||
# Move loaded nodes to correct index in outliner hierarchy
|
||||
# Parent the roots to the place of the placeholder locator and match
|
||||
# its matrix
|
||||
placeholder_form = cmds.xform(
|
||||
placeholder.scene_identifier,
|
||||
q=True,
|
||||
query=True,
|
||||
matrix=True,
|
||||
worldSpace=True
|
||||
)
|
||||
scene_parent = cmds.listRelatives(
|
||||
placeholder.scene_identifier, parent=True, fullPath=True
|
||||
)
|
||||
for node in set(nodes_to_parent):
|
||||
cmds.reorder(node, front=True)
|
||||
cmds.reorder(node, relative=placeholder.data["index"])
|
||||
cmds.xform(node, matrix=placeholder_form, ws=True)
|
||||
if scene_parent:
|
||||
cmds.parent(node, scene_parent)
|
||||
else:
|
||||
if cmds.listRelatives(node, parent=True):
|
||||
cmds.parent(node, world=True)
|
||||
scene_parent = get_node_parent(placeholder.scene_identifier)
|
||||
for node in set(roots):
|
||||
cmds.xform(node, matrix=placeholder_form, worldSpace=True)
|
||||
|
||||
holding_sets = cmds.listSets(object=placeholder.scene_identifier)
|
||||
if not holding_sets:
|
||||
return
|
||||
for holding_set in holding_sets:
|
||||
cmds.sets(roots, forceElement=holding_set)
|
||||
if scene_parent != get_node_parent(node):
|
||||
if scene_parent:
|
||||
node = cmds.parent(node, scene_parent)[0]
|
||||
else:
|
||||
node = cmds.parent(node, world=True)[0]
|
||||
|
||||
# Move loaded nodes in index order next to their placeholder node
|
||||
cmds.reorder(node, back=True)
|
||||
index = get_node_index_under_parent(placeholder.scene_identifier)
|
||||
cmds.reorder(node, front=True)
|
||||
cmds.reorder(node, relative=index + 1)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,201 @@
|
|||
from maya import cmds
|
||||
|
||||
from ayon_core.hosts.maya.api.workfile_template_builder import (
|
||||
MayaPlaceholderPlugin
|
||||
)
|
||||
from ayon_core.lib import NumberDef, TextDef, EnumDef
|
||||
from ayon_core.lib.events import weakref_partial
|
||||
|
||||
|
||||
EXAMPLE_SCRIPT = """
|
||||
# Access maya commands
|
||||
from maya import cmds
|
||||
|
||||
# Access the placeholder node
|
||||
placeholder_node = placeholder.scene_identifier
|
||||
|
||||
# Access the event callback
|
||||
if event is None:
|
||||
print(f"Populating {placeholder}")
|
||||
else:
|
||||
if event.topic == "template.depth_processed":
|
||||
print(f"Processed depth: {event.get('depth')}")
|
||||
elif event.topic == "template.finished":
|
||||
print("Build finished.")
|
||||
""".strip()
|
||||
|
||||
|
||||
class MayaPlaceholderScriptPlugin(MayaPlaceholderPlugin):
|
||||
"""Execute a script at the given `order` during workfile build.
|
||||
|
||||
This is a very low-level placeholder to run Python scripts at a given
|
||||
point in time during the workfile template build.
|
||||
|
||||
It can create either a locator or an objectSet as placeholder node.
|
||||
It defaults to an objectSet, since allowing to run on e.g. other
|
||||
placeholder node members can be useful, e.g. using:
|
||||
|
||||
>>> members = cmds.sets(placeholder.scene_identifier, query=True)
|
||||
|
||||
"""
|
||||
|
||||
identifier = "maya.runscript"
|
||||
label = "Run Python Script"
|
||||
|
||||
use_selection_as_parent = False
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
options = options or {}
|
||||
return [
|
||||
NumberDef(
|
||||
"order",
|
||||
label="Order",
|
||||
default=options.get("order") or 0,
|
||||
decimals=0,
|
||||
minimum=0,
|
||||
maximum=999,
|
||||
tooltip=(
|
||||
"Order"
|
||||
"\nOrder defines asset loading priority (0 to 999)"
|
||||
"\nPriority rule is : \"lowest is first to load\"."
|
||||
)
|
||||
),
|
||||
TextDef(
|
||||
"prepare_script",
|
||||
label="Run at\nprepare",
|
||||
tooltip="Run before populate at prepare order",
|
||||
multiline=True,
|
||||
default=options.get("prepare_script", "")
|
||||
),
|
||||
TextDef(
|
||||
"populate_script",
|
||||
label="Run at\npopulate",
|
||||
tooltip="Run script at populate node order<br>"
|
||||
"This is the <b>default</b> behavior",
|
||||
multiline=True,
|
||||
default=options.get("populate_script", EXAMPLE_SCRIPT)
|
||||
),
|
||||
TextDef(
|
||||
"depth_processed_script",
|
||||
label="Run after\ndepth\niteration",
|
||||
tooltip="Run script after every build depth iteration",
|
||||
multiline=True,
|
||||
default=options.get("depth_processed_script", "")
|
||||
),
|
||||
TextDef(
|
||||
"finished_script",
|
||||
label="Run after\nbuild",
|
||||
tooltip=(
|
||||
"Run script at build finished.<br>"
|
||||
"<b>Note</b>: this even runs if other placeholders had "
|
||||
"errors during the build"
|
||||
),
|
||||
multiline=True,
|
||||
default=options.get("finished_script", "")
|
||||
),
|
||||
EnumDef(
|
||||
"create_nodetype",
|
||||
label="Nodetype",
|
||||
items={
|
||||
"spaceLocator": "Locator",
|
||||
"objectSet": "ObjectSet"
|
||||
},
|
||||
tooltip=(
|
||||
"The placeholder's node type to be created.<br>"
|
||||
"<b>Note</b> this only works on create, not on update"
|
||||
),
|
||||
default=options.get("create_nodetype", "objectSet")
|
||||
),
|
||||
]
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
nodetype = placeholder_data.get("create_nodetype", "objectSet")
|
||||
|
||||
if nodetype == "spaceLocator":
|
||||
super(MayaPlaceholderScriptPlugin, self).create_placeholder(
|
||||
placeholder_data
|
||||
)
|
||||
elif nodetype == "objectSet":
|
||||
placeholder_data["plugin_identifier"] = self.identifier
|
||||
|
||||
# Create maya objectSet on selection
|
||||
selection = cmds.ls(selection=True, long=True)
|
||||
name = self._create_placeholder_name(placeholder_data)
|
||||
node = cmds.sets(selection, name=name)
|
||||
|
||||
self.imprint(node, placeholder_data)
|
||||
|
||||
def prepare_placeholders(self, placeholders):
|
||||
super(MayaPlaceholderScriptPlugin, self).prepare_placeholders(
|
||||
placeholders
|
||||
)
|
||||
for placeholder in placeholders:
|
||||
prepare_script = placeholder.data.get("prepare_script")
|
||||
if not prepare_script:
|
||||
continue
|
||||
|
||||
self.run_script(placeholder, prepare_script)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
|
||||
populate_script = placeholder.data.get("populate_script")
|
||||
depth_script = placeholder.data.get("depth_processed_script")
|
||||
finished_script = placeholder.data.get("finished_script")
|
||||
|
||||
# Run now
|
||||
if populate_script:
|
||||
self.run_script(placeholder, populate_script)
|
||||
|
||||
if not any([depth_script, finished_script]):
|
||||
# No callback scripts to run
|
||||
if not placeholder.data.get("keep_placeholder", True):
|
||||
self.delete_placeholder(placeholder)
|
||||
return
|
||||
|
||||
# Run at each depth processed
|
||||
if depth_script:
|
||||
callback = weakref_partial(
|
||||
self.run_script, placeholder, depth_script)
|
||||
self.builder.add_on_depth_processed_callback(
|
||||
callback, order=placeholder.order)
|
||||
|
||||
# Run at build finish
|
||||
if finished_script:
|
||||
callback = weakref_partial(
|
||||
self.run_script, placeholder, finished_script)
|
||||
self.builder.add_on_finished_callback(
|
||||
callback, order=placeholder.order)
|
||||
|
||||
# If placeholder should be deleted, delete it after finish so
|
||||
# the scripts have access to it up to the last run
|
||||
if not placeholder.data.get("keep_placeholder", True):
|
||||
delete_callback = weakref_partial(
|
||||
self.delete_placeholder, placeholder)
|
||||
self.builder.add_on_finished_callback(
|
||||
delete_callback, order=placeholder.order + 1)
|
||||
|
||||
def run_script(self, placeholder, script, event=None):
|
||||
"""Run script
|
||||
|
||||
Even though `placeholder` is an unused arguments by exposing it as
|
||||
an input argument it means it makes it available through
|
||||
globals()/locals() in the `exec` call, giving the script access
|
||||
to the placeholder.
|
||||
|
||||
For example:
|
||||
>>> node = placeholder.scene_identifier
|
||||
|
||||
In the case the script is running at a callback level (not during
|
||||
populate) then it has access to the `event` as well, otherwise the
|
||||
value is None if it runs during `populate_placeholder` directly.
|
||||
|
||||
For example adding this as the callback script:
|
||||
>>> if event is not None:
|
||||
>>> if event.topic == "on_depth_processed":
|
||||
>>> print(f"Processed depth: {event.get('depth')}")
|
||||
>>> elif event.topic == "on_finished":
|
||||
>>> print("Build finished.")
|
||||
|
||||
"""
|
||||
self.log.debug(f"Running script at event: {event}")
|
||||
exec(script, locals())
|
||||
|
|
@ -35,8 +35,12 @@ class ImageCreator(Creator):
|
|||
create_empty_group = False
|
||||
|
||||
stub = api.stub() # only after PS is up
|
||||
top_level_selected_items = stub.get_selected_layers()
|
||||
if pre_create_data.get("use_selection"):
|
||||
try:
|
||||
top_level_selected_items = stub.get_selected_layers()
|
||||
except ValueError:
|
||||
raise CreatorError("Cannot group locked Background layer!")
|
||||
|
||||
only_single_item_selected = len(top_level_selected_items) == 1
|
||||
if (
|
||||
only_single_item_selected or
|
||||
|
|
@ -50,11 +54,12 @@ class ImageCreator(Creator):
|
|||
group = stub.group_selected_layers(product_name_from_ui)
|
||||
groups_to_create.append(group)
|
||||
else:
|
||||
stub.select_layers(stub.get_layers())
|
||||
try:
|
||||
stub.select_layers(stub.get_layers())
|
||||
group = stub.group_selected_layers(product_name_from_ui)
|
||||
except:
|
||||
except ValueError:
|
||||
raise CreatorError("Cannot group locked Background layer!")
|
||||
|
||||
groups_to_create.append(group)
|
||||
|
||||
# create empty group if nothing selected
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
|
|
|
|||
|
|
@ -139,6 +139,7 @@ from .path_tools import (
|
|||
)
|
||||
|
||||
from .ayon_info import (
|
||||
is_in_ayon_launcher_process,
|
||||
is_running_from_build,
|
||||
is_using_ayon_console,
|
||||
is_staging_enabled,
|
||||
|
|
@ -248,6 +249,7 @@ __all__ = [
|
|||
|
||||
"Logger",
|
||||
|
||||
"is_in_ayon_launcher_process",
|
||||
"is_running_from_build",
|
||||
"is_using_ayon_console",
|
||||
"is_staging_enabled",
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import datetime
|
||||
import platform
|
||||
|
|
@ -25,6 +26,18 @@ def get_ayon_launcher_version():
|
|||
return content["__version__"]
|
||||
|
||||
|
||||
def is_in_ayon_launcher_process():
|
||||
"""Determine if current process is running from AYON launcher.
|
||||
|
||||
Returns:
|
||||
bool: True if running from AYON launcher.
|
||||
|
||||
"""
|
||||
ayon_executable_path = os.path.normpath(os.environ["AYON_EXECUTABLE"])
|
||||
executable_path = os.path.normpath(sys.executable)
|
||||
return ayon_executable_path == executable_path
|
||||
|
||||
|
||||
def is_running_from_build():
|
||||
"""Determine if current process is running from build or code.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
from .deadline_module import DeadlineModule
|
||||
from .version import __version__
|
||||
|
||||
|
||||
__all__ = (
|
||||
"DeadlineModule",
|
||||
"__version__"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -49,6 +49,10 @@ def requests_post(*args, **kwargs):
|
|||
if 'verify' not in kwargs:
|
||||
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
|
||||
True) else True # noqa
|
||||
|
||||
auth = kwargs.get("auth")
|
||||
if auth:
|
||||
kwargs["auth"] = tuple(auth) # explicit cast to tuple
|
||||
# add 10sec timeout before bailing out
|
||||
kwargs['timeout'] = 10
|
||||
return requests.post(*args, **kwargs)
|
||||
|
|
@ -70,6 +74,9 @@ def requests_get(*args, **kwargs):
|
|||
if 'verify' not in kwargs:
|
||||
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
|
||||
True) else True # noqa
|
||||
auth = kwargs.get("auth")
|
||||
if auth:
|
||||
kwargs["auth"] = tuple(auth)
|
||||
# add 10sec timeout before bailing out
|
||||
kwargs['timeout'] = 10
|
||||
return requests.get(*args, **kwargs)
|
||||
|
|
@ -434,9 +441,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
"""Plugin entry point."""
|
||||
self._instance = instance
|
||||
context = instance.context
|
||||
self._deadline_url = context.data.get("defaultDeadline")
|
||||
self._deadline_url = instance.data.get(
|
||||
"deadlineUrl", self._deadline_url)
|
||||
self._deadline_url = instance.data["deadline"]["url"]
|
||||
|
||||
assert self._deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
|
|
@ -460,7 +465,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
self.plugin_info = self.get_plugin_info()
|
||||
self.aux_files = self.get_aux_files()
|
||||
|
||||
job_id = self.process_submission()
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
job_id = self.process_submission(auth)
|
||||
self.log.info("Submitted job to Deadline: {}.".format(job_id))
|
||||
|
||||
# TODO: Find a way that's more generic and not render type specific
|
||||
|
|
@ -473,10 +479,10 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
job_info=render_job_info,
|
||||
plugin_info=render_plugin_info
|
||||
)
|
||||
render_job_id = self.submit(payload)
|
||||
render_job_id = self.submit(payload, auth)
|
||||
self.log.info("Render job id: %s", render_job_id)
|
||||
|
||||
def process_submission(self):
|
||||
def process_submission(self, auth=None):
|
||||
"""Process data for submission.
|
||||
|
||||
This takes Deadline JobInfo, PluginInfo, AuxFile, creates payload
|
||||
|
|
@ -487,7 +493,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
"""
|
||||
payload = self.assemble_payload()
|
||||
return self.submit(payload)
|
||||
return self.submit(payload, auth)
|
||||
|
||||
@abstractmethod
|
||||
def get_job_info(self):
|
||||
|
|
@ -577,7 +583,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
"AuxFiles": aux_files or self.aux_files
|
||||
}
|
||||
|
||||
def submit(self, payload):
|
||||
def submit(self, payload, auth):
|
||||
"""Submit payload to Deadline API end-point.
|
||||
|
||||
This takes payload in the form of JSON file and POST it to
|
||||
|
|
@ -585,6 +591,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
Args:
|
||||
payload (dict): dict to become json in deadline submission.
|
||||
auth (tuple): (username, password)
|
||||
|
||||
Returns:
|
||||
str: resulting Deadline job id.
|
||||
|
|
@ -594,7 +601,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
"""
|
||||
url = "{}/api/jobs".format(self._deadline_url)
|
||||
response = requests_post(url, json=payload)
|
||||
response = requests_post(url, json=payload,
|
||||
auth=auth)
|
||||
if not response.ok:
|
||||
self.log.error("Submission failed!")
|
||||
self.log.error(response.status_code)
|
||||
|
|
|
|||
|
|
@ -19,23 +19,23 @@ class DeadlineModule(AYONAddon, IPluginPaths):
|
|||
|
||||
def initialize(self, studio_settings):
|
||||
# This module is always enabled
|
||||
deadline_urls = {}
|
||||
deadline_servers_info = {}
|
||||
enabled = self.name in studio_settings
|
||||
if enabled:
|
||||
deadline_settings = studio_settings[self.name]
|
||||
deadline_urls = {
|
||||
url_item["name"]: url_item["value"]
|
||||
deadline_servers_info = {
|
||||
url_item["name"]: url_item
|
||||
for url_item in deadline_settings["deadline_urls"]
|
||||
}
|
||||
|
||||
if enabled and not deadline_urls:
|
||||
if enabled and not deadline_servers_info:
|
||||
enabled = False
|
||||
self.log.warning((
|
||||
"Deadline Webservice URLs are not specified. Disabling addon."
|
||||
))
|
||||
|
||||
self.enabled = enabled
|
||||
self.deadline_urls = deadline_urls
|
||||
self.deadline_servers_info = deadline_servers_info
|
||||
|
||||
def get_plugin_paths(self):
|
||||
"""Deadline plugin paths."""
|
||||
|
|
@ -45,13 +45,15 @@ class DeadlineModule(AYONAddon, IPluginPaths):
|
|||
}
|
||||
|
||||
@staticmethod
|
||||
def get_deadline_pools(webservice, log=None):
|
||||
def get_deadline_pools(webservice, auth=None, log=None):
|
||||
"""Get pools from Deadline.
|
||||
Args:
|
||||
webservice (str): Server url.
|
||||
log (Logger)
|
||||
auth (Optional[Tuple[str, str]]): Tuple containing username,
|
||||
password
|
||||
log (Optional[Logger]): Logger to log errors to, if provided.
|
||||
Returns:
|
||||
list: Pools.
|
||||
List[str]: Pools.
|
||||
Throws:
|
||||
RuntimeError: If deadline webservice is unreachable.
|
||||
|
||||
|
|
@ -63,7 +65,10 @@ class DeadlineModule(AYONAddon, IPluginPaths):
|
|||
|
||||
argument = "{}/api/pools?NamesOnly=true".format(webservice)
|
||||
try:
|
||||
response = requests_get(argument)
|
||||
kwargs = {}
|
||||
if auth:
|
||||
kwargs["auth"] = auth
|
||||
response = requests_get(argument, **kwargs)
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
msg = 'Cannot connect to DL web service {}'.format(webservice)
|
||||
log.error(msg)
|
||||
|
|
|
|||
|
|
@ -13,17 +13,45 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
"""Collect Deadline Webservice URL from instance."""
|
||||
|
||||
# Run before collect_render.
|
||||
order = pyblish.api.CollectorOrder + 0.005
|
||||
order = pyblish.api.CollectorOrder + 0.225
|
||||
label = "Deadline Webservice from the Instance"
|
||||
families = ["rendering", "renderlayer"]
|
||||
hosts = ["maya"]
|
||||
targets = ["local"]
|
||||
families = ["render",
|
||||
"rendering",
|
||||
"render.farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender",
|
||||
"usdrender",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"vray_rop",
|
||||
"publish.hou",
|
||||
"image"] # for Fusion
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["deadlineUrl"] = self._collect_deadline_url(instance)
|
||||
instance.data["deadlineUrl"] = \
|
||||
instance.data["deadlineUrl"].strip().rstrip("/")
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
if not instance.data.get("deadline"):
|
||||
instance.data["deadline"] = {}
|
||||
|
||||
# todo: separate logic should be removed, all hosts should have same
|
||||
host_name = instance.context.data["hostName"]
|
||||
if host_name == "maya":
|
||||
deadline_url = self._collect_deadline_url(instance)
|
||||
else:
|
||||
deadline_url = (instance.data.get("deadlineUrl") or # backwards
|
||||
instance.data.get("deadline", {}).get("url"))
|
||||
if deadline_url:
|
||||
instance.data["deadline"]["url"] = deadline_url.strip().rstrip("/")
|
||||
else:
|
||||
instance.data["deadline"]["url"] = instance.context.data["deadline"]["defaultUrl"] # noqa
|
||||
self.log.debug(
|
||||
"Using {} for submission.".format(instance.data["deadlineUrl"]))
|
||||
"Using {} for submission".format(instance.data["deadline"]["url"]))
|
||||
|
||||
def _collect_deadline_url(self, render_instance):
|
||||
# type: (pyblish.api.Instance) -> str
|
||||
|
|
@ -49,13 +77,13 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
["project_settings"]
|
||||
["deadline"]
|
||||
)
|
||||
|
||||
default_server = render_instance.context.data["defaultDeadline"]
|
||||
default_server_url = (render_instance.context.data["deadline"]
|
||||
["defaultUrl"])
|
||||
# QUESTION How and where is this is set? Should be removed?
|
||||
instance_server = render_instance.data.get("deadlineServers")
|
||||
if not instance_server:
|
||||
self.log.debug("Using default server.")
|
||||
return default_server
|
||||
return default_server_url
|
||||
|
||||
# Get instance server as sting.
|
||||
if isinstance(instance_server, int):
|
||||
|
|
@ -66,7 +94,7 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
default_servers = {
|
||||
url_item["name"]: url_item["value"]
|
||||
for url_item in deadline_settings["deadline_urls"]
|
||||
for url_item in deadline_settings["deadline_servers_info"]
|
||||
}
|
||||
project_servers = (
|
||||
render_instance.context.data
|
||||
|
|
|
|||
|
|
@ -18,10 +18,9 @@ class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
|
||||
# Run before collect_deadline_server_instance.
|
||||
order = pyblish.api.CollectorOrder + 0.0025
|
||||
order = pyblish.api.CollectorOrder + 0.200
|
||||
label = "Default Deadline Webservice"
|
||||
|
||||
pass_mongo_url = False
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, context):
|
||||
try:
|
||||
|
|
@ -33,15 +32,17 @@ class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin):
|
|||
deadline_settings = context.data["project_settings"]["deadline"]
|
||||
deadline_server_name = deadline_settings["deadline_server"]
|
||||
|
||||
deadline_webservice = None
|
||||
dl_server_info = None
|
||||
if deadline_server_name:
|
||||
deadline_webservice = deadline_module.deadline_urls.get(
|
||||
dl_server_info = deadline_module.deadline_servers_info.get(
|
||||
deadline_server_name)
|
||||
|
||||
default_deadline_webservice = deadline_module.deadline_urls["default"]
|
||||
deadline_webservice = (
|
||||
deadline_webservice
|
||||
or default_deadline_webservice
|
||||
)
|
||||
if dl_server_info:
|
||||
deadline_url = dl_server_info["value"]
|
||||
else:
|
||||
default_dl_server_info = deadline_module.deadline_servers_info[0]
|
||||
deadline_url = default_dl_server_info["value"]
|
||||
|
||||
context.data["defaultDeadline"] = deadline_webservice.strip().rstrip("/") # noqa
|
||||
context.data["deadline"] = {}
|
||||
context.data["deadline"]["defaultUrl"] = (
|
||||
deadline_url.strip().rstrip("/"))
|
||||
|
|
|
|||
|
|
@ -0,0 +1,89 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect user credentials
|
||||
|
||||
Requires:
|
||||
context -> project_settings
|
||||
instance.data["deadline"]["url"]
|
||||
|
||||
Provides:
|
||||
instance.data["deadline"] -> require_authentication (bool)
|
||||
instance.data["deadline"] -> auth (tuple (str, str)) -
|
||||
(username, password) or None
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
from ayon_api import get_server_api_connection
|
||||
from ayon_core.modules.deadline.deadline_module import DeadlineModule
|
||||
from ayon_core.modules.deadline import __version__
|
||||
|
||||
|
||||
class CollectDeadlineUserCredentials(pyblish.api.InstancePlugin):
|
||||
"""Collects user name and password for artist if DL requires authentication
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder + 0.250
|
||||
label = "Collect Deadline User Credentials"
|
||||
|
||||
targets = ["local"]
|
||||
hosts = ["aftereffects",
|
||||
"blender",
|
||||
"fusion",
|
||||
"harmony",
|
||||
"nuke",
|
||||
"maya",
|
||||
"max",
|
||||
"houdini"]
|
||||
|
||||
families = ["render",
|
||||
"rendering",
|
||||
"render.farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender",
|
||||
"usdrender",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"vray_rop",
|
||||
"publish.hou"]
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
collected_deadline_url = instance.data["deadline"]["url"]
|
||||
if not collected_deadline_url:
|
||||
raise ValueError("Instance doesn't have '[deadline][url]'.")
|
||||
context_data = instance.context.data
|
||||
deadline_settings = context_data["project_settings"]["deadline"]
|
||||
|
||||
deadline_server_name = None
|
||||
# deadline url might be set directly from instance, need to find
|
||||
# metadata for it
|
||||
for deadline_info in deadline_settings["deadline_urls"]:
|
||||
dl_settings_url = deadline_info["value"].strip().rstrip("/")
|
||||
if dl_settings_url == collected_deadline_url:
|
||||
deadline_server_name = deadline_info["name"]
|
||||
break
|
||||
|
||||
if not deadline_server_name:
|
||||
raise ValueError(f"Collected {collected_deadline_url} doesn't "
|
||||
"match any site configured in Studio Settings")
|
||||
|
||||
instance.data["deadline"]["require_authentication"] = (
|
||||
deadline_info["require_authentication"]
|
||||
)
|
||||
instance.data["deadline"]["auth"] = None
|
||||
|
||||
if not deadline_info["require_authentication"]:
|
||||
return
|
||||
# TODO import 'get_addon_site_settings' when available
|
||||
# in public 'ayon_api'
|
||||
local_settings = get_server_api_connection().get_addon_site_settings(
|
||||
DeadlineModule.name, __version__)
|
||||
local_settings = local_settings["local_settings"]
|
||||
for server_info in local_settings:
|
||||
if deadline_server_name == server_info["server_name"]:
|
||||
instance.data["deadline"]["auth"] = (server_info["username"],
|
||||
server_info["password"])
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Deadline Authentication</title>
|
||||
<description>
|
||||
## Deadline authentication is required
|
||||
|
||||
This project has set in Settings that Deadline requires authentication.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Please go to Ayon Server > Site Settings and provide your Deadline username and password.
|
||||
In some cases the password may be empty if Deadline is configured to allow that. Ask your administrator.
|
||||
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -174,7 +174,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
payload = self.assemble_payload()
|
||||
return self.submit(payload)
|
||||
return self.submit(payload,
|
||||
auth=instance.data["deadline"]["auth"])
|
||||
|
||||
def from_published_scene(self):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -2,9 +2,10 @@ import os
|
|||
import re
|
||||
import json
|
||||
import getpass
|
||||
import requests
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_post
|
||||
|
||||
|
||||
class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit CelAction2D scene to Deadline
|
||||
|
|
@ -30,11 +31,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
context = instance.context
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(deadline_url)
|
||||
|
|
@ -197,7 +194,8 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
|
||||
response = requests.post(self.deadline_url, json=payload)
|
||||
response = requests_post(self.deadline_url, json=payload,
|
||||
auth=instance.data["deadline"]["require_authentication"])
|
||||
|
||||
if not response.ok:
|
||||
self.log.error(
|
||||
|
|
|
|||
|
|
@ -2,17 +2,13 @@ import os
|
|||
import json
|
||||
import getpass
|
||||
|
||||
import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_post
|
||||
from ayon_core.pipeline.publish import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
)
|
||||
from ayon_core.lib import NumberDef
|
||||
|
||||
|
||||
class FusionSubmitDeadline(
|
||||
|
|
@ -64,11 +60,6 @@ class FusionSubmitDeadline(
|
|||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=10
|
||||
),
|
||||
BoolDef(
|
||||
"suspend_publish",
|
||||
default=False,
|
||||
label="Suspend publish"
|
||||
)
|
||||
]
|
||||
|
||||
|
|
@ -80,10 +71,6 @@ class FusionSubmitDeadline(
|
|||
attribute_values = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
# add suspend_publish attributeValue to instance data
|
||||
instance.data["suspend_publish"] = attribute_values[
|
||||
"suspend_publish"]
|
||||
|
||||
context = instance.context
|
||||
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
|
|
@ -94,11 +81,7 @@ class FusionSubmitDeadline(
|
|||
|
||||
from ayon_core.hosts.fusion.api.lib import get_frame_path
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
# Collect all saver instances in context that are to be rendered
|
||||
|
|
@ -258,7 +241,8 @@ class FusionSubmitDeadline(
|
|||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(deadline_url)
|
||||
response = requests.post(url, json=payload)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(url, json=payload, auth=auth)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ from openpype_modules.deadline import abstract_submit_deadline
|
|||
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
from ayon_core.lib import (
|
||||
is_in_tests,
|
||||
BoolDef,
|
||||
TextDef,
|
||||
NumberDef
|
||||
)
|
||||
|
|
@ -90,11 +89,6 @@ class HoudiniSubmitDeadline(
|
|||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
BoolDef(
|
||||
"suspend_publish",
|
||||
default=False,
|
||||
label="Suspend publish"
|
||||
),
|
||||
NumberDef(
|
||||
"priority",
|
||||
label="Priority",
|
||||
|
|
|
|||
|
|
@ -187,11 +187,13 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
payload_data, project_settings)
|
||||
job_infos, plugin_infos = payload
|
||||
for job_info, plugin_info in zip(job_infos, plugin_infos):
|
||||
self.submit(self.assemble_payload(job_info, plugin_info))
|
||||
self.submit(self.assemble_payload(job_info, plugin_info),
|
||||
instance.data["deadline"]["auth"])
|
||||
else:
|
||||
payload = self._use_published_name(payload_data, project_settings)
|
||||
job_info, plugin_info = payload
|
||||
self.submit(self.assemble_payload(job_info, plugin_info))
|
||||
self.submit(self.assemble_payload(job_info, plugin_info),
|
||||
instance.data["deadline"]["auth"])
|
||||
|
||||
def _use_published_name(self, data, project_settings):
|
||||
# Not all hosts can import these modules.
|
||||
|
|
|
|||
|
|
@ -292,7 +292,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
|
||||
return plugin_payload
|
||||
|
||||
def process_submission(self):
|
||||
def process_submission(self, auth=None):
|
||||
from maya import cmds
|
||||
instance = self._instance
|
||||
|
||||
|
|
@ -332,7 +332,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
if "vrayscene" in instance.data["families"]:
|
||||
self.log.debug("Submitting V-Ray scene render..")
|
||||
vray_export_payload = self._get_vray_export_payload(payload_data)
|
||||
export_job = self.submit(vray_export_payload)
|
||||
export_job = self.submit(vray_export_payload,
|
||||
instance.data["deadline"]["auth"])
|
||||
|
||||
payload = self._get_vray_render_payload(payload_data)
|
||||
|
||||
|
|
@ -351,7 +352,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
else:
|
||||
# Submit main render job
|
||||
job_info, plugin_info = payload
|
||||
self.submit(self.assemble_payload(job_info, plugin_info))
|
||||
self.submit(self.assemble_payload(job_info, plugin_info),
|
||||
instance.data["deadline"]["auth"])
|
||||
|
||||
def _tile_render(self, payload):
|
||||
"""Submit as tile render per frame with dependent assembly jobs."""
|
||||
|
|
@ -451,7 +453,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
# Submit frame tile jobs
|
||||
frame_tile_job_id = {}
|
||||
for frame, tile_job_payload in frame_payloads.items():
|
||||
job_id = self.submit(tile_job_payload)
|
||||
job_id = self.submit(tile_job_payload,
|
||||
instance.data["deadline"]["auth"])
|
||||
frame_tile_job_id[frame] = job_id
|
||||
|
||||
# Define assembly payloads
|
||||
|
|
@ -559,7 +562,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
"submitting assembly job {} of {}".format(i + 1,
|
||||
num_assemblies)
|
||||
)
|
||||
assembly_job_id = self.submit(payload)
|
||||
assembly_job_id = self.submit(payload,
|
||||
instance.data["deadline"]["auth"])
|
||||
assembly_job_ids.append(assembly_job_id)
|
||||
|
||||
instance.data["assemblySubmissionJobs"] = assembly_job_ids
|
||||
|
|
|
|||
|
|
@ -4,9 +4,9 @@ import json
|
|||
import getpass
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_post
|
||||
from ayon_core.pipeline.publish import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
|
|
@ -76,11 +76,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
default=cls.use_gpu,
|
||||
label="Use GPU"
|
||||
),
|
||||
BoolDef(
|
||||
"suspend_publish",
|
||||
default=False,
|
||||
label="Suspend publish"
|
||||
),
|
||||
BoolDef(
|
||||
"workfile_dependency",
|
||||
default=cls.workfile_dependency,
|
||||
|
|
@ -100,20 +95,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
instance.data["attributeValues"] = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
# add suspend_publish attributeValue to instance data
|
||||
instance.data["suspend_publish"] = instance.data["attributeValues"][
|
||||
"suspend_publish"]
|
||||
|
||||
families = instance.data["families"]
|
||||
|
||||
node = instance.data["transientData"]["node"]
|
||||
context = instance.context
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(deadline_url)
|
||||
|
|
@ -436,7 +423,9 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
response = requests.post(self.deadline_url, json=payload, timeout=10)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(self.deadline_url, json=payload, timeout=10,
|
||||
auth=auth)
|
||||
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
|
|||
|
|
@ -5,10 +5,10 @@ import json
|
|||
import re
|
||||
from copy import deepcopy
|
||||
|
||||
import requests
|
||||
import ayon_api
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_post
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.lib import EnumDef, is_in_tests
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
|
|
@ -147,9 +147,6 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
instance_settings = self.get_attr_values_from_data(instance.data)
|
||||
initial_status = instance_settings.get("publishJobState", "Active")
|
||||
# TODO: Remove this backwards compatibility of `suspend_publish`
|
||||
if instance.data.get("suspend_publish"):
|
||||
initial_status = "Suspended"
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
|
|
@ -212,7 +209,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
self.log.debug("Submitting Deadline publish job ...")
|
||||
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
response = requests.post(url, json=payload, timeout=10)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(url, json=payload, timeout=10,
|
||||
auth=auth)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
|
|
@ -344,11 +343,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
deadline_publish_job_id = None
|
||||
if submission_type == "deadline":
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
self.deadline_url = instance.data.get("deadlineUrl")
|
||||
self.deadline_url = instance.data["deadline"]["url"]
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
deadline_publish_job_id = \
|
||||
|
|
@ -356,7 +351,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
# Inject deadline url to instances.
|
||||
for inst in instances:
|
||||
inst["deadlineUrl"] = self.deadline_url
|
||||
if "deadline" not in inst:
|
||||
inst["deadline"] = {}
|
||||
inst["deadline"] = instance.data["deadline"]
|
||||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
|
|
|
|||
|
|
@ -5,11 +5,11 @@ import json
|
|||
import re
|
||||
from copy import deepcopy
|
||||
|
||||
import requests
|
||||
import clique
|
||||
import ayon_api
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_post
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.lib import EnumDef, is_in_tests
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
|
|
@ -88,9 +88,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
hosts = ["fusion", "max", "maya", "nuke", "houdini",
|
||||
"celaction", "aftereffects", "harmony", "blender"]
|
||||
|
||||
families = ["render.farm", "render.frames_farm",
|
||||
"prerender.farm", "prerender.frames_farm",
|
||||
"renderlayer", "imagesequence",
|
||||
families = ["render", "render.farm", "render.frames_farm",
|
||||
"prerender", "prerender.farm", "prerender.frames_farm",
|
||||
"renderlayer", "imagesequence", "image",
|
||||
"vrayscene", "maxrender",
|
||||
"arnold_rop", "mantra_rop",
|
||||
"karma_rop", "vray_rop",
|
||||
|
|
@ -224,9 +224,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
instance_settings = self.get_attr_values_from_data(instance.data)
|
||||
initial_status = instance_settings.get("publishJobState", "Active")
|
||||
# TODO: Remove this backwards compatibility of `suspend_publish`
|
||||
if instance.data.get("suspend_publish"):
|
||||
initial_status = "Suspended"
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
|
|
@ -306,7 +303,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
self.log.debug("Submitting Deadline publish job ...")
|
||||
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
response = requests.post(url, json=payload, timeout=10)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(url, json=payload, timeout=10,
|
||||
auth=auth)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
|
|
@ -314,7 +313,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
return deadline_publish_job_id
|
||||
|
||||
|
||||
def process(self, instance):
|
||||
# type: (pyblish.api.Instance) -> None
|
||||
"""Process plugin.
|
||||
|
|
@ -461,18 +459,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
}
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
self.deadline_url = instance.data.get("deadlineUrl")
|
||||
self.deadline_url = instance.data["deadline"]["url"]
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
deadline_publish_job_id = \
|
||||
self._submit_deadline_post_job(instance, render_job, instances)
|
||||
|
||||
# Inject deadline url to instances.
|
||||
# Inject deadline url to instances to query DL for job id for overrides
|
||||
for inst in instances:
|
||||
inst["deadlineUrl"] = self.deadline_url
|
||||
inst["deadline"] = instance.data["deadline"]
|
||||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import PublishXmlValidationError
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_get
|
||||
|
||||
|
||||
|
|
@ -8,27 +10,42 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Validate Deadline Web Service"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["maya", "nuke"]
|
||||
families = ["renderlayer", "render"]
|
||||
hosts = ["maya", "nuke", "aftereffects", "harmony", "fusion"]
|
||||
families = ["renderlayer", "render", "render.farm"]
|
||||
|
||||
# cache
|
||||
responses = {}
|
||||
|
||||
def process(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
self.log.debug(
|
||||
"We have deadline URL on instance {}".format(deadline_url)
|
||||
)
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
kwargs = {}
|
||||
if instance.data["deadline"]["require_authentication"]:
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
kwargs["auth"] = auth
|
||||
|
||||
if not auth[0]:
|
||||
raise PublishXmlValidationError(
|
||||
self,
|
||||
"Deadline requires authentication. "
|
||||
"At least username is required to be set in "
|
||||
"Site Settings.")
|
||||
|
||||
if deadline_url not in self.responses:
|
||||
self.responses[deadline_url] = requests_get(deadline_url)
|
||||
self.responses[deadline_url] = requests_get(deadline_url, **kwargs)
|
||||
|
||||
response = self.responses[deadline_url]
|
||||
if response.status_code == 401:
|
||||
raise PublishXmlValidationError(
|
||||
self,
|
||||
"Deadline requires authentication. "
|
||||
"Provided credentials are not working. "
|
||||
"Please change them in Site Settings")
|
||||
assert response.ok, "Response must be ok"
|
||||
assert response.text.startswith("Deadline Web Service "), (
|
||||
"Web service did not respond with 'Deadline Web Service'"
|
||||
|
|
|
|||
|
|
@ -37,8 +37,9 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
|||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
deadline_url = self.get_deadline_url(instance)
|
||||
pools = self.get_pools(deadline_url)
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
pools = self.get_pools(deadline_url,
|
||||
instance.data["deadline"].get("auth"))
|
||||
|
||||
invalid_pools = {}
|
||||
primary_pool = instance.data.get("primaryPool")
|
||||
|
|
@ -61,22 +62,18 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
|||
formatting_data={"pools_str": ", ".join(pools)}
|
||||
)
|
||||
|
||||
def get_deadline_url(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
if instance.data.get("deadlineUrl"):
|
||||
# if custom one is set in instance, use that
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
return deadline_url
|
||||
|
||||
def get_pools(self, deadline_url):
|
||||
def get_pools(self, deadline_url, auth):
|
||||
if deadline_url not in self.pools_per_url:
|
||||
self.log.debug(
|
||||
"Querying available pools for Deadline url: {}".format(
|
||||
deadline_url)
|
||||
)
|
||||
pools = DeadlineModule.get_deadline_pools(deadline_url,
|
||||
auth=auth,
|
||||
log=self.log)
|
||||
# some DL return "none" as a pool name
|
||||
if "none" not in pools:
|
||||
pools.append("none")
|
||||
self.log.info("Available pools: {}".format(pools))
|
||||
self.pools_per_url[deadline_url] = pools
|
||||
|
||||
|
|
|
|||
|
|
@ -199,16 +199,16 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
(dict): Job info from Deadline
|
||||
|
||||
"""
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
url = "{}/api/jobs?JobID={}".format(deadline_url, job_id)
|
||||
try:
|
||||
response = requests_get(url)
|
||||
kwargs = {}
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
if auth:
|
||||
kwargs["auth"] = auth
|
||||
response = requests_get(url, **kwargs)
|
||||
except requests.exceptions.ConnectionError:
|
||||
self.log.error("Deadline is not accessible at "
|
||||
"{}".format(deadline_url))
|
||||
|
|
|
|||
1
client/ayon_core/modules/deadline/version.py
Normal file
1
client/ayon_core/modules/deadline/version.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
__version__ = "0.1.10"
|
||||
|
|
@ -1987,12 +1987,12 @@ class CreateContext:
|
|||
"Folder '{}' was not found".format(folder_path)
|
||||
)
|
||||
|
||||
task_name = None
|
||||
if task_entity is None:
|
||||
task_name = self.get_current_task_name()
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
current_task_name = self.get_current_task_name()
|
||||
if current_task_name:
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], current_task_name
|
||||
)
|
||||
|
||||
if pre_create_data is None:
|
||||
pre_create_data = {}
|
||||
|
|
@ -2018,7 +2018,7 @@ class CreateContext:
|
|||
|
||||
instance_data = {
|
||||
"folderPath": folder_entity["path"],
|
||||
"task": task_name,
|
||||
"task": task_entity["name"] if task_entity else None,
|
||||
"productType": creator.product_type,
|
||||
"variant": variant
|
||||
}
|
||||
|
|
@ -2053,7 +2053,7 @@ class CreateContext:
|
|||
exc_info = sys.exc_info()
|
||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
self.log.warning(
|
||||
|
|
@ -2163,7 +2163,7 @@ class CreateContext:
|
|||
exc_info = sys.exc_info()
|
||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
failed = True
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
|
|
@ -2197,7 +2197,7 @@ class CreateContext:
|
|||
try:
|
||||
convertor.find_instances()
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
failed_info.append(
|
||||
prepare_failed_convertor_operation_info(
|
||||
convertor.identifier, sys.exc_info()
|
||||
|
|
@ -2373,7 +2373,7 @@ class CreateContext:
|
|||
exc_info = sys.exc_info()
|
||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
failed = True
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
|
|
@ -2440,7 +2440,7 @@ class CreateContext:
|
|||
error_message.format(identifier, exc_info[1])
|
||||
)
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
failed = True
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
|
|
@ -2546,7 +2546,7 @@ class CreateContext:
|
|||
try:
|
||||
self.run_convertor(convertor_identifier)
|
||||
|
||||
except:
|
||||
except: # noqa: E722
|
||||
failed_info.append(
|
||||
prepare_failed_convertor_operation_info(
|
||||
convertor_identifier, sys.exc_info()
|
||||
|
|
|
|||
|
|
@ -80,6 +80,7 @@ class RenderInstance(object):
|
|||
anatomyData = attr.ib(default=None)
|
||||
outputDir = attr.ib(default=None)
|
||||
context = attr.ib(default=None)
|
||||
deadline = attr.ib(default=None)
|
||||
|
||||
# The source instance the data of this render instance should merge into
|
||||
source_instance = attr.ib(default=None, type=pyblish.api.Instance)
|
||||
|
|
@ -215,13 +216,12 @@ class AbstractCollectRender(pyblish.api.ContextPlugin):
|
|||
|
||||
# add additional data
|
||||
data = self.add_additional_data(data)
|
||||
render_instance_dict = attr.asdict(render_instance)
|
||||
|
||||
# Merge into source instance if provided, otherwise create instance
|
||||
instance = render_instance_dict.pop("source_instance", None)
|
||||
instance = render_instance.source_instance
|
||||
if instance is None:
|
||||
instance = context.create_instance(render_instance.name)
|
||||
|
||||
render_instance_dict = attr.asdict(render_instance)
|
||||
instance.data.update(render_instance_dict)
|
||||
instance.data.update(data)
|
||||
|
||||
|
|
|
|||
|
|
@ -73,8 +73,8 @@ def get_folder_template_data(folder_entity, project_name):
|
|||
- 'parent' - direct parent name, project name used if is under
|
||||
project
|
||||
|
||||
Required document fields:
|
||||
Folder: 'path' -> Plan to require: 'folderType'
|
||||
Required entity fields:
|
||||
Folder: 'path', 'folderType'
|
||||
|
||||
Args:
|
||||
folder_entity (Dict[str, Any]): Folder entity.
|
||||
|
|
@ -101,6 +101,8 @@ def get_folder_template_data(folder_entity, project_name):
|
|||
return {
|
||||
"folder": {
|
||||
"name": folder_name,
|
||||
"type": folder_entity["folderType"],
|
||||
"path": path,
|
||||
},
|
||||
"asset": folder_name,
|
||||
"hierarchy": hierarchy,
|
||||
|
|
|
|||
263
client/ayon_core/pipeline/thumbnails.py
Normal file
263
client/ayon_core/pipeline/thumbnails.py
Normal file
|
|
@ -0,0 +1,263 @@
|
|||
import os
|
||||
import time
|
||||
import collections
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.lib.local_settings import get_ayon_appdirs
|
||||
|
||||
|
||||
FileInfo = collections.namedtuple(
|
||||
"FileInfo",
|
||||
("path", "size", "modification_time")
|
||||
)
|
||||
|
||||
|
||||
class ThumbnailsCache:
|
||||
"""Cache of thumbnails on local storage.
|
||||
|
||||
Thumbnails are cached to appdirs to predefined directory. Each project has
|
||||
own subfolder with thumbnails -> that's because each project has own
|
||||
thumbnail id validation and file names are thumbnail ids with matching
|
||||
extension. Extensions are predefined (.png and .jpeg).
|
||||
|
||||
Cache has cleanup mechanism which is triggered on initialized by default.
|
||||
|
||||
The cleanup has 2 levels:
|
||||
1. soft cleanup which remove all files that are older then 'days_alive'
|
||||
2. max size cleanup which remove all files until the thumbnails folder
|
||||
contains less then 'max_filesize'
|
||||
- this is time consuming so it's not triggered automatically
|
||||
|
||||
Args:
|
||||
cleanup (bool): Trigger soft cleanup (Cleanup expired thumbnails).
|
||||
"""
|
||||
|
||||
# Lifetime of thumbnails (in seconds)
|
||||
# - default 3 days
|
||||
days_alive = 3
|
||||
# Max size of thumbnail directory (in bytes)
|
||||
# - default 2 Gb
|
||||
max_filesize = 2 * 1024 * 1024 * 1024
|
||||
|
||||
def __init__(self, cleanup=True):
|
||||
self._thumbnails_dir = None
|
||||
self._days_alive_secs = self.days_alive * 24 * 60 * 60
|
||||
if cleanup:
|
||||
self.cleanup()
|
||||
|
||||
def get_thumbnails_dir(self):
|
||||
"""Root directory where thumbnails are stored.
|
||||
|
||||
Returns:
|
||||
str: Path to thumbnails root.
|
||||
"""
|
||||
|
||||
if self._thumbnails_dir is None:
|
||||
self._thumbnails_dir = get_ayon_appdirs("thumbnails")
|
||||
return self._thumbnails_dir
|
||||
|
||||
thumbnails_dir = property(get_thumbnails_dir)
|
||||
|
||||
def get_thumbnails_dir_file_info(self):
|
||||
"""Get information about all files in thumbnails directory.
|
||||
|
||||
Returns:
|
||||
List[FileInfo]: List of file information about all files.
|
||||
"""
|
||||
|
||||
thumbnails_dir = self.thumbnails_dir
|
||||
files_info = []
|
||||
if not os.path.exists(thumbnails_dir):
|
||||
return files_info
|
||||
|
||||
for root, _, filenames in os.walk(thumbnails_dir):
|
||||
for filename in filenames:
|
||||
path = os.path.join(root, filename)
|
||||
files_info.append(FileInfo(
|
||||
path, os.path.getsize(path), os.path.getmtime(path)
|
||||
))
|
||||
return files_info
|
||||
|
||||
def get_thumbnails_dir_size(self, files_info=None):
|
||||
"""Got full size of thumbnail directory.
|
||||
|
||||
Args:
|
||||
files_info (List[FileInfo]): Prepared file information about
|
||||
files in thumbnail directory.
|
||||
|
||||
Returns:
|
||||
int: File size of all files in thumbnail directory.
|
||||
"""
|
||||
|
||||
if files_info is None:
|
||||
files_info = self.get_thumbnails_dir_file_info()
|
||||
|
||||
if not files_info:
|
||||
return 0
|
||||
|
||||
return sum(
|
||||
file_info.size
|
||||
for file_info in files_info
|
||||
)
|
||||
|
||||
def cleanup(self, check_max_size=False):
|
||||
"""Cleanup thumbnails directory.
|
||||
|
||||
Args:
|
||||
check_max_size (bool): Also cleanup files to match max size of
|
||||
thumbnails directory.
|
||||
"""
|
||||
|
||||
thumbnails_dir = self.get_thumbnails_dir()
|
||||
# Skip if thumbnails dir does not exist yet
|
||||
if not os.path.exists(thumbnails_dir):
|
||||
return
|
||||
|
||||
self._soft_cleanup(thumbnails_dir)
|
||||
if check_max_size:
|
||||
self._max_size_cleanup(thumbnails_dir)
|
||||
|
||||
def _soft_cleanup(self, thumbnails_dir):
|
||||
current_time = time.time()
|
||||
for root, _, filenames in os.walk(thumbnails_dir):
|
||||
for filename in filenames:
|
||||
path = os.path.join(root, filename)
|
||||
modification_time = os.path.getmtime(path)
|
||||
if current_time - modification_time > self._days_alive_secs:
|
||||
os.remove(path)
|
||||
|
||||
def _max_size_cleanup(self, thumbnails_dir):
|
||||
files_info = self.get_thumbnails_dir_file_info()
|
||||
size = self.get_thumbnails_dir_size(files_info)
|
||||
if size < self.max_filesize:
|
||||
return
|
||||
|
||||
sorted_file_info = collections.deque(
|
||||
sorted(files_info, key=lambda item: item.modification_time)
|
||||
)
|
||||
diff = size - self.max_filesize
|
||||
while diff > 0:
|
||||
if not sorted_file_info:
|
||||
break
|
||||
|
||||
file_info = sorted_file_info.popleft()
|
||||
diff -= file_info.size
|
||||
os.remove(file_info.path)
|
||||
|
||||
def get_thumbnail_filepath(self, project_name, thumbnail_id):
|
||||
"""Get thumbnail by thumbnail id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
thumbnail_id (str): Thumbnail id.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Path to thumbnail image or None if thumbnail
|
||||
is not cached yet.
|
||||
"""
|
||||
|
||||
if not thumbnail_id:
|
||||
return None
|
||||
|
||||
for ext in (
|
||||
".png",
|
||||
".jpeg",
|
||||
):
|
||||
filepath = os.path.join(
|
||||
self.thumbnails_dir, project_name, thumbnail_id + ext
|
||||
)
|
||||
if os.path.exists(filepath):
|
||||
return filepath
|
||||
return None
|
||||
|
||||
def get_project_dir(self, project_name):
|
||||
"""Path to root directory for specific project.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project for which root directory path
|
||||
should be returned.
|
||||
|
||||
Returns:
|
||||
str: Path to root of project's thumbnails.
|
||||
"""
|
||||
|
||||
return os.path.join(self.thumbnails_dir, project_name)
|
||||
|
||||
def make_sure_project_dir_exists(self, project_name):
|
||||
project_dir = self.get_project_dir(project_name)
|
||||
if not os.path.exists(project_dir):
|
||||
os.makedirs(project_dir)
|
||||
return project_dir
|
||||
|
||||
def store_thumbnail(self, project_name, thumbnail_id, content, mime_type):
|
||||
"""Store thumbnail to cache folder.
|
||||
|
||||
Args:
|
||||
project_name (str): Project where the thumbnail belong to.
|
||||
thumbnail_id (str): Thumbnail id.
|
||||
content (bytes): Byte content of thumbnail file.
|
||||
mime_type (str): Type of content.
|
||||
|
||||
Returns:
|
||||
str: Path to cached thumbnail image file.
|
||||
"""
|
||||
|
||||
if mime_type == "image/png":
|
||||
ext = ".png"
|
||||
elif mime_type == "image/jpeg":
|
||||
ext = ".jpeg"
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unknown mime type for thumbnail \"{}\"".format(mime_type))
|
||||
|
||||
project_dir = self.make_sure_project_dir_exists(project_name)
|
||||
thumbnail_path = os.path.join(project_dir, thumbnail_id + ext)
|
||||
with open(thumbnail_path, "wb") as stream:
|
||||
stream.write(content)
|
||||
|
||||
current_time = time.time()
|
||||
os.utime(thumbnail_path, (current_time, current_time))
|
||||
|
||||
return thumbnail_path
|
||||
|
||||
|
||||
class _CacheItems:
|
||||
thumbnails_cache = ThumbnailsCache()
|
||||
|
||||
|
||||
def get_thumbnail_path(project_name, thumbnail_id):
|
||||
"""Get path to thumbnail image.
|
||||
|
||||
Args:
|
||||
project_name (str): Project where thumbnail belongs to.
|
||||
thumbnail_id (Union[str, None]): Thumbnail id.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Path to thumbnail image or None if thumbnail
|
||||
id is not valid or thumbnail was not possible to receive.
|
||||
|
||||
"""
|
||||
if not thumbnail_id:
|
||||
return None
|
||||
|
||||
filepath = _CacheItems.thumbnails_cache.get_thumbnail_filepath(
|
||||
project_name, thumbnail_id
|
||||
)
|
||||
if filepath is not None:
|
||||
return filepath
|
||||
|
||||
# 'ayon_api' had a bug, public function
|
||||
# 'get_thumbnail_by_id' did not return output of
|
||||
# 'ServerAPI' method.
|
||||
con = ayon_api.get_server_api_connection()
|
||||
result = con.get_thumbnail_by_id(project_name, thumbnail_id)
|
||||
|
||||
if result is not None and result.is_valid:
|
||||
return _CacheItems.thumbnails_cache.store_thumbnail(
|
||||
project_name,
|
||||
thumbnail_id,
|
||||
result.content,
|
||||
result.content_type
|
||||
)
|
||||
return None
|
||||
|
|
@ -33,6 +33,7 @@ import collections
|
|||
import pyblish.api
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline.template_data import get_folder_template_data
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
|
||||
|
||||
|
|
@ -383,24 +384,11 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
# - 'folder', 'hierarchy', 'parent', 'folder'
|
||||
folder_entity = instance.data.get("folderEntity")
|
||||
if folder_entity:
|
||||
folder_name = folder_entity["name"]
|
||||
folder_path = folder_entity["path"]
|
||||
hierarchy_parts = folder_path.split("/")
|
||||
hierarchy_parts.pop(0)
|
||||
hierarchy_parts.pop(-1)
|
||||
parent_name = project_entity["name"]
|
||||
if hierarchy_parts:
|
||||
parent_name = hierarchy_parts[-1]
|
||||
|
||||
hierarchy = "/".join(hierarchy_parts)
|
||||
anatomy_data.update({
|
||||
"asset": folder_name,
|
||||
"hierarchy": hierarchy,
|
||||
"parent": parent_name,
|
||||
"folder": {
|
||||
"name": folder_name,
|
||||
},
|
||||
})
|
||||
folder_data = get_folder_template_data(
|
||||
folder_entity,
|
||||
project_entity["name"]
|
||||
)
|
||||
anatomy_data.update(folder_data)
|
||||
return
|
||||
|
||||
if instance.data.get("newAssetPublishing"):
|
||||
|
|
@ -418,6 +406,11 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
"parent": parent_name,
|
||||
"folder": {
|
||||
"name": folder_name,
|
||||
"path": instance.data["folderPath"],
|
||||
# TODO get folder type from hierarchy
|
||||
# Using 'Shot' is current default behavior of editorial
|
||||
# (or 'newAssetPublishing') publishing.
|
||||
"type": "Shot",
|
||||
},
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ def prepare_changes(old_entity, new_entity):
|
|||
|
||||
Returns:
|
||||
dict[str, Any]: Changes that have new entity.
|
||||
|
||||
|
||||
"""
|
||||
changes = {}
|
||||
for key in set(new_entity.keys()):
|
||||
|
|
@ -121,6 +121,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"setdress",
|
||||
"layout",
|
||||
"ass",
|
||||
"assProxy",
|
||||
"vdbcache",
|
||||
"scene",
|
||||
"vrayproxy",
|
||||
|
|
|
|||
BIN
client/ayon_core/resources/app_icons/3de4.png
Normal file
BIN
client/ayon_core/resources/app_icons/3de4.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 16 KiB |
|
|
@ -104,14 +104,11 @@ class WebServerTool:
|
|||
again. In that case, use existing running webserver.
|
||||
Check here is easier than capturing exception from thread.
|
||||
"""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
result = True
|
||||
try:
|
||||
sock.bind((host_name, port))
|
||||
result = False
|
||||
except:
|
||||
print("Port is in use")
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as con:
|
||||
result = con.connect_ex((host_name, port)) == 0
|
||||
|
||||
if result:
|
||||
print(f"Port {port} is already in use")
|
||||
return result
|
||||
|
||||
def call(self, func):
|
||||
|
|
|
|||
|
|
@ -1,234 +1,15 @@
|
|||
import os
|
||||
import time
|
||||
import collections
|
||||
|
||||
import ayon_api
|
||||
import appdirs
|
||||
|
||||
from ayon_core.lib import NestedCacheItem
|
||||
|
||||
FileInfo = collections.namedtuple(
|
||||
"FileInfo",
|
||||
("path", "size", "modification_time")
|
||||
)
|
||||
|
||||
|
||||
class ThumbnailsCache:
|
||||
"""Cache of thumbnails on local storage.
|
||||
|
||||
Thumbnails are cached to appdirs to predefined directory. Each project has
|
||||
own subfolder with thumbnails -> that's because each project has own
|
||||
thumbnail id validation and file names are thumbnail ids with matching
|
||||
extension. Extensions are predefined (.png and .jpeg).
|
||||
|
||||
Cache has cleanup mechanism which is triggered on initialized by default.
|
||||
|
||||
The cleanup has 2 levels:
|
||||
1. soft cleanup which remove all files that are older then 'days_alive'
|
||||
2. max size cleanup which remove all files until the thumbnails folder
|
||||
contains less then 'max_filesize'
|
||||
- this is time consuming so it's not triggered automatically
|
||||
|
||||
Args:
|
||||
cleanup (bool): Trigger soft cleanup (Cleanup expired thumbnails).
|
||||
"""
|
||||
|
||||
# Lifetime of thumbnails (in seconds)
|
||||
# - default 3 days
|
||||
days_alive = 3
|
||||
# Max size of thumbnail directory (in bytes)
|
||||
# - default 2 Gb
|
||||
max_filesize = 2 * 1024 * 1024 * 1024
|
||||
|
||||
def __init__(self, cleanup=True):
|
||||
self._thumbnails_dir = None
|
||||
self._days_alive_secs = self.days_alive * 24 * 60 * 60
|
||||
if cleanup:
|
||||
self.cleanup()
|
||||
|
||||
def get_thumbnails_dir(self):
|
||||
"""Root directory where thumbnails are stored.
|
||||
|
||||
Returns:
|
||||
str: Path to thumbnails root.
|
||||
"""
|
||||
|
||||
if self._thumbnails_dir is None:
|
||||
# TODO use generic function
|
||||
directory = appdirs.user_data_dir("AYON", "Ynput")
|
||||
self._thumbnails_dir = os.path.join(directory, "thumbnails")
|
||||
return self._thumbnails_dir
|
||||
|
||||
thumbnails_dir = property(get_thumbnails_dir)
|
||||
|
||||
def get_thumbnails_dir_file_info(self):
|
||||
"""Get information about all files in thumbnails directory.
|
||||
|
||||
Returns:
|
||||
List[FileInfo]: List of file information about all files.
|
||||
"""
|
||||
|
||||
thumbnails_dir = self.thumbnails_dir
|
||||
files_info = []
|
||||
if not os.path.exists(thumbnails_dir):
|
||||
return files_info
|
||||
|
||||
for root, _, filenames in os.walk(thumbnails_dir):
|
||||
for filename in filenames:
|
||||
path = os.path.join(root, filename)
|
||||
files_info.append(FileInfo(
|
||||
path, os.path.getsize(path), os.path.getmtime(path)
|
||||
))
|
||||
return files_info
|
||||
|
||||
def get_thumbnails_dir_size(self, files_info=None):
|
||||
"""Got full size of thumbnail directory.
|
||||
|
||||
Args:
|
||||
files_info (List[FileInfo]): Prepared file information about
|
||||
files in thumbnail directory.
|
||||
|
||||
Returns:
|
||||
int: File size of all files in thumbnail directory.
|
||||
"""
|
||||
|
||||
if files_info is None:
|
||||
files_info = self.get_thumbnails_dir_file_info()
|
||||
|
||||
if not files_info:
|
||||
return 0
|
||||
|
||||
return sum(
|
||||
file_info.size
|
||||
for file_info in files_info
|
||||
)
|
||||
|
||||
def cleanup(self, check_max_size=False):
|
||||
"""Cleanup thumbnails directory.
|
||||
|
||||
Args:
|
||||
check_max_size (bool): Also cleanup files to match max size of
|
||||
thumbnails directory.
|
||||
"""
|
||||
|
||||
thumbnails_dir = self.get_thumbnails_dir()
|
||||
# Skip if thumbnails dir does not exist yet
|
||||
if not os.path.exists(thumbnails_dir):
|
||||
return
|
||||
|
||||
self._soft_cleanup(thumbnails_dir)
|
||||
if check_max_size:
|
||||
self._max_size_cleanup(thumbnails_dir)
|
||||
|
||||
def _soft_cleanup(self, thumbnails_dir):
|
||||
current_time = time.time()
|
||||
for root, _, filenames in os.walk(thumbnails_dir):
|
||||
for filename in filenames:
|
||||
path = os.path.join(root, filename)
|
||||
modification_time = os.path.getmtime(path)
|
||||
if current_time - modification_time > self._days_alive_secs:
|
||||
os.remove(path)
|
||||
|
||||
def _max_size_cleanup(self, thumbnails_dir):
|
||||
files_info = self.get_thumbnails_dir_file_info()
|
||||
size = self.get_thumbnails_dir_size(files_info)
|
||||
if size < self.max_filesize:
|
||||
return
|
||||
|
||||
sorted_file_info = collections.deque(
|
||||
sorted(files_info, key=lambda item: item.modification_time)
|
||||
)
|
||||
diff = size - self.max_filesize
|
||||
while diff > 0:
|
||||
if not sorted_file_info:
|
||||
break
|
||||
|
||||
file_info = sorted_file_info.popleft()
|
||||
diff -= file_info.size
|
||||
os.remove(file_info.path)
|
||||
|
||||
def get_thumbnail_filepath(self, project_name, thumbnail_id):
|
||||
"""Get thumbnail by thumbnail id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
thumbnail_id (str): Thumbnail id.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Path to thumbnail image or None if thumbnail
|
||||
is not cached yet.
|
||||
"""
|
||||
|
||||
if not thumbnail_id:
|
||||
return None
|
||||
|
||||
for ext in (
|
||||
".png",
|
||||
".jpeg",
|
||||
):
|
||||
filepath = os.path.join(
|
||||
self.thumbnails_dir, project_name, thumbnail_id + ext
|
||||
)
|
||||
if os.path.exists(filepath):
|
||||
return filepath
|
||||
return None
|
||||
|
||||
def get_project_dir(self, project_name):
|
||||
"""Path to root directory for specific project.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project for which root directory path
|
||||
should be returned.
|
||||
|
||||
Returns:
|
||||
str: Path to root of project's thumbnails.
|
||||
"""
|
||||
|
||||
return os.path.join(self.thumbnails_dir, project_name)
|
||||
|
||||
def make_sure_project_dir_exists(self, project_name):
|
||||
project_dir = self.get_project_dir(project_name)
|
||||
if not os.path.exists(project_dir):
|
||||
os.makedirs(project_dir)
|
||||
return project_dir
|
||||
|
||||
def store_thumbnail(self, project_name, thumbnail_id, content, mime_type):
|
||||
"""Store thumbnail to cache folder.
|
||||
|
||||
Args:
|
||||
project_name (str): Project where the thumbnail belong to.
|
||||
thumbnail_id (str): Id of thumbnail.
|
||||
content (bytes): Byte content of thumbnail file.
|
||||
mime_data (str): Type of content.
|
||||
|
||||
Returns:
|
||||
str: Path to cached thumbnail image file.
|
||||
"""
|
||||
|
||||
if mime_type == "image/png":
|
||||
ext = ".png"
|
||||
elif mime_type == "image/jpeg":
|
||||
ext = ".jpeg"
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unknown mime type for thumbnail \"{}\"".format(mime_type))
|
||||
|
||||
project_dir = self.make_sure_project_dir_exists(project_name)
|
||||
thumbnail_path = os.path.join(project_dir, thumbnail_id + ext)
|
||||
with open(thumbnail_path, "wb") as stream:
|
||||
stream.write(content)
|
||||
|
||||
current_time = time.time()
|
||||
os.utime(thumbnail_path, (current_time, current_time))
|
||||
|
||||
return thumbnail_path
|
||||
from ayon_core.pipeline.thumbnails import get_thumbnail_path
|
||||
|
||||
|
||||
class ThumbnailsModel:
|
||||
entity_cache_lifetime = 240 # In seconds
|
||||
|
||||
def __init__(self):
|
||||
self._thumbnail_cache = ThumbnailsCache()
|
||||
self._paths_cache = collections.defaultdict(dict)
|
||||
self._folders_cache = NestedCacheItem(
|
||||
levels=2, lifetime=self.entity_cache_lifetime)
|
||||
|
|
@ -283,28 +64,7 @@ class ThumbnailsModel:
|
|||
if thumbnail_id in project_cache:
|
||||
return project_cache[thumbnail_id]
|
||||
|
||||
filepath = self._thumbnail_cache.get_thumbnail_filepath(
|
||||
project_name, thumbnail_id
|
||||
)
|
||||
if filepath is not None:
|
||||
project_cache[thumbnail_id] = filepath
|
||||
return filepath
|
||||
|
||||
# 'ayon_api' had a bug, public function
|
||||
# 'get_thumbnail_by_id' did not return output of
|
||||
# 'ServerAPI' method.
|
||||
con = ayon_api.get_server_api_connection()
|
||||
result = con.get_thumbnail_by_id(project_name, thumbnail_id)
|
||||
if result is None:
|
||||
pass
|
||||
|
||||
elif result.is_valid:
|
||||
filepath = self._thumbnail_cache.store_thumbnail(
|
||||
project_name,
|
||||
thumbnail_id,
|
||||
result.content,
|
||||
result.content_type
|
||||
)
|
||||
filepath = get_thumbnail_path(project_name, thumbnail_id)
|
||||
project_cache[thumbnail_id] = filepath
|
||||
return filepath
|
||||
|
||||
|
|
|
|||
|
|
@ -52,6 +52,7 @@ class SelectionTypes:
|
|||
class BaseGroupWidget(QtWidgets.QWidget):
|
||||
selected = QtCore.Signal(str, str, str)
|
||||
removed_selected = QtCore.Signal()
|
||||
double_clicked = QtCore.Signal()
|
||||
|
||||
def __init__(self, group_name, parent):
|
||||
super(BaseGroupWidget, self).__init__(parent)
|
||||
|
|
@ -192,6 +193,7 @@ class ConvertorItemsGroupWidget(BaseGroupWidget):
|
|||
else:
|
||||
widget = ConvertorItemCardWidget(item, self)
|
||||
widget.selected.connect(self._on_widget_selection)
|
||||
widget.double_clicked(self.double_clicked)
|
||||
self._widgets_by_id[item.id] = widget
|
||||
self._content_layout.insertWidget(widget_idx, widget)
|
||||
widget_idx += 1
|
||||
|
|
@ -254,6 +256,7 @@ class InstanceGroupWidget(BaseGroupWidget):
|
|||
)
|
||||
widget.selected.connect(self._on_widget_selection)
|
||||
widget.active_changed.connect(self._on_active_changed)
|
||||
widget.double_clicked.connect(self.double_clicked)
|
||||
self._widgets_by_id[instance.id] = widget
|
||||
self._content_layout.insertWidget(widget_idx, widget)
|
||||
widget_idx += 1
|
||||
|
|
@ -271,6 +274,7 @@ class CardWidget(BaseClickableFrame):
|
|||
# Group identifier of card
|
||||
# - this must be set because if send when mouse is released with card id
|
||||
_group_identifier = None
|
||||
double_clicked = QtCore.Signal()
|
||||
|
||||
def __init__(self, parent):
|
||||
super(CardWidget, self).__init__(parent)
|
||||
|
|
@ -279,6 +283,11 @@ class CardWidget(BaseClickableFrame):
|
|||
self._selected = False
|
||||
self._id = None
|
||||
|
||||
def mouseDoubleClickEvent(self, event):
|
||||
super(CardWidget, self).mouseDoubleClickEvent(event)
|
||||
if self._is_valid_double_click(event):
|
||||
self.double_clicked.emit()
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""Id of card."""
|
||||
|
|
@ -312,6 +321,9 @@ class CardWidget(BaseClickableFrame):
|
|||
|
||||
self.selected.emit(self._id, self._group_identifier, selection_type)
|
||||
|
||||
def _is_valid_double_click(self, event):
|
||||
return True
|
||||
|
||||
|
||||
class ContextCardWidget(CardWidget):
|
||||
"""Card for global context.
|
||||
|
|
@ -527,6 +539,15 @@ class InstanceCardWidget(CardWidget):
|
|||
def _on_expend_clicked(self):
|
||||
self._set_expanded()
|
||||
|
||||
def _is_valid_double_click(self, event):
|
||||
widget = self.childAt(event.pos())
|
||||
if (
|
||||
widget is self._active_checkbox
|
||||
or widget is self._expand_btn
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class InstanceCardView(AbstractInstanceView):
|
||||
"""Publish access to card view.
|
||||
|
|
@ -534,6 +555,8 @@ class InstanceCardView(AbstractInstanceView):
|
|||
Wrapper of all widgets in card view.
|
||||
"""
|
||||
|
||||
double_clicked = QtCore.Signal()
|
||||
|
||||
def __init__(self, controller, parent):
|
||||
super(InstanceCardView, self).__init__(parent)
|
||||
|
||||
|
|
@ -715,6 +738,7 @@ class InstanceCardView(AbstractInstanceView):
|
|||
)
|
||||
group_widget.active_changed.connect(self._on_active_changed)
|
||||
group_widget.selected.connect(self._on_widget_selection)
|
||||
group_widget.double_clicked.connect(self.double_clicked)
|
||||
self._content_layout.insertWidget(widget_idx, group_widget)
|
||||
self._widgets_by_group[group_name] = group_widget
|
||||
|
||||
|
|
@ -755,6 +779,7 @@ class InstanceCardView(AbstractInstanceView):
|
|||
|
||||
widget = ContextCardWidget(self._content_widget)
|
||||
widget.selected.connect(self._on_widget_selection)
|
||||
widget.double_clicked.connect(self.double_clicked)
|
||||
|
||||
self._context_widget = widget
|
||||
|
||||
|
|
@ -778,6 +803,7 @@ class InstanceCardView(AbstractInstanceView):
|
|||
CONVERTOR_ITEM_GROUP, self._content_widget
|
||||
)
|
||||
group_widget.selected.connect(self._on_widget_selection)
|
||||
group_widget.double_clicked.connect(self.double_clicked)
|
||||
self._content_layout.insertWidget(1, group_widget)
|
||||
self._convertor_items_group = group_widget
|
||||
|
||||
|
|
|
|||
|
|
@ -110,6 +110,7 @@ class InstanceListItemWidget(QtWidgets.QWidget):
|
|||
This is required to be able use custom checkbox on custom place.
|
||||
"""
|
||||
active_changed = QtCore.Signal(str, bool)
|
||||
double_clicked = QtCore.Signal()
|
||||
|
||||
def __init__(self, instance, parent):
|
||||
super(InstanceListItemWidget, self).__init__(parent)
|
||||
|
|
@ -149,6 +150,12 @@ class InstanceListItemWidget(QtWidgets.QWidget):
|
|||
|
||||
self._set_valid_property(instance.has_valid_context)
|
||||
|
||||
def mouseDoubleClickEvent(self, event):
|
||||
widget = self.childAt(event.pos())
|
||||
super(InstanceListItemWidget, self).mouseDoubleClickEvent(event)
|
||||
if widget is not self._active_checkbox:
|
||||
self.double_clicked.emit()
|
||||
|
||||
def _set_valid_property(self, valid):
|
||||
if self._has_valid_context == valid:
|
||||
return
|
||||
|
|
@ -209,6 +216,8 @@ class InstanceListItemWidget(QtWidgets.QWidget):
|
|||
|
||||
class ListContextWidget(QtWidgets.QFrame):
|
||||
"""Context (or global attributes) widget."""
|
||||
double_clicked = QtCore.Signal()
|
||||
|
||||
def __init__(self, parent):
|
||||
super(ListContextWidget, self).__init__(parent)
|
||||
|
||||
|
|
@ -225,6 +234,10 @@ class ListContextWidget(QtWidgets.QFrame):
|
|||
|
||||
self.label_widget = label_widget
|
||||
|
||||
def mouseDoubleClickEvent(self, event):
|
||||
super(ListContextWidget, self).mouseDoubleClickEvent(event)
|
||||
self.double_clicked.emit()
|
||||
|
||||
|
||||
class InstanceListGroupWidget(QtWidgets.QFrame):
|
||||
"""Widget representing group of instances.
|
||||
|
|
@ -317,6 +330,7 @@ class InstanceListGroupWidget(QtWidgets.QFrame):
|
|||
class InstanceTreeView(QtWidgets.QTreeView):
|
||||
"""View showing instances and their groups."""
|
||||
toggle_requested = QtCore.Signal(int)
|
||||
double_clicked = QtCore.Signal()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(InstanceTreeView, self).__init__(*args, **kwargs)
|
||||
|
|
@ -425,6 +439,9 @@ class InstanceListView(AbstractInstanceView):
|
|||
|
||||
This is public access to and from list view.
|
||||
"""
|
||||
|
||||
double_clicked = QtCore.Signal()
|
||||
|
||||
def __init__(self, controller, parent):
|
||||
super(InstanceListView, self).__init__(parent)
|
||||
|
||||
|
|
@ -454,6 +471,7 @@ class InstanceListView(AbstractInstanceView):
|
|||
instance_view.collapsed.connect(self._on_collapse)
|
||||
instance_view.expanded.connect(self._on_expand)
|
||||
instance_view.toggle_requested.connect(self._on_toggle_request)
|
||||
instance_view.double_clicked.connect(self.double_clicked)
|
||||
|
||||
self._group_items = {}
|
||||
self._group_widgets = {}
|
||||
|
|
@ -687,6 +705,7 @@ class InstanceListView(AbstractInstanceView):
|
|||
self._active_toggle_enabled
|
||||
)
|
||||
widget.active_changed.connect(self._on_active_changed)
|
||||
widget.double_clicked.connect(self.double_clicked)
|
||||
self._instance_view.setIndexWidget(proxy_index, widget)
|
||||
self._widgets_by_id[instance.id] = widget
|
||||
|
||||
|
|
@ -717,6 +736,7 @@ class InstanceListView(AbstractInstanceView):
|
|||
)
|
||||
proxy_index = self._proxy_model.mapFromSource(index)
|
||||
widget = ListContextWidget(self._instance_view)
|
||||
widget.double_clicked.connect(self.double_clicked)
|
||||
self._instance_view.setIndexWidget(proxy_index, widget)
|
||||
|
||||
self._context_widget = widget
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ class OverviewWidget(QtWidgets.QFrame):
|
|||
instance_context_changed = QtCore.Signal()
|
||||
create_requested = QtCore.Signal()
|
||||
convert_requested = QtCore.Signal()
|
||||
publish_tab_requested = QtCore.Signal()
|
||||
|
||||
anim_end_value = 200
|
||||
anim_duration = 200
|
||||
|
|
@ -113,9 +114,15 @@ class OverviewWidget(QtWidgets.QFrame):
|
|||
product_list_view.selection_changed.connect(
|
||||
self._on_product_change
|
||||
)
|
||||
product_list_view.double_clicked.connect(
|
||||
self.publish_tab_requested
|
||||
)
|
||||
product_view_cards.selection_changed.connect(
|
||||
self._on_product_change
|
||||
)
|
||||
product_view_cards.double_clicked.connect(
|
||||
self.publish_tab_requested
|
||||
)
|
||||
# Active instances changed
|
||||
product_list_view.active_changed.connect(
|
||||
self._on_active_changed
|
||||
|
|
|
|||
|
|
@ -258,6 +258,9 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
overview_widget.convert_requested.connect(
|
||||
self._on_convert_requested
|
||||
)
|
||||
overview_widget.publish_tab_requested.connect(
|
||||
self._go_to_publish_tab
|
||||
)
|
||||
|
||||
save_btn.clicked.connect(self._on_save_clicked)
|
||||
reset_btn.clicked.connect(self._on_reset_clicked)
|
||||
|
|
|
|||
|
|
@ -281,13 +281,20 @@ def prepare_app_environments(
|
|||
app.environment
|
||||
]
|
||||
|
||||
task_entity = data.get("task_entity")
|
||||
folder_entity = data.get("folder_entity")
|
||||
# Add tools environments
|
||||
groups_by_name = {}
|
||||
tool_by_group_name = collections.defaultdict(dict)
|
||||
if folder_entity:
|
||||
# Make sure each tool group can be added only once
|
||||
for key in folder_entity["attrib"].get("tools") or []:
|
||||
tools = None
|
||||
if task_entity:
|
||||
tools = task_entity["attrib"].get("tools")
|
||||
|
||||
if tools is None and folder_entity:
|
||||
tools = folder_entity["attrib"].get("tools")
|
||||
|
||||
if tools:
|
||||
for key in tools:
|
||||
tool = app.manager.tools.get(key)
|
||||
if not tool or not tool.is_valid_for_app(app):
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
name = "applications"
|
||||
title = "Applications"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
|
||||
ayon_server_version = ">=1.0.7"
|
||||
ayon_launcher_version = ">=1.0.2"
|
||||
|
|
|
|||
|
|
@ -1271,6 +1271,28 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"equalizer": {
|
||||
"enabled": true,
|
||||
"label": "3DEqualizer",
|
||||
"icon": "{}/app_icons/3de4.png",
|
||||
"host_name": "equalizer",
|
||||
"environment": "{}",
|
||||
"variants": [
|
||||
{
|
||||
"name": "7-1v2",
|
||||
"label": "7.1v2",
|
||||
"use_python_2": false,
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\3DE4_win64_r7.1v2\\bin\\3DE4.exe"
|
||||
],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"environment": "{}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"additional_apps": []
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -190,6 +190,8 @@ class ApplicationsSettings(BaseSettingsModel):
|
|||
default_factory=AppGroupWithPython, title="OpenRV")
|
||||
zbrush: AppGroup = SettingsField(
|
||||
default_factory=AppGroupWithPython, title="Zbrush")
|
||||
equalizer: AppGroup = SettingsField(
|
||||
default_factory=AppGroupWithPython, title="3DEqualizer")
|
||||
additional_apps: list[AdditionalAppGroup] = SettingsField(
|
||||
default_factory=list, title="Additional Applications")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
name = "deadline"
|
||||
title = "Deadline"
|
||||
version = "0.1.10"
|
||||
version = "0.1.11"
|
||||
|
|
|
|||
|
|
@ -2,11 +2,13 @@ from typing import Type
|
|||
|
||||
from ayon_server.addons import BaseServerAddon
|
||||
|
||||
from .settings import DeadlineSettings, DEFAULT_VALUES
|
||||
from .settings import DeadlineSettings, DEFAULT_VALUES, DeadlineSiteSettings
|
||||
|
||||
|
||||
class Deadline(BaseServerAddon):
|
||||
settings_model: Type[DeadlineSettings] = DeadlineSettings
|
||||
site_settings_model: Type[DeadlineSiteSettings] = DeadlineSiteSettings
|
||||
|
||||
|
||||
async def get_default_settings(self):
|
||||
settings_model_cls = self.get_settings_model()
|
||||
|
|
|
|||
|
|
@ -2,9 +2,11 @@ from .main import (
|
|||
DeadlineSettings,
|
||||
DEFAULT_VALUES,
|
||||
)
|
||||
from .site_settings import DeadlineSiteSettings
|
||||
|
||||
|
||||
__all__ = (
|
||||
"DeadlineSettings",
|
||||
"DeadlineSiteSettings",
|
||||
"DEFAULT_VALUES",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -15,12 +15,6 @@ from .publish_plugins import (
|
|||
)
|
||||
|
||||
|
||||
class ServerListSubmodel(BaseSettingsModel):
|
||||
_layout = "compact"
|
||||
name: str = SettingsField(title="Name")
|
||||
value: str = SettingsField(title="Value")
|
||||
|
||||
|
||||
async def defined_deadline_ws_name_enum_resolver(
|
||||
addon: "BaseServerAddon",
|
||||
settings_variant: str = "production",
|
||||
|
|
@ -32,25 +26,40 @@ async def defined_deadline_ws_name_enum_resolver(
|
|||
|
||||
settings = await addon.get_studio_settings(variant=settings_variant)
|
||||
|
||||
ws_urls = []
|
||||
ws_server_name = []
|
||||
for deadline_url_item in settings.deadline_urls:
|
||||
ws_urls.append(deadline_url_item.name)
|
||||
ws_server_name.append(deadline_url_item.name)
|
||||
|
||||
return ws_urls
|
||||
return ws_server_name
|
||||
|
||||
class ServerItemSubmodel(BaseSettingsModel):
|
||||
"""Connection info about configured DL servers."""
|
||||
_layout = "compact"
|
||||
name: str = SettingsField(title="Name")
|
||||
value: str = SettingsField(title="Url")
|
||||
require_authentication: bool = SettingsField(
|
||||
False,
|
||||
title="Require authentication")
|
||||
ssl: bool = SettingsField(False,
|
||||
title="SSL")
|
||||
|
||||
|
||||
class DeadlineSettings(BaseSettingsModel):
|
||||
deadline_urls: list[ServerListSubmodel] = SettingsField(
|
||||
# configured DL servers
|
||||
deadline_urls: list[ServerItemSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="System Deadline Webservice URLs",
|
||||
title="System Deadline Webservice Info",
|
||||
scope=["studio"],
|
||||
)
|
||||
|
||||
# name(key) of selected server for project
|
||||
deadline_server: str = SettingsField(
|
||||
title="Project deadline server",
|
||||
title="Project Deadline server name",
|
||||
section="---",
|
||||
scope=["project"],
|
||||
enum_resolver=defined_deadline_ws_name_enum_resolver
|
||||
)
|
||||
|
||||
publish: PublishPluginsModel = SettingsField(
|
||||
default_factory=PublishPluginsModel,
|
||||
title="Publish Plugins",
|
||||
|
|
@ -62,11 +71,14 @@ class DeadlineSettings(BaseSettingsModel):
|
|||
return value
|
||||
|
||||
|
||||
|
||||
DEFAULT_VALUES = {
|
||||
"deadline_urls": [
|
||||
{
|
||||
"name": "default",
|
||||
"value": "http://127.0.0.1:8082"
|
||||
"value": "http://127.0.0.1:8082",
|
||||
"require_authentication": False,
|
||||
"ssl": False
|
||||
}
|
||||
],
|
||||
"deadline_server": "default",
|
||||
|
|
|
|||
|
|
@ -191,7 +191,6 @@ class NukeSubmitDeadlineModel(BaseSettingsModel):
|
|||
|
||||
@validator(
|
||||
"limit_groups",
|
||||
"env_allowed_keys",
|
||||
"env_search_replace_values")
|
||||
def validate_unique_names(cls, value):
|
||||
ensure_unique_names(value)
|
||||
|
|
|
|||
26
server_addon/deadline/server/settings/site_settings.py
Normal file
26
server_addon/deadline/server/settings/site_settings.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField,
|
||||
)
|
||||
from .main import defined_deadline_ws_name_enum_resolver
|
||||
|
||||
|
||||
class CredentialPerServerModel(BaseSettingsModel):
|
||||
"""Provide credentials for configured DL servers"""
|
||||
_layout = "expanded"
|
||||
server_name: str = SettingsField("",
|
||||
title="DL server name",
|
||||
enum_resolver=defined_deadline_ws_name_enum_resolver)
|
||||
username: str = SettingsField("",
|
||||
title="Username")
|
||||
password: str = SettingsField("",
|
||||
title="Password")
|
||||
|
||||
|
||||
class DeadlineSiteSettings(BaseSettingsModel):
|
||||
local_settings: list[CredentialPerServerModel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Local setting",
|
||||
description="Please provide credentials for configured Deadline servers",
|
||||
)
|
||||
|
||||
|
|
@ -1,3 +1,3 @@
|
|||
name = "maya"
|
||||
title = "Maya"
|
||||
version = "0.1.17"
|
||||
version = "0.1.18"
|
||||
|
|
|
|||
|
|
@ -46,7 +46,6 @@ def extract_alembic_overrides_enum():
|
|||
return [
|
||||
{"label": "Custom Attributes", "value": "attr"},
|
||||
{"label": "Custom Attributes Prefix", "value": "attrPrefix"},
|
||||
{"label": "Auto Subd", "value": "autoSubd"},
|
||||
{"label": "Data Format", "value": "dataFormat"},
|
||||
{"label": "Euler Filter", "value": "eulerFilter"},
|
||||
{"label": "Mel Per Frame Callback", "value": "melPerFrameCallback"},
|
||||
|
|
@ -229,7 +228,7 @@ class ValidateAttributesModel(BaseSettingsModel):
|
|||
|
||||
if not success:
|
||||
raise BadRequestException(
|
||||
"The attibutes can't be parsed as json object"
|
||||
"The attributes can't be parsed as json object"
|
||||
)
|
||||
return value
|
||||
|
||||
|
|
@ -265,7 +264,7 @@ class ValidateUnrealStaticMeshNameModel(BaseSettingsModel):
|
|||
enabled: bool = SettingsField(title="ValidateUnrealStaticMeshName")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
validate_mesh: bool = SettingsField(title="Validate mesh names")
|
||||
validate_collision: bool = SettingsField(title="Validate collison names")
|
||||
validate_collision: bool = SettingsField(title="Validate collision names")
|
||||
|
||||
|
||||
class ValidateCycleErrorModel(BaseSettingsModel):
|
||||
|
|
@ -288,7 +287,7 @@ class ValidatePluginPathAttributesModel(BaseSettingsModel):
|
|||
and the node attribute is <b>abc_file</b>
|
||||
"""
|
||||
|
||||
enabled: bool = True
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
attribute: list[ValidatePluginPathAttributesAttrModel] = SettingsField(
|
||||
|
|
@ -310,6 +309,9 @@ class RendererAttributesModel(BaseSettingsModel):
|
|||
|
||||
|
||||
class ValidateRenderSettingsModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
arnold_render_attributes: list[RendererAttributesModel] = SettingsField(
|
||||
default_factory=list, title="Arnold Render Attributes")
|
||||
vray_render_attributes: list[RendererAttributesModel] = SettingsField(
|
||||
|
|
@ -344,17 +346,6 @@ class ExtractAlembicModel(BaseSettingsModel):
|
|||
families: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Families")
|
||||
autoSubd: bool = SettingsField(
|
||||
title="Auto Subd",
|
||||
description=(
|
||||
"If this flag is present and the mesh has crease edges, crease "
|
||||
"vertices or holes, the mesh (OPolyMesh) would now be written out "
|
||||
"as an OSubD and crease info will be stored in the Alembic file. "
|
||||
"Otherwise, creases info won't be preserved in Alembic file unless"
|
||||
" a custom Boolean attribute SubDivisionMesh has been added to "
|
||||
"mesh node and its value is true."
|
||||
)
|
||||
)
|
||||
eulerFilter: bool = SettingsField(
|
||||
title="Euler Filter",
|
||||
description="Apply Euler filter while sampling rotations."
|
||||
|
|
@ -406,6 +397,10 @@ class ExtractAlembicModel(BaseSettingsModel):
|
|||
title="Write Color Sets",
|
||||
description="Write vertex colors with the geometry."
|
||||
)
|
||||
writeCreases: bool = SettingsField(
|
||||
title="Write Creases",
|
||||
description="Write the geometry's edge and vertex crease information."
|
||||
)
|
||||
writeFaceSets: bool = SettingsField(
|
||||
title="Write Face Sets",
|
||||
description="Write face sets with the geometry."
|
||||
|
|
@ -613,7 +608,7 @@ class ExtractGPUCacheModel(BaseSettingsModel):
|
|||
title="Optimize Animations For Motion Blur"
|
||||
)
|
||||
writeMaterials: bool = SettingsField(title="Write Materials")
|
||||
useBaseTessellation: bool = SettingsField(title="User Base Tesselation")
|
||||
useBaseTessellation: bool = SettingsField(title="User Based Tessellation")
|
||||
|
||||
|
||||
class PublishersModel(BaseSettingsModel):
|
||||
|
|
@ -1171,6 +1166,9 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
]
|
||||
},
|
||||
"ValidateRenderSettings": {
|
||||
"enabled": True,
|
||||
"active": True,
|
||||
"optional": False,
|
||||
"arnold_render_attributes": [],
|
||||
"vray_render_attributes": [],
|
||||
"redshift_render_attributes": [],
|
||||
|
|
@ -1611,7 +1609,6 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
],
|
||||
"attr": "",
|
||||
"attrPrefix": "",
|
||||
"autoSubd": False,
|
||||
"bake_attributes": [],
|
||||
"bake_attribute_prefixes": [],
|
||||
"dataFormat": "ogawa",
|
||||
|
|
@ -1635,7 +1632,7 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"renderableOnly": False,
|
||||
"stripNamespaces": True,
|
||||
"uvsOnly": False,
|
||||
"uvWrite": False,
|
||||
"uvWrite": True,
|
||||
"userAttr": "",
|
||||
"userAttrPrefix": "",
|
||||
"verbose": False,
|
||||
|
|
@ -1643,6 +1640,7 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"wholeFrameGeo": False,
|
||||
"worldSpace": True,
|
||||
"writeColorSets": False,
|
||||
"writeCreases": False,
|
||||
"writeFaceSets": False,
|
||||
"writeNormals": True,
|
||||
"writeUVSets": False,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue