Merge remote-tracking branch 'origin/develop' into bugfix/OP-6851_use-colorspace-for-rstexbin

This commit is contained in:
Ondrej Samohel 2023-10-13 16:01:55 +02:00
commit aba4642e98
No known key found for this signature in database
GPG key ID: 02376E18990A97C6
224 changed files with 17058 additions and 1161 deletions

View file

@ -0,0 +1,32 @@
from openpype.hosts.maya.api import (
lib,
plugin
)
from openpype.lib import BoolDef
class CreateMatchmove(plugin.MayaCreator):
"""Instance for more complex setup of cameras.
Might contain multiple cameras, geometries etc.
It is expected to be extracted into .abc or .ma
"""
identifier = "io.openpype.creators.maya.matchmove"
label = "Matchmove"
family = "matchmove"
icon = "video-camera"
def get_instance_attr_defs(self):
defs = lib.collect_animation_defs()
defs.extend([
BoolDef("bakeToWorldSpace",
label="Bake Cameras to World-Space",
tooltip="Bake Cameras to World-Space",
default=True),
])
return defs

View file

@ -20,6 +20,13 @@ class CreateRig(plugin.MayaCreator):
instance_node = instance.get("instance_node")
self.log.info("Creating Rig instance set up ...")
# TODOchange name (_controls_SET -> _rigs_SET)
controls = cmds.sets(name=subset_name + "_controls_SET", empty=True)
# TODOchange name (_out_SET -> _geo_SET)
pointcache = cmds.sets(name=subset_name + "_out_SET", empty=True)
cmds.sets([controls, pointcache], forceElement=instance_node)
skeleton = cmds.sets(
name=subset_name + "_skeletonAnim_SET", empty=True)
skeleton_mesh = cmds.sets(
name=subset_name + "_skeletonMesh_SET", empty=True)
cmds.sets([controls, pointcache,
skeleton, skeleton_mesh], forceElement=instance_node)

View file

@ -0,0 +1,39 @@
from openpype.hosts.maya.api import (
lib,
plugin
)
from openpype.lib import NumberDef
class CreateYetiCache(plugin.MayaCreator):
"""Output for procedural plugin nodes of Yeti """
identifier = "io.openpype.creators.maya.unrealyeticache"
label = "Unreal - Yeti Cache"
family = "yeticacheUE"
icon = "pagelines"
def get_instance_attr_defs(self):
defs = [
NumberDef("preroll",
label="Preroll",
minimum=0,
default=0,
decimals=0)
]
# Add animation data without step and handles
defs.extend(lib.collect_animation_defs())
remove = {"step", "handleStart", "handleEnd"}
defs = [attr_def for attr_def in defs if attr_def.key not in remove]
# Add samples after frame range
defs.append(
NumberDef("samples",
label="Samples",
default=3,
decimals=0)
)
return defs

View file

@ -1,4 +1,46 @@
import openpype.hosts.maya.api.plugin
import maya.cmds as cmds
def _process_reference(file_url, name, namespace, options):
"""Load files by referencing scene in Maya.
Args:
file_url (str): fileapth of the objects to be loaded
name (str): subset name
namespace (str): namespace
options (dict): dict of storing the param
Returns:
list: list of object nodes
"""
from openpype.hosts.maya.api.lib import unique_namespace
# Get name from asset being loaded
# Assuming name is subset name from the animation, we split the number
# suffix from the name to ensure the namespace is unique
name = name.split("_")[0]
ext = file_url.split(".")[-1]
namespace = unique_namespace(
"{}_".format(name),
format="%03d",
suffix="_{}".format(ext)
)
attach_to_root = options.get("attach_to_root", True)
group_name = options["group_name"]
# no group shall be created
if not attach_to_root:
group_name = namespace
nodes = cmds.file(file_url,
namespace=namespace,
sharedReferenceFile=False,
groupReference=attach_to_root,
groupName=group_name,
reference=True,
returnNewNodes=True)
return nodes
class AbcLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
@ -16,44 +58,42 @@ class AbcLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
def process_reference(self, context, name, namespace, options):
import maya.cmds as cmds
from openpype.hosts.maya.api.lib import unique_namespace
cmds.loadPlugin("AbcImport.mll", quiet=True)
# Prevent identical alembic nodes from being shared
# Create unique namespace for the cameras
# Get name from asset being loaded
# Assuming name is subset name from the animation, we split the number
# suffix from the name to ensure the namespace is unique
name = name.split("_")[0]
namespace = unique_namespace(
"{}_".format(name),
format="%03d",
suffix="_abc"
)
attach_to_root = options.get("attach_to_root", True)
group_name = options["group_name"]
# no group shall be created
if not attach_to_root:
group_name = namespace
# hero_001 (abc)
# asset_counter{optional}
path = self.filepath_from_context(context)
file_url = self.prepare_root_value(path,
context["project"]["name"])
nodes = cmds.file(file_url,
namespace=namespace,
sharedReferenceFile=False,
groupReference=attach_to_root,
groupName=group_name,
reference=True,
returnNewNodes=True)
nodes = _process_reference(file_url, name, namespace, options)
# load colorbleed ID attribute
self[:] = nodes
return nodes
class FbxLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
"""Loader to reference an Fbx files"""
families = ["animation",
"camera"]
representations = ["fbx"]
label = "Reference animation"
order = -10
icon = "code-fork"
color = "orange"
def process_reference(self, context, name, namespace, options):
cmds.loadPlugin("fbx4maya.mll", quiet=True)
path = self.filepath_from_context(context)
file_url = self.prepare_root_value(path,
context["project"]["name"])
nodes = _process_reference(file_url, name, namespace, options)
self[:] = nodes
return nodes

View file

@ -1,12 +1,6 @@
from maya import cmds, mel
from openpype.client import (
get_asset_by_id,
get_subset_by_id,
get_version_by_id,
)
from openpype.pipeline import (
get_current_project_name,
load,
get_representation_path,
)
@ -18,7 +12,7 @@ class AudioLoader(load.LoaderPlugin):
"""Specific loader of audio."""
families = ["audio"]
label = "Import audio"
label = "Load audio"
representations = ["wav"]
icon = "volume-up"
color = "orange"
@ -27,10 +21,10 @@ class AudioLoader(load.LoaderPlugin):
start_frame = cmds.playbackOptions(query=True, min=True)
sound_node = cmds.sound(
file=context["representation"]["data"]["path"], offset=start_frame
file=self.filepath_from_context(context), offset=start_frame
)
cmds.timeControl(
mel.eval("$tmpVar=$gPlayBackSlider"),
mel.eval("$gPlayBackSlider=$gPlayBackSlider"),
edit=True,
sound=sound_node,
displaySound=True
@ -59,32 +53,50 @@ class AudioLoader(load.LoaderPlugin):
assert audio_nodes is not None, "Audio node not found."
audio_node = audio_nodes[0]
current_sound = cmds.timeControl(
mel.eval("$gPlayBackSlider=$gPlayBackSlider"),
query=True,
sound=True
)
activate_sound = current_sound == audio_node
path = get_representation_path(representation)
cmds.setAttr("{}.filename".format(audio_node), path, type="string")
cmds.sound(
audio_node,
edit=True,
file=path
)
# The source start + end does not automatically update itself to the
# length of thew new audio file, even though maya does do that when
# creating a new audio node. So to update we compute it manually.
# This would however override any source start and source end a user
# might have done on the original audio node after load.
audio_frame_count = cmds.getAttr("{}.frameCount".format(audio_node))
audio_sample_rate = cmds.getAttr("{}.sampleRate".format(audio_node))
duration_in_seconds = audio_frame_count / audio_sample_rate
fps = mel.eval('currentTimeUnitToFPS()') # workfile FPS
source_start = 0
source_end = (duration_in_seconds * fps)
cmds.setAttr("{}.sourceStart".format(audio_node), source_start)
cmds.setAttr("{}.sourceEnd".format(audio_node), source_end)
if activate_sound:
# maya by default deactivates it from timeline on file change
cmds.timeControl(
mel.eval("$gPlayBackSlider=$gPlayBackSlider"),
edit=True,
sound=audio_node,
displaySound=True
)
cmds.setAttr(
container["objectName"] + ".representation",
str(representation["_id"]),
type="string"
)
# Set frame range.
project_name = get_current_project_name()
version = get_version_by_id(
project_name, representation["parent"], fields=["parent"]
)
subset = get_subset_by_id(
project_name, version["parent"], fields=["parent"]
)
asset = get_asset_by_id(
project_name, subset["parent"], fields=["parent"]
)
source_start = 1 - asset["data"]["frameStart"]
source_end = asset["data"]["frameEnd"]
cmds.setAttr("{}.sourceStart".format(audio_node), source_start)
cmds.setAttr("{}.sourceEnd".format(audio_node), source_end)
def switch(self, container, representation):
self.update(container, representation)

View file

@ -101,7 +101,8 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
"camerarig",
"staticMesh",
"skeletalMesh",
"mvLook"]
"mvLook",
"matchmove"]
representations = ["ma", "abc", "fbx", "mb"]

View file

@ -0,0 +1,36 @@
# -*- coding: utf-8 -*-
from maya import cmds # noqa
import pyblish.api
from openpype.pipeline import OptionalPyblishPluginMixin
class CollectFbxAnimation(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Collect Animated Rig Data for FBX Extractor."""
order = pyblish.api.CollectorOrder + 0.2
label = "Collect Fbx Animation"
hosts = ["maya"]
families = ["animation"]
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
skeleton_sets = [
i for i in instance
if i.endswith("skeletonAnim_SET")
]
if not skeleton_sets:
return
instance.data["families"].append("animation.fbx")
instance.data["animated_skeleton"] = []
for skeleton_set in skeleton_sets:
skeleton_content = cmds.sets(skeleton_set, query=True)
self.log.debug(
"Collected animated skeleton data: {}".format(
skeleton_content
))
if skeleton_content:
instance.data["animated_skeleton"] = skeleton_content

View file

@ -22,7 +22,8 @@ class CollectRigSets(pyblish.api.InstancePlugin):
def process(self, instance):
# Find required sets by suffix
searching = {"controls_SET", "out_SET"}
searching = {"controls_SET", "out_SET",
"skeletonAnim_SET", "skeletonMesh_SET"}
found = {}
for node in cmds.ls(instance, exactType="objectSet"):
for suffix in searching:

View file

@ -0,0 +1,44 @@
# -*- coding: utf-8 -*-
from maya import cmds # noqa
import pyblish.api
class CollectSkeletonMesh(pyblish.api.InstancePlugin):
"""Collect Static Rig Data for FBX Extractor."""
order = pyblish.api.CollectorOrder + 0.2
label = "Collect Skeleton Mesh"
hosts = ["maya"]
families = ["rig"]
def process(self, instance):
skeleton_mesh_set = instance.data["rig_sets"].get(
"skeletonMesh_SET")
if not skeleton_mesh_set:
self.log.debug(
"No skeletonMesh_SET found. "
"Skipping collecting of skeleton mesh..."
)
return
# Store current frame to ensure single frame export
frame = cmds.currentTime(query=True)
instance.data["frameStart"] = frame
instance.data["frameEnd"] = frame
instance.data["skeleton_mesh"] = []
skeleton_mesh_content = cmds.sets(
skeleton_mesh_set, query=True) or []
if not skeleton_mesh_content:
self.log.debug(
"No object nodes in skeletonMesh_SET. "
"Skipping collecting of skeleton mesh..."
)
return
instance.data["families"] += ["rig.fbx"]
instance.data["skeleton_mesh"] = skeleton_mesh_content
self.log.debug(
"Collected skeletonMesh_SET members: {}".format(
skeleton_mesh_content
))

View file

@ -39,7 +39,7 @@ class CollectYetiCache(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder + 0.45
label = "Collect Yeti Cache"
families = ["yetiRig", "yeticache"]
families = ["yetiRig", "yeticache", "yeticacheUE"]
hosts = ["maya"]
def process(self, instance):

View file

@ -6,17 +6,21 @@ from openpype.pipeline import publish
from openpype.hosts.maya.api import lib
class ExtractCameraAlembic(publish.Extractor):
class ExtractCameraAlembic(publish.Extractor,
publish.OptionalPyblishPluginMixin):
"""Extract a Camera as Alembic.
The cameras gets baked to world space by default. Only when the instance's
The camera gets baked to world space by default. Only when the instance's
`bakeToWorldSpace` is set to False it will include its full hierarchy.
'camera' family expects only single camera, if multiple cameras are needed,
'matchmove' is better choice.
"""
label = "Camera (Alembic)"
label = "Extract Camera (Alembic)"
hosts = ["maya"]
families = ["camera"]
families = ["camera", "matchmove"]
bake_attributes = []
def process(self, instance):
@ -35,10 +39,11 @@ class ExtractCameraAlembic(publish.Extractor):
# validate required settings
assert isinstance(step, float), "Step must be a float value"
camera = cameras[0]
# Define extract output file path
dir_path = self.staging_dir(instance)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
filename = "{0}.abc".format(instance.name)
path = os.path.join(dir_path, filename)
@ -64,9 +69,10 @@ class ExtractCameraAlembic(publish.Extractor):
# if baked, drop the camera hierarchy to maintain
# clean output and backwards compatibility
camera_root = cmds.listRelatives(
camera, parent=True, fullPath=True)[0]
job_str += ' -root {0}'.format(camera_root)
camera_roots = cmds.listRelatives(
cameras, parent=True, fullPath=True)
for camera_root in camera_roots:
job_str += ' -root {0}'.format(camera_root)
for member in members:
descendants = cmds.listRelatives(member,

View file

@ -2,11 +2,15 @@
"""Extract camera as Maya Scene."""
import os
import itertools
import contextlib
from maya import cmds
from openpype.pipeline import publish
from openpype.hosts.maya.api import lib
from openpype.lib import (
BoolDef
)
def massage_ma_file(path):
@ -78,7 +82,8 @@ def unlock(plug):
cmds.disconnectAttr(source, destination)
class ExtractCameraMayaScene(publish.Extractor):
class ExtractCameraMayaScene(publish.Extractor,
publish.OptionalPyblishPluginMixin):
"""Extract a Camera as Maya Scene.
This will create a duplicate of the camera that will be baked *with*
@ -88,17 +93,22 @@ class ExtractCameraMayaScene(publish.Extractor):
The cameras gets baked to world space by default. Only when the instance's
`bakeToWorldSpace` is set to False it will include its full hierarchy.
'camera' family expects only single camera, if multiple cameras are needed,
'matchmove' is better choice.
Note:
The extracted Maya ascii file gets "massaged" removing the uuid values
so they are valid for older versions of Fusion (e.g. 6.4)
"""
label = "Camera (Maya Scene)"
label = "Extract Camera (Maya Scene)"
hosts = ["maya"]
families = ["camera"]
families = ["camera", "matchmove"]
scene_type = "ma"
keep_image_planes = True
def process(self, instance):
"""Plugin entry point."""
# get settings
@ -131,15 +141,15 @@ class ExtractCameraMayaScene(publish.Extractor):
"bake to world space is ignored...")
# get cameras
members = cmds.ls(instance.data['setMembers'], leaf=True, shapes=True,
long=True, dag=True)
cameras = cmds.ls(members, leaf=True, shapes=True, long=True,
dag=True, type="camera")
members = set(cmds.ls(instance.data['setMembers'], leaf=True,
shapes=True, long=True, dag=True))
cameras = set(cmds.ls(members, leaf=True, shapes=True, long=True,
dag=True, type="camera"))
# validate required settings
assert isinstance(step, float), "Step must be a float value"
camera = cameras[0]
transform = cmds.listRelatives(camera, parent=True, fullPath=True)
transforms = cmds.listRelatives(list(cameras),
parent=True, fullPath=True)
# Define extract output file path
dir_path = self.staging_dir(instance)
@ -151,23 +161,21 @@ class ExtractCameraMayaScene(publish.Extractor):
with lib.evaluation("off"):
with lib.suspended_refresh():
if bake_to_worldspace:
self.log.debug(
"Performing camera bakes: {}".format(transform))
baked = lib.bake_to_world_space(
transform,
transforms,
frame_range=[start, end],
step=step
)
baked_camera_shapes = cmds.ls(baked,
type="camera",
dag=True,
shapes=True,
long=True)
baked_camera_shapes = set(cmds.ls(baked,
type="camera",
dag=True,
shapes=True,
long=True))
members = members + baked_camera_shapes
members.remove(camera)
members.update(baked_camera_shapes)
members.difference_update(cameras)
else:
baked_camera_shapes = cmds.ls(cameras,
baked_camera_shapes = cmds.ls(list(cameras),
type="camera",
dag=True,
shapes=True,
@ -186,19 +194,28 @@ class ExtractCameraMayaScene(publish.Extractor):
unlock(plug)
cmds.setAttr(plug, value)
self.log.debug("Performing extraction..")
cmds.select(cmds.ls(members, dag=True,
shapes=True, long=True), noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
exportSelected=True,
preserveReferences=False,
constructionHistory=False,
channels=True, # allow animation
constraints=False,
shader=False,
expressions=False)
attr_values = self.get_attr_values_from_data(
instance.data)
keep_image_planes = attr_values.get("keep_image_planes")
with transfer_image_planes(sorted(cameras),
sorted(baked_camera_shapes),
keep_image_planes):
self.log.info("Performing extraction..")
cmds.select(cmds.ls(list(members), dag=True,
shapes=True, long=True),
noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
exportSelected=True,
preserveReferences=False,
constructionHistory=False,
channels=True, # allow animation
constraints=False,
shader=False,
expressions=False)
# Delete the baked hierarchy
if bake_to_worldspace:
@ -219,3 +236,62 @@ class ExtractCameraMayaScene(publish.Extractor):
self.log.debug("Extracted instance '{0}' to: {1}".format(
instance.name, path))
@classmethod
def get_attribute_defs(cls):
defs = super(ExtractCameraMayaScene, cls).get_attribute_defs()
defs.extend([
BoolDef("keep_image_planes",
label="Keep Image Planes",
tooltip="Preserving connected image planes on camera",
default=cls.keep_image_planes),
])
return defs
@contextlib.contextmanager
def transfer_image_planes(source_cameras, target_cameras,
keep_input_connections):
"""Reattaches image planes to baked or original cameras.
Baked cameras are duplicates of original ones.
This attaches it to duplicated camera properly and after
export it reattaches it back to original to keep image plane in workfile.
"""
originals = {}
try:
for source_camera, target_camera in zip(source_cameras,
target_cameras):
image_planes = cmds.listConnections(source_camera,
type="imagePlane") or []
# Split of the parent path they are attached - we want
# the image plane node name.
# TODO: Does this still mean the image plane name is unique?
image_planes = [x.split("->", 1)[1] for x in image_planes]
if not image_planes:
continue
originals[source_camera] = []
for image_plane in image_planes:
if keep_input_connections:
if source_camera == target_camera:
continue
_attach_image_plane(target_camera, image_plane)
else: # explicitly dettaching image planes
cmds.imagePlane(image_plane, edit=True, detach=True)
originals[source_camera].append(image_plane)
yield
finally:
for camera, image_planes in originals.items():
for image_plane in image_planes:
_attach_image_plane(camera, image_plane)
def _attach_image_plane(camera, image_plane):
cmds.imagePlane(image_plane, edit=True, detach=True)
cmds.imagePlane(image_plane, edit=True, camera=camera)

View file

@ -0,0 +1,65 @@
# -*- coding: utf-8 -*-
import os
from maya import cmds # noqa
import pyblish.api
from openpype.pipeline import publish
from openpype.hosts.maya.api import fbx
from openpype.hosts.maya.api.lib import (
namespaced, get_namespace, strip_namespace
)
class ExtractFBXAnimation(publish.Extractor):
"""Extract Rig in FBX format from Maya.
This extracts the rig in fbx with the constraints
and referenced asset content included.
This also optionally extract animated rig in fbx with
geometries included.
"""
order = pyblish.api.ExtractorOrder
label = "Extract Animation (FBX)"
hosts = ["maya"]
families = ["animation.fbx"]
def process(self, instance):
# Define output path
staging_dir = self.staging_dir(instance)
filename = "{0}.fbx".format(instance.name)
path = os.path.join(staging_dir, filename)
path = path.replace("\\", "/")
fbx_exporter = fbx.FBXExtractor(log=self.log)
out_members = instance.data.get("animated_skeleton", [])
# Export
instance.data["constraints"] = True
instance.data["skeletonDefinitions"] = True
instance.data["referencedAssetsContent"] = True
fbx_exporter.set_options_from_instance(instance)
# Export from the rig's namespace so that the exported
# FBX does not include the namespace but preserves the node
# names as existing in the rig workfile
namespace = get_namespace(out_members[0])
relative_out_members = [
strip_namespace(node, namespace) for node in out_members
]
with namespaced(
":" + namespace,
new=False,
relative_names=True
) as namespace:
fbx_exporter.export(relative_out_members, path)
representations = instance.data.setdefault("representations", [])
representations.append({
'name': 'fbx',
'ext': 'fbx',
'files': filename,
"stagingDir": staging_dir
})
self.log.debug(
"Extracted FBX animation to: {0}".format(path))

View file

@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
import os
from maya import cmds # noqa
import pyblish.api
from openpype.pipeline import publish
from openpype.pipeline.publish import OptionalPyblishPluginMixin
from openpype.hosts.maya.api import fbx
class ExtractSkeletonMesh(publish.Extractor,
OptionalPyblishPluginMixin):
"""Extract Rig in FBX format from Maya.
This extracts the rig in fbx with the constraints
and referenced asset content included.
This also optionally extract animated rig in fbx with
geometries included.
"""
order = pyblish.api.ExtractorOrder
label = "Extract Skeleton Mesh"
hosts = ["maya"]
families = ["rig.fbx"]
def process(self, instance):
if not self.is_active(instance.data):
return
# Define output path
staging_dir = self.staging_dir(instance)
filename = "{0}.fbx".format(instance.name)
path = os.path.join(staging_dir, filename)
fbx_exporter = fbx.FBXExtractor(log=self.log)
out_set = instance.data.get("skeleton_mesh", [])
instance.data["constraints"] = True
instance.data["skeletonDefinitions"] = True
fbx_exporter.set_options_from_instance(instance)
# Export
fbx_exporter.export(out_set, path)
representations = instance.data.setdefault("representations", [])
representations.append({
'name': 'fbx',
'ext': 'fbx',
'files': filename,
"stagingDir": staging_dir
})
self.log.debug("Extract FBX to: {0}".format(path))

View file

@ -0,0 +1,61 @@
import os
from maya import cmds
from openpype.pipeline import publish
class ExtractYetiCache(publish.Extractor):
"""Producing Yeti cache files using scene time range.
This will extract Yeti cache file sequence and fur settings.
"""
label = "Extract Yeti Cache"
hosts = ["maya"]
families = ["yeticacheUE"]
def process(self, instance):
yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
if not yeti_nodes:
raise RuntimeError("No pgYetiMaya nodes found in the instance")
# Define extract output file path
dirname = self.staging_dir(instance)
# Collect information for writing cache
start_frame = instance.data["frameStartHandle"]
end_frame = instance.data["frameEndHandle"]
preroll = instance.data["preroll"]
if preroll > 0:
start_frame -= preroll
kwargs = {}
samples = instance.data.get("samples", 0)
if samples == 0:
kwargs.update({"sampleTimes": "0.0 1.0"})
else:
kwargs.update({"samples": samples})
self.log.debug(f"Writing out cache {start_frame} - {end_frame}")
filename = f"{instance.name}.abc"
path = os.path.join(dirname, filename)
cmds.pgYetiCommand(yeti_nodes,
writeAlembic=path,
range=(start_frame, end_frame),
asUnrealAbc=True,
**kwargs)
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'abc',
'ext': 'abc',
'files': filename,
'stagingDir': dirname
}
instance.data["representations"].append(representation)
self.log.debug(f"Extracted {instance} to {dirname}")

View file

@ -1,555 +0,0 @@
import os
import json
import getpass
import platform
import appdirs
from maya import cmds
import pyblish.api
from openpype.lib import requests_post
from openpype.hosts.maya.api import lib
from openpype.pipeline import legacy_io
from openpype.settings import get_system_settings
# mapping between Maya renderer names and Muster template ids
def _get_template_id(renderer):
"""
Return muster template ID based on renderer name.
:param renderer: renderer name
:type renderer: str
:returns: muster template id
:rtype: int
"""
templates = get_system_settings()["modules"]["muster"]["templates_mapping"]
if not templates:
raise RuntimeError(("Muster template mapping missing in "
"pype-settings"))
try:
template_id = templates[renderer]
except KeyError:
raise RuntimeError("Unmapped renderer - missing template id")
return template_id
def _get_script():
"""Get path to the image sequence script"""
try:
from openpype.scripts import publish_filesequence
except Exception:
raise RuntimeError("Expected module 'publish_deadline'"
"to be available")
module_path = publish_filesequence.__file__
if module_path.endswith(".pyc"):
module_path = module_path[:-len(".pyc")] + ".py"
return module_path
def get_renderer_variables(renderlayer=None):
"""Retrieve the extension which has been set in the VRay settings
Will return None if the current renderer is not VRay
For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which
start with `rs`. Use the actual node name, do NOT use the `nice name`
Args:
renderlayer (str): the node name of the renderlayer.
Returns:
dict
"""
renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer())
render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"])
padding = cmds.getAttr("{}.{}".format(render_attrs["node"],
render_attrs["padding"]))
filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0]
if renderer == "vray":
# Maya's renderSettings function does not return V-Ray file extension
# so we get the extension from vraySettings
extension = cmds.getAttr("vraySettings.imageFormatStr")
# When V-Ray image format has not been switched once from default .png
# the getAttr command above returns None. As such we explicitly set
# it to `.png`
if extension is None:
extension = "png"
filename_prefix = "<Scene>/<Scene>_<Layer>/<Layer>"
else:
# Get the extension, getAttr defaultRenderGlobals.imageFormat
# returns an index number.
filename_base = os.path.basename(filename_0)
extension = os.path.splitext(filename_base)[-1].strip(".")
filename_prefix = "<Scene>/<RenderLayer>/<RenderLayer>"
return {"ext": extension,
"filename_prefix": filename_prefix,
"padding": padding,
"filename_0": filename_0}
def preview_fname(folder, scene, layer, padding, ext):
"""Return output file path with #### for padding.
Deadline requires the path to be formatted with # in place of numbers.
For example `/path/to/render.####.png`
Args:
folder (str): The root output folder (image path)
scene (str): The scene name
layer (str): The layer name to be rendered
padding (int): The padding length
ext(str): The output file extension
Returns:
str
"""
# Following hardcoded "<Scene>/<Scene>_<Layer>/<Layer>"
output = "{scene}/{layer}/{layer}.{number}.{ext}".format(
scene=scene,
layer=layer,
number="#" * padding,
ext=ext
)
return os.path.join(folder, output)
class MayaSubmitMuster(pyblish.api.InstancePlugin):
"""Submit available render layers to Muster
Renders are submitted to a Muster via HTTP API as
supplied via the environment variable ``MUSTER_REST_URL``.
Also needed is ``MUSTER_USER`` and ``MUSTER_PASSWORD``.
"""
label = "Submit to Muster"
order = pyblish.api.IntegratorOrder + 0.1
hosts = ["maya"]
families = ["renderlayer"]
icon = "satellite-dish"
if not os.environ.get("MUSTER_REST_URL"):
optional = False
active = False
else:
optional = True
_token = None
def _load_credentials(self):
"""
Load Muster credentials from file and set `MUSTER_USER`,
`MUSTER_PASSWORD`, `MUSTER_REST_URL` is loaded from settings.
.. todo::
Show login dialog if access token is invalid or missing.
"""
app_dir = os.path.normpath(
appdirs.user_data_dir('pype-app', 'pype')
)
file_name = 'muster_cred.json'
fpath = os.path.join(app_dir, file_name)
file = open(fpath, 'r')
muster_json = json.load(file)
self._token = muster_json.get('token', None)
if not self._token:
raise RuntimeError("Invalid access token for Muster")
file.close()
self.MUSTER_REST_URL = os.environ.get("MUSTER_REST_URL")
if not self.MUSTER_REST_URL:
raise AttributeError("Muster REST API url not set")
def _get_templates(self):
"""
Get Muster templates from server.
"""
params = {
"authToken": self._token,
"select": "name"
}
api_entry = '/api/templates/list'
response = requests_post(
self.MUSTER_REST_URL + api_entry, params=params)
if response.status_code != 200:
self.log.error(
'Cannot get templates from Muster: {}'.format(
response.status_code))
raise Exception('Cannot get templates from Muster.')
try:
response_templates = response.json()["ResponseData"]["templates"]
except ValueError as e:
self.log.error(
'Muster server returned unexpected data {}'.format(e)
)
raise Exception('Muster server returned unexpected data')
templates = {}
for t in response_templates:
templates[t.get("name")] = t.get("id")
self._templates = templates
def _resolve_template(self, renderer):
"""
Returns template ID based on renderer string.
:param renderer: Name of renderer to match against template names
:type renderer: str
:returns: ID of template
:rtype: int
:raises: Exception if template ID isn't found
"""
self.log.debug("Trying to find template for [{}]".format(renderer))
mapped = _get_template_id(renderer)
self.log.debug("got id [{}]".format(mapped))
return self._templates.get(mapped)
def _submit(self, payload):
"""
Submit job to Muster
:param payload: json with job to submit
:type payload: str
:returns: response
:raises: Exception status is wrong
"""
params = {
"authToken": self._token,
"name": "submit"
}
api_entry = '/api/queue/actions'
response = requests_post(
self.MUSTER_REST_URL + api_entry, params=params, json=payload)
if response.status_code != 200:
self.log.error(
'Cannot submit job to Muster: {}'.format(response.text))
raise Exception('Cannot submit job to Muster.')
return response
def process(self, instance):
"""
Authenticate with Muster, collect all data, prepare path for post
render publish job and submit job to farm.
"""
# setup muster environment
self.MUSTER_REST_URL = os.environ.get("MUSTER_REST_URL")
if self.MUSTER_REST_URL is None:
self.log.error(
"\"MUSTER_REST_URL\" is not found. Skipping "
"[{}]".format(instance)
)
raise RuntimeError("MUSTER_REST_URL not set")
self._load_credentials()
# self._get_templates()
context = instance.context
workspace = context.data["workspaceDir"]
project_name = context.data["projectName"]
asset_name = context.data["asset"]
filepath = None
allInstances = []
for result in context.data["results"]:
if ((result["instance"] is not None) and
(result["instance"] not in allInstances)):
allInstances.append(result["instance"])
for inst in allInstances:
print(inst)
if inst.data['family'] == 'scene':
filepath = inst.data['destination_list'][0]
if not filepath:
filepath = context.data["currentFile"]
self.log.debug(filepath)
filename = os.path.basename(filepath)
comment = context.data.get("comment", "")
scene = os.path.splitext(filename)[0]
dirname = os.path.join(workspace, "renders")
renderlayer = instance.data['renderlayer'] # rs_beauty
renderlayer_name = instance.data['subset'] # beauty
renderglobals = instance.data["renderGlobals"]
# legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
# deadline_user = context.data.get("deadlineUser", getpass.getuser())
jobname = "%s - %s" % (filename, instance.name)
# Get the variables depending on the renderer
render_variables = get_renderer_variables(renderlayer)
output_filename_0 = preview_fname(folder=dirname,
scene=scene,
layer=renderlayer_name,
padding=render_variables["padding"],
ext=render_variables["ext"])
instance.data["outputDir"] = os.path.dirname(output_filename_0)
self.log.debug("output: {}".format(filepath))
# build path for metadata file
metadata_filename = "{}_metadata.json".format(instance.data["subset"])
output_dir = instance.data["outputDir"]
metadata_path = os.path.join(output_dir, metadata_filename)
pype_root = os.environ["OPENPYPE_SETUP_PATH"]
# we must provide either full path to executable or use musters own
# python named MPython.exe, residing directly in muster bin
# directory.
if platform.system().lower() == "windows":
# for muster, those backslashes must be escaped twice
muster_python = ("\"C:\\\\Program Files\\\\Virtual Vertex\\\\"
"Muster 9\\\\MPython.exe\"")
else:
# we need to run pype as different user then Muster dispatcher
# service is running (usually root).
muster_python = ("/usr/sbin/runuser -u {}"
" -- /usr/bin/python3".format(getpass.getuser()))
# build the path and argument. We are providing separate --pype
# argument with network path to pype as post job actions are run
# but dispatcher (Server) and not render clients. Render clients
# inherit environment from publisher including PATH, so there's
# no problem finding PYPE, but there is now way (as far as I know)
# to set environment dynamically for dispatcher. Therefore this hack.
args = [muster_python,
_get_script().replace('\\', '\\\\'),
"--paths",
metadata_path.replace('\\', '\\\\'),
"--pype",
pype_root.replace('\\', '\\\\')]
postjob_command = " ".join(args)
try:
# Ensure render folder exists
os.makedirs(dirname)
except OSError:
pass
env = self.clean_environment()
payload = {
"RequestData": {
"platform": 0,
"job": {
"jobName": jobname,
"templateId": _get_template_id(
instance.data["renderer"]),
"chunksInterleave": 2,
"chunksPriority": "0",
"chunksTimeoutValue": 320,
"department": "",
"dependIds": [""],
"dependLinkMode": 0,
"dependMode": 0,
"emergencyQueue": False,
"excludedPools": [""],
"includedPools": [renderglobals["Pool"]],
"packetSize": 4,
"packetType": 1,
"priority": 1,
"jobId": -1,
"startOn": 0,
"parentId": -1,
"project": project_name or scene,
"shot": asset_name or scene,
"camera": instance.data.get("cameras")[0],
"dependMode": 0,
"packetSize": 4,
"packetType": 1,
"priority": 1,
"maximumInstances": 0,
"assignedInstances": 0,
"attributes": {
"environmental_variables": {
"value": ", ".join("{!s}={!r}".format(k, v)
for (k, v) in env.items()),
"state": True,
"subst": False
},
"memo": {
"value": comment,
"state": True,
"subst": False
},
"frames_range": {
"value": "{start}-{end}".format(
start=int(instance.data["frameStart"]),
end=int(instance.data["frameEnd"])),
"state": True,
"subst": False
},
"job_file": {
"value": filepath,
"state": True,
"subst": True
},
"job_project": {
"value": workspace,
"state": True,
"subst": True
},
"output_folder": {
"value": dirname.replace("\\", "/"),
"state": True,
"subst": True
},
"post_job_action": {
"value": postjob_command,
"state": True,
"subst": True
},
"MAYADIGITS": {
"value": 1,
"state": True,
"subst": False
},
"ARNOLDMODE": {
"value": "0",
"state": True,
"subst": False
},
"ABORTRENDER": {
"value": "0",
"state": True,
"subst": True
},
"ARNOLDLICENSE": {
"value": "0",
"state": False,
"subst": False
},
"ADD_FLAGS": {
"value": "-rl {}".format(renderlayer),
"state": True,
"subst": True
}
}
}
}
}
self.preflight_check(instance)
self.log.debug("Submitting ...")
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
response = self._submit(payload)
# response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
# Store output dir for unified publisher (filesequence)
instance.data["musterSubmissionJob"] = response.json()
def clean_environment(self):
"""
Clean and set environment variables for render job so render clients
work in more or less same environment as publishing machine.
.. warning:: This is not usable for **post job action** as this is
executed on dispatcher machine (server) and not render clients.
"""
keys = [
# This will trigger `userSetup.py` on the slave
# such that proper initialisation happens the same
# way as it does on a local machine.
# TODO(marcus): This won't work if the slaves don't
# have access to these paths, such as if slaves are
# running Linux and the submitter is on Windows.
"PYTHONPATH",
"PATH",
"MTOA_EXTENSIONS_PATH",
"MTOA_EXTENSIONS",
"DYLD_LIBRARY_PATH",
"MAYA_RENDER_DESC_PATH",
"MAYA_MODULE_PATH",
"ARNOLD_PLUGIN_PATH",
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
"PYBLISHPLUGINPATH",
# todo: This is a temporary fix for yeti variables
"PEREGRINEL_LICENSE",
"SOLIDANGLE_LICENSE",
"ARNOLD_LICENSE"
"MAYA_MODULE_PATH",
"TOOL_ENV"
]
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **legacy_io.Session)
# self.log.debug("enviro: {}".format(pprint(environment)))
for path in os.environ:
if path.lower().startswith('pype_'):
environment[path] = os.environ[path]
environment["PATH"] = os.environ["PATH"]
# self.log.debug("enviro: {}".format(environment['OPENPYPE_SCRIPTS']))
clean_environment = {}
for key, value in environment.items():
clean_path = ""
self.log.debug("key: {}".format(key))
if "://" in value:
clean_path = value
else:
valid_paths = []
for path in value.split(os.pathsep):
if not path:
continue
try:
path.decode('UTF-8', 'strict')
valid_paths.append(os.path.normpath(path))
except UnicodeDecodeError:
print('path contains non UTF characters')
if valid_paths:
clean_path = os.pathsep.join(valid_paths)
clean_environment[key] = clean_path
return clean_environment
def preflight_check(self, instance):
"""Ensure the startFrame, endFrame and byFrameStep are integers"""
for key in ("frameStart", "frameEnd", "byFrameStep"):
value = instance.data[key]
if int(value) == value:
continue
self.log.warning(
"%f=%d was rounded off to nearest integer"
% (value, int(value))
)
# TODO: Remove hack to avoid this plug-in in new publisher
# This plug-in should actually be in dedicated module
if not os.environ.get("MUSTER_REST_URL"):
del MayaSubmitMuster

View file

@ -0,0 +1,66 @@
import pyblish.api
import openpype.hosts.maya.api.action
from openpype.pipeline.publish import (
PublishValidationError,
ValidateContentsOrder
)
from maya import cmds
class ValidateAnimatedReferenceRig(pyblish.api.InstancePlugin):
"""Validate all nodes in skeletonAnim_SET are referenced"""
order = ValidateContentsOrder
hosts = ["maya"]
families = ["animation.fbx"]
label = "Animated Reference Rig"
accepted_controllers = ["transform", "locator"]
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
def process(self, instance):
animated_sets = instance.data.get("animated_skeleton", [])
if not animated_sets:
self.log.debug(
"No nodes found in skeletonAnim_SET. "
"Skipping validation of animated reference rig..."
)
return
for animated_reference in animated_sets:
is_referenced = cmds.referenceQuery(
animated_reference, isNodeReferenced=True)
if not bool(is_referenced):
raise PublishValidationError(
"All the content in skeletonAnim_SET"
" should be referenced nodes"
)
invalid_controls = self.validate_controls(animated_sets)
if invalid_controls:
raise PublishValidationError(
"All the content in skeletonAnim_SET"
" should be transforms"
)
@classmethod
def validate_controls(self, set_members):
"""Check if the controller set contains only accepted node types.
Checks if all its set members are within the hierarchy of the root
Checks if the node types of the set members valid
Args:
set_members: list of nodes of the skeleton_anim_set
hierarchy: list of nodes which reside under the root node
Returns:
errors (list)
"""
# Validate control types
invalid = []
set_members = cmds.ls(set_members, long=True)
for node in set_members:
if cmds.nodeType(node) not in self.accepted_controllers:
invalid.append(node)
return invalid

View file

@ -30,18 +30,21 @@ class ValidatePluginPathAttributes(pyblish.api.InstancePlugin):
def get_invalid(cls, instance):
invalid = list()
file_attr = cls.attribute
if not file_attr:
file_attrs = cls.attribute
if not file_attrs:
return invalid
# Consider only valid node types to avoid "Unknown object type" warning
all_node_types = set(cmds.allNodeTypes())
node_types = [key for key in file_attr.keys() if key in all_node_types]
node_types = [
key for key in file_attrs.keys()
if key in all_node_types
]
for node, node_type in pairwise(cmds.ls(type=node_types,
showType=True)):
# get the filepath
file_attr = "{}.{}".format(node, file_attr[node_type])
file_attr = "{}.{}".format(node, file_attrs[node_type])
filepath = cmds.getAttr(file_attr)
if filepath and not os.path.exists(filepath):

View file

@ -0,0 +1,117 @@
import pyblish.api
from openpype.pipeline import (
PublishValidationError,
OptionalPyblishPluginMixin
)
from maya import cmds
from openpype.pipeline.publish import RepairAction
from openpype.hosts.maya.api import lib
from openpype.hosts.maya.api.lib import reset_scene_resolution
class ValidateResolution(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validate the render resolution setting aligned with DB"""
order = pyblish.api.ValidatorOrder
families = ["renderlayer"]
hosts = ["maya"]
label = "Validate Resolution"
actions = [RepairAction]
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
invalid = self.get_invalid_resolution(instance)
if invalid:
raise PublishValidationError(
"Render resolution is invalid. See log for details.",
description=(
"Wrong render resolution setting. "
"Please use repair button to fix it.\n\n"
"If current renderer is V-Ray, "
"make sure vraySettings node has been created."
)
)
@classmethod
def get_invalid_resolution(cls, instance):
width, height, pixelAspect = cls.get_db_resolution(instance)
current_renderer = instance.data["renderer"]
layer = instance.data["renderlayer"]
invalid = False
if current_renderer == "vray":
vray_node = "vraySettings"
if cmds.objExists(vray_node):
current_width = lib.get_attr_in_layer(
"{}.width".format(vray_node), layer=layer)
current_height = lib.get_attr_in_layer(
"{}.height".format(vray_node), layer=layer)
current_pixelAspect = lib.get_attr_in_layer(
"{}.pixelAspect".format(vray_node), layer=layer
)
else:
cls.log.error(
"Can't detect VRay resolution because there is no node "
"named: `{}`".format(vray_node)
)
return True
else:
current_width = lib.get_attr_in_layer(
"defaultResolution.width", layer=layer)
current_height = lib.get_attr_in_layer(
"defaultResolution.height", layer=layer)
current_pixelAspect = lib.get_attr_in_layer(
"defaultResolution.pixelAspect", layer=layer
)
if current_width != width or current_height != height:
cls.log.error(
"Render resolution {}x{} does not match "
"asset resolution {}x{}".format(
current_width, current_height,
width, height
))
invalid = True
if current_pixelAspect != pixelAspect:
cls.log.error(
"Render pixel aspect {} does not match "
"asset pixel aspect {}".format(
current_pixelAspect, pixelAspect
))
invalid = True
return invalid
@classmethod
def get_db_resolution(cls, instance):
asset_doc = instance.data["assetEntity"]
project_doc = instance.context.data["projectEntity"]
for data in [asset_doc["data"], project_doc["data"]]:
if (
"resolutionWidth" in data and
"resolutionHeight" in data and
"pixelAspect" in data
):
width = data["resolutionWidth"]
height = data["resolutionHeight"]
pixelAspect = data["pixelAspect"]
return int(width), int(height), float(pixelAspect)
# Defaults if not found in asset document or project document
return 1920, 1080, 1.0
@classmethod
def repair(cls, instance):
# Usually without renderlayer overrides the renderlayers
# all share the same resolution value - so fixing the first
# will have fixed all the others too. It's much faster to
# check whether it's invalid first instead of switching
# into all layers individually
if not cls.get_invalid_resolution(instance):
cls.log.debug(
"Nothing to repair on instance: {}".format(instance)
)
return
layer_node = instance.data['setMembers']
with lib.renderlayer(layer_node):
reset_scene_resolution()

View file

@ -1,6 +1,6 @@
import pyblish.api
from maya import cmds
import openpype.hosts.maya.api.action
from openpype.pipeline.publish import (
PublishValidationError,
ValidateContentsOrder
@ -20,33 +20,27 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
label = "Rig Contents"
hosts = ["maya"]
families = ["rig"]
action = [openpype.hosts.maya.api.action.SelectInvalidAction]
accepted_output = ["mesh", "transform"]
accepted_controllers = ["transform"]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
"Invalid rig content. See log for details.")
@classmethod
def get_invalid(cls, instance):
# Find required sets by suffix
required = ["controls_SET", "out_SET"]
missing = [
key for key in required if key not in instance.data["rig_sets"]
]
if missing:
raise PublishValidationError(
"%s is missing sets: %s" % (instance, ", ".join(missing))
)
required, rig_sets = cls.get_nodes(instance)
controls_set = instance.data["rig_sets"]["controls_SET"]
out_set = instance.data["rig_sets"]["out_SET"]
cls.validate_missing_objectsets(instance, required, rig_sets)
# Ensure there are at least some transforms or dag nodes
# in the rig instance
set_members = instance.data['setMembers']
if not cmds.ls(set_members, type="dagNode", long=True):
raise PublishValidationError(
"No dag nodes in the pointcache instance. "
"(Empty instance?)"
)
controls_set = rig_sets["controls_SET"]
out_set = rig_sets["out_SET"]
# Ensure contents in sets and retrieve long path for all objects
output_content = cmds.sets(out_set, query=True) or []
@ -61,49 +55,92 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
)
controls_content = cmds.ls(controls_content, long=True)
# Validate members are inside the hierarchy from root node
root_nodes = cmds.ls(set_members, assemblies=True, long=True)
hierarchy = cmds.listRelatives(root_nodes, allDescendents=True,
fullPath=True) + root_nodes
hierarchy = set(hierarchy)
invalid_hierarchy = []
for node in output_content:
if node not in hierarchy:
invalid_hierarchy.append(node)
for node in controls_content:
if node not in hierarchy:
invalid_hierarchy.append(node)
rig_content = output_content + controls_content
invalid_hierarchy = cls.invalid_hierarchy(instance, rig_content)
# Additional validations
invalid_geometry = self.validate_geometry(output_content)
invalid_controls = self.validate_controls(controls_content)
invalid_geometry = cls.validate_geometry(output_content)
invalid_controls = cls.validate_controls(controls_content)
error = False
if invalid_hierarchy:
self.log.error("Found nodes which reside outside of root group "
cls.log.error("Found nodes which reside outside of root group "
"while they are set up for publishing."
"\n%s" % invalid_hierarchy)
error = True
if invalid_controls:
self.log.error("Only transforms can be part of the controls_SET."
cls.log.error("Only transforms can be part of the controls_SET."
"\n%s" % invalid_controls)
error = True
if invalid_geometry:
self.log.error("Only meshes can be part of the out_SET\n%s"
cls.log.error("Only meshes can be part of the out_SET\n%s"
% invalid_geometry)
error = True
if error:
return invalid_hierarchy + invalid_controls + invalid_geometry
@classmethod
def validate_missing_objectsets(cls, instance,
required_objsets, rig_sets):
"""Validate missing objectsets in rig sets
Args:
instance (str): instance
required_objsets (list): list of objectset names
rig_sets (list): list of rig sets
Raises:
PublishValidationError: When the error is raised, it will show
which instance has the missing object sets
"""
missing = [
key for key in required_objsets if key not in rig_sets
]
if missing:
raise PublishValidationError(
"Invalid rig content. See log for details.")
"%s is missing sets: %s" % (instance, ", ".join(missing))
)
def validate_geometry(self, set_members):
"""Check if the out set passes the validations
@classmethod
def invalid_hierarchy(cls, instance, content):
"""
Check if all rig set members are within the hierarchy of the rig root
Checks if all its set members are within the hierarchy of the root
Args:
instance (str): instance
content (list): list of content from rig sets
Raises:
PublishValidationError: It means no dag nodes in
the rig instance
Returns:
list: invalid hierarchy
"""
# Ensure there are at least some transforms or dag nodes
# in the rig instance
set_members = instance.data['setMembers']
if not cmds.ls(set_members, type="dagNode", long=True):
raise PublishValidationError(
"No dag nodes in the rig instance. "
"(Empty instance?)"
)
# Validate members are inside the hierarchy from root node
root_nodes = cmds.ls(set_members, assemblies=True, long=True)
hierarchy = cmds.listRelatives(root_nodes, allDescendents=True,
fullPath=True) + root_nodes
hierarchy = set(hierarchy)
invalid_hierarchy = []
for node in content:
if node not in hierarchy:
invalid_hierarchy.append(node)
return invalid_hierarchy
@classmethod
def validate_geometry(cls, set_members):
"""
Checks if the node types of the set members valid
Args:
@ -122,15 +159,13 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
fullPath=True) or []
all_shapes = cmds.ls(set_members + shapes, long=True, shapes=True)
for shape in all_shapes:
if cmds.nodeType(shape) not in self.accepted_output:
if cmds.nodeType(shape) not in cls.accepted_output:
invalid.append(shape)
return invalid
def validate_controls(self, set_members):
"""Check if the controller set passes the validations
Checks if all its set members are within the hierarchy of the root
@classmethod
def validate_controls(cls, set_members):
"""
Checks if the control set members are allowed node types.
Checks if the node types of the set members valid
Args:
@ -144,7 +179,80 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
# Validate control types
invalid = []
for node in set_members:
if cmds.nodeType(node) not in self.accepted_controllers:
if cmds.nodeType(node) not in cls.accepted_controllers:
invalid.append(node)
return invalid
@classmethod
def get_nodes(cls, instance):
"""Get the target objectsets and rig sets nodes
Args:
instance (str): instance
Returns:
tuple: 2-tuple of list of objectsets,
list of rig sets nodes
"""
objectsets = ["controls_SET", "out_SET"]
rig_sets_nodes = instance.data.get("rig_sets", [])
return objectsets, rig_sets_nodes
class ValidateSkeletonRigContents(ValidateRigContents):
"""Ensure skeleton rigs contains pipeline-critical content
The rigs optionally contain at least two object sets:
"skeletonMesh_SET" - Set of the skinned meshes
with bone hierarchies
"""
order = ValidateContentsOrder
label = "Skeleton Rig Contents"
hosts = ["maya"]
families = ["rig.fbx"]
@classmethod
def get_invalid(cls, instance):
objectsets, skeleton_mesh_nodes = cls.get_nodes(instance)
cls.validate_missing_objectsets(
instance, objectsets, instance.data["rig_sets"])
# Ensure contents in sets and retrieve long path for all objects
output_content = instance.data.get("skeleton_mesh", [])
output_content = cmds.ls(skeleton_mesh_nodes, long=True)
invalid_hierarchy = cls.invalid_hierarchy(
instance, output_content)
invalid_geometry = cls.validate_geometry(output_content)
error = False
if invalid_hierarchy:
cls.log.error("Found nodes which reside outside of root group "
"while they are set up for publishing."
"\n%s" % invalid_hierarchy)
error = True
if invalid_geometry:
cls.log.error("Found nodes which reside outside of root group "
"while they are set up for publishing."
"\n%s" % invalid_hierarchy)
error = True
if error:
return invalid_hierarchy + invalid_geometry
@classmethod
def get_nodes(cls, instance):
"""Get the target objectsets and rig sets nodes
Args:
instance (str): instance
Returns:
tuple: 2-tuple of list of objectsets,
list of rig sets nodes
"""
objectsets = ["skeletonMesh_SET"]
skeleton_mesh_nodes = instance.data.get("skeleton_mesh", [])
return objectsets, skeleton_mesh_nodes

View file

@ -59,7 +59,7 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
@classmethod
def get_invalid(cls, instance):
controls_set = instance.data["rig_sets"].get("controls_SET")
controls_set = cls.get_node(instance)
if not controls_set:
cls.log.error(
"Must have 'controls_SET' in rig instance"
@ -189,7 +189,7 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
@classmethod
def repair(cls, instance):
controls_set = instance.data["rig_sets"].get("controls_SET")
controls_set = cls.get_node(instance)
if not controls_set:
cls.log.error(
"Unable to repair because no 'controls_SET' found in rig "
@ -228,3 +228,64 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
default = cls.CONTROLLER_DEFAULTS[attr]
cls.log.info("Setting %s to %s" % (plug, default))
cmds.setAttr(plug, default)
@classmethod
def get_node(cls, instance):
"""Get target object nodes from controls_SET
Args:
instance (str): instance
Returns:
list: list of object nodes from controls_SET
"""
return instance.data["rig_sets"].get("controls_SET")
class ValidateSkeletonRigControllers(ValidateRigControllers):
"""Validate rig controller for skeletonAnim_SET
Controls must have the transformation attributes on their default
values of translate zero, rotate zero and scale one when they are
unlocked attributes.
Unlocked keyable attributes may not have any incoming connections. If
these connections are required for the rig then lock the attributes.
The visibility attribute must be locked.
Note that `repair` will:
- Lock all visibility attributes
- Reset all default values for translate, rotate, scale
- Break all incoming connections to keyable attributes
"""
order = ValidateContentsOrder + 0.05
label = "Skeleton Rig Controllers"
hosts = ["maya"]
families = ["rig.fbx"]
# Default controller values
CONTROLLER_DEFAULTS = {
"translateX": 0,
"translateY": 0,
"translateZ": 0,
"rotateX": 0,
"rotateY": 0,
"rotateZ": 0,
"scaleX": 1,
"scaleY": 1,
"scaleZ": 1
}
@classmethod
def get_node(cls, instance):
"""Get target object nodes from skeletonMesh_SET
Args:
instance (str): instance
Returns:
list: list of object nodes from skeletonMesh_SET
"""
return instance.data["rig_sets"].get("skeletonMesh_SET")

View file

@ -46,7 +46,7 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin):
def get_invalid(cls, instance):
"""Get all nodes which do not match the criteria"""
out_set = instance.data["rig_sets"].get("out_SET")
out_set = cls.get_node(instance)
if not out_set:
return []
@ -85,3 +85,45 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin):
continue
lib.set_id(node, sibling_id, overwrite=True)
@classmethod
def get_node(cls, instance):
"""Get target object nodes from out_SET
Args:
instance (str): instance
Returns:
list: list of object nodes from out_SET
"""
return instance.data["rig_sets"].get("out_SET")
class ValidateSkeletonRigOutSetNodeIds(ValidateRigOutSetNodeIds):
"""Validate if deformed shapes have related IDs to the original shapes
from skeleton set.
When a deformer is applied in the scene on a referenced mesh that already
had deformers then Maya will create a new shape node for the mesh that
does not have the original id. This validator checks whether the ids are
valid on all the shape nodes in the instance.
"""
order = ValidateContentsOrder
families = ["rig.fbx"]
hosts = ['maya']
label = 'Skeleton Rig Out Set Node Ids'
@classmethod
def get_node(cls, instance):
"""Get target object nodes from skeletonMesh_SET
Args:
instance (str): instance
Returns:
list: list of object nodes from skeletonMesh_SET
"""
return instance.data["rig_sets"].get(
"skeletonMesh_SET")

View file

@ -47,7 +47,7 @@ class ValidateRigOutputIds(pyblish.api.InstancePlugin):
invalid = {}
if compute:
out_set = instance.data["rig_sets"].get("out_SET")
out_set = cls.get_node(instance)
if not out_set:
instance.data["mismatched_output_ids"] = invalid
return invalid
@ -115,3 +115,40 @@ class ValidateRigOutputIds(pyblish.api.InstancePlugin):
"Multiple matched ids found. Please repair manually: "
"{}".format(multiple_ids_match)
)
@classmethod
def get_node(cls, instance):
"""Get target object nodes from out_SET
Args:
instance (str): instance
Returns:
list: list of object nodes from out_SET
"""
return instance.data["rig_sets"].get("out_SET")
class ValidateSkeletonRigOutputIds(ValidateRigOutputIds):
"""Validate rig output ids from the skeleton sets.
Ids must share the same id as similarly named nodes in the scene. This is
to ensure the id from the model is preserved through animation.
"""
order = ValidateContentsOrder + 0.05
label = "Skeleton Rig Output Ids"
hosts = ["maya"]
families = ["rig.fbx"]
@classmethod
def get_node(cls, instance):
"""Get target object nodes from skeletonMesh_SET
Args:
instance (str): instance
Returns:
list: list of object nodes from skeletonMesh_SET
"""
return instance.data["rig_sets"].get("skeletonMesh_SET")

View file

@ -0,0 +1,40 @@
# -*- coding: utf-8 -*-
"""Plugin for validating naming conventions."""
from maya import cmds
import pyblish.api
from openpype.pipeline.publish import (
ValidateContentsOrder,
OptionalPyblishPluginMixin,
PublishValidationError
)
class ValidateSkeletonTopGroupHierarchy(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validates top group hierarchy in the SETs
Make sure the object inside the SETs are always top
group of the hierarchy
"""
order = ValidateContentsOrder + 0.05
label = "Skeleton Rig Top Group Hierarchy"
families = ["rig.fbx"]
def process(self, instance):
invalid = []
skeleton_mesh_data = instance.data("skeleton_mesh", [])
if skeleton_mesh_data:
invalid = self.get_top_hierarchy(skeleton_mesh_data)
if invalid:
raise PublishValidationError(
"The skeletonMesh_SET includes the object which "
"is not at the top hierarchy: {}".format(invalid))
def get_top_hierarchy(self, targets):
targets = cmds.ls(targets, long=True) # ensure long names
non_top_hierarchy_list = [
target for target in targets if target.count("|") > 2
]
return non_top_hierarchy_list

View file

@ -69,11 +69,8 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin,
invalid = []
project_settings = get_project_settings(
legacy_io.Session["AVALON_PROJECT"]
)
collision_prefixes = (
project_settings
instance.context.data["project_settings"]
["maya"]
["create"]
["CreateUnrealStaticMesh"]