mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merged in Aardschok/config/0002 (pull request #3)
0002 - complexity lowered and cosmetics
This commit is contained in:
commit
ce7a5b507b
21 changed files with 661 additions and 559 deletions
|
|
@ -20,6 +20,27 @@ def get_errored_instances_from_context(context):
|
|||
return instances
|
||||
|
||||
|
||||
def get_errored_plugins_from_data(context):
|
||||
"""Get all failed validation plugins
|
||||
|
||||
Args:
|
||||
context (object):
|
||||
|
||||
Returns:
|
||||
list of plugins which failed during validation
|
||||
|
||||
"""
|
||||
|
||||
plugins = list()
|
||||
results = context.data.get("results", [])
|
||||
for result in results:
|
||||
if result["success"] == True:
|
||||
continue
|
||||
plugins.append(result["plugin"])
|
||||
|
||||
return plugins
|
||||
|
||||
|
||||
class RepairAction(pyblish.api.Action):
|
||||
"""Repairs the action
|
||||
|
||||
|
|
@ -42,11 +63,35 @@ class RepairAction(pyblish.api.Action):
|
|||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
for instance in instances:
|
||||
plugin.repair(instance)
|
||||
|
||||
|
||||
class RepairContextAction(pyblish.api.Action):
|
||||
"""Repairs the action
|
||||
|
||||
To retrieve the invalid nodes this assumes a static `repair(instance)`
|
||||
method is available on the plugin.
|
||||
|
||||
"""
|
||||
label = "Repair Context"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
if not hasattr(plugin, "repair"):
|
||||
raise RuntimeError("Plug-in does not have repair method.")
|
||||
|
||||
# Get the errored instances
|
||||
self.log.info("Finding failed instances..")
|
||||
errored_plugins = get_errored_plugins_from_data(context)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
if plugin in errored_plugins:
|
||||
self.log.info("Attempting fix ...")
|
||||
plugin.repair()
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
"""Select invalid nodes in Maya when plug-in failed.
|
||||
|
||||
|
|
|
|||
|
|
@ -12,29 +12,22 @@ from .plugin import (
|
|||
|
||||
# temporary fix, might
|
||||
from .action import (
|
||||
|
||||
get_errored_instances_from_context,
|
||||
SelectInvalidAction,
|
||||
GenerateUUIDsOnInvalidAction,
|
||||
RepairAction
|
||||
RepairAction,
|
||||
RepairContextAction
|
||||
)
|
||||
|
||||
|
||||
def merge(*args):
|
||||
"""Helper to merge OrderedDict instances"""
|
||||
data = OrderedDict()
|
||||
for arg in args:
|
||||
for key, value in arg.items():
|
||||
data.pop(key, None)
|
||||
data[key] = value
|
||||
return data
|
||||
|
||||
|
||||
all = [
|
||||
"Extractor",
|
||||
"ValidatePipelineOrder",
|
||||
"ValidateContentsOrder",
|
||||
"ValidateSceneOrder",
|
||||
"ValidateMeshOrder",
|
||||
|
||||
# action
|
||||
"get_errored_instances_from_context",
|
||||
"SelectInvalidAction",
|
||||
"GenerateUUIDsOnInvalidAction",
|
||||
"RepairAction"
|
||||
|
|
|
|||
|
|
@ -241,4 +241,64 @@ def collect_animation_data():
|
|||
|
||||
|
||||
def get_current_renderlayer():
|
||||
return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
|
||||
return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
|
||||
|
||||
|
||||
def is_visible(node,
|
||||
displayLayer=True,
|
||||
intermediateObject=True,
|
||||
parentHidden=True,
|
||||
visibility=True):
|
||||
"""Is `node` visible?
|
||||
|
||||
Returns whether a node is hidden by one of the following methods:
|
||||
- The node exists (always checked)
|
||||
- The node must be a dagNode (always checked)
|
||||
- The node's visibility is off.
|
||||
- The node is set as intermediate Object.
|
||||
- The node is in a disabled displayLayer.
|
||||
- Whether any of its parent nodes is hidden.
|
||||
|
||||
Roughly based on: http://ewertb.soundlinker.com/mel/mel.098.php
|
||||
|
||||
Returns:
|
||||
bool: Whether the node is visible in the scene
|
||||
|
||||
"""
|
||||
|
||||
# Only existing objects can be visible
|
||||
if not cmds.objExists(node):
|
||||
return False
|
||||
|
||||
# Only dagNodes can be visible
|
||||
if not cmds.objectType(node, isAType='dagNode'):
|
||||
return False
|
||||
|
||||
if visibility:
|
||||
if not cmds.getAttr('{0}.visibility'.format(node)):
|
||||
return False
|
||||
|
||||
if intermediateObject and cmds.objectType(node, isAType='shape'):
|
||||
if cmds.getAttr('{0}.intermediateObject'.format(node)):
|
||||
return False
|
||||
|
||||
if displayLayer:
|
||||
# Display layers set overrideEnabled and overrideVisibility on members
|
||||
if cmds.attributeQuery('overrideEnabled', node=node, exists=True):
|
||||
override_enabled = cmds.getAttr('{}.overrideEnabled'.format(node))
|
||||
override_visibility = cmds.getAttr('{}.overrideVisibility'.format(node))
|
||||
if override_enabled and override_visibility:
|
||||
return False
|
||||
|
||||
if parentHidden:
|
||||
parents = cmds.listRelatives(node, parent=True, fullPath=True)
|
||||
if parents:
|
||||
parent = parents[0]
|
||||
if not is_visible(parent,
|
||||
displayLayer=displayLayer,
|
||||
intermediateObject=False,
|
||||
parentHidden=parentHidden,
|
||||
visibility=visibility):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
import avalon.maya
|
||||
from maya import cmds
|
||||
|
||||
import avalon.maya
|
||||
|
||||
|
||||
class CreateRig(avalon.maya.Creator):
|
||||
"""Skeleton and controls for manipulation of the geometry"""
|
||||
|
|
@ -12,6 +13,8 @@ class CreateRig(avalon.maya.Creator):
|
|||
def process(self):
|
||||
instance = super(CreateRig, self).process()
|
||||
|
||||
self.log.info("Creating Rig instance set up ...")
|
||||
|
||||
controls = cmds.sets(name="controls_SET", empty=True)
|
||||
pointcache = cmds.sets(name="out_SET", empty=True)
|
||||
cmds.sets([controls, pointcache], forceElement=instance)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
from maya import cmds
|
||||
|
||||
from avalon import api
|
||||
from avalon import maya
|
||||
|
||||
|
||||
class ModelLoader(api.Loader):
|
||||
|
|
@ -14,7 +16,7 @@ class ModelLoader(api.Loader):
|
|||
color = "orange"
|
||||
|
||||
def process(self, name, namespace, context):
|
||||
from avalon import maya
|
||||
|
||||
with maya.maintained_selection():
|
||||
nodes = cmds.file(
|
||||
self.fname,
|
||||
|
|
@ -22,7 +24,7 @@ class ModelLoader(api.Loader):
|
|||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName=namespace + ":" + name
|
||||
groupName="{}:{}".format(namespace, name)
|
||||
)
|
||||
|
||||
# Assign default shader to meshes
|
||||
|
|
|
|||
|
|
@ -1,20 +0,0 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateUnitsAngular(pyblish.api.ContextPlugin):
|
||||
"""Scene angular units must be in degrees"""
|
||||
|
||||
order = colorbleed.api.ValidateSceneOrder
|
||||
label = "Units (angular)"
|
||||
families = ["colorbleed.rig",
|
||||
"colorbleed.model",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.curves"]
|
||||
|
||||
def process(self, context):
|
||||
units = context.data('angularUnits')
|
||||
|
||||
self.log.info('Units (angular): {0}'.format(units))
|
||||
assert units and units == 'deg', (
|
||||
"Scene angular units must be degrees")
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateUnitsFps(pyblish.api.ContextPlugin):
|
||||
"""Validate the scene linear, angular and time units."""
|
||||
|
||||
order = colorbleed.api.ValidateSceneOrder
|
||||
label = "Units (fps)"
|
||||
families = ["colorbleed.rig",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.curves"]
|
||||
actions = [colorbleed.api.RepairAction]
|
||||
optional = True
|
||||
|
||||
def process(self, context):
|
||||
|
||||
fps = context.data['fps']
|
||||
|
||||
self.log.info('Units (time): {0} FPS'.format(fps))
|
||||
assert fps and fps == 25.0, "Scene must be 25 FPS"
|
||||
|
||||
@classmethod
|
||||
def repair(cls):
|
||||
"""Fix the current FPS setting of the scene, set to PAL(25.0 fps)
|
||||
"""
|
||||
import maya.cmds as cmds
|
||||
cmds.currentUnit(time="pal")
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateUnitsLinear(pyblish.api.ContextPlugin):
|
||||
"""Scene must be in linear units"""
|
||||
|
||||
order = colorbleed.api.ValidateSceneOrder
|
||||
label = "Units (linear)"
|
||||
families = ["colorbleed.rig",
|
||||
"colorbleed.model",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.curves"]
|
||||
|
||||
def process(self, context):
|
||||
units = context.data('linearUnits')
|
||||
|
||||
self.log.info('Units (linear): {0}'.format(units))
|
||||
assert units and units == 'cm', ("Scene linear units must "
|
||||
"be centimeters")
|
||||
|
|
@ -1,5 +1,3 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
|
|
|||
|
|
@ -69,69 +69,25 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
# Ignore specifically named sets (check with endswith)
|
||||
IGNORE = ["out_SET", "controls_SET", "_INST"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Collect the Look in the instance with the correct layer settings"""
|
||||
|
||||
layer = instance.data.get("renderlayer", "defaultRenderLayer")
|
||||
with context.renderlayer(layer):
|
||||
self.log.info("Checking out layer: {0}".format(layer))
|
||||
self.collect(instance)
|
||||
|
||||
def collect(self, instance):
|
||||
|
||||
# Whether to log information verbosely
|
||||
verbose = instance.data.get("verbose", False)
|
||||
|
||||
self.log.info("Looking for look associations "
|
||||
"for {0}..".format(instance.data['label']))
|
||||
|
||||
# Get view sets (so we can ignore those sets later)
|
||||
model_panels = cmds.getPanel(type="modelPanel")
|
||||
view_sets = set()
|
||||
|
||||
for panel in model_panels:
|
||||
view_set = cmds.modelEditor(panel, query=True, viewObjects=True)
|
||||
if view_set:
|
||||
view_sets.add(view_set)
|
||||
"for %s" % instance.data['label'])
|
||||
|
||||
# Discover related object sets
|
||||
self.log.info("Gathering sets..")
|
||||
sets = dict()
|
||||
for node in instance:
|
||||
|
||||
node_sets = cmds.listSets(object=node, extendToShape=False) or []
|
||||
if verbose:
|
||||
self.log.info("Found raw sets "
|
||||
"{0} for {1}".format(node_sets, node))
|
||||
|
||||
if not node_sets:
|
||||
continue
|
||||
|
||||
# Exclude deformer sets
|
||||
deformer_sets = cmds.listSets(object=node,
|
||||
extendToShape=False,
|
||||
type=2) or []
|
||||
deformer_sets = set(deformer_sets) # optimize lookup
|
||||
node_sets = [s for s in node_sets if s not in deformer_sets]
|
||||
|
||||
if verbose:
|
||||
self.log.debug("After filtering deformer sets "
|
||||
"{0}".format(node_sets))
|
||||
|
||||
# Ignore specifically named sets
|
||||
node_sets = [s for s in node_sets if
|
||||
not any(s.endswith(x) for x in self.IGNORE)]
|
||||
|
||||
if verbose:
|
||||
self.log.debug("After filtering ignored sets "
|
||||
"{0}".format(node_sets))
|
||||
|
||||
# Ignore viewport filter view sets (from isolate select and
|
||||
# viewports)
|
||||
node_sets = [s for s in node_sets if s not in view_sets]
|
||||
|
||||
if verbose:
|
||||
self.log.debug("After filtering view sets {0}".format(node_sets))
|
||||
|
||||
self.log.info("Found sets {0} for {1}".format(node_sets, node))
|
||||
|
||||
for objset in node_sets:
|
||||
if objset not in sets:
|
||||
sets[objset] = {"name": objset,
|
||||
"uuid": id_utils.get_id(objset),
|
||||
"members": list()}
|
||||
self.gather_sets(instance)
|
||||
|
||||
# Lookup with absolute names (from root namespace)
|
||||
instance_lookup = set([str(x) for x in cmds.ls(instance,
|
||||
|
|
@ -139,44 +95,141 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
absoluteName=True)])
|
||||
|
||||
self.log.info("Gathering set relations..")
|
||||
sets = self.gather_sets(instance)
|
||||
for objset in sets:
|
||||
|
||||
self.log.debug("From %s.." % objset)
|
||||
content = cmds.sets(objset, query=True)
|
||||
objset_members = sets[objset]["members"]
|
||||
for member in cmds.ls(content, long=True, absoluteName=True):
|
||||
|
||||
node, components = (member.rsplit(".", 1) + [None])[:2]
|
||||
|
||||
# Only include valid members of the instance
|
||||
if node not in instance_lookup:
|
||||
if verbose:
|
||||
self.log.info("Skipping member %s" % member)
|
||||
member_data = self.collect_member_data(member,
|
||||
objset_members,
|
||||
instance_lookup,
|
||||
verbose)
|
||||
if not member_data:
|
||||
continue
|
||||
|
||||
if member in [m["name"] for m in sets[objset]["members"]]:
|
||||
continue
|
||||
|
||||
if verbose:
|
||||
self.log.debug("Such as %s.." % member)
|
||||
|
||||
member_data = {"name": node, "uuid": id_utils.get_id(node)}
|
||||
|
||||
# Include components information when components are assigned
|
||||
if components:
|
||||
member_data["components"] = components
|
||||
|
||||
sets[objset]["members"].append(member_data)
|
||||
|
||||
# Remove sets that didn't have any members assigned in the end
|
||||
for objset, data in sets.items():
|
||||
if not data['members']:
|
||||
self.log.debug("Removing redundant set "
|
||||
"information: {0}".format(objset))
|
||||
sets.pop(objset)
|
||||
|
||||
sets = self.clean_sets(sets)
|
||||
# Member attributes (shapes + transforms)
|
||||
|
||||
self.log.info("Gathering attribute changes to instance members..")
|
||||
attrs = []
|
||||
|
||||
attributes = self.collect_attributes_changes(instance)
|
||||
|
||||
# Store data on the instance
|
||||
instance.data["lookAttributes"] = attributes
|
||||
instance.data["lookSetRelations"] = sets.values()
|
||||
instance.data["lookSets"] = cmds.ls(sets.keys(),
|
||||
absoluteName=True,
|
||||
long=True)
|
||||
|
||||
# Log a warning when no relevant sets were retrieved for the look.
|
||||
if not instance.data['lookSets']:
|
||||
self.log.warning("No sets found for the nodes in the instance: "
|
||||
"%s" % instance[:])
|
||||
|
||||
self.log.info("Collected look for %s" % instance)
|
||||
|
||||
def gather_sets(self, instance):
|
||||
|
||||
# Get view sets (so we can ignore those sets later)
|
||||
sets = dict()
|
||||
view_sets = set()
|
||||
model_panels = cmds.getPanel(type="modelPanel")
|
||||
for panel in model_panels:
|
||||
view_set = cmds.modelEditor(panel, query=True, viewObjects=True)
|
||||
if view_set:
|
||||
view_sets.add(view_set)
|
||||
|
||||
for node in instance:
|
||||
node_sets = self.filter_sets(node, view_sets)
|
||||
if not node_sets:
|
||||
continue
|
||||
|
||||
for objset in node_sets:
|
||||
if objset in sets:
|
||||
continue
|
||||
sets[objset] = {"name": objset,
|
||||
"uuid": id_utils.get_id(objset),
|
||||
"members": list()}
|
||||
return sets
|
||||
|
||||
def filter_sets(self, node, view_sets):
|
||||
|
||||
node_sets = cmds.listSets(object=node, extendToShape=False) or []
|
||||
if not node_sets:
|
||||
return
|
||||
|
||||
# Exclude deformer sets
|
||||
deformer_sets = cmds.listSets(object=node,
|
||||
extendToShape=False,
|
||||
type=2) or []
|
||||
deformer_sets = set(deformer_sets) # optimize lookup
|
||||
sets = [s for s in node_sets if s not in deformer_sets]
|
||||
|
||||
# Ignore specifically named sets
|
||||
sets = [s for s in sets if not any(s.endswith(x) for x in self.IGNORE)]
|
||||
|
||||
# Ignore viewport filter view sets (from isolate select and
|
||||
# viewports)
|
||||
sets = [s for s in sets if s not in view_sets]
|
||||
|
||||
self.log.info("Found sets {0} for {1}".format(node_sets, node))
|
||||
|
||||
return sets
|
||||
|
||||
def clean_sets(self, sets):
|
||||
|
||||
for objset, data in sets.items():
|
||||
if not data['members']:
|
||||
self.log.debug("Removing redundant set "
|
||||
"information: %s" % objset)
|
||||
sets.pop(objset)
|
||||
|
||||
return sets
|
||||
|
||||
def collect_member_data(self, member, objset_members, instance_members,
|
||||
verbose=False):
|
||||
"""Get all information of the node
|
||||
Args:
|
||||
member (str): the name of the node to check
|
||||
objset_members (list): the objectSet members
|
||||
instance_members (set): the collected instance members
|
||||
verbose (bool): get debug information
|
||||
|
||||
Returns:
|
||||
dict
|
||||
|
||||
"""
|
||||
|
||||
node, components = (member.rsplit(".", 1) + [None])[:2]
|
||||
|
||||
# Only include valid members of the instance
|
||||
if node not in instance_members:
|
||||
if verbose:
|
||||
self.log.info("Skipping member %s" % member)
|
||||
return
|
||||
|
||||
if member in [m["name"] for m in objset_members]:
|
||||
return
|
||||
|
||||
if verbose:
|
||||
self.log.debug("Such as %s.." % member)
|
||||
|
||||
member_data = {"name": node, "uuid": id_utils.get_id(node)}
|
||||
|
||||
# Include components information when components are assigned
|
||||
if components:
|
||||
member_data["components"] = components
|
||||
|
||||
return member_data
|
||||
|
||||
def collect_attributes_changes(self, instance):
|
||||
|
||||
attributes = []
|
||||
for node in instance:
|
||||
|
||||
# Collect changes to "custom" attributes
|
||||
|
|
@ -186,36 +239,15 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
if not node_attrs:
|
||||
continue
|
||||
|
||||
attributes = {}
|
||||
node_attributes = {}
|
||||
for attr in node_attrs:
|
||||
attribute = "{}.{}".format(node, attr)
|
||||
attributes[attr] = cmds.getAttr(attribute)
|
||||
node_attributes[attr] = cmds.getAttr(attribute)
|
||||
|
||||
# attributes = dict((attr, pm.getAttr("{}.{}".format(node, attr))
|
||||
# for attr in node_attrs))
|
||||
data = {"name": node,
|
||||
"uuid": id_utils.get_id(node),
|
||||
"attributes": attributes}
|
||||
"attributes": node_attributes}
|
||||
|
||||
attrs.append(data)
|
||||
attributes.append(data)
|
||||
|
||||
# Store data on the instance
|
||||
instance.data["lookAttributes"] = attrs
|
||||
instance.data["lookSetRelations"] = sets.values()
|
||||
instance.data["lookSets"] = cmds.ls(sets.keys(),
|
||||
absoluteName=True,
|
||||
long=True)
|
||||
|
||||
# Log a warning when no relevant sets were retrieved for the look.
|
||||
if not instance.data['lookSets']:
|
||||
self.log.warning("No sets found for the nodes in the instance: {0}".format(instance[:]))
|
||||
|
||||
self.log.info("Collected look for %s" % instance)
|
||||
|
||||
def process(self, instance):
|
||||
"""Collect the Look in the instance with the correct layer settings"""
|
||||
|
||||
layer = instance.data.get("renderlayer", "defaultRenderLayer")
|
||||
with context.renderlayer(layer):
|
||||
self.log.info("Checking out layer: {0}".format(layer))
|
||||
self.collect(instance)
|
||||
return attributes
|
||||
|
|
|
|||
|
|
@ -31,26 +31,11 @@ class SelectTextureNodesAction(pyblish.api.Action):
|
|||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(instances, plugin)
|
||||
|
||||
def is_texture_resource(resource):
|
||||
"""Return whether the resource is a texture"""
|
||||
|
||||
tags = resource.get("tags", [])
|
||||
if not TAGS_LOOKUP.issubset(tags):
|
||||
return False
|
||||
|
||||
if resource.get("subfolder", None) != "textures":
|
||||
return False
|
||||
|
||||
if "node" not in resource:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
# Get the texture nodes from the instances
|
||||
nodes = []
|
||||
for instance in instances:
|
||||
for resource in instance.data.get("resources", []):
|
||||
if is_texture_resource(resource):
|
||||
if self.is_texture_resource(resource):
|
||||
node = resource['node']
|
||||
nodes.append(node)
|
||||
|
||||
|
|
@ -64,6 +49,21 @@ class SelectTextureNodesAction(pyblish.api.Action):
|
|||
self.log.info("No texture nodes found.")
|
||||
cmds.select(deselect=True)
|
||||
|
||||
def is_texture_resource(self, resource):
|
||||
"""Return whether the resource is a texture"""
|
||||
|
||||
tags = resource.get("tags", [])
|
||||
if not TAGS_LOOKUP.issubset(tags):
|
||||
return False
|
||||
|
||||
if resource.get("subfolder", None) != "textures":
|
||||
return False
|
||||
|
||||
if "node" not in resource:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class CollectLookTextures(pyblish.api.InstancePlugin):
|
||||
"""Collect look textures
|
||||
|
|
@ -84,7 +84,8 @@ class CollectLookTextures(pyblish.api.InstancePlugin):
|
|||
# Get textures from sets
|
||||
sets = instance.data["lookSets"]
|
||||
if not sets:
|
||||
raise RuntimeError("No look sets found for the nodes in the instance. {0}".format(sets))
|
||||
raise RuntimeError("No look sets found for the nodes in the "
|
||||
"instance. %s" % sets)
|
||||
|
||||
# Get the file nodes
|
||||
history = cmds.listHistory(sets) or []
|
||||
|
|
@ -93,43 +94,61 @@ class CollectLookTextures(pyblish.api.InstancePlugin):
|
|||
|
||||
resources = instance.data.get("resources", [])
|
||||
for node in files:
|
||||
|
||||
attribute = "%s.fileTextureName" % node
|
||||
source = cmds.getAttr(attribute)
|
||||
|
||||
# Get the computed file path (e.g. the one with the <UDIM> pattern
|
||||
# in it) So we can reassign it this computed file path whenever
|
||||
# we need to.
|
||||
computed_attribute = "%s.computedFileTextureNamePattern" % node
|
||||
computed_source = cmds.getAttr(computed_attribute)
|
||||
if source != computed_source:
|
||||
if verbose:
|
||||
self.log.debug("File node computed pattern differs from "
|
||||
"original pattern: {0} "
|
||||
"({1} -> {2})".format(node,
|
||||
source,
|
||||
computed_source))
|
||||
|
||||
# We replace backslashes with forward slashes because V-Ray
|
||||
# can't handle the UDIM files with the backslashes in the
|
||||
# paths as the computed patterns
|
||||
source = computed_source.replace("\\", "/")
|
||||
|
||||
files = shader.get_file_node_files(node)
|
||||
if not files:
|
||||
self.log.error("File node does not have a texture set: "
|
||||
"{0}".format(node))
|
||||
|
||||
# Define the resource
|
||||
resource = {"tags": TAGS[:],
|
||||
"node": node,
|
||||
"attribute": attribute,
|
||||
"source": source, # required for resources
|
||||
"files": files, # required for resources
|
||||
"subfolder": "textures" # optional for resources
|
||||
}
|
||||
|
||||
resource = self.collect_resources(node, verbose)
|
||||
if not resource:
|
||||
continue
|
||||
resources.append(resource)
|
||||
|
||||
# Store resources
|
||||
instance.data['resources'] = resources
|
||||
|
||||
def collect_resources(self, node, verbose=False):
|
||||
"""Collect the link to the file(s) used (resource)
|
||||
Args:
|
||||
node (str): name of the node
|
||||
verbose (bool): enable debug information
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
attribute = "{}.fileTextureName".format(node)
|
||||
source = cmds.getAttr(attribute)
|
||||
|
||||
# Get the computed file path (e.g. the one with the <UDIM> pattern
|
||||
# in it) So we can reassign it this computed file path whenever
|
||||
# we need to.
|
||||
|
||||
computed_attribute = "{}.computedFileTextureNamePattern".format(node)
|
||||
computed_source = cmds.getAttr(computed_attribute)
|
||||
if source != computed_source:
|
||||
if verbose:
|
||||
self.log.debug("File node computed pattern differs from "
|
||||
"original pattern: {0} "
|
||||
"({1} -> {2})".format(node,
|
||||
source,
|
||||
computed_source))
|
||||
|
||||
# We replace backslashes with forward slashes because V-Ray
|
||||
# can't handle the UDIM files with the backslashes in the
|
||||
# paths as the computed patterns
|
||||
source = computed_source.replace("\\", "/")
|
||||
|
||||
files = shader.get_file_node_files(node)
|
||||
if not files:
|
||||
self.log.error("File node does not have a texture set: "
|
||||
"{0}".format(node))
|
||||
return
|
||||
|
||||
# Define the resource
|
||||
resource = {"tags": TAGS[:],
|
||||
"node": node,
|
||||
"attribute": attribute,
|
||||
"source": source, # required for resources
|
||||
"files": files, # required for resources
|
||||
"subfolder": "textures" # optional for resources
|
||||
}
|
||||
|
||||
return resource
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,39 +0,0 @@
|
|||
import pyblish.api
|
||||
import copy
|
||||
|
||||
|
||||
class CollectMetadata(pyblish.api.ContextPlugin):
|
||||
"""Transfer context metadata to the instance.
|
||||
|
||||
This applies a copy of the `context.data['metadata']` to the
|
||||
`instance.data['metadata']` for the following metadata:
|
||||
|
||||
Provides:
|
||||
{
|
||||
"topic": "topic",
|
||||
"author": "user",
|
||||
"date": "date",
|
||||
"filename": "currentFile"
|
||||
}
|
||||
|
||||
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = "Metadata"
|
||||
|
||||
mapping = {"topic": "topic",
|
||||
"author": "user",
|
||||
"date": "date",
|
||||
"filename": "currentFile"}
|
||||
|
||||
def process(self, context):
|
||||
|
||||
metadata = {}
|
||||
for key, source in self.mapping.iteritems():
|
||||
if source in context.data:
|
||||
metadata[key] = context.data.get(source)
|
||||
|
||||
for instance in context:
|
||||
instance.data["metadata"] = copy.deepcopy(metadata)
|
||||
|
||||
self.log.info("Collected {0}".format(metadata))
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
import pyblish.api
|
||||
import maya.cmds as cmds
|
||||
import cb.utils.maya.dag as dag
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
class ValidateInstancerContent(pyblish.api.InstancePlugin):
|
||||
|
|
@ -15,7 +16,7 @@ class ValidateInstancerContent(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = False
|
||||
error = False
|
||||
members = instance.data['setMembers']
|
||||
export_members = instance.data['exactExportMembers']
|
||||
|
||||
|
|
@ -23,42 +24,22 @@ class ValidateInstancerContent(pyblish.api.InstancePlugin):
|
|||
|
||||
if not len(members) == len(cmds.ls(members, type="instancer")):
|
||||
self.log.error("Instancer can only contain instancers")
|
||||
invalid = True
|
||||
error = True
|
||||
|
||||
# TODO: Implement better check for particles are cached
|
||||
if not cmds.ls(export_members, type="nucleus"):
|
||||
self.log.error("Instancer must have a connected nucleus")
|
||||
invalid = True
|
||||
error = True
|
||||
|
||||
if not cmds.ls(export_members, type="cacheFile"):
|
||||
self.log.error("Instancer must be cached")
|
||||
invalid = True
|
||||
error = True
|
||||
|
||||
# Ensure all instanced geometry is hidden
|
||||
shapes = cmds.ls(export_members,
|
||||
dag=True, shapes=True,
|
||||
noIntermediate=True)
|
||||
meshes = cmds.ls(shapes, type="mesh")
|
||||
|
||||
def invalidate(node):
|
||||
"""Whether mesh is in a valid state
|
||||
|
||||
Arguments:
|
||||
node (str): The node to check
|
||||
|
||||
Returns:
|
||||
bool: Whether it is in a valid state.
|
||||
|
||||
"""
|
||||
return dag.is_visible(node,
|
||||
displayLayer=False,
|
||||
intermediateObject=False)
|
||||
|
||||
visible = [node for node in meshes if invalidate(node)]
|
||||
if visible:
|
||||
hidden = self.check_geometry_hidden(export_members)
|
||||
if not hidden:
|
||||
error = True
|
||||
self.log.error("Instancer input geometry must be hidden "
|
||||
"the scene. Invalid: {0}".format(visible))
|
||||
invalid = True
|
||||
"the scene. Invalid: {0}".format(hidden))
|
||||
|
||||
# Ensure all in one group
|
||||
parents = cmds.listRelatives(members,
|
||||
|
|
@ -68,7 +49,26 @@ class ValidateInstancerContent(pyblish.api.InstancePlugin):
|
|||
if len(roots) > 1:
|
||||
self.log.error("Instancer should all be contained in a single "
|
||||
"group. Current roots: {0}".format(roots))
|
||||
invalid = True
|
||||
error = True
|
||||
|
||||
if invalid:
|
||||
if error:
|
||||
raise RuntimeError("Instancer Content is invalid. See log.")
|
||||
|
||||
def check_geometry_hidden(self, export_members):
|
||||
|
||||
# Ensure all instanced geometry is hidden
|
||||
shapes = cmds.ls(export_members,
|
||||
dag=True,
|
||||
shapes=True,
|
||||
noIntermediate=True)
|
||||
meshes = cmds.ls(shapes, type="mesh")
|
||||
|
||||
visible = [node for node in meshes
|
||||
if lib.is_visible(node,
|
||||
displayLayer=False,
|
||||
intermediateObject=False)]
|
||||
if visible:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
import pyblish.api
|
||||
|
||||
VERBOSE = False
|
||||
|
|
@ -10,6 +12,27 @@ def is_cache_resource(resource):
|
|||
return required.issubset(tags)
|
||||
|
||||
|
||||
def valdidate_files(files):
|
||||
for f in files:
|
||||
assert os.path.exists(f)
|
||||
assert f.endswith(".mcx") or f.endswith(".mcc")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def filter_ticks(files):
|
||||
tick_files = set()
|
||||
ticks = set()
|
||||
for path in files:
|
||||
match = re.match(".+Tick([0-9]+).mcx$", os.path.basename(path))
|
||||
if match:
|
||||
tick_files.add(path)
|
||||
num = match.group(1)
|
||||
ticks.add(int(num))
|
||||
|
||||
return tick_files, ticks
|
||||
|
||||
|
||||
class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
||||
"""Validates all instancer particle systems are cached correctly.
|
||||
|
||||
|
|
@ -26,7 +49,6 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import os
|
||||
import pyseq
|
||||
|
||||
start_frame = instance.data.get("startFrame", 0)
|
||||
|
|
@ -42,7 +64,6 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
|||
|
||||
node = resource['node']
|
||||
all_files = resource['files'][:]
|
||||
|
||||
all_lookup = set(all_files)
|
||||
|
||||
# The first file is usually the .xml description file.
|
||||
|
|
@ -54,28 +75,21 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
|||
|
||||
# Ensure all files exist (including ticks)
|
||||
# The remainder file paths should be the .mcx or .mcc files
|
||||
for f in all_files:
|
||||
assert os.path.exists(f)
|
||||
assert f.endswith(".mcx") or f.endswith(".mcc")
|
||||
valdidate_files(all_files)
|
||||
|
||||
# Maya particle caches support substeps by saving out additional files
|
||||
# that end with a Tick60.mcx, Tick120.mcx, etc. suffix. To avoid `pyseq`
|
||||
# getting confused we filter those out and then for each file (except
|
||||
# the last frame) check that at least all ticks exist.
|
||||
tick_files = set()
|
||||
ticks = set()
|
||||
for path in all_files:
|
||||
import re
|
||||
match = re.match(".+Tick([0-9]+).mcx$", os.path.basename(path))
|
||||
# Maya particle caches support substeps by saving out additional
|
||||
# files that end with a Tick60.mcx, Tick120.mcx, etc. suffix.
|
||||
# To avoid `pyseq` getting confused we filter those out and then
|
||||
# for each file (except the last frame) check that at least all
|
||||
# ticks exist.
|
||||
|
||||
if match:
|
||||
tick_files.add(path)
|
||||
num = match.group(1)
|
||||
ticks.add(int(num))
|
||||
tick_files, ticks = filter_ticks(all_files)
|
||||
if tick_files:
|
||||
files = [f for f in all_files if f not in tick_files]
|
||||
else:
|
||||
files = all_files
|
||||
|
||||
files = [f for f in all_files if f not in tick_files] if tick_files else all_files
|
||||
sequences = pyseq.get_sequences(files)
|
||||
|
||||
if len(sequences) != 1:
|
||||
invalid.append(node)
|
||||
cls.log.warning("More than one sequence found? "
|
||||
|
|
@ -112,7 +126,8 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
|||
# for the frames required by the time range.
|
||||
if ticks:
|
||||
ticks = list(sorted(ticks))
|
||||
cls.log.info("Found ticks: {0} (substeps: {1})".format(ticks, len(ticks)))
|
||||
cls.log.info("Found ticks: {0} "
|
||||
"(substeps: {1})".format(ticks, len(ticks)))
|
||||
|
||||
# Check all frames except the last since we don't
|
||||
# require subframes after our time range.
|
||||
|
|
@ -123,7 +138,8 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
|||
frame = item.frame
|
||||
if not frame:
|
||||
invalid.append(node)
|
||||
cls.log.error("Path is not a frame in sequence: {0}".format(item))
|
||||
cls.log.error("Path is not a frame in sequence: "
|
||||
"{0}".format(item))
|
||||
continue
|
||||
|
||||
# Not required for our time range
|
||||
|
|
@ -137,7 +153,8 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
|||
if tick_file not in all_lookup:
|
||||
invalid.append(node)
|
||||
cls.log.warning("Tick file found that is not "
|
||||
"in cache query filenames: {0}".format(tick_file))
|
||||
"in cache query filenames: "
|
||||
"{0}".format(tick_file))
|
||||
|
||||
return invalid
|
||||
|
||||
|
|
@ -148,4 +165,4 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
|||
if invalid:
|
||||
self.log.error("Invalid nodes: {0}".format(invalid))
|
||||
raise RuntimeError("Invalid particle caches in instance. "
|
||||
"See logs for details.")
|
||||
"See logs for details.")
|
||||
|
|
@ -2,66 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
def is_visible(node,
|
||||
displayLayer=True,
|
||||
intermediateObject=True,
|
||||
parentHidden=True,
|
||||
visibility=True):
|
||||
"""Is `node` visible?
|
||||
|
||||
Returns whether a node is hidden by one of the following methods:
|
||||
- The node exists (always checked)
|
||||
- The node must be a dagNode (always checked)
|
||||
- The node's visibility is off.
|
||||
- The node is set as intermediate Object.
|
||||
- The node is in a disabled displayLayer.
|
||||
- Whether any of its parent nodes is hidden.
|
||||
|
||||
Roughly based on: http://ewertb.soundlinker.com/mel/mel.098.php
|
||||
|
||||
Returns:
|
||||
bool: Whether the node is visible in the scene
|
||||
|
||||
"""
|
||||
|
||||
# Only existing objects can be visible
|
||||
if not cmds.objExists(node):
|
||||
return False
|
||||
|
||||
# Only dagNodes can be visible
|
||||
if not cmds.objectType(node, isAType='dagNode'):
|
||||
return False
|
||||
|
||||
if visibility:
|
||||
if not cmds.getAttr('{0}.visibility'.format(node)):
|
||||
return False
|
||||
|
||||
if intermediateObject and cmds.objectType(node, isAType='shape'):
|
||||
if cmds.getAttr('{0}.intermediateObject'.format(node)):
|
||||
return False
|
||||
|
||||
if displayLayer:
|
||||
# Display layers set overrideEnabled and overrideVisibility on members
|
||||
if cmds.attributeQuery('overrideEnabled', node=node, exists=True):
|
||||
override_enabled = cmds.getAttr('{}.overrideEnabled'.format(node))
|
||||
override_visibility = cmds.getAttr('{}.overrideVisibility'.format(node))
|
||||
if override_enabled and override_visibility:
|
||||
return False
|
||||
|
||||
if parentHidden:
|
||||
parents = cmds.listRelatives(node, parent=True, fullPath=True)
|
||||
if parents:
|
||||
parent = parents[0]
|
||||
if not is_visible(parent,
|
||||
displayLayer=displayLayer,
|
||||
intermediateObject=False,
|
||||
parentHidden=parentHidden,
|
||||
visibility=visibility):
|
||||
return False
|
||||
|
||||
return True
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
class ValidateJointsHidden(pyblish.api.InstancePlugin):
|
||||
|
|
@ -81,17 +22,22 @@ class ValidateJointsHidden(pyblish.api.InstancePlugin):
|
|||
category = 'rig'
|
||||
version = (0, 1, 0)
|
||||
label = "Joints Hidden"
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
joints = cmds.ls(instance, type='joint', long=True)
|
||||
return [j for j in joints if is_visible(j, displayLayer=True)]
|
||||
return [j for j in joints if lib.is_visible(j, displayLayer=True)]
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all the nodes in the instance 'objectSet'"""
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise ValueError("Visible joints found: "
|
||||
"{0}".format(invalid))
|
||||
raise ValueError("Visible joints found: {0}".format(invalid))
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
import maya.mel as mel
|
||||
mel.eval("HideJoints")
|
||||
|
|
|
|||
|
|
@ -1,108 +0,0 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
import cbra.lib
|
||||
from cb.utils.python.decorators import memorize
|
||||
|
||||
|
||||
def is_latest_version(path):
|
||||
"""Return whether path is the latest version.
|
||||
|
||||
Args:
|
||||
path (str): Full path to published file.
|
||||
|
||||
Returns:
|
||||
bool: Whether the path belongs to the latest version.
|
||||
|
||||
"""
|
||||
|
||||
ctx = cbra.lib.parse_context(path)
|
||||
versions = cbra.lib.list_versions(ctx)
|
||||
highest = cbra.lib.find_highest_version(versions)
|
||||
|
||||
if ctx.get('version', None) != highest:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
@memorize
|
||||
def is_latest_version_cached(path):
|
||||
"""Memorized cached wrapper to `is_latest_version`"""
|
||||
return is_latest_version(path)
|
||||
|
||||
|
||||
class ValidateLatestVersions(pyblish.api.InstancePlugin):
|
||||
"""Validates content included is using latest published versions.
|
||||
|
||||
If published contents are out of date they can be easily updated to the
|
||||
latest version using the scripts > pyblish > utilities > update_xxx for
|
||||
the corresponding node type.
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidateContentsOrder
|
||||
families = ['colorbleed.layout']
|
||||
label = "Latest Versions"
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
optional = True
|
||||
|
||||
# (node_type, attribute) that are non-referenced to check paths for
|
||||
LOCAL_CHECKS = {
|
||||
"gpuCache": "cacheFileName",
|
||||
"VRayMesh": "fileName2"
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
all_nodes = instance[:]
|
||||
invalid = list()
|
||||
|
||||
# check non-referenced nodes
|
||||
for node_type, attr in cls.LOCAL_CHECKS.iteritems():
|
||||
|
||||
nodes = cmds.ls(all_nodes, type=node_type, long=True)
|
||||
referenced = cmds.ls(nodes, referencedNodes=True, long=True)
|
||||
non_referenced = [n for n in nodes if n not in referenced]
|
||||
|
||||
for node in non_referenced:
|
||||
|
||||
path = cmds.getAttr("{0}.{1}".format(node, attr))
|
||||
path = os.path.normpath(path)
|
||||
if not is_latest_version_cached(path):
|
||||
invalid.append(node)
|
||||
|
||||
# reference nodes related to this isntance
|
||||
referenced = cmds.ls(all_nodes, long=True, referencedNodes=True)
|
||||
referenced_nodes = set(cmds.referenceQuery(reference, referenceNode=True)
|
||||
for reference in referenced)
|
||||
|
||||
for reference in referenced_nodes:
|
||||
path = cmds.referenceQuery(reference,
|
||||
filename=True,
|
||||
withoutCopyNumber=True)
|
||||
path = os.path.normpath(path)
|
||||
if not is_latest_version_cached(path):
|
||||
invalid.append(reference)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Clear cache only once per publish. So we store a value on
|
||||
# the context on the first instance so we clear only once.
|
||||
name = self.__class__.__name__
|
||||
key = "_plugin_{0}_processed".format(name)
|
||||
if not instance.context.data.get(key, False):
|
||||
is_latest_version_cached.cache.clear()
|
||||
instance.context.data[key] = True
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Used Items are not updated to latest versions:"
|
||||
"{0}".format(invalid))
|
||||
|
|
@ -18,10 +18,20 @@ class ValidateLookContents(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
"""Process all the nodes in the instance"""
|
||||
|
||||
error = False
|
||||
|
||||
attributes = ["lookSets",
|
||||
"lookSetRelations",
|
||||
"lookAttributes"]
|
||||
|
||||
if not instance[:]:
|
||||
raise RuntimeError("Instance is empty")
|
||||
|
||||
# Required look data
|
||||
assert "lookSets" in instance.data
|
||||
assert "lookSetRelations" in instance.data
|
||||
assert "lookAttributes" in instance.data
|
||||
for attr in attributes:
|
||||
if attr not in instance.data:
|
||||
self.log.error("No %s found in data" % attr)
|
||||
error = True
|
||||
|
||||
if error:
|
||||
raise RuntimeError("Invalid look content. See log for details.")
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
from colorbleed.api import get_errored_instances_from_context
|
||||
|
||||
from cbra.utils.maya.node_uuid import get_id, add_ids
|
||||
|
||||
|
|
@ -71,10 +70,10 @@ class CopyUUIDsFromHistory(pyblish.api.Action):
|
|||
|
||||
# Get the errored instances
|
||||
self.log.info("Finding failed instances..")
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
errored = colorbleed.api.get_errored_instances_from_context(context)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
instances = pyblish.api.instances_by_plugin(errored, plugin)
|
||||
|
||||
ids_map = dict()
|
||||
for instance in instances:
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import maya.cmds as cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
|
@ -11,7 +13,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
|
|||
"colorbleed.model",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.curves"]
|
||||
actions = [colorbleed.api.RepairAction]
|
||||
actions = [colorbleed.api.RepairContextAction]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
|
|
@ -29,11 +31,23 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
|
|||
|
||||
assert angularunits and angularunits == 'deg', ("Scene angular units "
|
||||
"must be degrees")
|
||||
|
||||
assert fps and fps == 25.0, "Scene must be 25 FP"
|
||||
assert fps and fps == 25.0, "Scene must be 25 FPS"
|
||||
|
||||
@classmethod
|
||||
def repair(cls):
|
||||
"""Fix the current FPS setting of the scene, set to PAL(25.0 fps)
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
"""Fix the current FPS setting of the scene, set to PAL(25.0 fps)"""
|
||||
|
||||
cls.log.info("Setting angular unit to 'degrees'")
|
||||
cmds.currentUnit(angle="degree")
|
||||
current_angle = cmds.currentUnit(query=True, angle=True)
|
||||
cls.log.debug(current_angle)
|
||||
|
||||
cls.log.info("Setting linear unit to 'centimeter'")
|
||||
cmds.currentUnit(linear="centimeter")
|
||||
current_linear = cmds.currentUnit(query=True, linear=True)
|
||||
cls.log.debug(current_linear)
|
||||
|
||||
cls.log.info("Setting time unit to 'PAL'")
|
||||
cmds.currentUnit(time="pal")
|
||||
current_time = cmds.currentUnit(query=True, time=True)
|
||||
cls.log.debug(current_time)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
|
@ -14,58 +16,155 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
|
|||
order = colorbleed.api.ValidateContentsOrder
|
||||
label = "Rig Contents"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.rig", "colorbleed.rigcontrols",
|
||||
"colorbleed.rigpointcache"]
|
||||
families = ["colorbleed.rig"]
|
||||
|
||||
accepted_output = ["mesh", "transform"]
|
||||
accepted_controllers = ["transform"]
|
||||
ignore_nodes = []
|
||||
|
||||
invalid_hierarchy = []
|
||||
invalid_controls = []
|
||||
invalid_geometry = []
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
from maya import cmds
|
||||
error = False
|
||||
|
||||
objsets = ("controls_SET", "out_SET")
|
||||
|
||||
missing = list()
|
||||
for objset in objsets:
|
||||
if objset not in instance:
|
||||
missing.append(objset)
|
||||
|
||||
assert not missing, ("%s is missing %s"
|
||||
% (instance, missing))
|
||||
objectsets = ("controls_SET", "out_SET")
|
||||
missing = [obj for obj in objectsets if obj not in instance]
|
||||
assert not missing, ("%s is missing %s" % (instance, missing))
|
||||
|
||||
# Ensure there are at least some transforms or dag nodes
|
||||
# in the rig instance
|
||||
set_members = self.check_set_members(instance)
|
||||
|
||||
self.log.info("Evaluating contents of object sets..")
|
||||
|
||||
# Ensure contents in sets and retrieve long path for all objects
|
||||
output_content = cmds.sets("out_SET", query=True) or []
|
||||
assert output_content, "Must have members in rig out_SET"
|
||||
|
||||
controls_content = cmds.set("controls_SET", query=True) or []
|
||||
assert controls_content, "Must have members in rig controls_SET"
|
||||
|
||||
root_node = cmds.ls(set_members, assemblies=True)
|
||||
hierarchy = cmds.listRelatives(root_node, allDescendents=True,
|
||||
fullPath=True)
|
||||
|
||||
self.invalid_geometry = self.validate_geometry(output_content,
|
||||
hierarchy)
|
||||
self.invalid_controls = self.validate_controls(controls_content,
|
||||
hierarchy)
|
||||
|
||||
if self.invalid_hierachy:
|
||||
self.log.error("Found nodes which reside outside of root group "
|
||||
"while they are set up for publishing."
|
||||
"\n%s" % self.invalid_hierachy)
|
||||
error = True
|
||||
|
||||
if self.not_transforms:
|
||||
self.log.error("Only transforms can be part of the controls_SET."
|
||||
"\n%s" % self.not_transforms)
|
||||
error = True
|
||||
|
||||
if self.invalid_geometry:
|
||||
self.log.error("Only meshes can be part of the out_SET\n%s"
|
||||
% self.invalid_geometry)
|
||||
error = True
|
||||
|
||||
if error:
|
||||
raise RuntimeError("Invalid rig content. See log for details.")
|
||||
|
||||
def check_set_members(self, instance):
|
||||
"""Check if the instance has any dagNodes
|
||||
Args:
|
||||
instance: the instance which needs to be published
|
||||
Returns:
|
||||
set_members (list): all dagNodes from instance
|
||||
"""
|
||||
|
||||
set_members = instance.data['setMembers']
|
||||
if not cmds.ls(set_members, type="dagNode", long=True):
|
||||
raise RuntimeError("No dag nodes in the pointcache instance. "
|
||||
"(Empty instance?)")
|
||||
return set_members
|
||||
|
||||
self.log.info("Evaluating contents of object sets..")
|
||||
not_meshes = list()
|
||||
def validate_hierarchy(self, hierarchy, nodes):
|
||||
"""Collect all nodes which are NOT within the hierarchy
|
||||
Args:
|
||||
hierarchy (list): nodes within the root node
|
||||
nodes (list): nodes to check
|
||||
|
||||
# Ensure contents in sets
|
||||
members = cmds.sets("out_SET", query=True) or []
|
||||
assert members, "Must have members in rig out_SET"
|
||||
Returns:
|
||||
errors (list): list of nodes
|
||||
"""
|
||||
errors = []
|
||||
for node in nodes:
|
||||
if node not in hierarchy:
|
||||
errors.append(node)
|
||||
return errors
|
||||
|
||||
controls = cmds.sets("controls_SET", query=True) or []
|
||||
assert controls, "Must have controls in rig control_SET"
|
||||
def validate_geometry(self, set_members, hierarchy):
|
||||
"""Check if the out set passes the validations
|
||||
|
||||
Checks if all its set members are within the hierarchy of the root
|
||||
Checks if the node types of the set members valid
|
||||
|
||||
Args:
|
||||
set_members: list of nodes of the controls_set
|
||||
hierarchy: list of nodes which reside under the root node
|
||||
|
||||
Returns:
|
||||
errors (list)
|
||||
"""
|
||||
|
||||
errors = []
|
||||
# Validate the contents further
|
||||
shapes = cmds.listRelatives(members,
|
||||
shapes = cmds.listRelatives(set_members,
|
||||
allDescendents=True,
|
||||
shapes=True,
|
||||
fullPath=True) or []
|
||||
for shape in shapes:
|
||||
if cmds.nodeType(shape) != "mesh":
|
||||
not_meshes.append(shape)
|
||||
|
||||
not_transforms = list()
|
||||
for node in cmds.sets("controls_SET", query=True) or []:
|
||||
if cmds.nodeType(node) != "transform":
|
||||
not_meshes.append(node)
|
||||
# The user can add the shape node to the out_set, this will result
|
||||
# in none when querying allDescendents
|
||||
all_shapes = set_members + shapes
|
||||
|
||||
assert not_transforms == [], (
|
||||
"Only transforms can be part of the controls_SET: %s"
|
||||
% not_transforms)
|
||||
# geometry
|
||||
invalid_shapes = self.validate_hierarchy(hierarchy, all_shapes)
|
||||
self.invalid_hierachy.extend(invalid_shapes)
|
||||
for shape in all_shapes:
|
||||
nodetype = cmds.nodeType(shape)
|
||||
if nodetype in self.ignore_nodes:
|
||||
continue
|
||||
|
||||
assert not_meshes == [], (
|
||||
"Only meshes can be part of the out_SET: %s"
|
||||
% not_meshes)
|
||||
if nodetype not in self.accepted_output:
|
||||
errors.append(shape)
|
||||
|
||||
return errors
|
||||
|
||||
def validate_controls(self, set_members, hierarchy):
|
||||
"""Check if the controller set passes the validations
|
||||
|
||||
Checks if all its set members are within the hierarchy of the root
|
||||
Checks if the node types of the set members valid
|
||||
|
||||
Args:
|
||||
set_members: list of nodes of the controls_set
|
||||
hierarchy: list of nodes which reside under the root node
|
||||
|
||||
Returns:
|
||||
errors (list)
|
||||
"""
|
||||
|
||||
errors = []
|
||||
invalid_controllers = self.validate_hierarchy(hierarchy, set_members)
|
||||
self.invalid_hierachy.extend(invalid_controllers)
|
||||
for node in set_members:
|
||||
nodetype = cmds.nodeType(node)
|
||||
if nodetype in self.ignore_nodes:
|
||||
continue
|
||||
|
||||
if nodetype not in self.accepted_controllers:
|
||||
errors.append(node)
|
||||
|
||||
return errors
|
||||
|
|
|
|||
80
colorbleed/plugins/maya/publish/validate_rig_controllers.py
Normal file
80
colorbleed/plugins/maya/publish/validate_rig_controllers.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateRigControllers(pyblish.api.InstancePlugin):
|
||||
"""Check if the controllers have the transformation attributes set to
|
||||
default values, locked vibisility attributes and are not keyed
|
||||
"""
|
||||
order = colorbleed.api.ValidateContentsOrder + 0.05
|
||||
label = "Rig Controllers"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.rig"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
error = False
|
||||
is_keyed = list()
|
||||
not_locked = list()
|
||||
is_offset = list()
|
||||
|
||||
controls = cmds.sets("controls_SET", query=True)
|
||||
assert controls, "Must have controls in rig control_SET"
|
||||
|
||||
for control in controls:
|
||||
valid_keyed = self.validate_keyed_state(control)
|
||||
if not valid_keyed:
|
||||
is_keyed.append(control)
|
||||
|
||||
# check if visibility is locked
|
||||
attribute = "{}.visibility".format(control)
|
||||
locked = cmds.getAttr(attribute, lock=True)
|
||||
if not locked:
|
||||
not_locked.append(control)
|
||||
|
||||
valid_transforms = self.validate_transforms(control)
|
||||
if not valid_transforms:
|
||||
is_offset.append(control)
|
||||
|
||||
if is_keyed:
|
||||
self.log.error("No controls can be keyes. Failed :\n"
|
||||
"%s" % is_keyed)
|
||||
|
||||
if is_offset:
|
||||
self.log.error("All controls default transformation values. "
|
||||
"Failed :\n%s" % is_offset)
|
||||
|
||||
if not_locked:
|
||||
self.log.error("All controls must have visibility "
|
||||
"attribute locked. Failed :\n"
|
||||
"%s" % not_locked)
|
||||
|
||||
if error:
|
||||
raise RuntimeError("Invalid rig controllers. See log for details.")
|
||||
|
||||
def validate_transforms(self, control):
|
||||
tolerance = 1e-30
|
||||
identity = [1.0, 0.0, 0.0, 0.0,
|
||||
0.0, 1.0, 0.0, 0.0,
|
||||
0.0, 0.0, 1.0, 0.0,
|
||||
0.0, 0.0, 0.0, 1.0]
|
||||
|
||||
matrix = cmds.xform(control, query=True, matrix=True, objectSpace=True)
|
||||
if not all(abs(x - y) < tolerance for x, y in zip(identity, matrix)):
|
||||
return False
|
||||
return True
|
||||
|
||||
def validate_keyed_state(self, control):
|
||||
"""Check if the control has an animation curve attached
|
||||
Args:
|
||||
control:
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
animation_curves = cmds.keyframe(control, query=True, name=True)
|
||||
if animation_curves:
|
||||
return False
|
||||
return True
|
||||
Loading…
Add table
Add a link
Reference in a new issue