mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 08:24:53 +01:00
Merge branch 'master' of https://bitbucket.org/colorbleed/config
This commit is contained in:
commit
690db8681c
15 changed files with 580 additions and 462 deletions
|
|
@ -12,6 +12,8 @@ from .plugin import (
|
||||||
|
|
||||||
# temporary fix, might
|
# temporary fix, might
|
||||||
from .action import (
|
from .action import (
|
||||||
|
|
||||||
|
get_errored_instances_from_context,
|
||||||
SelectInvalidAction,
|
SelectInvalidAction,
|
||||||
GenerateUUIDsOnInvalidAction,
|
GenerateUUIDsOnInvalidAction,
|
||||||
RepairAction,
|
RepairAction,
|
||||||
|
|
@ -24,7 +26,8 @@ all = [
|
||||||
"ValidateContentsOrder",
|
"ValidateContentsOrder",
|
||||||
"ValidateSceneOrder",
|
"ValidateSceneOrder",
|
||||||
"ValidateMeshOrder",
|
"ValidateMeshOrder",
|
||||||
|
# action
|
||||||
|
"get_errored_instances_from_context",
|
||||||
"SelectInvalidAction",
|
"SelectInvalidAction",
|
||||||
"GenerateUUIDsOnInvalidAction",
|
"GenerateUUIDsOnInvalidAction",
|
||||||
"RepairAction"
|
"RepairAction"
|
||||||
|
|
|
||||||
|
|
@ -242,3 +242,63 @@ def collect_animation_data():
|
||||||
|
|
||||||
def get_current_renderlayer():
|
def get_current_renderlayer():
|
||||||
return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
|
return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
|
||||||
|
|
||||||
|
|
||||||
|
def is_visible(node,
|
||||||
|
displayLayer=True,
|
||||||
|
intermediateObject=True,
|
||||||
|
parentHidden=True,
|
||||||
|
visibility=True):
|
||||||
|
"""Is `node` visible?
|
||||||
|
|
||||||
|
Returns whether a node is hidden by one of the following methods:
|
||||||
|
- The node exists (always checked)
|
||||||
|
- The node must be a dagNode (always checked)
|
||||||
|
- The node's visibility is off.
|
||||||
|
- The node is set as intermediate Object.
|
||||||
|
- The node is in a disabled displayLayer.
|
||||||
|
- Whether any of its parent nodes is hidden.
|
||||||
|
|
||||||
|
Roughly based on: http://ewertb.soundlinker.com/mel/mel.098.php
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: Whether the node is visible in the scene
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Only existing objects can be visible
|
||||||
|
if not cmds.objExists(node):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Only dagNodes can be visible
|
||||||
|
if not cmds.objectType(node, isAType='dagNode'):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if visibility:
|
||||||
|
if not cmds.getAttr('{0}.visibility'.format(node)):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if intermediateObject and cmds.objectType(node, isAType='shape'):
|
||||||
|
if cmds.getAttr('{0}.intermediateObject'.format(node)):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if displayLayer:
|
||||||
|
# Display layers set overrideEnabled and overrideVisibility on members
|
||||||
|
if cmds.attributeQuery('overrideEnabled', node=node, exists=True):
|
||||||
|
override_enabled = cmds.getAttr('{}.overrideEnabled'.format(node))
|
||||||
|
override_visibility = cmds.getAttr('{}.overrideVisibility'.format(node))
|
||||||
|
if override_enabled and override_visibility:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if parentHidden:
|
||||||
|
parents = cmds.listRelatives(node, parent=True, fullPath=True)
|
||||||
|
if parents:
|
||||||
|
parent = parents[0]
|
||||||
|
if not is_visible(parent,
|
||||||
|
displayLayer=displayLayer,
|
||||||
|
intermediateObject=False,
|
||||||
|
parentHidden=parentHidden,
|
||||||
|
visibility=visibility):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
import avalon.maya
|
|
||||||
from maya import cmds
|
from maya import cmds
|
||||||
|
|
||||||
|
import avalon.maya
|
||||||
|
|
||||||
|
|
||||||
class CreateRig(avalon.maya.Creator):
|
class CreateRig(avalon.maya.Creator):
|
||||||
"""Skeleton and controls for manipulation of the geometry"""
|
"""Skeleton and controls for manipulation of the geometry"""
|
||||||
|
|
@ -12,6 +13,8 @@ class CreateRig(avalon.maya.Creator):
|
||||||
def process(self):
|
def process(self):
|
||||||
instance = super(CreateRig, self).process()
|
instance = super(CreateRig, self).process()
|
||||||
|
|
||||||
|
self.log.info("Creating Rig instance set up ...")
|
||||||
|
|
||||||
controls = cmds.sets(name="controls_SET", empty=True)
|
controls = cmds.sets(name="controls_SET", empty=True)
|
||||||
pointcache = cmds.sets(name="out_SET", empty=True)
|
pointcache = cmds.sets(name="out_SET", empty=True)
|
||||||
cmds.sets([controls, pointcache], forceElement=instance)
|
cmds.sets([controls, pointcache], forceElement=instance)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
from maya import cmds
|
from maya import cmds
|
||||||
|
|
||||||
from avalon import api
|
from avalon import api
|
||||||
|
from avalon import maya
|
||||||
|
|
||||||
|
|
||||||
class ModelLoader(api.Loader):
|
class ModelLoader(api.Loader):
|
||||||
|
|
@ -15,7 +16,7 @@ class ModelLoader(api.Loader):
|
||||||
color = "orange"
|
color = "orange"
|
||||||
|
|
||||||
def process(self, name, namespace, context):
|
def process(self, name, namespace, context):
|
||||||
from avalon import maya
|
|
||||||
with maya.maintained_selection():
|
with maya.maintained_selection():
|
||||||
nodes = cmds.file(
|
nodes = cmds.file(
|
||||||
self.fname,
|
self.fname,
|
||||||
|
|
@ -23,7 +24,7 @@ class ModelLoader(api.Loader):
|
||||||
reference=True,
|
reference=True,
|
||||||
returnNewNodes=True,
|
returnNewNodes=True,
|
||||||
groupReference=True,
|
groupReference=True,
|
||||||
groupName=namespace + ":" + name
|
groupName="{}:{}".format(namespace, name)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Assign default shader to meshes
|
# Assign default shader to meshes
|
||||||
|
|
|
||||||
|
|
@ -69,69 +69,25 @@ class CollectLook(pyblish.api.InstancePlugin):
|
||||||
# Ignore specifically named sets (check with endswith)
|
# Ignore specifically named sets (check with endswith)
|
||||||
IGNORE = ["out_SET", "controls_SET", "_INST"]
|
IGNORE = ["out_SET", "controls_SET", "_INST"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
"""Collect the Look in the instance with the correct layer settings"""
|
||||||
|
|
||||||
|
layer = instance.data.get("renderlayer", "defaultRenderLayer")
|
||||||
|
with context.renderlayer(layer):
|
||||||
|
self.log.info("Checking out layer: {0}".format(layer))
|
||||||
|
self.collect(instance)
|
||||||
|
|
||||||
def collect(self, instance):
|
def collect(self, instance):
|
||||||
|
|
||||||
# Whether to log information verbosely
|
# Whether to log information verbosely
|
||||||
verbose = instance.data.get("verbose", False)
|
verbose = instance.data.get("verbose", False)
|
||||||
|
|
||||||
self.log.info("Looking for look associations "
|
self.log.info("Looking for look associations "
|
||||||
"for {0}..".format(instance.data['label']))
|
"for %s" % instance.data['label'])
|
||||||
|
|
||||||
# Get view sets (so we can ignore those sets later)
|
|
||||||
model_panels = cmds.getPanel(type="modelPanel")
|
|
||||||
view_sets = set()
|
|
||||||
|
|
||||||
for panel in model_panels:
|
|
||||||
view_set = cmds.modelEditor(panel, query=True, viewObjects=True)
|
|
||||||
if view_set:
|
|
||||||
view_sets.add(view_set)
|
|
||||||
|
|
||||||
# Discover related object sets
|
# Discover related object sets
|
||||||
self.log.info("Gathering sets..")
|
self.log.info("Gathering sets..")
|
||||||
sets = dict()
|
self.gather_sets(instance)
|
||||||
for node in instance:
|
|
||||||
|
|
||||||
node_sets = cmds.listSets(object=node, extendToShape=False) or []
|
|
||||||
if verbose:
|
|
||||||
self.log.info("Found raw sets "
|
|
||||||
"{0} for {1}".format(node_sets, node))
|
|
||||||
|
|
||||||
if not node_sets:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Exclude deformer sets
|
|
||||||
deformer_sets = cmds.listSets(object=node,
|
|
||||||
extendToShape=False,
|
|
||||||
type=2) or []
|
|
||||||
deformer_sets = set(deformer_sets) # optimize lookup
|
|
||||||
node_sets = [s for s in node_sets if s not in deformer_sets]
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
self.log.debug("After filtering deformer sets "
|
|
||||||
"{0}".format(node_sets))
|
|
||||||
|
|
||||||
# Ignore specifically named sets
|
|
||||||
node_sets = [s for s in node_sets if
|
|
||||||
not any(s.endswith(x) for x in self.IGNORE)]
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
self.log.debug("After filtering ignored sets "
|
|
||||||
"{0}".format(node_sets))
|
|
||||||
|
|
||||||
# Ignore viewport filter view sets (from isolate select and
|
|
||||||
# viewports)
|
|
||||||
node_sets = [s for s in node_sets if s not in view_sets]
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
self.log.debug("After filtering view sets {0}".format(node_sets))
|
|
||||||
|
|
||||||
self.log.info("Found sets {0} for {1}".format(node_sets, node))
|
|
||||||
|
|
||||||
for objset in node_sets:
|
|
||||||
if objset not in sets:
|
|
||||||
sets[objset] = {"name": objset,
|
|
||||||
"uuid": id_utils.get_id(objset),
|
|
||||||
"members": list()}
|
|
||||||
|
|
||||||
# Lookup with absolute names (from root namespace)
|
# Lookup with absolute names (from root namespace)
|
||||||
instance_lookup = set([str(x) for x in cmds.ls(instance,
|
instance_lookup = set([str(x) for x in cmds.ls(instance,
|
||||||
|
|
@ -139,44 +95,141 @@ class CollectLook(pyblish.api.InstancePlugin):
|
||||||
absoluteName=True)])
|
absoluteName=True)])
|
||||||
|
|
||||||
self.log.info("Gathering set relations..")
|
self.log.info("Gathering set relations..")
|
||||||
|
sets = self.gather_sets(instance)
|
||||||
for objset in sets:
|
for objset in sets:
|
||||||
|
|
||||||
self.log.debug("From %s.." % objset)
|
self.log.debug("From %s.." % objset)
|
||||||
content = cmds.sets(objset, query=True)
|
content = cmds.sets(objset, query=True)
|
||||||
|
objset_members = sets[objset]["members"]
|
||||||
for member in cmds.ls(content, long=True, absoluteName=True):
|
for member in cmds.ls(content, long=True, absoluteName=True):
|
||||||
|
member_data = self.collect_member_data(member,
|
||||||
node, components = (member.rsplit(".", 1) + [None])[:2]
|
objset_members,
|
||||||
|
instance_lookup,
|
||||||
# Only include valid members of the instance
|
verbose)
|
||||||
if node not in instance_lookup:
|
if not member_data:
|
||||||
if verbose:
|
|
||||||
self.log.info("Skipping member %s" % member)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if member in [m["name"] for m in sets[objset]["members"]]:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
self.log.debug("Such as %s.." % member)
|
|
||||||
|
|
||||||
member_data = {"name": node, "uuid": id_utils.get_id(node)}
|
|
||||||
|
|
||||||
# Include components information when components are assigned
|
|
||||||
if components:
|
|
||||||
member_data["components"] = components
|
|
||||||
|
|
||||||
sets[objset]["members"].append(member_data)
|
sets[objset]["members"].append(member_data)
|
||||||
|
|
||||||
# Remove sets that didn't have any members assigned in the end
|
# Remove sets that didn't have any members assigned in the end
|
||||||
for objset, data in sets.items():
|
sets = self.clean_sets(sets)
|
||||||
if not data['members']:
|
|
||||||
self.log.debug("Removing redundant set "
|
|
||||||
"information: {0}".format(objset))
|
|
||||||
sets.pop(objset)
|
|
||||||
|
|
||||||
# Member attributes (shapes + transforms)
|
# Member attributes (shapes + transforms)
|
||||||
|
|
||||||
self.log.info("Gathering attribute changes to instance members..")
|
self.log.info("Gathering attribute changes to instance members..")
|
||||||
attrs = []
|
|
||||||
|
attributes = self.collect_attributes_changes(instance)
|
||||||
|
|
||||||
|
# Store data on the instance
|
||||||
|
instance.data["lookAttributes"] = attributes
|
||||||
|
instance.data["lookSetRelations"] = sets.values()
|
||||||
|
instance.data["lookSets"] = cmds.ls(sets.keys(),
|
||||||
|
absoluteName=True,
|
||||||
|
long=True)
|
||||||
|
|
||||||
|
# Log a warning when no relevant sets were retrieved for the look.
|
||||||
|
if not instance.data['lookSets']:
|
||||||
|
self.log.warning("No sets found for the nodes in the instance: "
|
||||||
|
"%s" % instance[:])
|
||||||
|
|
||||||
|
self.log.info("Collected look for %s" % instance)
|
||||||
|
|
||||||
|
def gather_sets(self, instance):
|
||||||
|
|
||||||
|
# Get view sets (so we can ignore those sets later)
|
||||||
|
sets = dict()
|
||||||
|
view_sets = set()
|
||||||
|
model_panels = cmds.getPanel(type="modelPanel")
|
||||||
|
for panel in model_panels:
|
||||||
|
view_set = cmds.modelEditor(panel, query=True, viewObjects=True)
|
||||||
|
if view_set:
|
||||||
|
view_sets.add(view_set)
|
||||||
|
|
||||||
|
for node in instance:
|
||||||
|
node_sets = self.filter_sets(node, view_sets)
|
||||||
|
if not node_sets:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for objset in node_sets:
|
||||||
|
if objset in sets:
|
||||||
|
continue
|
||||||
|
sets[objset] = {"name": objset,
|
||||||
|
"uuid": id_utils.get_id(objset),
|
||||||
|
"members": list()}
|
||||||
|
return sets
|
||||||
|
|
||||||
|
def filter_sets(self, node, view_sets):
|
||||||
|
|
||||||
|
node_sets = cmds.listSets(object=node, extendToShape=False) or []
|
||||||
|
if not node_sets:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Exclude deformer sets
|
||||||
|
deformer_sets = cmds.listSets(object=node,
|
||||||
|
extendToShape=False,
|
||||||
|
type=2) or []
|
||||||
|
deformer_sets = set(deformer_sets) # optimize lookup
|
||||||
|
sets = [s for s in node_sets if s not in deformer_sets]
|
||||||
|
|
||||||
|
# Ignore specifically named sets
|
||||||
|
sets = [s for s in sets if not any(s.endswith(x) for x in self.IGNORE)]
|
||||||
|
|
||||||
|
# Ignore viewport filter view sets (from isolate select and
|
||||||
|
# viewports)
|
||||||
|
sets = [s for s in sets if s not in view_sets]
|
||||||
|
|
||||||
|
self.log.info("Found sets {0} for {1}".format(node_sets, node))
|
||||||
|
|
||||||
|
return sets
|
||||||
|
|
||||||
|
def clean_sets(self, sets):
|
||||||
|
|
||||||
|
for objset, data in sets.items():
|
||||||
|
if not data['members']:
|
||||||
|
self.log.debug("Removing redundant set "
|
||||||
|
"information: %s" % objset)
|
||||||
|
sets.pop(objset)
|
||||||
|
|
||||||
|
return sets
|
||||||
|
|
||||||
|
def collect_member_data(self, member, objset_members, instance_members,
|
||||||
|
verbose=False):
|
||||||
|
"""Get all information of the node
|
||||||
|
Args:
|
||||||
|
member (str): the name of the node to check
|
||||||
|
objset_members (list): the objectSet members
|
||||||
|
instance_members (set): the collected instance members
|
||||||
|
verbose (bool): get debug information
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
node, components = (member.rsplit(".", 1) + [None])[:2]
|
||||||
|
|
||||||
|
# Only include valid members of the instance
|
||||||
|
if node not in instance_members:
|
||||||
|
if verbose:
|
||||||
|
self.log.info("Skipping member %s" % member)
|
||||||
|
return
|
||||||
|
|
||||||
|
if member in [m["name"] for m in objset_members]:
|
||||||
|
return
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
self.log.debug("Such as %s.." % member)
|
||||||
|
|
||||||
|
member_data = {"name": node, "uuid": id_utils.get_id(node)}
|
||||||
|
|
||||||
|
# Include components information when components are assigned
|
||||||
|
if components:
|
||||||
|
member_data["components"] = components
|
||||||
|
|
||||||
|
return member_data
|
||||||
|
|
||||||
|
def collect_attributes_changes(self, instance):
|
||||||
|
|
||||||
|
attributes = []
|
||||||
for node in instance:
|
for node in instance:
|
||||||
|
|
||||||
# Collect changes to "custom" attributes
|
# Collect changes to "custom" attributes
|
||||||
|
|
@ -186,36 +239,15 @@ class CollectLook(pyblish.api.InstancePlugin):
|
||||||
if not node_attrs:
|
if not node_attrs:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
attributes = {}
|
node_attributes = {}
|
||||||
for attr in node_attrs:
|
for attr in node_attrs:
|
||||||
attribute = "{}.{}".format(node, attr)
|
attribute = "{}.{}".format(node, attr)
|
||||||
attributes[attr] = cmds.getAttr(attribute)
|
node_attributes[attr] = cmds.getAttr(attribute)
|
||||||
|
|
||||||
# attributes = dict((attr, pm.getAttr("{}.{}".format(node, attr))
|
|
||||||
# for attr in node_attrs))
|
|
||||||
data = {"name": node,
|
data = {"name": node,
|
||||||
"uuid": id_utils.get_id(node),
|
"uuid": id_utils.get_id(node),
|
||||||
"attributes": attributes}
|
"attributes": node_attributes}
|
||||||
|
|
||||||
attrs.append(data)
|
attributes.append(data)
|
||||||
|
|
||||||
# Store data on the instance
|
return attributes
|
||||||
instance.data["lookAttributes"] = attrs
|
|
||||||
instance.data["lookSetRelations"] = sets.values()
|
|
||||||
instance.data["lookSets"] = cmds.ls(sets.keys(),
|
|
||||||
absoluteName=True,
|
|
||||||
long=True)
|
|
||||||
|
|
||||||
# Log a warning when no relevant sets were retrieved for the look.
|
|
||||||
if not instance.data['lookSets']:
|
|
||||||
self.log.warning("No sets found for the nodes in the instance: {0}".format(instance[:]))
|
|
||||||
|
|
||||||
self.log.info("Collected look for %s" % instance)
|
|
||||||
|
|
||||||
def process(self, instance):
|
|
||||||
"""Collect the Look in the instance with the correct layer settings"""
|
|
||||||
|
|
||||||
layer = instance.data.get("renderlayer", "defaultRenderLayer")
|
|
||||||
with context.renderlayer(layer):
|
|
||||||
self.log.info("Checking out layer: {0}".format(layer))
|
|
||||||
self.collect(instance)
|
|
||||||
|
|
|
||||||
|
|
@ -31,26 +31,11 @@ class SelectTextureNodesAction(pyblish.api.Action):
|
||||||
# Apply pyblish.logic to get the instances for the plug-in
|
# Apply pyblish.logic to get the instances for the plug-in
|
||||||
instances = pyblish.api.instances_by_plugin(instances, plugin)
|
instances = pyblish.api.instances_by_plugin(instances, plugin)
|
||||||
|
|
||||||
def is_texture_resource(resource):
|
|
||||||
"""Return whether the resource is a texture"""
|
|
||||||
|
|
||||||
tags = resource.get("tags", [])
|
|
||||||
if not TAGS_LOOKUP.issubset(tags):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if resource.get("subfolder", None) != "textures":
|
|
||||||
return False
|
|
||||||
|
|
||||||
if "node" not in resource:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Get the texture nodes from the instances
|
# Get the texture nodes from the instances
|
||||||
nodes = []
|
nodes = []
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
for resource in instance.data.get("resources", []):
|
for resource in instance.data.get("resources", []):
|
||||||
if is_texture_resource(resource):
|
if self.is_texture_resource(resource):
|
||||||
node = resource['node']
|
node = resource['node']
|
||||||
nodes.append(node)
|
nodes.append(node)
|
||||||
|
|
||||||
|
|
@ -64,6 +49,21 @@ class SelectTextureNodesAction(pyblish.api.Action):
|
||||||
self.log.info("No texture nodes found.")
|
self.log.info("No texture nodes found.")
|
||||||
cmds.select(deselect=True)
|
cmds.select(deselect=True)
|
||||||
|
|
||||||
|
def is_texture_resource(self, resource):
|
||||||
|
"""Return whether the resource is a texture"""
|
||||||
|
|
||||||
|
tags = resource.get("tags", [])
|
||||||
|
if not TAGS_LOOKUP.issubset(tags):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if resource.get("subfolder", None) != "textures":
|
||||||
|
return False
|
||||||
|
|
||||||
|
if "node" not in resource:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class CollectLookTextures(pyblish.api.InstancePlugin):
|
class CollectLookTextures(pyblish.api.InstancePlugin):
|
||||||
"""Collect look textures
|
"""Collect look textures
|
||||||
|
|
@ -84,7 +84,8 @@ class CollectLookTextures(pyblish.api.InstancePlugin):
|
||||||
# Get textures from sets
|
# Get textures from sets
|
||||||
sets = instance.data["lookSets"]
|
sets = instance.data["lookSets"]
|
||||||
if not sets:
|
if not sets:
|
||||||
raise RuntimeError("No look sets found for the nodes in the instance. {0}".format(sets))
|
raise RuntimeError("No look sets found for the nodes in the "
|
||||||
|
"instance. %s" % sets)
|
||||||
|
|
||||||
# Get the file nodes
|
# Get the file nodes
|
||||||
history = cmds.listHistory(sets) or []
|
history = cmds.listHistory(sets) or []
|
||||||
|
|
@ -93,43 +94,61 @@ class CollectLookTextures(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
resources = instance.data.get("resources", [])
|
resources = instance.data.get("resources", [])
|
||||||
for node in files:
|
for node in files:
|
||||||
|
resource = self.collect_resources(node, verbose)
|
||||||
attribute = "%s.fileTextureName" % node
|
if not resource:
|
||||||
source = cmds.getAttr(attribute)
|
continue
|
||||||
|
|
||||||
# Get the computed file path (e.g. the one with the <UDIM> pattern
|
|
||||||
# in it) So we can reassign it this computed file path whenever
|
|
||||||
# we need to.
|
|
||||||
computed_attribute = "%s.computedFileTextureNamePattern" % node
|
|
||||||
computed_source = cmds.getAttr(computed_attribute)
|
|
||||||
if source != computed_source:
|
|
||||||
if verbose:
|
|
||||||
self.log.debug("File node computed pattern differs from "
|
|
||||||
"original pattern: {0} "
|
|
||||||
"({1} -> {2})".format(node,
|
|
||||||
source,
|
|
||||||
computed_source))
|
|
||||||
|
|
||||||
# We replace backslashes with forward slashes because V-Ray
|
|
||||||
# can't handle the UDIM files with the backslashes in the
|
|
||||||
# paths as the computed patterns
|
|
||||||
source = computed_source.replace("\\", "/")
|
|
||||||
|
|
||||||
files = shader.get_file_node_files(node)
|
|
||||||
if not files:
|
|
||||||
self.log.error("File node does not have a texture set: "
|
|
||||||
"{0}".format(node))
|
|
||||||
|
|
||||||
# Define the resource
|
|
||||||
resource = {"tags": TAGS[:],
|
|
||||||
"node": node,
|
|
||||||
"attribute": attribute,
|
|
||||||
"source": source, # required for resources
|
|
||||||
"files": files, # required for resources
|
|
||||||
"subfolder": "textures" # optional for resources
|
|
||||||
}
|
|
||||||
|
|
||||||
resources.append(resource)
|
resources.append(resource)
|
||||||
|
|
||||||
# Store resources
|
# Store resources
|
||||||
instance.data['resources'] = resources
|
instance.data['resources'] = resources
|
||||||
|
|
||||||
|
def collect_resources(self, node, verbose=False):
|
||||||
|
"""Collect the link to the file(s) used (resource)
|
||||||
|
Args:
|
||||||
|
node (str): name of the node
|
||||||
|
verbose (bool): enable debug information
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
attribute = "{}.fileTextureName".format(node)
|
||||||
|
source = cmds.getAttr(attribute)
|
||||||
|
|
||||||
|
# Get the computed file path (e.g. the one with the <UDIM> pattern
|
||||||
|
# in it) So we can reassign it this computed file path whenever
|
||||||
|
# we need to.
|
||||||
|
|
||||||
|
computed_attribute = "{}.computedFileTextureNamePattern".format(node)
|
||||||
|
computed_source = cmds.getAttr(computed_attribute)
|
||||||
|
if source != computed_source:
|
||||||
|
if verbose:
|
||||||
|
self.log.debug("File node computed pattern differs from "
|
||||||
|
"original pattern: {0} "
|
||||||
|
"({1} -> {2})".format(node,
|
||||||
|
source,
|
||||||
|
computed_source))
|
||||||
|
|
||||||
|
# We replace backslashes with forward slashes because V-Ray
|
||||||
|
# can't handle the UDIM files with the backslashes in the
|
||||||
|
# paths as the computed patterns
|
||||||
|
source = computed_source.replace("\\", "/")
|
||||||
|
|
||||||
|
files = shader.get_file_node_files(node)
|
||||||
|
if not files:
|
||||||
|
self.log.error("File node does not have a texture set: "
|
||||||
|
"{0}".format(node))
|
||||||
|
return
|
||||||
|
|
||||||
|
# Define the resource
|
||||||
|
resource = {"tags": TAGS[:],
|
||||||
|
"node": node,
|
||||||
|
"attribute": attribute,
|
||||||
|
"source": source, # required for resources
|
||||||
|
"files": files, # required for resources
|
||||||
|
"subfolder": "textures" # optional for resources
|
||||||
|
}
|
||||||
|
|
||||||
|
return resource
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,39 +0,0 @@
|
||||||
import pyblish.api
|
|
||||||
import copy
|
|
||||||
|
|
||||||
|
|
||||||
class CollectMetadata(pyblish.api.ContextPlugin):
|
|
||||||
"""Transfer context metadata to the instance.
|
|
||||||
|
|
||||||
This applies a copy of the `context.data['metadata']` to the
|
|
||||||
`instance.data['metadata']` for the following metadata:
|
|
||||||
|
|
||||||
Provides:
|
|
||||||
{
|
|
||||||
"topic": "topic",
|
|
||||||
"author": "user",
|
|
||||||
"date": "date",
|
|
||||||
"filename": "currentFile"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
order = pyblish.api.CollectorOrder + 0.2
|
|
||||||
label = "Metadata"
|
|
||||||
|
|
||||||
mapping = {"topic": "topic",
|
|
||||||
"author": "user",
|
|
||||||
"date": "date",
|
|
||||||
"filename": "currentFile"}
|
|
||||||
|
|
||||||
def process(self, context):
|
|
||||||
|
|
||||||
metadata = {}
|
|
||||||
for key, source in self.mapping.iteritems():
|
|
||||||
if source in context.data:
|
|
||||||
metadata[key] = context.data.get(source)
|
|
||||||
|
|
||||||
for instance in context:
|
|
||||||
instance.data["metadata"] = copy.deepcopy(metadata)
|
|
||||||
|
|
||||||
self.log.info("Collected {0}".format(metadata))
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
import pyblish.api
|
|
||||||
import maya.cmds as cmds
|
import maya.cmds as cmds
|
||||||
import cb.utils.maya.dag as dag
|
|
||||||
|
import pyblish.api
|
||||||
|
import colorbleed.maya.lib as lib
|
||||||
|
|
||||||
|
|
||||||
class ValidateInstancerContent(pyblish.api.InstancePlugin):
|
class ValidateInstancerContent(pyblish.api.InstancePlugin):
|
||||||
|
|
@ -15,7 +16,7 @@ class ValidateInstancerContent(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
|
|
||||||
invalid = False
|
error = False
|
||||||
members = instance.data['setMembers']
|
members = instance.data['setMembers']
|
||||||
export_members = instance.data['exactExportMembers']
|
export_members = instance.data['exactExportMembers']
|
||||||
|
|
||||||
|
|
@ -23,42 +24,22 @@ class ValidateInstancerContent(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
if not len(members) == len(cmds.ls(members, type="instancer")):
|
if not len(members) == len(cmds.ls(members, type="instancer")):
|
||||||
self.log.error("Instancer can only contain instancers")
|
self.log.error("Instancer can only contain instancers")
|
||||||
invalid = True
|
error = True
|
||||||
|
|
||||||
# TODO: Implement better check for particles are cached
|
# TODO: Implement better check for particles are cached
|
||||||
if not cmds.ls(export_members, type="nucleus"):
|
if not cmds.ls(export_members, type="nucleus"):
|
||||||
self.log.error("Instancer must have a connected nucleus")
|
self.log.error("Instancer must have a connected nucleus")
|
||||||
invalid = True
|
error = True
|
||||||
|
|
||||||
if not cmds.ls(export_members, type="cacheFile"):
|
if not cmds.ls(export_members, type="cacheFile"):
|
||||||
self.log.error("Instancer must be cached")
|
self.log.error("Instancer must be cached")
|
||||||
invalid = True
|
error = True
|
||||||
|
|
||||||
# Ensure all instanced geometry is hidden
|
hidden = self.check_geometry_hidden(export_members)
|
||||||
shapes = cmds.ls(export_members,
|
if not hidden:
|
||||||
dag=True, shapes=True,
|
error = True
|
||||||
noIntermediate=True)
|
|
||||||
meshes = cmds.ls(shapes, type="mesh")
|
|
||||||
|
|
||||||
def invalidate(node):
|
|
||||||
"""Whether mesh is in a valid state
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
node (str): The node to check
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: Whether it is in a valid state.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return dag.is_visible(node,
|
|
||||||
displayLayer=False,
|
|
||||||
intermediateObject=False)
|
|
||||||
|
|
||||||
visible = [node for node in meshes if invalidate(node)]
|
|
||||||
if visible:
|
|
||||||
self.log.error("Instancer input geometry must be hidden "
|
self.log.error("Instancer input geometry must be hidden "
|
||||||
"the scene. Invalid: {0}".format(visible))
|
"the scene. Invalid: {0}".format(hidden))
|
||||||
invalid = True
|
|
||||||
|
|
||||||
# Ensure all in one group
|
# Ensure all in one group
|
||||||
parents = cmds.listRelatives(members,
|
parents = cmds.listRelatives(members,
|
||||||
|
|
@ -68,7 +49,26 @@ class ValidateInstancerContent(pyblish.api.InstancePlugin):
|
||||||
if len(roots) > 1:
|
if len(roots) > 1:
|
||||||
self.log.error("Instancer should all be contained in a single "
|
self.log.error("Instancer should all be contained in a single "
|
||||||
"group. Current roots: {0}".format(roots))
|
"group. Current roots: {0}".format(roots))
|
||||||
invalid = True
|
error = True
|
||||||
|
|
||||||
if invalid:
|
if error:
|
||||||
raise RuntimeError("Instancer Content is invalid. See log.")
|
raise RuntimeError("Instancer Content is invalid. See log.")
|
||||||
|
|
||||||
|
def check_geometry_hidden(self, export_members):
|
||||||
|
|
||||||
|
# Ensure all instanced geometry is hidden
|
||||||
|
shapes = cmds.ls(export_members,
|
||||||
|
dag=True,
|
||||||
|
shapes=True,
|
||||||
|
noIntermediate=True)
|
||||||
|
meshes = cmds.ls(shapes, type="mesh")
|
||||||
|
|
||||||
|
visible = [node for node in meshes
|
||||||
|
if lib.is_visible(node,
|
||||||
|
displayLayer=False,
|
||||||
|
intermediateObject=False)]
|
||||||
|
if visible:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
import os
|
||||||
|
import re
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
VERBOSE = False
|
VERBOSE = False
|
||||||
|
|
@ -10,6 +12,27 @@ def is_cache_resource(resource):
|
||||||
return required.issubset(tags)
|
return required.issubset(tags)
|
||||||
|
|
||||||
|
|
||||||
|
def valdidate_files(files):
|
||||||
|
for f in files:
|
||||||
|
assert os.path.exists(f)
|
||||||
|
assert f.endswith(".mcx") or f.endswith(".mcc")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def filter_ticks(files):
|
||||||
|
tick_files = set()
|
||||||
|
ticks = set()
|
||||||
|
for path in files:
|
||||||
|
match = re.match(".+Tick([0-9]+).mcx$", os.path.basename(path))
|
||||||
|
if match:
|
||||||
|
tick_files.add(path)
|
||||||
|
num = match.group(1)
|
||||||
|
ticks.add(int(num))
|
||||||
|
|
||||||
|
return tick_files, ticks
|
||||||
|
|
||||||
|
|
||||||
class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
||||||
"""Validates all instancer particle systems are cached correctly.
|
"""Validates all instancer particle systems are cached correctly.
|
||||||
|
|
||||||
|
|
@ -26,7 +49,6 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_invalid(cls, instance):
|
def get_invalid(cls, instance):
|
||||||
|
|
||||||
import os
|
|
||||||
import pyseq
|
import pyseq
|
||||||
|
|
||||||
start_frame = instance.data.get("startFrame", 0)
|
start_frame = instance.data.get("startFrame", 0)
|
||||||
|
|
@ -42,7 +64,6 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
node = resource['node']
|
node = resource['node']
|
||||||
all_files = resource['files'][:]
|
all_files = resource['files'][:]
|
||||||
|
|
||||||
all_lookup = set(all_files)
|
all_lookup = set(all_files)
|
||||||
|
|
||||||
# The first file is usually the .xml description file.
|
# The first file is usually the .xml description file.
|
||||||
|
|
@ -54,28 +75,21 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
# Ensure all files exist (including ticks)
|
# Ensure all files exist (including ticks)
|
||||||
# The remainder file paths should be the .mcx or .mcc files
|
# The remainder file paths should be the .mcx or .mcc files
|
||||||
for f in all_files:
|
valdidate_files(all_files)
|
||||||
assert os.path.exists(f)
|
|
||||||
assert f.endswith(".mcx") or f.endswith(".mcc")
|
|
||||||
|
|
||||||
# Maya particle caches support substeps by saving out additional files
|
# Maya particle caches support substeps by saving out additional
|
||||||
# that end with a Tick60.mcx, Tick120.mcx, etc. suffix. To avoid `pyseq`
|
# files that end with a Tick60.mcx, Tick120.mcx, etc. suffix.
|
||||||
# getting confused we filter those out and then for each file (except
|
# To avoid `pyseq` getting confused we filter those out and then
|
||||||
# the last frame) check that at least all ticks exist.
|
# for each file (except the last frame) check that at least all
|
||||||
tick_files = set()
|
# ticks exist.
|
||||||
ticks = set()
|
|
||||||
for path in all_files:
|
|
||||||
import re
|
|
||||||
match = re.match(".+Tick([0-9]+).mcx$", os.path.basename(path))
|
|
||||||
|
|
||||||
if match:
|
tick_files, ticks = filter_ticks(all_files)
|
||||||
tick_files.add(path)
|
if tick_files:
|
||||||
num = match.group(1)
|
files = [f for f in all_files if f not in tick_files]
|
||||||
ticks.add(int(num))
|
else:
|
||||||
|
files = all_files
|
||||||
|
|
||||||
files = [f for f in all_files if f not in tick_files] if tick_files else all_files
|
|
||||||
sequences = pyseq.get_sequences(files)
|
sequences = pyseq.get_sequences(files)
|
||||||
|
|
||||||
if len(sequences) != 1:
|
if len(sequences) != 1:
|
||||||
invalid.append(node)
|
invalid.append(node)
|
||||||
cls.log.warning("More than one sequence found? "
|
cls.log.warning("More than one sequence found? "
|
||||||
|
|
@ -112,7 +126,8 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
||||||
# for the frames required by the time range.
|
# for the frames required by the time range.
|
||||||
if ticks:
|
if ticks:
|
||||||
ticks = list(sorted(ticks))
|
ticks = list(sorted(ticks))
|
||||||
cls.log.info("Found ticks: {0} (substeps: {1})".format(ticks, len(ticks)))
|
cls.log.info("Found ticks: {0} "
|
||||||
|
"(substeps: {1})".format(ticks, len(ticks)))
|
||||||
|
|
||||||
# Check all frames except the last since we don't
|
# Check all frames except the last since we don't
|
||||||
# require subframes after our time range.
|
# require subframes after our time range.
|
||||||
|
|
@ -123,7 +138,8 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
||||||
frame = item.frame
|
frame = item.frame
|
||||||
if not frame:
|
if not frame:
|
||||||
invalid.append(node)
|
invalid.append(node)
|
||||||
cls.log.error("Path is not a frame in sequence: {0}".format(item))
|
cls.log.error("Path is not a frame in sequence: "
|
||||||
|
"{0}".format(item))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Not required for our time range
|
# Not required for our time range
|
||||||
|
|
@ -137,7 +153,8 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
||||||
if tick_file not in all_lookup:
|
if tick_file not in all_lookup:
|
||||||
invalid.append(node)
|
invalid.append(node)
|
||||||
cls.log.warning("Tick file found that is not "
|
cls.log.warning("Tick file found that is not "
|
||||||
"in cache query filenames: {0}".format(tick_file))
|
"in cache query filenames: "
|
||||||
|
"{0}".format(tick_file))
|
||||||
|
|
||||||
return invalid
|
return invalid
|
||||||
|
|
||||||
|
|
@ -148,4 +165,4 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
||||||
if invalid:
|
if invalid:
|
||||||
self.log.error("Invalid nodes: {0}".format(invalid))
|
self.log.error("Invalid nodes: {0}".format(invalid))
|
||||||
raise RuntimeError("Invalid particle caches in instance. "
|
raise RuntimeError("Invalid particle caches in instance. "
|
||||||
"See logs for details.")
|
"See logs for details.")
|
||||||
|
|
@ -2,66 +2,7 @@ from maya import cmds
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
import colorbleed.api
|
import colorbleed.api
|
||||||
|
import colorbleed.maya.lib as lib
|
||||||
|
|
||||||
def is_visible(node,
|
|
||||||
displayLayer=True,
|
|
||||||
intermediateObject=True,
|
|
||||||
parentHidden=True,
|
|
||||||
visibility=True):
|
|
||||||
"""Is `node` visible?
|
|
||||||
|
|
||||||
Returns whether a node is hidden by one of the following methods:
|
|
||||||
- The node exists (always checked)
|
|
||||||
- The node must be a dagNode (always checked)
|
|
||||||
- The node's visibility is off.
|
|
||||||
- The node is set as intermediate Object.
|
|
||||||
- The node is in a disabled displayLayer.
|
|
||||||
- Whether any of its parent nodes is hidden.
|
|
||||||
|
|
||||||
Roughly based on: http://ewertb.soundlinker.com/mel/mel.098.php
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: Whether the node is visible in the scene
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Only existing objects can be visible
|
|
||||||
if not cmds.objExists(node):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Only dagNodes can be visible
|
|
||||||
if not cmds.objectType(node, isAType='dagNode'):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if visibility:
|
|
||||||
if not cmds.getAttr('{0}.visibility'.format(node)):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if intermediateObject and cmds.objectType(node, isAType='shape'):
|
|
||||||
if cmds.getAttr('{0}.intermediateObject'.format(node)):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if displayLayer:
|
|
||||||
# Display layers set overrideEnabled and overrideVisibility on members
|
|
||||||
if cmds.attributeQuery('overrideEnabled', node=node, exists=True):
|
|
||||||
override_enabled = cmds.getAttr('{}.overrideEnabled'.format(node))
|
|
||||||
override_visibility = cmds.getAttr('{}.overrideVisibility'.format(node))
|
|
||||||
if override_enabled and override_visibility:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if parentHidden:
|
|
||||||
parents = cmds.listRelatives(node, parent=True, fullPath=True)
|
|
||||||
if parents:
|
|
||||||
parent = parents[0]
|
|
||||||
if not is_visible(parent,
|
|
||||||
displayLayer=displayLayer,
|
|
||||||
intermediateObject=False,
|
|
||||||
parentHidden=parentHidden,
|
|
||||||
visibility=visibility):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class ValidateJointsHidden(pyblish.api.InstancePlugin):
|
class ValidateJointsHidden(pyblish.api.InstancePlugin):
|
||||||
|
|
@ -87,7 +28,7 @@ class ValidateJointsHidden(pyblish.api.InstancePlugin):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_invalid(instance):
|
def get_invalid(instance):
|
||||||
joints = cmds.ls(instance, type='joint', long=True)
|
joints = cmds.ls(instance, type='joint', long=True)
|
||||||
return [j for j in joints if is_visible(j, displayLayer=True)]
|
return [j for j in joints if lib.is_visible(j, displayLayer=True)]
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
"""Process all the nodes in the instance 'objectSet'"""
|
"""Process all the nodes in the instance 'objectSet'"""
|
||||||
|
|
|
||||||
|
|
@ -1,108 +0,0 @@
|
||||||
import os
|
|
||||||
|
|
||||||
from maya import cmds
|
|
||||||
|
|
||||||
import pyblish.api
|
|
||||||
import colorbleed.api
|
|
||||||
|
|
||||||
import cbra.lib
|
|
||||||
from cb.utils.python.decorators import memorize
|
|
||||||
|
|
||||||
|
|
||||||
def is_latest_version(path):
|
|
||||||
"""Return whether path is the latest version.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path (str): Full path to published file.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: Whether the path belongs to the latest version.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
ctx = cbra.lib.parse_context(path)
|
|
||||||
versions = cbra.lib.list_versions(ctx)
|
|
||||||
highest = cbra.lib.find_highest_version(versions)
|
|
||||||
|
|
||||||
if ctx.get('version', None) != highest:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
@memorize
|
|
||||||
def is_latest_version_cached(path):
|
|
||||||
"""Memorized cached wrapper to `is_latest_version`"""
|
|
||||||
return is_latest_version(path)
|
|
||||||
|
|
||||||
|
|
||||||
class ValidateLatestVersions(pyblish.api.InstancePlugin):
|
|
||||||
"""Validates content included is using latest published versions.
|
|
||||||
|
|
||||||
If published contents are out of date they can be easily updated to the
|
|
||||||
latest version using the scripts > pyblish > utilities > update_xxx for
|
|
||||||
the corresponding node type.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
order = colorbleed.api.ValidateContentsOrder
|
|
||||||
families = ['colorbleed.layout']
|
|
||||||
label = "Latest Versions"
|
|
||||||
actions = [colorbleed.api.SelectInvalidAction]
|
|
||||||
optional = True
|
|
||||||
|
|
||||||
# (node_type, attribute) that are non-referenced to check paths for
|
|
||||||
LOCAL_CHECKS = {
|
|
||||||
"gpuCache": "cacheFileName",
|
|
||||||
"VRayMesh": "fileName2"
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_invalid(cls, instance):
|
|
||||||
|
|
||||||
all_nodes = instance[:]
|
|
||||||
invalid = list()
|
|
||||||
|
|
||||||
# check non-referenced nodes
|
|
||||||
for node_type, attr in cls.LOCAL_CHECKS.iteritems():
|
|
||||||
|
|
||||||
nodes = cmds.ls(all_nodes, type=node_type, long=True)
|
|
||||||
referenced = cmds.ls(nodes, referencedNodes=True, long=True)
|
|
||||||
non_referenced = [n for n in nodes if n not in referenced]
|
|
||||||
|
|
||||||
for node in non_referenced:
|
|
||||||
|
|
||||||
path = cmds.getAttr("{0}.{1}".format(node, attr))
|
|
||||||
path = os.path.normpath(path)
|
|
||||||
if not is_latest_version_cached(path):
|
|
||||||
invalid.append(node)
|
|
||||||
|
|
||||||
# reference nodes related to this isntance
|
|
||||||
referenced = cmds.ls(all_nodes, long=True, referencedNodes=True)
|
|
||||||
referenced_nodes = set(cmds.referenceQuery(reference, referenceNode=True)
|
|
||||||
for reference in referenced)
|
|
||||||
|
|
||||||
for reference in referenced_nodes:
|
|
||||||
path = cmds.referenceQuery(reference,
|
|
||||||
filename=True,
|
|
||||||
withoutCopyNumber=True)
|
|
||||||
path = os.path.normpath(path)
|
|
||||||
if not is_latest_version_cached(path):
|
|
||||||
invalid.append(reference)
|
|
||||||
|
|
||||||
return invalid
|
|
||||||
|
|
||||||
def process(self, instance):
|
|
||||||
|
|
||||||
# Clear cache only once per publish. So we store a value on
|
|
||||||
# the context on the first instance so we clear only once.
|
|
||||||
name = self.__class__.__name__
|
|
||||||
key = "_plugin_{0}_processed".format(name)
|
|
||||||
if not instance.context.data.get(key, False):
|
|
||||||
is_latest_version_cached.cache.clear()
|
|
||||||
instance.context.data[key] = True
|
|
||||||
|
|
||||||
invalid = self.get_invalid(instance)
|
|
||||||
if invalid:
|
|
||||||
raise RuntimeError("Used Items are not updated to latest versions:"
|
|
||||||
"{0}".format(invalid))
|
|
||||||
|
|
@ -18,10 +18,20 @@ class ValidateLookContents(pyblish.api.InstancePlugin):
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
"""Process all the nodes in the instance"""
|
"""Process all the nodes in the instance"""
|
||||||
|
|
||||||
|
error = False
|
||||||
|
|
||||||
|
attributes = ["lookSets",
|
||||||
|
"lookSetRelations",
|
||||||
|
"lookAttributes"]
|
||||||
|
|
||||||
if not instance[:]:
|
if not instance[:]:
|
||||||
raise RuntimeError("Instance is empty")
|
raise RuntimeError("Instance is empty")
|
||||||
|
|
||||||
# Required look data
|
# Required look data
|
||||||
assert "lookSets" in instance.data
|
for attr in attributes:
|
||||||
assert "lookSetRelations" in instance.data
|
if attr not in instance.data:
|
||||||
assert "lookAttributes" in instance.data
|
self.log.error("No %s found in data" % attr)
|
||||||
|
error = True
|
||||||
|
|
||||||
|
if error:
|
||||||
|
raise RuntimeError("Invalid look content. See log for details.")
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,6 @@ from maya import cmds
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
import colorbleed.api
|
import colorbleed.api
|
||||||
from colorbleed.api import get_errored_instances_from_context
|
|
||||||
|
|
||||||
from cbra.utils.maya.node_uuid import get_id, add_ids
|
from cbra.utils.maya.node_uuid import get_id, add_ids
|
||||||
|
|
||||||
|
|
@ -71,10 +70,10 @@ class CopyUUIDsFromHistory(pyblish.api.Action):
|
||||||
|
|
||||||
# Get the errored instances
|
# Get the errored instances
|
||||||
self.log.info("Finding failed instances..")
|
self.log.info("Finding failed instances..")
|
||||||
errored_instances = get_errored_instances_from_context(context)
|
errored = colorbleed.api.get_errored_instances_from_context(context)
|
||||||
|
|
||||||
# Apply pyblish.logic to get the instances for the plug-in
|
# Apply pyblish.logic to get the instances for the plug-in
|
||||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
instances = pyblish.api.instances_by_plugin(errored, plugin)
|
||||||
|
|
||||||
ids_map = dict()
|
ids_map = dict()
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
|
|
|
||||||
|
|
@ -16,55 +16,155 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
|
||||||
order = colorbleed.api.ValidateContentsOrder
|
order = colorbleed.api.ValidateContentsOrder
|
||||||
label = "Rig Contents"
|
label = "Rig Contents"
|
||||||
hosts = ["maya"]
|
hosts = ["maya"]
|
||||||
families = ["colorbleed.rig", "colorbleed.rigcontrols",
|
families = ["colorbleed.rig"]
|
||||||
"colorbleed.rigpointcache"]
|
|
||||||
|
accepted_output = ["mesh", "transform"]
|
||||||
|
accepted_controllers = ["transform"]
|
||||||
|
ignore_nodes = []
|
||||||
|
|
||||||
|
invalid_hierarchy = []
|
||||||
|
invalid_controls = []
|
||||||
|
invalid_geometry = []
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
|
|
||||||
|
error = False
|
||||||
|
|
||||||
objectsets = ("controls_SET", "out_SET")
|
objectsets = ("controls_SET", "out_SET")
|
||||||
|
missing = [obj for obj in objectsets if obj not in instance]
|
||||||
missing = list()
|
|
||||||
for objectset in objectsets:
|
|
||||||
if objectset not in instance:
|
|
||||||
missing.append(objectset)
|
|
||||||
|
|
||||||
assert not missing, ("%s is missing %s" % (instance, missing))
|
assert not missing, ("%s is missing %s" % (instance, missing))
|
||||||
|
|
||||||
# Ensure there are at least some transforms or dag nodes
|
# Ensure there are at least some transforms or dag nodes
|
||||||
# in the rig instance
|
# in the rig instance
|
||||||
|
set_members = self.check_set_members(instance)
|
||||||
|
|
||||||
|
self.log.info("Evaluating contents of object sets..")
|
||||||
|
|
||||||
|
# Ensure contents in sets and retrieve long path for all objects
|
||||||
|
output_content = cmds.sets("out_SET", query=True) or []
|
||||||
|
assert output_content, "Must have members in rig out_SET"
|
||||||
|
|
||||||
|
controls_content = cmds.set("controls_SET", query=True) or []
|
||||||
|
assert controls_content, "Must have members in rig controls_SET"
|
||||||
|
|
||||||
|
root_node = cmds.ls(set_members, assemblies=True)
|
||||||
|
hierarchy = cmds.listRelatives(root_node, allDescendents=True,
|
||||||
|
fullPath=True)
|
||||||
|
|
||||||
|
self.invalid_geometry = self.validate_geometry(output_content,
|
||||||
|
hierarchy)
|
||||||
|
self.invalid_controls = self.validate_controls(controls_content,
|
||||||
|
hierarchy)
|
||||||
|
|
||||||
|
if self.invalid_hierachy:
|
||||||
|
self.log.error("Found nodes which reside outside of root group "
|
||||||
|
"while they are set up for publishing."
|
||||||
|
"\n%s" % self.invalid_hierachy)
|
||||||
|
error = True
|
||||||
|
|
||||||
|
if self.not_transforms:
|
||||||
|
self.log.error("Only transforms can be part of the controls_SET."
|
||||||
|
"\n%s" % self.not_transforms)
|
||||||
|
error = True
|
||||||
|
|
||||||
|
if self.invalid_geometry:
|
||||||
|
self.log.error("Only meshes can be part of the out_SET\n%s"
|
||||||
|
% self.invalid_geometry)
|
||||||
|
error = True
|
||||||
|
|
||||||
|
if error:
|
||||||
|
raise RuntimeError("Invalid rig content. See log for details.")
|
||||||
|
|
||||||
|
def check_set_members(self, instance):
|
||||||
|
"""Check if the instance has any dagNodes
|
||||||
|
Args:
|
||||||
|
instance: the instance which needs to be published
|
||||||
|
Returns:
|
||||||
|
set_members (list): all dagNodes from instance
|
||||||
|
"""
|
||||||
|
|
||||||
set_members = instance.data['setMembers']
|
set_members = instance.data['setMembers']
|
||||||
if not cmds.ls(set_members, type="dagNode", long=True):
|
if not cmds.ls(set_members, type="dagNode", long=True):
|
||||||
raise RuntimeError("No dag nodes in the pointcache instance. "
|
raise RuntimeError("No dag nodes in the pointcache instance. "
|
||||||
"(Empty instance?)")
|
"(Empty instance?)")
|
||||||
|
return set_members
|
||||||
|
|
||||||
self.log.info("Evaluating contents of object sets..")
|
def validate_hierarchy(self, hierarchy, nodes):
|
||||||
not_meshes = list()
|
"""Collect all nodes which are NOT within the hierarchy
|
||||||
|
Args:
|
||||||
|
hierarchy (list): nodes within the root node
|
||||||
|
nodes (list): nodes to check
|
||||||
|
|
||||||
# Ensure contents in sets
|
Returns:
|
||||||
members = cmds.sets("out_SET", query=True) or []
|
errors (list): list of nodes
|
||||||
assert members, "Must have members in rig out_SET"
|
"""
|
||||||
|
errors = []
|
||||||
|
for node in nodes:
|
||||||
|
if node not in hierarchy:
|
||||||
|
errors.append(node)
|
||||||
|
return errors
|
||||||
|
|
||||||
controls = cmds.sets("controls_SET", query=True) or []
|
def validate_geometry(self, set_members, hierarchy):
|
||||||
assert controls, "Must have controls in rig control_SET"
|
"""Check if the out set passes the validations
|
||||||
|
|
||||||
|
Checks if all its set members are within the hierarchy of the root
|
||||||
|
Checks if the node types of the set members valid
|
||||||
|
|
||||||
|
Args:
|
||||||
|
set_members: list of nodes of the controls_set
|
||||||
|
hierarchy: list of nodes which reside under the root node
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
errors (list)
|
||||||
|
"""
|
||||||
|
|
||||||
|
errors = []
|
||||||
# Validate the contents further
|
# Validate the contents further
|
||||||
shapes = cmds.listRelatives(members,
|
shapes = cmds.listRelatives(set_members,
|
||||||
allDescendents=True,
|
allDescendents=True,
|
||||||
shapes=True,
|
shapes=True,
|
||||||
fullPath=True) or []
|
fullPath=True) or []
|
||||||
for shape in shapes:
|
|
||||||
if cmds.nodeType(shape) != "mesh":
|
|
||||||
not_meshes.append(shape)
|
|
||||||
|
|
||||||
not_transforms = list()
|
# The user can add the shape node to the out_set, this will result
|
||||||
for node in cmds.sets("controls_SET", query=True) or []:
|
# in none when querying allDescendents
|
||||||
if cmds.nodeType(node) != "transform":
|
all_shapes = set_members + shapes
|
||||||
not_meshes.append(node)
|
|
||||||
|
|
||||||
assert not_transforms == [], (
|
# geometry
|
||||||
"Only transforms can be part of the controls_SET: %s"
|
invalid_shapes = self.validate_hierarchy(hierarchy, all_shapes)
|
||||||
% not_transforms)
|
self.invalid_hierachy.extend(invalid_shapes)
|
||||||
|
for shape in all_shapes:
|
||||||
|
nodetype = cmds.nodeType(shape)
|
||||||
|
if nodetype in self.ignore_nodes:
|
||||||
|
continue
|
||||||
|
|
||||||
assert not_meshes == [], (
|
if nodetype not in self.accepted_output:
|
||||||
"Only meshes can be part of the out_SET: %s"
|
errors.append(shape)
|
||||||
% not_meshes)
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
def validate_controls(self, set_members, hierarchy):
|
||||||
|
"""Check if the controller set passes the validations
|
||||||
|
|
||||||
|
Checks if all its set members are within the hierarchy of the root
|
||||||
|
Checks if the node types of the set members valid
|
||||||
|
|
||||||
|
Args:
|
||||||
|
set_members: list of nodes of the controls_set
|
||||||
|
hierarchy: list of nodes which reside under the root node
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
errors (list)
|
||||||
|
"""
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
invalid_controllers = self.validate_hierarchy(hierarchy, set_members)
|
||||||
|
self.invalid_hierachy.extend(invalid_controllers)
|
||||||
|
for node in set_members:
|
||||||
|
nodetype = cmds.nodeType(node)
|
||||||
|
if nodetype in self.ignore_nodes:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if nodetype not in self.accepted_controllers:
|
||||||
|
errors.append(node)
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
|
||||||
80
colorbleed/plugins/maya/publish/validate_rig_controllers.py
Normal file
80
colorbleed/plugins/maya/publish/validate_rig_controllers.py
Normal file
|
|
@ -0,0 +1,80 @@
|
||||||
|
from maya import cmds
|
||||||
|
|
||||||
|
import pyblish.api
|
||||||
|
import colorbleed.api
|
||||||
|
|
||||||
|
|
||||||
|
class ValidateRigControllers(pyblish.api.InstancePlugin):
|
||||||
|
"""Check if the controllers have the transformation attributes set to
|
||||||
|
default values, locked vibisility attributes and are not keyed
|
||||||
|
"""
|
||||||
|
order = colorbleed.api.ValidateContentsOrder + 0.05
|
||||||
|
label = "Rig Controllers"
|
||||||
|
hosts = ["maya"]
|
||||||
|
families = ["colorbleed.rig"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
|
||||||
|
error = False
|
||||||
|
is_keyed = list()
|
||||||
|
not_locked = list()
|
||||||
|
is_offset = list()
|
||||||
|
|
||||||
|
controls = cmds.sets("controls_SET", query=True)
|
||||||
|
assert controls, "Must have controls in rig control_SET"
|
||||||
|
|
||||||
|
for control in controls:
|
||||||
|
valid_keyed = self.validate_keyed_state(control)
|
||||||
|
if not valid_keyed:
|
||||||
|
is_keyed.append(control)
|
||||||
|
|
||||||
|
# check if visibility is locked
|
||||||
|
attribute = "{}.visibility".format(control)
|
||||||
|
locked = cmds.getAttr(attribute, lock=True)
|
||||||
|
if not locked:
|
||||||
|
not_locked.append(control)
|
||||||
|
|
||||||
|
valid_transforms = self.validate_transforms(control)
|
||||||
|
if not valid_transforms:
|
||||||
|
is_offset.append(control)
|
||||||
|
|
||||||
|
if is_keyed:
|
||||||
|
self.log.error("No controls can be keyes. Failed :\n"
|
||||||
|
"%s" % is_keyed)
|
||||||
|
|
||||||
|
if is_offset:
|
||||||
|
self.log.error("All controls default transformation values. "
|
||||||
|
"Failed :\n%s" % is_offset)
|
||||||
|
|
||||||
|
if not_locked:
|
||||||
|
self.log.error("All controls must have visibility "
|
||||||
|
"attribute locked. Failed :\n"
|
||||||
|
"%s" % not_locked)
|
||||||
|
|
||||||
|
if error:
|
||||||
|
raise RuntimeError("Invalid rig controllers. See log for details.")
|
||||||
|
|
||||||
|
def validate_transforms(self, control):
|
||||||
|
tolerance = 1e-30
|
||||||
|
identity = [1.0, 0.0, 0.0, 0.0,
|
||||||
|
0.0, 1.0, 0.0, 0.0,
|
||||||
|
0.0, 0.0, 1.0, 0.0,
|
||||||
|
0.0, 0.0, 0.0, 1.0]
|
||||||
|
|
||||||
|
matrix = cmds.xform(control, query=True, matrix=True, objectSpace=True)
|
||||||
|
if not all(abs(x - y) < tolerance for x, y in zip(identity, matrix)):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def validate_keyed_state(self, control):
|
||||||
|
"""Check if the control has an animation curve attached
|
||||||
|
Args:
|
||||||
|
control:
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
"""
|
||||||
|
animation_curves = cmds.keyframe(control, query=True, name=True)
|
||||||
|
if animation_curves:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
Loading…
Add table
Add a link
Reference in a new issue