merging Roy's branch

This commit is contained in:
aardschok 2018-02-05 16:25:01 +01:00
commit 55610af68d
10 changed files with 505 additions and 170 deletions

View file

@ -0,0 +1,26 @@
import os
from avalon import api as avalon
from pyblish import api as pyblish
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "fusion", "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "fusion", "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "fusion", "create")
def install():
print("Registering Fusion plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
def uninstall():
print("Deregistering Fusion plug-ins..")
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)

View file

@ -1126,8 +1126,111 @@ def get_container_transforms(container, members=None, root=False):
results = cmds.ls(members, type="transform", long=True)
if root:
root = core.getHighestInHierarchy(results)
root = get_highest_in_hierarchy(results)
if root:
results = root[0]
return results
def get_highest_in_hierarchy(nodes):
"""Return highest nodes in the hierarchy that are in the `nodes` list.
The "highest in hierarchy" are the nodes closest to world: top-most level.
Args:
nodes (list): The nodes in which find the highest in hierarchies.
Returns:
list: The highest nodes from the input nodes.
"""
# Ensure we use long names
nodes = cmds.ls(nodes, long=True)
lookup = set(nodes)
highest = []
for node in nodes:
# If no parents are within the nodes input list
# then this is a highest node
if not any(n in lookup for n in iter_parents(node)):
highest.append(node)
return highest
def iter_parents(node):
"""Iter parents of node from its long name.
Note: The `node` *must* be the long node name.
Args:
node (str): Node long name.
Yields:
str: All parent node names (long names)
"""
while True:
split = node.rsplit("|", 1)
if len(split) == 1:
return
node = split[0]
yield node
def remove_other_uv_sets(mesh):
"""Remove all other UV sets than the current UV set.
Keep only current UV set and ensure it's the renamed to default 'map1'.
"""
uvSets = cmds.polyUVSet(mesh, query=True, allUVSets=True)
current = cmds.polyUVSet(mesh, query=True, currentUVSet=True)[0]
# Copy over to map1
if current != 'map1':
cmds.polyUVSet(mesh, uvSet=current, newUVSet='map1', copy=True)
cmds.polyUVSet(mesh, currentUVSet=True, uvSet='map1')
current = 'map1'
# Delete all non-current UV sets
deleteUVSets = [uvSet for uvSet in uvSets if uvSet != current]
uvSet = None
# Maya Bug (tested in 2015/2016):
# In some cases the API's MFnMesh will report less UV sets than
# maya.cmds.polyUVSet. This seems to happen when the deletion of UV sets
# has not triggered a cleanup of the UVSet array attribute on the mesh
# node. It will still have extra entries in the attribute, though it will
# not show up in API or UI. Nevertheless it does show up in
# maya.cmds.polyUVSet. To ensure we clean up the array we'll force delete
# the extra remaining 'indices' that we don't want.
# TODO: Implement a better fix
# The best way to fix would be to get the UVSet indices from api with
# MFnMesh (to ensure we keep correct ones) and then only force delete the
# other entries in the array attribute on the node. But for now we're
# deleting all entries except first one. Note that the first entry could
# never be removed (the default 'map1' always exists and is supposed to
# be undeletable.)
try:
for uvSet in deleteUVSets:
cmds.polyUVSet(mesh, delete=True, uvSet=uvSet)
except RuntimeError as exc:
log.warning('Error uvSet: %s - %s', uvSet, exc)
indices = cmds.getAttr('{0}.uvSet'.format(mesh),
multiIndices=True)
if not indices:
log.warning("No uv set found indices for: %s", mesh)
return
# Delete from end to avoid shifting indices
# and remove the indices in the attribute
indices = reversed(indices[1:])
for i in indices:
attr = '{0}.uvSet[{1}]'.format(mesh, i)
cmds.removeMultiInstance(attr, b=True)

View file

@ -0,0 +1,103 @@
"""A module containing generic loader actions that will display in the Loader.
"""
from avalon import api
def _set_frame_range(start, end, set_render_range=True):
"""Set Fusion comp's start and end frame range
Attrs:
set_render_range (bool, Optional): When True this will also set the
composition's render start and end frame.
Returns:
None
"""
from avalon.fusion import get_current_comp, comp_lock_and_undo_chunk
comp = get_current_comp()
attrs = {
"COMPN_GlobalStart": start,
"COMPN_GlobalEnd": end
}
if set_render_range:
attrs.update({
"COMPN_RenderStart": start,
"COMPN_RenderEnd": end
})
with comp_lock_and_undo_chunk(comp):
comp.SetAttrs(attrs)
class FusionSetFrameRangeLoader(api.Loader):
"""Specific loader of Alembic for the avalon.animation family"""
families = ["colorbleed.animation",
"colorbleed.camera",
"colorbleed.imagesequence",
"colorbleed.yeticache",
"colorbleed.pointcache"]
representations = ["*"]
label = "Set frame range"
order = 11
icon = "clock-o"
color = "white"
def load(self, context, name, namespace, data):
version = context['version']
version_data = version.get("data", {})
start = version_data.get("startFrame", None)
end = version_data.get("endFrame", None)
if start is None or end is None:
print("Skipping setting frame range because start or "
"end frame data is missing..")
return
_set_frame_range(start, end)
class FusionSetFrameRangeWithHandlesLoader(api.Loader):
"""Specific loader of Alembic for the avalon.animation family"""
families = ["colorbleed.animation",
"colorbleed.camera",
"colorbleed.imagesequence",
"colorbleed.yeticache",
"colorbleed.pointcache"]
representations = ["*"]
label = "Set frame range (with handles)"
order = 12
icon = "clock-o"
color = "white"
def load(self, context, name, namespace, data):
version = context['version']
version_data = version.get("data", {})
start = version_data.get("startFrame", None)
end = version_data.get("endFrame", None)
if start is None or end is None:
print("Skipping setting frame range because start or "
"end frame data is missing..")
return
# Include handles
handles = version_data.get("handles", 0)
start -= handles
end += handles
_set_frame_range(start, end)

View file

@ -0,0 +1,93 @@
from avalon import api
import os
class FusionLoadSequence(api.Loader):
"""Load image sequence into Fusion"""
families = ["colorbleed.imagesequence"]
representations = ["*"]
label = "Load sequence"
order = -10
icon = "play-circle"
color = "orange"
def load(self, context, name, namespace, data):
from avalon.fusion import (
imprint_container,
get_current_comp,
comp_lock_and_undo_chunk
)
# Fallback to asset name when namespace is None
if namespace is None:
namespace = context['asset']['name']
# Use the first file for now
path = self._get_first_image(self.fname)
# Create the Loader with the filename path set
comp = get_current_comp()
with comp_lock_and_undo_chunk(comp, "Create Loader"):
args = (-32768, -32768)
tool = comp.AddTool("Loader", *args)
tool["Clip"] = path
imprint_container(tool,
name=name,
namespace=namespace,
context=context,
loader=self.__class__.__name__)
def _get_first_image(self, root):
"""Get first file in representation root"""
files = sorted(os.listdir(root))
return os.path.join(root, files[0])
def update(self, container, representation):
"""Update the Loader's path
Fusion automatically tries to reset some variables when changing
the loader's path to a new file. These automatic changes are to its
inputs:
- ClipTimeStart (if duration changes)
- ClipTimeEnd (if duration changes)
- GlobalIn (if duration changes)
- GlobalEnd (if duration changes)
- Reverse (sometimes?)
- Loop (sometimes?)
- Depth (always resets to "Format")
- KeyCode (always resets to "")
- TimeCodeOffset (always resets to 0)
"""
from avalon.fusion import comp_lock_and_undo_chunk
root = api.get_representation_path(representation)
path = self._get_first_image(root)
print representation
print path
tool = container["_tool"]
assert tool.ID == "Loader", "Must be Loader"
comp = tool.Comp()
with comp_lock_and_undo_chunk(comp, "Update Loader"):
tool["Clip"] = path
# Update the imprinted representation
tool.SetData("avalon.representation", str(representation["_id"]))
def remove(self, container):
from avalon.fusion import comp_lock_and_undo_chunk
tool = container["_tool"]
assert tool.ID == "Loader", "Must be Loader"
comp = tool.Comp()
with comp_lock_and_undo_chunk(comp, "Remove Loader"):
tool.Delete()

View file

@ -10,7 +10,7 @@ from colorbleed.maya.lib import extract_alembic
class ExtractColorbleedAnimation(colorbleed.api.Extractor):
"""Produce an alembic of just point positions and normals.
Positions and normals are preserved, but nothing more,
Positions and normals, uvs, creases are preserved, but nothing more,
for plain and predictable point caches.
"""
@ -49,18 +49,26 @@ class ExtractColorbleedAnimation(colorbleed.api.Extractor):
filename = "{name}.abc".format(**instance.data)
path = os.path.join(parent_dir, filename)
options = {
"step": instance.data.get("step", 1.0),
"attr": ["cbId"],
"writeVisibility": True,
"writeCreases": True,
"uvWrite": True,
"selection": True
}
if int(cmds.about(version=True)) >= 2017:
# Since Maya 2017 alembic supports multiple uv sets - write them.
options["writeUVSets"] = True
with avalon.maya.suspended_refresh():
with avalon.maya.maintained_selection():
cmds.select(nodes, noExpand=True)
extract_alembic(file=path,
startFrame=start,
endFrame=end,
**{"step": instance.data.get("step", 1.0),
"attr": ["cbId"],
"writeVisibility": True,
"writeCreases": True,
"uvWrite": True,
"selection": True})
**options)
if "files" not in instance.data:
instance.data["files"] = list()

View file

@ -10,7 +10,7 @@ from colorbleed.maya.lib import extract_alembic
class ExtractColorbleedAlembic(colorbleed.api.Extractor):
"""Produce an alembic of just point positions and normals.
Positions and normals are preserved, but nothing more,
Positions and normals, uvs, creases are preserved, but nothing more,
for plain and predictable point caches.
"""
@ -44,6 +44,20 @@ class ExtractColorbleedAlembic(colorbleed.api.Extractor):
filename = "{name}.abc".format(**instance.data)
path = os.path.join(parent_dir, filename)
options = {
"step": instance.data.get("step", 1.0),
"attr": ["cbId"],
"writeVisibility": True,
"writeCreases": True,
"writeColorSets": writeColorSets,
"uvWrite": True,
"selection": True
}
if int(cmds.about(version=True)) >= 2017:
# Since Maya 2017 alembic supports multiple uv sets - write them.
options["writeUVSets"] = True
with avalon.maya.suspended_refresh():
with avalon.maya.maintained_selection():
cmds.select(nodes, noExpand=True)

View file

@ -2,10 +2,17 @@ from maya import cmds
import pyblish.api
import colorbleed.api
import colorbleed.maya.lib as lib
class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin):
"""Ensure no multiple UV sets exist for each polygon mesh"""
"""Warn on multiple UV sets existing for each polygon mesh.
On versions prior to Maya 2017 this will force no multiple uv sets because
the Alembic exports in Maya prior to 2017 don't support writing multiple
UV sets.
"""
order = colorbleed.api.ValidateMeshOrder
hosts = ['maya']
@ -42,83 +49,20 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
raise ValueError("Nodes found with multiple "
"UV sets: {0}".format(invalid))
message = "Nodes found with multiple UV sets: {0}".format(invalid)
# Maya 2017 and up allows multiple UV sets in Alembic exports
# so we allow it, yet just warn the user to ensure they know about
# the other UV sets.
allowed = int(cmds.about(version=True)) >= 2017
if allowed:
self.log.warning(message)
else:
raise ValueError(message)
@classmethod
def repair(cls, instance):
for mesh in cls.get_invalid(instance):
cls._repair_mesh(mesh)
@classmethod
def _repair_mesh(cls, mesh):
"""Process a single mesh, deleting other UV sets than the active one.
Keep only current UV set and ensure it's the default 'map1'
"""
from maya import cmds
uvSets = cmds.polyUVSet(mesh,
query=True,
allUVSets=True)
current = cmds.polyUVSet(mesh,
query=True,
currentUVSet=True)[0]
# Copy over to map1
if current != 'map1':
cmds.polyUVSet(mesh,
uvSet=current,
newUVSet='map1',
copy=True)
cmds.polyUVSet(mesh,
currentUVSet=True,
uvSet='map1')
current = 'map1'
# Delete all non-current UV sets
deleteUVSets = [uvSet for uvSet in uvSets if uvSet != current]
uvSet = None
# Maya Bug (tested in 2015/2016):
# In some cases the API's MFnMesh will report less UV sets
# than maya.cmds.polyUVSet.
# This seems to happen when the deletion of UV sets has not
# triggered a cleanup of the UVSet array
# attribute on the mesh node. It will still have extra
# entries in the attribute, though it will not
# show up in API or UI. Nevertheless it does show up in
# maya.cmds.polyUVSet.
# To ensure we clean up the array we'll force delete the
# extra remaining 'indices' that we don't want.
# TODO: Implement a better fix
# The best way to fix would be to get the UVSet
# indices from api with MFnMesh (to ensure we keep
# correct ones) and then only force delete the other
# entries in the array attribute on the node.
# But for now we're deleting all entries except first
# one. Note that the first entry could never
# be removed (the default 'map1' always exists and is
# supposed to be undeletable.)
try:
for uvSet in deleteUVSets:
cmds.polyUVSet(mesh, delete=True, uvSet=uvSet)
except RuntimeError, e:
cls.log.warning('uvSet: {0} - '
'Error: {1}'.format(uvSet, e))
indices = cmds.getAttr('{0}.uvSet'.format(mesh),
multiIndices=True)
if not indices:
cls.log.warning(
"No uv set found indices for: {0}".format(mesh))
return
# Delete from end to avoid shifting indices
# and remove the indices in the attribute
indices = reversed(indices[1:])
for i in indices:
attr = '{0}.uvSet[{1}]'.format(mesh, i)
cmds.removeMultiInstance(attr, b=True)
lib.remove_other_uv_sets(mesh)

View file

@ -20,91 +20,68 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
accepted_output = ["mesh", "transform"]
accepted_controllers = ["transform"]
ignore_nodes = []
invalid_hierarchy = []
invalid_controls = []
invalid_geometry = []
def process(self, instance):
error = False
objectsets = ("controls_SET", "out_SET")
missing = [obj for obj in objectsets if obj not in instance]
assert not missing, ("%s is missing %s" % (instance, missing))
# Ensure there are at least some transforms or dag nodes
# in the rig instance
set_members = self.check_set_members(instance)
self.log.info("Evaluating contents of object sets..")
set_members = instance.data['setMembers']
if not cmds.ls(set_members, type="dagNode", long=True):
raise RuntimeError("No dag nodes in the pointcache instance. "
"(Empty instance?)")
# Ensure contents in sets and retrieve long path for all objects
output_content = cmds.sets("out_SET", query=True) or []
assert output_content, "Must have members in rig out_SET"
output_content = cmds.ls(output_content, long=True)
controls_content = cmds.sets("controls_SET", query=True) or []
assert controls_content, "Must have members in rig controls_SET"
controls_content = cmds.ls(controls_content, long=True)
# Validate members are inside the hierarchy from root node
root_node = cmds.ls(set_members, assemblies=True)
hierarchy = cmds.listRelatives(root_node, allDescendents=True,
fullPath=True)
hierarchy = set(hierarchy)
self.invalid_geometry = self.validate_geometry(output_content,
hierarchy)
self.invalid_controls = self.validate_controls(controls_content,
hierarchy)
invalid_hierarchy = []
for node in output_content:
if node not in hierarchy:
invalid_hierarchy.append(node)
for node in controls_content:
if node not in hierarchy:
invalid_hierarchy.append(node)
if self.invalid_hierarchy:
# Additional validations
invalid_geometry = self.validate_geometry(output_content)
invalid_controls = self.validate_controls(controls_content)
error = False
if invalid_hierarchy:
self.log.error("Found nodes which reside outside of root group "
"while they are set up for publishing."
"\n%s" % self.invalid_hierarchy)
"\n%s" % invalid_hierarchy)
error = True
if self.invalid_controls:
if invalid_controls:
self.log.error("Only transforms can be part of the controls_SET."
"\n%s" % self.invalid_controls)
"\n%s" % invalid_controls)
error = True
if self.invalid_geometry:
if invalid_geometry:
self.log.error("Only meshes can be part of the out_SET\n%s"
% self.invalid_geometry)
% invalid_geometry)
error = True
if error:
raise RuntimeError("Invalid rig content. See log for details.")
def check_set_members(self, instance):
"""Check if the instance has any dagNodes
Args:
instance: the instance which needs to be published
Returns:
set_members (list): all dagNodes from instance
"""
set_members = instance.data['setMembers']
if not cmds.ls(set_members, type="dagNode", long=True):
raise RuntimeError("No dag nodes in the pointcache instance. "
"(Empty instance?)")
return set_members
def validate_hierarchy(self, hierarchy, nodes):
"""Collect all nodes which are NOT within the hierarchy
Args:
hierarchy (list): nodes within the root node
nodes (list): nodes to check
Returns:
errors (list): list of nodes
"""
errors = []
for node in nodes:
if node not in hierarchy:
errors.append(node)
return errors
def validate_geometry(self, set_members, hierarchy):
def validate_geometry(self, set_members):
"""Check if the out set passes the validations
Checks if all its set members are within the hierarchy of the root
@ -118,33 +95,20 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
errors (list)
"""
errors = []
# Validate the contents further
# Validate all shape types
invalid = []
shapes = cmds.listRelatives(set_members,
allDescendents=True,
shapes=True,
fullPath=True) or []
# The user can add the shape node to the out_set, this will result
# in none when querying allDescendents
all_shapes = set_members + shapes
all_long_names = [cmds.ls(i, long=True)[0] for i in all_shapes]
# geometry
invalid_shapes = self.validate_hierarchy(hierarchy,
all_long_names)
self.invalid_hierarchy.extend(invalid_shapes)
all_shapes = cmds.ls(set_members + shapes, long=True, shapes=True)
for shape in all_shapes:
nodetype = cmds.nodeType(shape)
if nodetype in self.ignore_nodes:
continue
if cmds.nodeType(shape) not in self.accepted_output:
invalid.append(shape)
if nodetype not in self.accepted_output:
errors.append(shape)
return invalid
return errors
def validate_controls(self, set_members, hierarchy):
def validate_controls(self, set_members):
"""Check if the controller set passes the validations
Checks if all its set members are within the hierarchy of the root
@ -158,17 +122,10 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
errors (list)
"""
errors = []
all_long_names = [cmds.ls(i, long=True)[0] for i in set_members]
invalid_controllers = self.validate_hierarchy(hierarchy,
all_long_names)
self.invalid_hierarchy.extend(invalid_controllers)
# Validate control types
invalid = []
for node in set_members:
nodetype = cmds.nodeType(node)
if nodetype in self.ignore_nodes:
continue
if cmds.nodeType(node) not in self.accepted_controllers:
invalid.append(node)
if nodetype not in self.accepted_controllers:
errors.append(node)
return errors
return invalid

View file

@ -0,0 +1,89 @@
from maya import cmds
import pyblish.api
import colorbleed.api
from cb.utils.maya.context import undo_chunk
class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin):
"""Validate rig control curves have no keyable arnold attributes.
The Arnold plug-in will create curve attributes like:
- aiRenderCurve
- aiCurveWidth
- aiSampleRate
- aiCurveShaderR
- aiCurveShaderG
- aiCurveShaderB
Unfortunately these attributes visible in the channelBox are *keyable*
by default and visible in the channelBox. As such pressing a regular "S"
set key shortcut will set keys on these attributes too, thus cluttering
the animator's scene.
This validator will ensure they are hidden or unkeyable attributes.
"""
order = colorbleed.api.ValidateContentsOrder + 0.05
label = "Rig Controllers (Arnold Attributes)"
hosts = ["maya"]
families = ["colorbleed.rig"]
actions = [colorbleed.api.RepairAction,
colorbleed.api.SelectInvalidAction]
attributes = [
"rcurve",
"cwdth",
"srate",
"ai_curve_shaderr",
"ai_curve_shaderg",
"ai_curve_shaderb"
]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError('{} failed, see log '
'information'.format(self.label))
@classmethod
def get_invalid(cls, instance):
controllers_sets = [i for i in instance if i == "controls_SET"]
if not controllers_sets:
return []
controls = cmds.sets(controllers_sets, query=True) or []
if not controls:
return []
shapes = cmds.ls(controls,
dag=True,
leaf=True,
long=True,
shapes=True,
noIntermediate=True)
curves = cmds.ls(shapes, type="nurbsCurve", long=True)
invalid = list()
for node in curves:
for attribute in cls.attributes:
if cmds.attributeQuery(attribute, node=node, exists=True):
plug = "{}.{}".format(node, attribute)
if cmds.getAttr(plug, keyable=True):
invalid.append(node)
break
return invalid
@classmethod
def repair(cls, instance):
invalid = cls.get_invalid(instance)
with undo_chunk():
for node in invalid:
for attribute in cls.attributes:
if cmds.attributeQuery(attribute, node=node, exists=True):
plug = "{}.{}".format(node, attribute)
cmds.setAttr(plug, channelBox=False, keyable=False)

View file

@ -35,8 +35,6 @@ def get_id_from_history(node):
similar_nodes = [i for i in similar_nodes if
get_parent(i) == parent]
print similar_nodes
# Check all of the remaining similar nodes and take the first one
# with an id and assume it's the original.
for similar_node in similar_nodes: