Initial Colorbleed config for Avalon

This commit is contained in:
aardschok 2017-06-23 15:35:00 +02:00
commit 33c731ef76
130 changed files with 9353 additions and 0 deletions

99
.gitignore vendored Normal file
View file

@ -0,0 +1,99 @@
# Created by .ignore support plugin (hsz.mobi)
### Python template
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# dotenv
.env
# virtualenv
.venv
venv/
ENV/
# Spyder project settings
.spyderproject
# Rope project settings
.ropeproject
# Pycharm IDE settings
.idea

27
.travis.yml Normal file
View file

@ -0,0 +1,27 @@
language: python
sudo: required
dist: trusty
python:
- 2.7
services:
- docker
install:
- git clone https://github.com/getavalon/core.git
- docker run --name mindbender-mongo -d mongo
- docker build -t pyblish/mindbender -f Dockerfile-maya2016 .
script:
- >
PYTHONPATH=$(pwd)/core
docker run
--rm
-v $(pwd):/workspace
--link mindbender-mongo:mongo
-e COVERALLS_REPO_TOKEN
-e TRAVIS_JOB_ID
-e MINDBENDER_MONGO=mongodb://mongo:27017
pyblish/mindbender

4
colorbleed.bat Normal file
View file

@ -0,0 +1,4 @@
:: Set paths to ensure plugins have access to the inhouse tools
set PYTHONPATH=%PYTHONPATH%;P:\pipeline\dev\git\cb;
set PYTHONPATH=%PYTHONPATH%;P:\pipeline\dev\git\cbra;

17
colorbleed/__init__.py Normal file
View file

@ -0,0 +1,17 @@
import os
from pyblish import api as pyblish
PACKAGE_DIR = os.path.dirname(__file__)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
def install():
publish_path = os.path.join(PLUGINS_DIR, "publish")
print("Registering global plug-ins..")
pyblish.register_plugin_path(publish_path)
def uninstall():
pyblish.deregister_plugin_path(PUBLISH_PATH)

151
colorbleed/action.py Normal file
View file

@ -0,0 +1,151 @@
# absolute_import is needed to counter the `module has no cmds error` in Maya
from __future__ import absolute_import
import pyblish.api
from maya import cmds
def get_errored_instances_from_context(context):
instances = list()
for result in context.data["results"]:
if result["instance"] is None:
# When instance is None we are on the "context" result
continue
if result["error"]:
instances.append(result["instance"])
return instances
class RepairAction(pyblish.api.Action):
"""Repairs the action
To retrieve the invalid nodes this assumes a static `repair(instance)`
method is available on the plugin.
"""
label = "Repair"
on = "failed" # This action is only available on a failed plug-in
icon = "wrench" # Icon from Awesome Icon
def process(self, context, plugin):
if not hasattr(plugin, "repair"):
raise RuntimeError("Plug-in does not have repair method.")
# Get the errored instances
self.log.info("Finding failed instances..")
errored_instances = get_errored_instances_from_context(context)
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
for instance in instances:
plugin.repair(instance)
class SelectInvalidAction(pyblish.api.Action):
"""Select invalid nodes in Maya when plug-in failed.
To retrieve the invalid nodes this assumes a static `get_invalid()`
method is available on the plugin.
"""
label = "Select invalid"
on = "failed" # This action is only available on a failed plug-in
icon = "search" # Icon from Awesome Icon
def process(self, context, plugin):
errored_instances = get_errored_instances_from_context(context)
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
# Get the invalid nodes for the plug-ins
self.log.info("Finding invalid nodes..")
invalid = list()
for instance in instances:
invalid_nodes = plugin.get_invalid(instance)
if invalid_nodes:
if isinstance(invalid_nodes, (list, tuple)):
invalid.extend(invalid_nodes)
else:
self.log.warning("Plug-in returned to be invalid, "
"but has no selectable nodes.")
# Ensure unique (process each node only once)
invalid = list(set(invalid))
if invalid:
self.log.info("Selecting invalid nodes: %s" % ", ".join(invalid))
cmds.select(invalid, replace=True, noExpand=True)
else:
self.log.info("No invalid nodes found.")
cmds.select(deselect=True)
class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
"""Generate UUIDs on the invalid nodes in the instance.
Invalid nodes are those returned by the plugin's `get_invalid` method.
As such it is the plug-in's responsibility to ensure the nodes that
receive new UUIDs are actually invalid.
Requires:
- currentFile on context
"""
label = "Regenerate UUIDs"
on = "failed" # This action is only available on a failed plug-in
icon = "wrench" # Icon from Awesome Icon
def process(self, context, plugin):
import cbra.lib
import cbra.utils.maya.node_uuid as id_utils
self.log.info("Finding bad nodes..")
# Get the errored instances
errored_instances = []
for result in context.data["results"]:
if result["error"] is not None and result["instance"] is not None:
if result["error"]:
instance = result["instance"]
errored_instances.append(instance)
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
# Get the nodes from the all instances that ran through this plug-in
invalid = []
for instance in instances:
invalid_nodes = plugin.get_invalid(instance)
if invalid_nodes:
invalid.extend(invalid_nodes)
if not invalid:
self.log.info("No invalid nodes found.")
return
# Ensure unique (process each node only once)
invalid = list(set(invalid))
# Parse context from current file
self.log.info("Parsing current context..")
try:
current_file = context.data['currentFile']
context = cbra.lib.parse_context(current_file)
except RuntimeError, e:
self.log.error("Can't generate UUIDs because scene isn't "
"in new-style pipeline: ".format(current_file))
raise e
# Generate and add the ids to the nodes
ids = id_utils.generate_ids(context, invalid)
id_utils.add_ids(ids)
self.log.info("Generated ids on nodes: {0}".format(invalid))

41
colorbleed/api.py Normal file
View file

@ -0,0 +1,41 @@
from collections import OrderedDict
from .plugin import (
Extractor,
ValidatePipelineOrder,
ValidateContentsOrder,
ValidateSceneOrder,
ValidateMeshOrder
)
# temporary fix, might
from .action import (
SelectInvalidAction,
GenerateUUIDsOnInvalidAction,
RepairAction
)
def merge(*args):
"""Helper to merge OrderedDict instances"""
data = OrderedDict()
for arg in args:
for key, value in arg.items():
data.pop(key, None)
data[key] = value
return data
all = [
"Extractor",
"ValidatePipelineOrder",
"ValidateContentsOrder",
"ValidateSceneOrder",
"ValidateMeshOrder",
"SelectInvalidAction",
"GenerateUUIDsOnInvalidAction",
"RepairAction"
]

View file

@ -0,0 +1,37 @@
import os
import site
from avalon import api as avalon
from pyblish import api as pyblish
from . import menu
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "maya", "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "maya", "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "maya", "create")
def install():
# add local pipeline library to the paths
site.addsitedir(r"P:\pipeline\dev\git\cb")
site.addsitedir(r"C:\Users\User\Documents\development\cbra")
site.addsitedir(r"C:\Users\User\Documents\development\pyblish-cb")
pyblish.register_plugin_path(PUBLISH_PATH)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
menu.install()
def uninstall():
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
menu.uninstall()

637
colorbleed/maya/commands.py Normal file
View file

@ -0,0 +1,637 @@
"""Used for scripting
These are used in other scripts and mostly require explicit input,
such as which specific nodes they apply to.
For interactive use, see :mod:`interactive.py`
"""
import sys
from maya import cmds
from . import lib
if sys.version_info[0] == 3:
basestring = str
# Flags
LocalSpace = 1 << 0
WorldSpace = 1 << 1
def auto_connect2(src, dst):
"""Connect to `dst` based on what `dst` is and `src` has available
TODO: Offer optionbox of choices when multiple inputs are possible.
For example, connecting a mesh to a wrap node could either
go to driverMesh, or baseMesh.
"""
to_from = {
"mesh": (
["mesh", (".outMesh", ".inMesh")],
),
"nurbsSurface": (
["nurbsSurface", (".local", ".create")],
),
"nurbsCurve": (
["nurbsCurve", (".local", ".create")],
),
"decomposeMatrix": (
["transform", (".worldMatrix", ".inputMatrix")],
),
"transform": (
[
"transform", (
(".translate", ".rotate", ".scale"),
(".translate", ".rotate", ".scale"))
],
[
"decomposeMatrix", (
(".outTranslate", ".outRotate", ".outScale"),
(".translate", ".rotate", ".scale"))
],
),
"objectSet": (
["dagNode", (".message", ".dagSetMembers")],
["entity", (".message", ".dnSetMembers")],
),
}
support = next(
(to_from[to] for to in to_from
if to in cmds.nodeType(dst, inherited=True)), None
)
if not support:
# Guess, based on available inputs,
# what is the closest match
print("Guessing..")
pass
assert support, "No supported outputs for '%s'" % (cmds.nodeType(src))
out_, in_ = next(
(typ for typ in support
if typ[0] in cmds.nodeType(src, inherited=True)), (None, None)
)
assert in_ and out_, "No matching attributes found for %s" % src
if not isinstance(in_, tuple):
in_ = (in_,)
if not isinstance(out_, tuple):
out_ = (out_,)
assert len(in_) == len(out_)
map(lambda io: cmds.connectAttr(src + io[0],
dst + io[1],
force=True), zip(out_, in_))
def auto_connect(src, dst):
"""Connect `src` to `dst` via the most likely input and output
Usage:
>>> # Create cube and transfer mesh into new shape
>>> shape = cmds.createNode("mesh", name="newShape")
>>> transform, generator = cmds.polyCube(name="original")
>>> auto_connect(generator, shape)
>>> cmds.delete(transform)
"""
out_ = {
"mesh": ".outMesh",
"nurbsSurface": ".local",
"nurbsCurve": ".local",
"decomposeMatrix": (".outTranslate",
".outRotate",
".outScale"),
"transform": (".translate",
".rotate",
".scale",
".visibility")
}
in_ = {
"mesh": ".inMesh",
"nurbsSurface": ".create",
"nurbsCurve": ".create",
"decomposeMatrix": "inputMatrix",
"transform": (".translate",
".rotate",
".scale",
".visibility"),
"objectSet": ["dnSetMembers", "dgSetMembers"]
}
try:
in_ = in_[cmds.nodeType(dst)]
except KeyError:
in_ = next((attr for attr in (".input",
".inputGeometry")
if cmds.objExists(dst + attr)), None)
try:
out_ = out_[cmds.nodeType(src)]
except KeyError:
out_ = next((attr for attr in (".output",
".outputGeometry")
if cmds.objExists(src + attr)), None)
assert in_ and out_, "No matching attributes found for %s" % src
if not isinstance(in_, tuple):
in_ = (in_,)
if not isinstance(out_, tuple):
out_ = (out_,)
assert len(in_) == len(out_)
map(lambda io: cmds.connectAttr(src + io[0],
dst + io[1],
force=True), zip(out_, in_))
@lib.maintained_selection
def match_transform(src, dst):
"""Transform `src` to `dst`, taking worldspace into account
Arguments:
src (str): Absolute path to source transform
dst (str): Absolute path to destination transform
"""
try:
parent = cmds.listRelatives(src, parent=True)[0]
except Exception:
parent = None
node_decompose = cmds.createNode("decomposeMatrix")
node_multmatrix = cmds.createNode("multMatrix")
connections = {
dst + ".worldMatrix": node_multmatrix + ".matrixIn[0]",
node_multmatrix + ".matrixSum": node_decompose + ".inputMatrix",
node_decompose + ".outputTranslate": src + ".translate",
node_decompose + ".outputRotate": src + ".rotate",
node_decompose + ".outputScale": src + ".scale",
}
if parent:
connections.update({
parent + ".worldInverseMatrix": node_multmatrix + ".matrixIn[1]"
})
for s, d in connections.iteritems():
cmds.connectAttr(s, d, force=True)
cmds.refresh()
cmds.delete([node_decompose, node_multmatrix])
def connect_shapes(src, dst):
"""Connect geometry of `src` to source geometry of dst
Arguments:
src (str): Name of source shape
dst (list): Names of destination nodes
"""
out_attr = None
if cmds.nodeType(src) == "mesh":
out_attr = ".outMesh"
elif cmds.nodeType(src) in ("nurbsSurface", "nurbsCurve"):
out_attr = ".local"
else:
for wildcard in (".output",):
if cmds.objExists(src + wildcard):
out_attr = wildcard
break
if not out_attr:
return cmds.warning("Could not detect output of %s" % src)
for target in dst:
in_attr = None
if cmds.nodeType(target) == "mesh":
in_attr = ".inMesh"
elif cmds.nodeType(target) in ("nurbsSurface", "nurbsCurve"):
in_attr = ".create"
else:
# Support unspecific nodes with common input attributes
for support, wildcard in (("mesh", ".inputPolymesh"),
("mesh", ".inputMesh"),
("mesh", ".inputGeometry")):
if cmds.objExists(target + wildcard):
if not cmds.nodeType(src) == support:
cmds.warning("Could not connect: %s -> %s" % (src,
target))
break
in_attr = wildcard
break
if not in_attr:
cmds.warning("Could not detect input of %s" % target)
continue
try:
cmds.connectAttr(src + out_attr,
target + in_attr,
force=True)
except Exception as e:
cmds.warning("Could not connect: %s%s -> %s%s (%s)" % (
src, out_attr,
target, in_attr, e)
)
def connect_transform(driver, driven, source=WorldSpace, compensate=False):
"""Connect translation, rotation and scale via decomposeMatrix
Arguments:
driver (str): Absolute path to driver
driven (str): Absolute path to driven
source (str, optional): Either WorldSpace or LocalSpace,
default WorldSpace
compensate (bool, optional): Whether or not to take into account
the current transform, default False.
Returns:
output (list): Newly created nodes
"""
outputattr = ".matrix" if source == LocalSpace else ".worldMatrix[0]"
assert cmds.objExists(driver), "%s not found" % driver
assert cmds.objExists(driven), "%s not found" % driven
decompose = driver + "_decompose"
output = [decompose]
if not cmds.objExists(decompose):
decompose = cmds.createNode("decomposeMatrix", name=decompose)
if compensate:
multMatrix = cmds.createNode(
"multMatrix", name=driver + "_multMatrix")
# Compensate for drivens parentMatrix.
cmds.connectAttr(driver + outputattr,
multMatrix + ".matrixIn[0]")
cmds.connectAttr(driven + ".parentInverseMatrix",
multMatrix + ".matrixIn[1]")
cmds.connectAttr(multMatrix + ".matrixSum",
decompose + ".inputMatrix")
output.append(multMatrix)
else:
cmds.connectAttr(driver + outputattr,
decompose + ".inputMatrix")
# Drive driven with compensated driver.
cmds.connectAttr(decompose + ".outputTranslate", driven + ".t")
cmds.connectAttr(decompose + ".outputRotate", driven + ".r")
cmds.connectAttr(decompose + ".outputScale", driven + ".s")
return output
def clone(shape, worldspace=False):
"""Clone `shape`
Arguments:
shape (str): Absolute path to shape
worldspace (bool, optional): Whether or not to consider worldspace
Returns:
node (str): Newly created clone
"""
type = cmds.nodeType(shape)
assert type in ("mesh", "nurbsSurface", "nurbsCurve"), (
"clone() works on polygonal and nurbs surfaces")
src, dst = {
"mesh": (".outMesh", ".inMesh"),
"nurbsSurface": (".local", ".create"),
"nurbsCurve": (".local", ".create"),
}[type]
nodetype = cmds.nodeType(shape)
name = lib.unique(name=shape.rsplit("|")[-1])
clone = cmds.createNode(nodetype, name=name)
cmds.connectAttr(shape + src, clone + dst, force=True)
if worldspace:
transform = cmds.createNode("transformGeometry",
name=name + "_transformGeometry")
cmds.connectAttr(shape + src,
transform + ".inputGeometry", force=True)
cmds.connectAttr(shape + ".worldMatrix[0]",
transform + ".transform", force=True)
cmds.connectAttr(transform + ".outputGeometry",
clone + dst, force=True)
# Assign default shader
cmds.sets(clone, addElement="initialShadingGroup")
return clone
def combine(nodes):
"""Produce a new mesh with the contents of `nodes`
Arguments:
nodes (list): Path to shapes
"""
unite = cmds.createNode("polyUnite", n=nodes[0] + "_polyUnite")
count = 0
for node in nodes:
# Are we dealing with transforms, or shapes directly?
shapes = cmds.listRelatives(node, shapes=True) or [node]
for shape in shapes:
try:
cmds.connectAttr(shape + ".outMesh",
unite + ".inputPoly[%s]" % count, force=True)
cmds.connectAttr(shape + ".worldMatrix",
unite + ".inputMat[%s]" % count, force=True)
count += 1
except Exception:
cmds.warning("'%s' is not a polygonal mesh" % shape)
if count:
output = cmds.createNode("mesh", n=nodes[0] + "_combinedShape")
cmds.connectAttr(unite + ".output", output + ".inMesh", force=True)
return output
else:
cmds.delete(unite)
return None
def transfer_outgoing_connections(src, dst):
"""Connect outgoing connections from `src` to `dst`
Connections that cannot be made are ignored.
Arguments:
src (str): Absolute path to source node
dst (str): Absolute path to destination node
"""
for destination in cmds.listConnections(src,
source=False,
plugs=True) or []:
for source in cmds.listConnections(destination,
destination=False,
plugs=True) or []:
try:
cmds.connectAttr(source.replace(src, dst),
destination, force=True)
except RuntimeError:
continue
def parent_group(source, transferTransform=True):
"""Create and transfer transforms to parent group"""
assert cmds.objExists(source), "%s does not exist" % source
assert cmds.nodeType(source) == "transform", (
"%s must be transform" % source)
parent = cmds.listRelatives(source, parent=True)
if transferTransform:
group = cmds.createNode("transform", n="%s_parent" % source)
match_transform(group, source)
try:
cmds.parent(source, group)
except Exception:
cmds.warning("Failed to parent child under new parent")
cmds.delete(group)
if parent:
cmds.parent(group, parent[0])
else:
cmds.select(source)
group = cmds.group(n="%s_parent" % source)
return group
def _output_node(source, type, suffix):
newname = lib.unique(name=source.rsplit("_", 1)[0] + suffix)
node = cmds.createNode(type)
node = [cmds.listRelatives(node, parent=True) or node][0]
node = cmds.rename(node, newname)
try:
cmds.parent(node, source)
match_transform(node, source)
except Exception:
cmds.warning("Could not create %s" % node)
cmds.delete(node)
return node
def output_locator(source, suffix="_LOC"):
"""Create child locator
Arguments:
source (str): Parent node
suffix (str): Suffix of output
"""
return _output_node(source, "locator", suffix)
def output_joint(source, suffix="_JNT"):
"""Create child joint
Arguments:
source (str): Parent node
suffix (str): Suffix of output
"""
return _output_node(source, "joint", suffix)
def follicle(shape, u=0, v=0, name=""):
"""Attach follicle to "shape" at specified "u" and "v" values"""
type = cmds.nodeType(shape)
assert type in ("mesh", "nurbsSurface"), (
"follicle() works on polygonal meshes and nurbs")
src, dst = {
"mesh": (".outMesh", ".inputMesh"),
"nurbsSurface": (".local", ".inputSurface")
}[type]
follicle = cmds.createNode("follicle", name=name + "Shape")
transform = cmds.listRelatives(follicle, parent=True)[0]
cmds.setAttr(follicle + ".parameterU", u)
cmds.setAttr(follicle + ".parameterV", v)
cmds.connectAttr(follicle + ".outTranslate", transform + ".translate")
cmds.connectAttr(follicle + ".outRotate", transform + ".rotate")
cmds.connectAttr(shape + ".worldMatrix[0]", follicle + ".inputWorldMatrix")
cmds.connectAttr(shape + src, follicle + dst, force=True)
return transform
def connect_matching_attributes(source, target):
"""Connect matching attributes from source to target
Arguments:
source (str): Absolute path to node from which to connect
target (str): Target node
Example:
>>> # Select two matching nodes
>>> source = cmds.createNode("transform", name="source")
>>> target = cmds.createNode("transform", name="target")
>>> cmds.select([source, target], replace=True)
>>> source, target = cmds.ls(selection=True)
>>> connect_matching_attributes(source, target)
"""
dsts = cmds.listAttr(target, keyable=True)
for src in cmds.listAttr(source, keyable=True):
if src not in dsts:
continue
try:
src = "." + src
cmds.connectAttr(source + src,
target + src,
force=True)
except RuntimeError as e:
cmds.warning("Could not connect %s: %s" % (src, e))
def create_ncloth(input_mesh):
"""Replace Create nCloth menu item
This performs the identical option of nCloth -> Create nCloth
with the following changes.
1. Input mesh not made intermediate
2. Current mesh and shape named "currentMesh"
Arguments:
input_mesh (str): Path to shape
"""
assert cmds.nodeType(input_mesh) == "mesh", (
"%s was not of type mesh" % input_mesh)
nucleus = cmds.createNode("nucleus", name="nucleus1")
ncloth = cmds.createNode("nCloth", name="nClothShape1")
current_mesh = cmds.createNode("mesh", name="currentMesh")
cmds.connectAttr(input_mesh + ".worldMesh[0]", ncloth + ".inputMesh")
cmds.connectAttr(ncloth + ".outputMesh", current_mesh + ".inMesh")
cmds.connectAttr("time1.outTime", nucleus + ".currentTime")
cmds.connectAttr("time1.outTime", ncloth + ".currentTime")
cmds.connectAttr(ncloth + ".currentState", nucleus + ".inputActive[0]")
cmds.connectAttr(ncloth + ".startState", nucleus + ".inputActiveStart[0]")
cmds.connectAttr(nucleus + ".outputObjects[0]", ncloth + ".nextState")
cmds.connectAttr(nucleus + ".startFrame", ncloth + ".startFrame")
# Assign default shader
cmds.sets(current_mesh, addElement="initialShadingGroup")
return current_mesh
def enhanced_parent(child, parent):
if "shape" in cmds.nodeType(child, inherited=True):
cmds.parent(relative=True, shape=True)
else:
cmds.parent(child, parent)
def auto_connect_assets(src, dst):
"""Attempt to automatically two assets
Arguments:
src (str): Name of source reference node
dst (str): Name of destination reference node
Raises:
StopIteration on missing in_SET
"""
in_set = None
for node in cmds.referenceQuery(dst, nodes=True):
if node.endswith("in_SET"):
in_set = node
break
for input_transform in cmds.sets(in_set, query=True):
mbid = cmds.getAttr(input_transform + ".mbID")
input_shape = cmds.listRelatives(input_transform, shapes=True)[0]
for output_transform in lib.lsattr("mbID", value=mbid):
ref = cmds.referenceQuery(output_transform, referenceNode=True)
if ref != src:
continue
print("Connecting %s -> %s" % (output_transform, input_transform))
output_shape = cmds.listRelatives(output_transform, shapes=True)[0]
try:
auto_connect(output_transform, input_transform)
except RuntimeError:
# Already connected
pass
try:
auto_connect(output_shape, input_shape)
except RuntimeError:
# Already connected
pass

View file

@ -0,0 +1,288 @@
"""Interactive functionality
These depend on user selection in Maya, and may be used as-is. They
implement the functionality in :mod:`commands.py`.
Each of these functions take `*args` as argument, because when used
in a Maya menu an additional argument is passed with metadata about
what state the button was pressed in. None of this data is used here.
"""
from maya import cmds, mel
from . import commands, lib
def connect_shapes(*args):
"""Connect the first selection to the last selection(s)"""
selection = cmds.ls(selection=True)
src = selection.pop(0)
commands.connect_shapes(src, dst=selection)
def combine(*args):
"""Combine currently selected meshes
This differs from the default Maya combine in that it
retains the original mesh and produces a new mesh with the result.
"""
commands.combine(cmds.ls(sl=1))
def read_selected_channels(*args):
"""Return a list of selected channels in the Channel Box"""
channelbox = mel.eval("global string $gChannelBoxName; "
"$temp=$gChannelBoxName;")
return cmds.channelBox(channelbox,
query=True,
selectedMainAttributes=True) or []
def set_defaults(*args):
"""Set currently selected values from channel box to their default value
If no channel is selected, default all keyable attributes.
"""
for node in cmds.ls(selection=True):
selected_channels = read_selected_channels()
for channel in (selected_channels or
cmds.listAttr(node, keyable=True)):
try:
default = cmds.attributeQuery(channel,
node=node,
listDefault=True)[0]
except Exception:
continue
else:
cmds.setAttr(node + "." + channel, default)
def transfer_outgoing_connections(*args):
"""Connect outgoing connections from first to second selected node"""
try:
src, dst = cmds.ls(selection=True)
except ValueError:
return cmds.warning("Select source and destination nodes")
commands.transfer_outgoing_connections(src, dst)
def clone_special(*args):
"""Clone in localspace, and preserve user-defined attributes"""
for transform in cmds.ls(selection=True, long=True):
if cmds.nodeType(transform) != "transform":
cmds.warning("Skipping '%s', not a `transform`" % transform)
continue
shape = _find_shape(transform)
type = cmds.nodeType(shape)
if type not in ("mesh", "nurbsSurface", "nurbsCurve"):
cmds.warning("Skipping '{transform}': cannot clone nodes "
"of type '{type}'".format(**locals()))
continue
cloned = commands.clone(shape, worldspace=False)
new_transform = cmds.listRelatives(cloned,
parent=True,
fullPath=True)[0]
new_transform = cmds.rename(new_transform,
new_transform.rsplit(":", 1)[-1])
for attr in cmds.listAttr(transform,
userDefined=True) or list():
try:
cmds.addAttr(new_transform, longName=attr, dataType="string")
except Exception:
continue
value = cmds.getAttr(transform + "." + attr)
cmds.setAttr(new_transform + "." + attr, value, type="string")
# Connect visibility
cmds.connectAttr(transform + ".visibility",
new_transform + ".visibility")
def clone_worldspace(*args):
return _clone(worldspace=True)
def clone_localspace(*args):
return _clone(worldspace=False)
def _clone(worldspace=False):
"""Clone selected objects in viewport
Arguments:
worldspace (bool): Whether or not to append a transformGeometry to
resulting clone.
"""
clones = list()
for node in cmds.ls(selection=True, long=True):
shape = _find_shape(node)
type = cmds.nodeType(shape)
if type not in ("mesh", "nurbsSurface", "nurbsCurve"):
cmds.warning("Skipping '{node}': cannot clone nodes "
"of type '{type}'".format(**locals()))
continue
cloned = commands.clone(shape, worldspace=worldspace)
clones.append(cloned)
if not clones:
return
# Select newly created transform nodes in the viewport
transforms = list()
for clone in clones:
transform = cmds.listRelatives(clone, parent=True, fullPath=True)[0]
transforms.append(transform)
cmds.select(transforms, replace=True)
def _find_shape(element):
"""Return shape of given 'element'
Supports components, meshes, and surfaces
Arguments:
element (str): Path to component, mesh or surface
Returns:
str of path if found, None otherwise
"""
# Get either shape or transform, based on element-type
node = cmds.ls(element, objectsOnly=True, long=True)[0]
if cmds.nodeType(node) == "transform":
try:
return cmds.listRelatives(node, shapes=True, fullPath=True)[0]
except IndexError:
return cmds.warning("Could not find shape in %s" % element)
else:
return node
def connect_matching_attributes_from_selection(*args):
try:
source, target = cmds.ls(sl=True)
except ValueError:
raise ValueError("Select (1) source and (2) target nodes only.")
return commands.connect_matching_attributes(source, target)
def auto_connect(*args):
"""Connect `src` to `dst` via the most likely input and output"""
try:
commands.auto_connect(*cmds.ls(selection=True))
except TypeError:
cmds.warning("Select only source and destination nodes.")
def create_ncloth():
selection = cmds.ls(selection=True)[0]
input_mesh = cmds.listRelatives(selection, shapes=True)[0]
current_mesh = commands.create_ncloth(input_mesh)
# Optionally append suffix
comp = selection.rsplit("_", 1)
suffix = ("_" + comp[-1]) if len(comp) > 1 else ""
cmds.rename(current_mesh, "currentMesh%sShape" % suffix)
# Mimic default nCloth command
cmds.hide(selection)
def follicle(*args):
supported = ["mesh", "nurbsSurface"]
selection = cmds.ls(sl=1)
new_follicles = []
for sel in selection:
uv = lib.uv_from_element(sel)
geometry_shape = lib.shape_from_element(sel)
geometry_transform = cmds.listRelatives(geometry_shape, parent=True)[0]
# Figure out output connection
inputs = [".inputMesh", ".inputSurface"]
outputs = [".outMesh", ".local"]
failed = False
type = cmds.nodeType(geometry_shape)
if type not in supported:
failed = True
shapes = cmds.listRelatives(geometry_shape, shapes=True)
if shapes:
geometry_shape = shapes[0]
type = cmds.nodeType(geometry_shape)
if type in supported:
failed = False
if failed:
cmds.error("Skipping '%s': Type not accepted" % type)
return
input = inputs[supported.index(type)]
output = outputs[supported.index(type)]
# Make follicle
follicle = cmds.createNode("follicle",
name=geometry_transform + "_follicleShape1")
follicle_transform = cmds.listRelatives(follicle, parent=True)[0]
follicle_transform = cmds.rename(follicle_transform,
geometry_transform + "_follicle1")
# Set U and V value
cmds.setAttr(follicle + ".parameterU", uv[0])
cmds.setAttr(follicle + ".parameterV", uv[1])
# Make the connections
cmds.connectAttr(follicle + ".outTranslate",
follicle_transform + ".translate")
cmds.connectAttr(follicle + ".outRotate",
follicle_transform + ".rotate")
cmds.connectAttr(geometry_shape + output,
follicle + input)
# Select last
new_follicles.append(follicle_transform)
# Select newly created follicles
if new_follicles:
cmds.select(new_follicles, r=1)
return new_follicles
def auto_connect_assets(*args):
references = cmds.ls(selection=True, type="reference")
if not len(references) == 2:
raise RuntimeError("Select source and destination "
"reference nodes, in that order.")
return commands.auto_connect_assets(*references)

222
colorbleed/maya/lib.py Normal file
View file

@ -0,0 +1,222 @@
"""Standalone helper functions"""
import re
import contextlib
from maya import cmds
def maintained_selection(arg=None):
if arg is not None:
return _maintained_selection_context()
else:
return _maintained_selection_decorator(arg)
def _maintained_selection_decorator(func):
"""Function decorator to maintain the selection once called
Example:
>>> @_maintained_selection
... def my_function():
... # Modify selection
... cmds.select(clear=True)
...
>>> # Selection restored
"""
def wrapper(*args, **kwargs):
previous_selection = cmds.ls(selection=True)
try:
return func(*args, **kwargs)
finally:
if previous_selection:
cmds.select(previous_selection,
replace=True,
noExpand=True)
else:
cmds.select(deselect=True,
noExpand=True)
return wrapper
@contextlib.contextmanager
def _maintained_selection_context():
"""Maintain selection during context
Example:
>>> scene = cmds.file(new=True, force=True)
>>> node = cmds.createNode("transform", name="Test")
>>> cmds.select("persp")
>>> with maintained_selection():
... cmds.select("Test", replace=True)
>>> "Test" in cmds.ls(selection=True)
False
"""
previous_selection = cmds.ls(selection=True)
try:
yield
finally:
if previous_selection:
cmds.select(previous_selection,
replace=True,
noExpand=True)
else:
cmds.select(deselect=True,
noExpand=True)
def unique(name):
assert isinstance(name, basestring), "`name` must be string"
while cmds.objExists(name):
matches = re.findall(r"\d+$", name)
if matches:
match = matches[-1]
name = name.rstrip(match)
number = int(match) + 1
else:
number = 1
name = name + str(number)
return name
def uv_from_element(element):
"""Return the UV coordinate of given 'element'
Supports components, meshes, nurbs.
"""
supported = ["mesh", "nurbsSurface"]
uv = [0.5, 0.5]
if "." not in element:
type = cmds.nodeType(element)
if type == "transform":
geometry_shape = cmds.listRelatives(element, shapes=True)
if len(geometry_shape) >= 1:
geometry_shape = geometry_shape[0]
else:
return
elif type in supported:
geometry_shape = element
else:
cmds.error("Could not do what you wanted..")
return
else:
# If it is indeed a component - get the current Mesh
try:
parent = element.split(".", 1)[0]
# Maya is funny in that when the transform of the shape
# of the component elemen has children, the name returned
# by that elementection is the shape. Otherwise, it is
# the transform. So lets see what type we're dealing with here.
if cmds.nodeType(parent) in supported:
geometry_shape = parent
else:
geometry_shape = cmds.listRelatives(parent, shapes=1)[0]
if not geometry_shape:
cmds.error("Skipping %s: Could not find shape." % element)
return
if len(cmds.ls(geometry_shape)) > 1:
cmds.warning("Multiple shapes with identical "
"names found. This might not work")
except TypeError as e:
cmds.warning("Skipping %s: Didn't find a shape "
"for component elementection. %s" % (element, e))
return
try:
type = cmds.nodeType(geometry_shape)
if type == "nurbsSurface":
# If a surfacePoint is elementected on a nurbs surface
root, u, v = element.rsplit("[", 2)
uv = [float(u[:-1]), float(v[:-1])]
if type == "mesh":
# -----------
# Average the U and V values
# ===========
uvs = cmds.polyListComponentConversion(element, toUV=1)
if not uvs:
cmds.warning("Couldn't derive any UV's from "
"component, reverting to default U and V")
raise TypeError
# Flatten list of Uv's as sometimes it returns
# neighbors like this [2:3] instead of [2], [3]
flattened = []
for uv in uvs:
flattened.extend(cmds.ls(uv, flatten=True))
uvs = flattened
sumU = 0
sumV = 0
for uv in uvs:
try:
u, v = cmds.polyEditUV(uv, query=True)
except Exception:
cmds.warning("Couldn't find any UV coordinated, "
"reverting to default U and V")
raise TypeError
sumU += u
sumV += v
averagedU = sumU / len(uvs)
averagedV = sumV / len(uvs)
uv = [averagedU, averagedV]
except TypeError:
pass
return uv
def shape_from_element(element):
"""Return shape of given 'element'
Supports components, meshes, and surfaces
"""
try:
# Get either shape or transform, based on element-type
node = cmds.ls(element, objectsOnly=True)[0]
except Exception:
cmds.warning("Could not find node in %s" % element)
return None
if cmds.nodeType(node) == 'transform':
try:
return cmds.listRelatives(node, shapes=True)[0]
except Exception:
cmds.warning("Could not find shape in %s" % element)
return None
else:
return node
def add_attributes(node, data):
for key, value in data.items():
cmds.addAttr()

72
colorbleed/maya/menu.py Normal file
View file

@ -0,0 +1,72 @@
import sys
from maya import cmds
from avalon.vendor.Qt import QtWidgets, QtCore
self = sys.modules[__name__]
self._menu = "colorbleed"
self._parent = {
widget.objectName(): widget
for widget in QtWidgets.QApplication.topLevelWidgets()
}.get("MayaWindow")
def install():
from . import interactive
uninstall()
def deferred():
cmds.menu(self._menu,
label="Colorbleed",
tearOff=True,
parent="MayaWindow")
# Modeling sub-menu
cmds.menuItem("Modeling",
label="Modeling",
tearOff=True,
subMenu=True,
parent=self._menu)
cmds.menuItem("Combine", command=interactive.combine)
# Rigging sub-menu
cmds.menuItem("Rigging",
label="Rigging",
tearOff=True,
subMenu=True,
parent=self._menu)
cmds.menuItem("Auto Connect", command=interactive.auto_connect)
cmds.menuItem("Clone (Local)", command=interactive.clone_localspace)
cmds.menuItem("Clone (World)", command=interactive.clone_worldspace)
cmds.menuItem("Clone (Special)", command=interactive.clone_special)
cmds.menuItem("Create Follicle", command=interactive.follicle)
# Animation sub-menu
cmds.menuItem("Animation",
label="Animation",
tearOff=True,
subMenu=True,
parent=self._menu)
cmds.menuItem("Set Defaults", command=interactive.set_defaults)
cmds.setParent("..", menu=True)
cmds.menuItem(divider=True)
cmds.menuItem("Auto Connect", command=interactive.auto_connect_assets)
# Allow time for uninstallation to finish.
QtCore.QTimer.singleShot(100, deferred)
def uninstall():
app = QtWidgets.QApplication.instance()
widgets = dict((w.objectName(), w) for w in app.allWidgets())
menu = widgets.get(self._menu)
if menu:
menu.deleteLater()
del(menu)

34
colorbleed/plugin.py Normal file
View file

@ -0,0 +1,34 @@
import tempfile
import pyblish.api
ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05
ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1
ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2
ValidateMeshOrder = pyblish.api.ValidatorOrder + 0.3
class Extractor(pyblish.api.InstancePlugin):
"""Extractor base class.
The extractor base class implements a "staging_dir" function used to
generate a temporary directory for an instance to extract to.
This temporary directory is generated through `tempfile.mkdtemp()`
"""
order = pyblish.api.ExtractorOrder
def staging_dir(self, instance):
"""Provide a temporary directory in which to store extracted files
Upon calling this method the staging directory is stored inside
the instance.data['stagingDir']
"""
staging_dir = instance.data.get('stagingDir', None)
if not staging_dir:
staging_dir = tempfile.mkdtemp()
instance.data['stagingDir'] = staging_dir
return staging_dir

View file

@ -0,0 +1,39 @@
from collections import OrderedDict
import avalon.maya
from maya import cmds
class CreateAnimation(avalon.maya.Creator):
"""THe animated objects in the scene"""
name = "animationDefault"
label = "Animation"
family = "colorbleed.animation"
def __init__(self, *args, **kwargs):
super(CreateAnimation, self).__init__(*args, **kwargs)
# get scene values as defaults
start = cmds.playbackOptions(query=True, animationStartTime=True)
end = cmds.playbackOptions(query=True, animationEndTime=True)
# build attributes
attributes = OrderedDict()
attributes["startFrame"] = start
attributes["endFrame"] = end
attributes["handles"] = 1
attributes["step"] = 1.0
# Write vertex colors with the geometry.
attributes["writeColorSets"] = False
# Include only renderable visible shapes.
# Skips locators and empty transforms
attributes["renderableOnly"] = False
# Include only nodes that are visible at least once during the
# frame range.
attributes["visibleOnly"] = False
self.data = attributes

View file

@ -0,0 +1,12 @@
import avalon.maya
class CreateCamera(avalon.maya.Creator):
"""Single baked camera extraction"""
name = "cameraDefault"
label = "Camera"
family = "colorbleed.camera"
# def process(self):
# pass

View file

@ -0,0 +1,9 @@
import avalon.maya
class CreateGroom(avalon.maya.Creator):
"""Hair / fur definition for an asset"""
name = "groomDefault"
label = "Groom"
family = "colorbleed.groom"

View file

@ -0,0 +1,9 @@
import avalon.maya
class CreateInstance(avalon.maya.Creator):
"""Maya instancer using cached particles"""
name = "instanceDefault"
label = "Instance"
family = "colorbleed.instance"

View file

@ -0,0 +1,20 @@
import avalon.maya
class CreateLayout(avalon.maya.Creator):
"""The layout of a episode / sequence / shot """
name = "layoutDefault"
label = "Layout"
family = "colorbleed.layout"
def __init__(self, *args, **kwargs):
super(CreateLayout, self).__init__(*args, **kwargs)
from maya import cmds
self.data.update({
"startFrame": lambda: cmds.playbackOptions(
query=True, animationStartTime=True),
"endFrame": lambda: cmds.playbackOptions(
query=True, animationEndTime=True),
})

View file

@ -0,0 +1,9 @@
import avalon.maya
class CreateLook(avalon.maya.Creator):
"""Polygonal geometry for animation"""
name = "lookDefault"
label = "Look Dev"
family = "colorbleed.look"

View file

@ -0,0 +1,9 @@
import avalon.maya
class CreateMayaAscii(avalon.maya.Creator):
"""Raw Maya Ascii file of the item(s)"""
name = "mayaAscii"
label = "Maya Ascii"
family = "colorbleed.mayaAscii"

View file

@ -0,0 +1,9 @@
import avalon.maya
class CreateModel(avalon.maya.Creator):
"""Polygonal geometry for animation"""
name = "modelDefault"
label = "Model"
family = "colorbleed.model"

View file

@ -0,0 +1,9 @@
import avalon.maya
class CreatePointCache(avalon.maya.Creator):
"""Alembic extract"""
name = "pointcache"
label = "Point Cache"
family = "colorbleed.pointcache"

View file

@ -0,0 +1,17 @@
import avalon.maya
from maya import cmds
class CreateRig(avalon.maya.Creator):
"""Skeleton and controls for manipulation of the geometry"""
name = "rigDefault"
label = "Rig"
family = "colorbleed.rig"
def process(self):
instance = super(CreateRig, self).process()
controls = cmds.sets(name="controls_SET", empty=True)
pointcache = cmds.sets(name="pointcache_SET", empty=True)
cmds.sets([controls, pointcache], forceElement=instance)

View file

@ -0,0 +1,18 @@
import avalon.maya
import colorbleed.api as api
class CreateYetiFur(avalon.maya.Creator):
"""Cached yeti fur extraction"""
name = "yetiFur"
label = "Yeti Fur"
family = "colorbleed.yetifur"
def process(self):
time_with_handles = api.OrderedDict(startFrame=True,
endFrame=True,
handles=True)
api.merge()

View file

@ -0,0 +1,150 @@
import pprint
from avalon import api
class AbcLoader(api.Loader):
"""Specific loader of Alembic for the avalon.animation family"""
families = ["colorbleed.animation", "colorbleed.camera"]
representations = ["abc"]
def process(self, name, namespace, context):
from maya import cmds
cmds.loadPlugin("AbcImport.mll", quiet=True)
# Prevent identical alembic nodes from being shared
# Create unique namespace for the cameras
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
groupReference=True,
groupName="{}:{}".format(namespace, name),
reference=True,
returnNewNodes=True)
self[:] = nodes
class CurvesLoader(api.Loader):
"""Specific loader of Curves for the avalon.animation family"""
families = ["colorbleed.animation"]
representations = ["curves"]
def process(self, name, namespace, context):
from maya import cmds
from avalon import maya
cmds.loadPlugin("atomImportExport.mll", quiet=True)
rig = context["representation"]["dependencies"][0]
container = maya.load(rig,
name=name,
namespace=namespace,
# Skip creation of Animation instance
post_process=False)
try:
control_set = next(
node for node in cmds.sets(container, query=True)
if node.endswith("controls_SET")
)
except StopIteration:
raise TypeError("%s is missing controls_SET")
cmds.select(control_set)
options = ";".join([
"",
"",
"targetTime=3",
"option=insert",
"match=hierarchy",
"selected=selectedOnly",
"search=",
"replace=",
"prefix=",
"suffix=",
"mapFile=",
])
with maya.maintained_selection():
cmds.select(
control_set,
replace=True,
# Support controllers being embedded in
# additional selection sets.
noExpand=False
)
nodes = cmds.file(
self.fname,
i=True,
type="atomImport",
renameAll=True,
namespace=namespace,
options=options,
returnNewNodes=True,
)
self[:] = nodes + cmds.sets(container, query=True) + [container]
def post_process(self, name, namespace, context):
import os
from maya import cmds
from avalon import maya, io
# Task-dependent post-process
if os.getenv("AVALON_TASK") != "animate":
return self.log.info(
"No animation instance created due to task != animate"
)
# Find associated rig to these curves
try:
dependency = context["representation"]["dependencies"][0]
except (KeyError, IndexError):
return self.log.warning("No dependencies found for %s" % name)
dependency = io.find_one({"_id": io.ObjectId(dependency)})
_, _, dependency, _ = io.parenthood(dependency)
# TODO(marcus): We are hardcoding the name "out_SET" here.
# Better register this keyword, so that it can be used
# elsewhere, such as in the Integrator plug-in,
# without duplication.
output = next((node for node in self
if node.endswith("out_SET")), None)
controls = next((node for node in self
if node.endswith("controls_SET")), None)
assert output, "No out_SET in rig, this is a bug."
assert controls, "No controls_SET in rig, this is a bug."
with maya.maintained_selection():
cmds.select([output, controls], noExpand=True)
dependencies = [context["representation"]["_id"]]
name = "anim{}_".format(dependency["name"].title())
# TODO(marcus): Hardcoding the family here, better separate this.
family = [f for f in self.families if f.endswith("animation")]
assert len(family) == 1, ("None or multiple animation "
"families found")
family = family[0]
maya.create(
name=maya.unique_name(name, suffix="_SET"),
family=family,
options={"useSelection": True},
data={"dependencies": " ".join(str(d) for d in dependencies)})
class HistoryLoader(api.Loader):
"""Specific loader of Curves for the avalon.animation family"""
families = ["colorbleed.animation"]
representations = ["history"]
def process(self, name, namespace, context):
raise NotImplementedError("Can't load history yet.")

View file

@ -0,0 +1,23 @@
from maya import cmds
from avalon import api
class HistoryLookLoader(api.Loader):
"""Specific loader for lookdev"""
families = ["colorbleed.historyLookdev"]
representations = ["ma"]
def process(self, name, namespace, context):
from avalon import maya
with maya.maintained_selection():
nodes = cmds.file(
self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
groupName=namespace + ":" + name
)
self[:] = nodes

View file

@ -0,0 +1,50 @@
import os
import json
from maya import cmds
from avalon import api
class LookLoader(api.Loader):
"""Specific loader for lookdev"""
families = ["colorbleed.lookdev"]
representations = ["ma"]
def process(self, name, namespace, context):
from avalon import maya
try:
existing_reference = cmds.file(self.fname,
query=True,
referenceNode=True)
except RuntimeError as e:
if e.message.rstrip() != "Cannot find the scene file.":
raise
self.log.info("Loading lookdev for the first time..")
with maya.maintained_selection():
nodes = cmds.file(
self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True
)
else:
self.log.info("Reusing existing lookdev..")
nodes = cmds.referenceQuery(existing_reference, nodes=True)
namespace = nodes[0].split(":", 1)[0]
# Assign shaders
self.fname = self.fname.rsplit(".", 1)[0] + ".json"
if not os.path.isfile(self.fname):
self.log.warning("Look development asset "
"has no relationship data.")
return nodes
with open(self.fname) as f:
relationships = json.load(f)
maya.apply_shaders(relationships, namespace)
self[:] = nodes

View file

@ -0,0 +1,31 @@
from maya import cmds
from avalon import api
class ModelLoader(api.Loader):
"""Load models
Stores the imported asset in a container named after the asset.
"""
families = ["colorbleed.model"]
representations = ["ma"]
def process(self, name, namespace, context):
from avalon import maya
with maya.maintained_selection():
nodes = cmds.file(
self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
groupName=namespace + ":" + name
)
# Assign default shader to meshes
meshes = cmds.ls(nodes, type="mesh")
cmds.sets(meshes, forceElement="initialShadingGroup")
self[:] = nodes

View file

@ -0,0 +1,57 @@
from maya import cmds
from avalon import api
class RigLoader(api.Loader):
"""Specific loader for rigs
This automatically creates an instance for animators upon load.
"""
families = ["colorbleed.rig"]
representations = ["ma"]
def process(self, name, namespace, context):
nodes = cmds.file(self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
groupName=namespace + ":" + name)
# Store for post-process
self[:] = nodes
def post_process(self, name, namespace, context):
from avalon import maya
# TODO(marcus): We are hardcoding the name "out_SET" here.
# Better register this keyword, so that it can be used
# elsewhere, such as in the Integrator plug-in,
# without duplication.
output = next(
(node for node in self
if node.endswith("out_SET")), None)
controls = next(
(node for node in self
if node.endswith("controls_SET")), None)
assert output, "No out_SET in rig, this is a bug."
assert controls, "No controls_SET in rig, this is a bug."
with maya.maintained_selection():
cmds.select([output, controls], noExpand=True)
dependencies = [context["representation"]["_id"]]
asset = context["asset"]["name"] + "_"
# TODO(marcus): Hardcoding the family here, better separate this.
maya.create(
name=maya.unique_name(asset, suffix="_SET"),
asset=context["asset"]["name"],
family="avalon.animation",
options={"useSelection": True},
data={
"dependencies": " ".join(str(d) for d in dependencies)
})

View file

@ -0,0 +1,177 @@
from collections import defaultdict
from maya import cmds
import cbra.utils.maya.node_uuid as node_uuid
import cbra.lib
import pyblish.api
class CollectInstancePerItem(pyblish.api.ContextPlugin):
"""Collect instances from the Maya scene and breaks them down per item id
An instance is identified by having an _INST suffix
and a .family user-defined attribute.
All other user-defined attributes of the object set
is accessible within each instance's data.
This collector breaks the instances down to each Item member it contains,
by using the IDs on the nodes in the instance it will split up the instance
into separate instances for each unique "item" id it finds.
Note:
- Only breaks down based on children members and ignores parent members.
- Discards members without IDs.
"""
order = pyblish.api.CollectorOrder + 0.1
hosts = ["maya"]
label = "Instance per Item"
_include_families = ["colorbleed.look"]
def process(self, context):
invalid = list()
for objset in cmds.ls("*_SET",
objectsOnly=True,
type='objectSet',
long=True,
recursive=True): # Include namespace
try:
family = cmds.getAttr("{}.family".format(objset))
except ValueError:
self.log.error("Found: %s found, but no family." % objset)
continue
if family not in self._include_families:
continue
# ignore referenced sets
if cmds.referenceQuery(objset, isNodeReferenced=True):
continue
instances = self.build_instances(context, objset)
if not instances:
# Log special error messages when objectSet is completely
# empty (has no members) to clarify to artists the root of
# their problem.
if not cmds.sets(objset, query=True):
self.log.error("Instance objectSet has no members: "
"{}".format(objset))
self.log.error("No instances retrieved from objectSet: "
"{}".format(objset))
invalid.append(objset)
if invalid:
raise RuntimeError("Invalid instances: {}".format(invalid))
# Sort context based on family
context[:] = sorted(
context, key=lambda instance: instance.data("family"))
def build_instances(self, context, objset):
"""Build the instances for a single instance objectSet
Returns:
list: The constructed instances from the objectSet.
"""
self.log.info("Collecting: %s" % objset)
short_name = objset.rsplit("|", 1)[-1].rsplit(":", 1)[-1]
# Default data
default_data = {"name": short_name[:-5],
"subset": "default"}
# Get user data from user defined attributes
user_data = dict()
for attr in cmds.listAttr(objset, userDefined=True):
try:
value = cmds.getAttr("{}.{}".format(objset, attr))
user_data[attr] = value
except RuntimeError:
continue
# Maintain nested object sets
members = cmds.sets(objset, query=True)
members = cmds.ls(members, long=True)
children = cmds.listRelatives(members,
allDescendents=True,
fullPath=True) or []
# Exclude intermediate objects
children = cmds.ls(children, noIntermediate=True, long=True)
nodes = members + children
nodes = list(set(nodes))
# Group nodes using ids to an Item
nodes_id = node_uuid.build_cache(nodes, include_without_ids=True)
# Log warning for nodes without ids
if None in nodes_id:
self.log.warning("Skipping nodes without ids: "
"{}".format(nodes_id[None]))
# ignore nodes without ids
context.data["instancePerItemNodesWithoutId"] = nodes_id.pop(None,
None)
item_groups = defaultdict(list)
for id, nodes in nodes_id.iteritems():
item_id = id.rsplit(":", 1)[0]
item_groups[item_id].extend(nodes)
instances = list()
for item_id, item_nodes in item_groups.iteritems():
ctx = node_uuid.parse_id("{}:fake_node_uuid".format(item_id))
# Use itemPath to parse full blown context using official lib
ctx = cbra.lib.parse_context(ctx['itemPath'])
item = ctx.get('item', None)
if item is None:
self.log.info("Unparsed item id: {}".format(item_id))
self.log.error("Item can't be parsed and seems to be "
"non-existent. Was an asset renamed? Or your"
"project set incorrectly?")
raise RuntimeError("Item not parsed. See log for description.")
instance = context.create_instance(objset)
# Set the related members
instance[:] = item_nodes
instance.data['setMembers'] = item_nodes
# Set defaults and user data
instance.data.update(default_data.copy())
instance.data.update(user_data.copy())
# Override the label to be clear
name = instance.data['name']
instance.data['label'] = "{0} ({1})".format(name, item)
# Store that the instance is collected per item
instance.data['_instancePerItem'] = True
instance.data['_itemContext'] = ctx
assert "family" in instance.data, "No family data in instance"
assert "name" in instance.data, ("No objectSet name data "
"in instance")
instances.append(instance)
return instances

View file

@ -0,0 +1,156 @@
import os
import re
import pyseq
import glob
import pyblish.api
from maya import cmds
class SeletYetiCachesAction(pyblish.api.Action):
"""Select the nodes related to the collected file textures"""
label = "Select yeti nodes"
on = "succeeded" # This action is only available on a failed plug-in
icon = "search" # Icon from Awesome Icon
def process(self, context, plugin):
self.log.info("Finding textures..")
# Get the errored instances
instances = []
for result in context.data["results"]:
instance = result["instance"]
if instance is None:
continue
instances.append(instance)
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(instances, plugin)
# Get the texture nodes from the instances
nodes = []
for instance in instances:
texture_nodes = instance.data['yetiCaches'].keys()
nodes.extend(texture_nodes)
# Ensure unique
nodes = list(set(nodes))
if nodes:
self.log.info("Selecting nodes: %s" % ", ".join(nodes))
cmds.select(nodes, r=True, noExpand=True)
else:
self.log.info("No nodes found.")
cmds.select(deselect=True)
def get_sequence(filename, pattern="%04d"):
"""Get pyseq sequence from filename
Supports negative frame ranges like (-001, 0000, 0001 and -0001, 0000, 0001).
Arguments:
filename (str): The full path to filename containing the given pattern.
pattern (str): The pattern to swap with the variable frame number.
Returns:
pyseq.Sequence: file sequence.
"""
glob_pattern = filename.replace(pattern, "*")
escaped = re.escape(filename)
re_pattern = escaped.replace(pattern, "-?[0-9]+")
files = glob.glob(glob_pattern)
files = [str(f) for f in files if re.match(re_pattern, f)]
return pyseq.get_sequences(files)
class CollectYetiCaches(pyblish.api.InstancePlugin):
"""Collect used yeti caches.
Collects the file sequences from pgYetiMaya.cacheFileName
"""
order = pyblish.api.CollectorOrder + 0.495
label = 'Yeti Caches'
families = ["colorbleed.groom"]
actions = [SeletYetiCachesAction]
TYPES = {"pgYetiMaya": "cacheFileName"}
def process(self, instance):
# Get textures from sets
members = instance.data("setMembers")
members = cmds.ls(members, dag=True, shapes=True, type="pgYetiMaya",
noIntermediate=True, long=True)
if not members:
raise RuntimeError("Instance appears to be empty (no members)")
# Collect only those cache frames that are required
# If handles are required it is assumed to already be included
# in the start frame and end frames.
# (e.g. using frame handle collector)
start_frame = instance.data("startFrame")
end_frame = instance.data("endFrame")
required = set(range(int(start_frame), int(end_frame) + 1))
history = cmds.listHistory(members) or []
resources = instance.data.get("resources", [])
yeti_caches = dict()
for node_type, attr in self.TYPES.iteritems():
for node in cmds.ls(history, type=node_type, long=True):
attribute = "{0}.{1}".format(node, attr)
# Source
source = cmds.getAttr(attribute)
if not source:
self.log.error("Node does not have a file set: "
"{0}".format(node))
# Collect the source as expanded path because that's also
# how the attribute must be 'set' for yeti nodes.
source = os.path.realpath(cmds.workspace(expandName=source))
# Collect the frames we need from the sequence
sequences = get_sequence(source)
files = list()
for sequence in sequences:
for index, frame in enumerate(sequence.frames()):
if frame not in required:
continue
item = sequence[index]
files.append(item.path)
# Define the resource
resource = {"tags": ["maya", "yeti", "attribute"],
"node": node,
"attribute": attribute,
"source": source, # required for resources
"files": files, # required for resources
"subfolder": "caches" # optional for resources
}
resources.append(resource)
# For validations
yeti_caches[node] = {"attribute": attribute,
"source": source,
"sequences": sequences}
# Store data on instance
instance.data['yetiCaches'] = yeti_caches
instance.data['resources'] = resources

View file

@ -0,0 +1,81 @@
import json
from maya import cmds
import pyblish_maya
import colorbleed.api
import cb.utils.maya.context as context
import cbra.utils.maya.layout as layout
def get_upstream_hierarchy_fast(nodes):
"""Passed in nodes must be long names!"""
matched = set()
parents = []
for node in nodes:
hierarchy = node.split("|")
num = len(hierarchy)
for x in range(1, num-1):
parent = "|".join(hierarchy[:num-x])
if parent in parents:
break
else:
parents.append(parent)
matched.add(parent)
return parents
class ExtractLayout(colorbleed.api.Extractor):
"""Extract Layout as both gpuCache and Alembic"""
label = "Layout (gpuCache & alembic)"
hosts = ["maya"]
families = ["colorbleed.layout"]
def process(self, instance):
# Define extract output file path
dir_path = self.staging_dir(instance)
start = instance.data.get("startFrame", 1)
end = instance.data.get("endFrame", 1)
step = instance.data.get("step", 1.0)
placeholder = instance.data.get("placeholder", False)
write_color_sets = instance.data.get("writeColorSets", False)
renderable_only = instance.data.get("renderableOnly", False)
visible_only = instance.data.get("visibleOnly", False)
layers = instance.data.get("animLayersActive", None)
if layers:
layers = json.loads(layers)
self.log.info("Publishing with animLayers active: "
"{0}".format(layers))
# Perform extraction
self.log.info("Performing extraction..")
with pyblish_maya.maintained_selection():
# Get children hierarchy
nodes = instance.data['setMembers']
cmds.select(nodes, r=True, hierarchy=True)
hierarchy = cmds.ls(selection=True, long=True)
with context.evaluation("off"):
with context.no_refresh():
with context.active_anim_layers(layers):
layout.extract_layout(hierarchy,
dir_path,
start=start,
end=end,
step=step,
placeholder=placeholder,
write_color_sets=write_color_sets,
renderable_only=renderable_only,
visible_only=visible_only)
self.log.info("Extracted instance '{0}' to: {1}".format(
instance.name, dir_path))

View file

@ -0,0 +1,91 @@
import os
import maya.cmds as cmds
import pyblish.api
import colorbleed.api
import cbra.lib
from cb.utils.python.decorators import memorize
def isclose(a, b, rel_tol=1e-9, abs_tol=0.0):
return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
@memorize
def is_published_path(path):
"""Return whether path is from a published file"""
# Quick check (optimization) without going through the folder
# structure
if cbra.lib.DIR_PUBLISH.lower() not in path.lower():
return False
try:
context = cbra.lib.parse_context(path)
except RuntimeError:
context = dict()
return all([context.get("family", None),
context.get("subset", None),
context.get("version", None)])
class ValidateLayoutNodes(pyblish.api.InstancePlugin):
"""Validates that layout nodes behave to certain rules
Gpu caches in a layout may not have sub-frame offsets, like offsets with a
value after the decimal point. (e.g. 1.45)
Gpu caches loaded in a layout MUST come from a published source that has
family and version.
"""
order = colorbleed.api.ValidateContentsOrder
label = 'Layout Nodes'
families = ['colorbleed.layout']
actions = [colorbleed.api.SelectInvalidAction]
@classmethod
def get_invalid(cls, instance):
caches = cmds.ls(instance, type="gpuCache", long=True)
# Validate sub-frame offsets
invalid_offsets = list()
for cache in caches:
offset = cmds.getAttr("{}.animOffset".format(cache))
if not isclose(offset, round(offset)):
cls.log.warning("Invalid sub-frame offset on: %s" % cache)
invalid_offsets.append(cache)
# Validate gpuCache paths are from published files
invalid_paths = list()
for cache in caches:
path = cmds.getAttr("{}.cacheFileName".format(cache))
path = os.path.normpath(path)
if not is_published_path(path):
cls.log.warning("GpuCache path not from published file: "
"{0} -> {1}".format(cache, path))
invalid_paths.append(cache)
invalid = invalid_offsets + invalid_paths
return invalid
def process(self, instance):
# Clear cache only once per publish. So we store a value on
# the context on the first instance so we clear only once.
name = self.__class__.__name__
key = "_plugin_{0}_processed".format(name)
if not instance.context.data.get(key, False):
is_published_path.cache.clear()
instance.context.data[key] = True
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Invalid nodes found: {0}".format(invalid))

View file

@ -0,0 +1,83 @@
import pyblish.api
import colorbleed.api
import cbra.utils.maya.node_uuid as id_utils
import cbra.lib
class ValidateRelatedNodeIds(pyblish.api.InstancePlugin):
"""Validate nodes have related colorbleed ids.
An ID is 'related' if its built in the current Item.
Note that this doesn't ensure it's from the current Task. An ID created
from `lookdev` has the same relation to the Item as one coming from others,
like `rigging` or `modeling`.
"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.model']
hosts = ['maya']
label = 'Related Id Attributes'
actions = [colorbleed.api.SelectInvalidAction,
colorbleed.api.GenerateUUIDsOnInvalidAction]
@classmethod
def get_invalid(cls, instance):
"""Return the member nodes that are invalid"""
context = instance.context
current_file = context.data.get('currentFile', None)
if not current_file:
raise RuntimeError("No current file information: "
"{0}".format(current_file))
try:
context = cbra.lib.parse_context(current_file)
except RuntimeError, e:
cls.log.error("Can't generate UUIDs because scene isn't "
"in new-style pipeline: ".format(current_file))
raise e
def to_item(id):
"""Split the item id part from a node id"""
return id.rsplit(":", 1)[0]
# Generate a fake id in the current context to retrieve the item
# id prefix that should match with ids on the nodes
fake_node = "__node__"
ids = id_utils.generate_ids(context, [fake_node])
id = ids[fake_node]
item_prefix = to_item(id)
# Take only the ids with more than one member
invalid = list()
invalid_items = set()
for member in instance:
member_id = id_utils.get_id(member)
# skip nodes without ids
if not member_id:
continue
if not member_id.startswith(item_prefix):
invalid.append(member)
invalid_items.add(to_item(member_id))
# Log invalid item ids
if invalid_items:
for item_id in sorted(invalid_items):
cls.log.warning("Found invalid item id: {0}".format(item_id))
return invalid
def process(self, instance):
"""Process all meshes"""
# Ensure all nodes have a cbId
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Nodes found with non-related "
"asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,167 @@
import os
from collections import defaultdict
import pyblish.api
import colorbleed.api
import cbra.lib
from cbra.utils.maya.abc import get_alembic_ids
from cbra.utils.maya.node_uuid import get_id
def get_subset_path(context):
return os.path.join(context['itemPath'],
cbra.lib.DIR_PUBLISH,
context['family'],
context['subset'])
class ValidateUniqueIdsInItem(pyblish.api.InstancePlugin):
"""Checks whether IDs are unique across other subsets
This ensures a model to be published can't have ids
which are already present in another subset. For example
the "default" model can't have ids present in the "high"
subset.
Note:
This will also invalidate the instance if it contains
nodes that are present in another instance in the scene.
So ensure the instance you're publishing actually has
the correct set members.
"""
order = colorbleed.api.ValidateMeshOrder
families = ['colorbleed.model']
hosts = ['maya']
label = 'Unique Ids in Item'
actions = [colorbleed.api.SelectInvalidAction]
optional = True
@classmethod
def iter_invalid(cls, instance):
verbose = instance.data.get("verbose", False)
def _get_instance_ids(instance):
"""Collect ids in an instance"""
nodes_per_id = defaultdict(list)
for node in instance:
node_id = get_id(node)
if node_id:
nodes_per_id[node_id].append(node)
return nodes_per_id
nodes_per_id = _get_instance_ids(instance)
if not nodes_per_id:
return
ids_lookup = set(nodes_per_id.keys())
instance_context = instance.data["instanceContext"]
instance_subset = instance.data['subset']
assert instance_context, "Instance must have 'instanceContext' data"
assert instance_subset, "Instance must have 'subset' data"
subsets_checked = set()
subsets_checked.add(instance_subset) # we can skip this subset
# Compare with all other *currently publishing instances*
# of family 'model' for this item
for other_instance in instance.context:
if other_instance is instance:
continue
if other_instance.data['subset'] == instance_subset:
cls.log.error("Another instance has the same subset? "
"This should never happen.")
if other_instance.data['family'] != "model":
continue
if other_instance.data['instanceContext']['item'] != \
instance_context['item']:
cls.log.error("Also publishing model for other item? "
"This should never happen.")
continue
other_ids = _get_instance_ids(other_instance).keys()
# Perform comparison
intersection = ids_lookup.intersection(other_ids)
if intersection:
for node_id in intersection:
nodes = nodes_per_id[node_id]
for node in nodes:
yield node
# Those that are invalid don't need to be checked again
ids_lookup.difference_update(other_ids)
if not ids_lookup:
# Once we have no ids to check for anymore we can already
# return
return
subsets_checked.add(other_instance.data['subset'])
# Compare with all previously *published instances*
# of family 'model' for this item
ctx = instance_context.copy()
ctx['family'] = "model"
published_subsets = cbra.lib.list_subsets(ctx)
published_subsets = set(x for x in published_subsets if
x != instance_subset)
for published_subset in published_subsets:
ctx['subset'] = published_subset
ctx['subsetPath'] = get_subset_path(ctx)
versions = cbra.lib.list_versions(ctx)
version = cbra.lib.find_highest_version(versions)
if not version:
cls.log.debug("No published version for "
"'model': {0}".format(published_subset))
continue
ctx['currentVersion'] = version
publish_abc = cbra.lib.get_filepath(ctx) + ".abc"
if not os.path.exists(publish_abc):
cls.log.error("Published file to compare with does not exist: "
"{0}".format(publish_abc))
continue
if verbose:
cls.log.debug("Comparing with: {0}".format(publish_abc))
abc_ids = set(get_alembic_ids(publish_abc).values())
# Perform comparison
intersection = ids_lookup.intersection(abc_ids)
if intersection:
for node_id in intersection:
nodes = nodes_per_id[node_id]
for node in nodes:
yield node
# Those that are invalid don't need to be checked again
ids_lookup.difference_update(abc_ids)
if not ids_lookup:
# Once we have no ids to check for anymore we can already
# return
return
return
@classmethod
def get_invalid(cls, instance):
return list(cls.iter_invalid(instance))
def process(self, instance):
"""Process all meshes"""
if any(self.iter_invalid(instance)):
raise RuntimeError("Invalid nodes found in {0}".format(instance))

View file

@ -0,0 +1,19 @@
import pyblish.api
class DebugPlugin(pyblish.api.InstancePlugin):
label = "Debug"
order = pyblish.api.IntegratorOrder - 0.4
def process(self, instance):
import pprint
self.log("\n\n----------------------")
self.log("Instance")
pprint.pprint(instance)
self.log("\n\n----------------------")
self.log("Instance.data")
pprint.pprint(instance.data)

View file

@ -0,0 +1,85 @@
import os
import shutil
import pyblish_cb.lib
import colorbleed.api
class IntegrateColorbleedAssets(colorbleed.api.Integrator):
"""Name and position instances on disk for instances.
The files are transferred from the `extractDir` to the
computed `integrationDir` and are renamed as:
- "{item}_{family}_{subsetName}_{version}.{ext}"
Assumptions:
- Each extracted instance is 1 file (no directories)
"""
label = "Asset"
families = ["colorbleed.model", "colorbleed.rig", "colorbleed.pointcache",
"colorbleed.proxy", "colorbleed.layout", "colorbleed.look",
"colorbleed.vrmeshReplace", "colorbleed.review",
"colorbleed.instancer", "colorbleed.camera",
"colorbleed.mayaAscii",
"colorbleed.furYeti"]
def process(self, instance):
super(IntegrateColorbleedAssets, self).process(instance)
self.log.info("Integrating {0}..".format(instance))
integration = pyblish_cb.lib.compute_integration(instance)
# Store reference for upcoming plug-ins
instance.data["integrationDir"] = integration['path']
instance.data["integrationVersion"] = integration['versionNum']
path = integration['path']
data = integration.copy()
try:
if not os.path.exists(path):
os.makedirs(path)
self.log.info("Moving files to %s" % path)
tmp = instance.data["extractDir"]
for src in (os.path.join(tmp, f) for f in os.listdir(tmp)):
self.log.debug("Integrating %s" % src)
# Source must be a file
if not os.path.isfile(src):
self.log.error("Source is not a file: {0}".format(src))
continue
# TODO(marcus): Consider files without extension
data["ext"] = src.split(".", 1)[-1]
dst = os.path.join(path, "{item}_"
"{family}_"
"{subsetName}_"
"{version}.{ext}".format(
**data))
# Copy
self.log.info("\"%s\" -> \"%s\"" % (src, dst))
shutil.copyfile(src, dst)
self.log.debug("Tagged %s with .Version" % path)
try:
subset_path = os.path.dirname(path)
cquery.tag(subset_path, ".Subset")
self.log.debug("Tagged %s with .Subset" % subset_path)
except cquery.TagExists:
pass
except OSError as e:
# If, for whatever reason, this instance did not get written.
instance.data.pop("integrationDir")
raise e
except Exception as e:
raise Exception("An unknown error occured: %s" % e)

View file

@ -0,0 +1,47 @@
import os
import shutil
import pyblish.api
import colorbleed.api
class IntegrateFiles(colorbleed.api.Integrator):
"""Integrate Files
Copies the transfer queue to the destinations.
"""
order = pyblish.api.IntegratorOrder + 0.1
label = "Transfer Files"
def process(self, instance):
"""Copy textures from srcPath to destPath
The files should be stored in the "integrateFiles" data on the instance. Each item in the
list should be a dictionary with 'srcPath' and 'destPath' key values.
- srcPath: Source path (must be absolute!)
- destPath: Destination path (can be relative)
"""
super(IntegrateFiles, self).process(instance)
# Get unique texture transfers
# (since different nodes might load same texture)
transfers = instance.data.get("transfers", [])
for src, dest in transfers:
self.log.info("Copying: {0} -> {1}".format(src, dest))
# Source is destination
if os.path.normpath(dest) == os.path.normpath(src):
self.log.info("Skip copy of resource file: {0}".format(src))
continue
# Ensure folder exists
folder = os.path.dirname(dest)
if not os.path.exists(folder):
os.makedirs(folder)
shutil.copyfile(src, dest)

View file

@ -0,0 +1,41 @@
import pyblish.api
import colorbleed.api
class ValidateLookNodeIds(pyblish.api.InstancePlugin):
"""Validate nodes have colorbleed id attributes
All look sets should have id attributes.
"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.look']
hosts = ['maya']
label = 'Look Id Attributes'
actions = [colorbleed.api.SelectInvalidAction,
colorbleed.api.GenerateUUIDsOnInvalidAction]
@staticmethod
def get_invalid(instance):
import maya.cmds as cmds
nodes = instance.data["lookSets"]
# Ensure all nodes have a cbId
invalid = list()
for node in nodes:
uuid = cmds.attributeQuery("mbId", node=node, exists=True)
if not uuid:
invalid.append(node)
return invalid
def process(self, instance):
"""Process all meshes"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Nodes found without "
"asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,62 @@
from maya import cmds
import pyblish.api
import colorbleed.api
class ValidateRigNodeIds(pyblish.api.InstancePlugin):
"""Validate nodes in instance have colorbleed id attributes
To fix this use the action to select the invalid nodes. Identify these
are nodes created locally to the rig; if they are not they should've gotten
their ID elsewhere! This is important, because then you should NOT fix it
in your scene but earlier in the pipeline. If these invalid nodes are local
to your rig then you should generate ids for them.
For Dummies:
For the pipeline it's important in further stages to identify exactly
"what nodes is what node". Basically it saying: Hey! It's me! To
accompany that each node stores an ID, like its own passport. This
validator will tell you if there are nodes that have no such
passport (ID).
Warning:
This does NOT validate the IDs are unique in the instance.
"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.rig',
'colorbleed.rigcontrols',
"colorbleed.rigpointcache"]
hosts = ['maya']
label = 'Rig Id Attributes'
actions = [colorbleed.api.SelectInvalidAction]
# includes: yeti grooms and v-ray fur, etc.
TYPES = ("transform", "mesh", "nurbsCurve", "geometryShape")
@staticmethod
def get_invalid(instance):
# filter to nodes of specific types
dag = cmds.ls(instance, noIntermediate=True,
long=True, type=ValidateRigNodeIds.TYPES)
# Ensure all nodes have a cbId
invalid = list()
for node in dag:
# todo: refactor `mbId` when attribute is updated
uuid = cmds.attributeQuery("mbId", node=node, exists=True)
if not uuid:
invalid.append(node)
return invalid
def process(self, instance):
"""Process all meshes"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Nodes found without "
"asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,85 @@
from collections import defaultdict
import pyblish.api
import colorbleed.api
class ValidateRigPointcacheNodeIds(pyblish.api.InstancePlugin):
"""Validate rig pointcache_SET nodes have ids
The nodes in a rig's pointcache_SET must all have node IDs
that are all unique.
Geometry in a rig should be using published model's geometry.
As such when this validation doesn't pass it means you're using
local newly created nodes that are not coming from a published
model file. Ensure you update the ids from the model.
"""
order = colorbleed.api.ValidateContentsOrder
families = ['colorbleed.rig', "colorbleed.rigpointcache"]
hosts = ['maya']
label = 'Rig Pointcache Node Ids'
actions = [colorbleed.api.SelectInvalidAction]
ignore_types = ("constraint",)
@classmethod
def get_invalid(cls, instance):
from maya import cmds
# Get pointcache_SET
sets = cmds.ls(instance, type='objectSet')
pointcache_sets = [x for x in sets if x == 'pointcache_SET']
nodes = list()
for s in pointcache_sets:
members = cmds.sets(s, query=True)
members = cmds.ls(members, long=True) # ensure long names
descendents = cmds.listRelatives(members,
allDescendents=True,
fullPath=True) or []
descendents = cmds.ls(descendents, noIntermediate=True, long=True)
hierarchy = members + descendents
nodes.extend(hierarchy)
# ignore certain node types (e.g. constraints)
ignore = cmds.ls(nodes, type=cls.ignore_types, long=True)
if ignore:
ignore = set(ignore)
nodes = [node for node in nodes if node not in ignore]
# Missing ids
missing = list()
ids = defaultdict(list)
for node in nodes:
has_id = cmds.attributeQuery("mbId", node=node, exists=True)
if not has_id:
missing.append(node)
continue
uuid = cmds.getAttr("{}.mbId".format(node))
ids[uuid].append(node)
non_uniques = list()
for uuid, nodes in ids.iteritems():
if len(nodes) > 1:
non_uniques.extend(nodes)
if missing:
cls.log.warning("Missing node ids: {0}".format(missing))
if non_uniques:
cls.log.warning("Non unique node ids: {0}".format(non_uniques))
invalid = missing + non_uniques
return invalid
def process(self, instance):
"""Process all meshes"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Missing or non-unique node IDs: "
"{0}".format(invalid))

View file

@ -0,0 +1,86 @@
from maya import cmds
import pyblish.api
import colorbleed.api
class ValidateRigPointcacheShapeRenderStats(pyblish.api.Validator):
"""Ensure all render stats are set to the default values."""
order = colorbleed.api.ValidateMeshOrder
families = ['colorbleed.model']
hosts = ['maya']
category = 'model'
optional = False
version = (0, 1, 0)
label = 'Rig Pointcache Shape Default Render Stats'
actions = [colorbleed.api.SelectInvalidAction]
defaults = {'castsShadows': 1,
'receiveShadows': 1,
'motionBlur': 1,
'primaryVisibility': 1,
'smoothShading': 1,
'visibleInReflections': 1,
'visibleInRefractions': 1,
'doubleSided': 1,
'opposite': 0}
ignore_types = ("constraint",)
@classmethod
def get_pointcache_nodes(cls, instance):
# Get pointcache_SET
sets = cmds.ls(instance, type='objectSet')
pointcache_sets = [x for x in sets if x == 'pointcache_SET']
nodes = list()
for s in pointcache_sets:
members = cmds.sets(s, q=1)
members = cmds.ls(members, long=True) # ensure long names
descendents = cmds.listRelatives(members,
allDescendents=True,
fullPath=True) or []
descendents = cmds.ls(descendents,
noIntermediate=True,
long=True)
hierarchy = members + descendents
nodes.extend(hierarchy)
# ignore certain node types (e.g. constraints)
ignore = cmds.ls(nodes, type=cls.ignore_types, long=True)
if ignore:
ignore = set(ignore)
nodes = [node for node in nodes if node not in ignore]
return nodes
@classmethod
def get_invalid(cls, instance):
# It seems the "surfaceShape" and those derived from it have
# `renderStat` attributes.
nodes = cls.get_pointcache_nodes(instance)
shapes = cmds.ls(nodes, long=True, type='surfaceShape')
invalid = []
for shape in shapes:
for attr, requiredValue in \
ValidateRigPointcacheShapeRenderStats.defaults.iteritems():
if cmds.attributeQuery(attr, node=shape, exists=True):
value = cmds.getAttr('{node}.{attr}'.format(node=shape,
attr=attr))
if value != requiredValue:
invalid.append(shape)
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise ValueError("Shapes with non-standard renderStats "
"found: {0}".format(invalid))

View file

@ -0,0 +1,63 @@
import pyblish.api
import colorbleed.api
class ValidateUniqueNodeIds(pyblish.api.InstancePlugin):
"""Validate nodes have colorbleed id attributes"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.model']
hosts = ['maya']
label = 'Unique Id Attributes'
actions = [colorbleed.api.SelectInvalidAction,
colorbleed.api.GenerateUUIDsOnInvalidAction]
@staticmethod
def get_invalid_dict(instance):
"""Return a dictionary mapping of id key to list of member nodes"""
import maya.cmds as cmds
uuid_attr = "mbId"
# Collect each id with their members
from collections import defaultdict
ids = defaultdict(list)
for member in instance:
has_attr = cmds.attributeQuery(uuid_attr, node=member, exists=True)
if not has_attr:
continue
mbid = cmds.getAttr("{}.{}".format(member, uuid_attr))
ids[mbid].append(member)
# Skip those without IDs (if everything should have an ID that should
# be another validation)
ids.pop(None, None)
# Take only the ids with more than one member
invalid = dict((id, members) for id, members in ids.iteritems() if
len(members) > 1)
return invalid
@classmethod
def get_invalid(cls, instance):
"""Return the member nodes that are invalid"""
invalid_dict = cls.get_invalid_dict(instance)
# Take only the ids with more than one member
invalid = list()
for members in invalid_dict.itervalues():
invalid.extend(members)
return invalid
def process(self, instance):
"""Process all meshes"""
# Ensure all nodes have a cbId
invalid = self.get_invalid_dict(instance)
if invalid:
raise RuntimeError("Nodes found with non-unique "
"asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,20 @@
import pyblish.api
import colorbleed.api
class ValidateUnitsAngular(pyblish.api.ContextPlugin):
"""Scene angular units must be in degrees"""
order = colorbleed.api.ValidateSceneOrder
label = "Units (angular)"
families = ["colorbleed.rig",
"colorbleed.model",
"colorbleed.pointcache",
"colorbleed.curves"]
def process(self, context):
units = context.data('angularUnits')
self.log.info('Units (angular): {0}'.format(units))
assert units and units == 'deg', (
"Scene angular units must be degrees")

View file

@ -0,0 +1,28 @@
import pyblish.api
import colorbleed.api
class ValidateUnitsFps(pyblish.api.ContextPlugin):
"""Validate the scene linear, angular and time units."""
order = colorbleed.api.ValidateSceneOrder
label = "Units (fps)"
families = ["colorbleed.rig",
"colorbleed.pointcache",
"colorbleed.curves"]
actions = [colorbleed.api.RepairAction]
optional = True
def process(self, context):
fps = context.data['fps']
self.log.info('Units (time): {0} FPS'.format(fps))
assert fps and fps == 25.0, "Scene must be 25 FPS"
@classmethod
def repair(cls):
"""Fix the current FPS setting of the scene, set to PAL(25.0 fps)
"""
import maya.cmds as cmds
cmds.currentUnit(time="pal")

View file

@ -0,0 +1,20 @@
import pyblish.api
import colorbleed.api
class ValidateUnitsLinear(pyblish.api.ContextPlugin):
"""Scene must be in linear units"""
order = colorbleed.api.ValidateSceneOrder
label = "Units (linear)"
families = ["colorbleed.rig",
"colorbleed.model",
"colorbleed.pointcache",
"colorbleed.curves"]
def process(self, context):
units = context.data('linearUnits')
self.log.info('Units (linear): {0}'.format(units))
assert units and units == 'cm', ("Scene linear units must "
"be centimeters")

View file

@ -0,0 +1,25 @@
import pyblish.api
class CollectAlembicCBAttrs(pyblish.api.InstancePlugin):
"""Collects settings for the Alembic extractor"""
order = pyblish.api.CollectorOrder + 0.499
families = ['colorbleed.model', 'colorbleed.pointcache']
label = "Alembic Colorbleed Attrs"
def process(self, instance):
attrPrefix = instance.data.get("attrPrefix", [])
attrPrefix.append("cb")
instance.data['attrPrefix'] = attrPrefix
# Ensure visibility keys are written
instance.data['writeVisibility'] = True
# Write creases
instance.data['writeCreases'] = True
# Ensure UVs are written
instance.data['uvWrite'] = True

View file

@ -0,0 +1,18 @@
import os
from maya import cmds
import pyblish.api
class CollectMayaCurrentFile(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.5
label = "Maya Current File"
hosts = ['maya']
def process(self, context):
"""Inject the current working file"""
current_file = cmds.file(query=True, sceneName=True)
context.data['currentFile'] = os.path.normpath(current_file)

View file

@ -0,0 +1,40 @@
from maya import cmds
import pyblish.api
class CollectMayaHistory(pyblish.api.InstancePlugin):
"""Collect history for instances from the Maya scene
Note:
This removes render layers collected in the history
This is separate from Collect Instances so we can target it towards only
specific family types.
"""
order = pyblish.api.CollectorOrder + 0.1
hosts = ["maya"]
label = "Maya History"
families = ["colorbleed.rig"]
verbose = False
def process(self, instance):
# Collect the history with long names
history = cmds.listHistory(instance, leaf=False) or []
history = cmds.ls(history, long=True)
# Remove invalid node types (like renderlayers)
invalid = cmds.ls(history, type="renderLayer", long=True)
if invalid:
invalid = set(invalid) # optimize lookup
history = [x for x in history if x not in invalid]
# Combine members with history
members = instance[:] + history
members = list(set(members)) # ensure unique
# Update the instance
instance[:] = members

View file

@ -0,0 +1,54 @@
import pyblish.api
import maya.cmds as cmds
class CollectInstancerCaches(pyblish.api.InstancePlugin):
"""For an Instancer collect the history.
This would collect its particles with nucleus and cacheFile
"""
order = pyblish.api.CollectorOrder + 0.495
families = ['colorbleed.instancer']
label = "Instancer Cache Files"
def process(self, instance):
members = instance.data.get("exactExportMembers", None)
assert members, "Instancer must have members"
resources = instance.data.get("resources", [])
caches = cmds.ls(members, type="cacheFile")
errors = False
for cache in caches:
self.log.debug("Collecting cache files for: {0}".format(cache))
files = cmds.cacheFile(cache, query=True, fileName=True)
# Ensure there are any files and the cacheFile is linked
# correctly.
if not files:
errors = True
self.log.error("Cache has no files: {0}".format(cache))
continue
source = files[0] # The first file is the .xml file
# TODO: Filter the files to only contain the required frame range.
resource = {"tags": ["maya", "node", "cacheFile"],
"node": cache,
"source": source,
"files": files,
"subfolder": "caches"}
resources.append(resource)
# Store on the instance
instance.data['resources'] = resources
if errors:
raise RuntimeError("Errors during collecting caches. "
"Are the caches linked correctly?")

View file

@ -0,0 +1,63 @@
import pyblish.api
import maya.cmds as cmds
class CollectInstancerHistory(pyblish.api.InstancePlugin):
"""For an Instancer collect the history.
This would collect its particles with nucleus and cacheFile
"""
order = pyblish.api.CollectorOrder + 0.49
families = ['colorbleed.instancer']
label = "Instancer History"
def process(self, instance):
members = instance.data["setMembers"]
# Include history of the instancer
instancers = cmds.ls(members, type="instancer")
if not instancers:
self.log.info("No instancers found")
return
export = instancers[:]
# Get the required inputs of the particles from history
history = cmds.listHistory(instancers) or []
particles = cmds.ls(history, type="nParticle")
export.extend(particles)
if particles:
self.log.info("Particles: {0}".format(particles))
particles_history = cmds.listHistory(particles) or []
self.log.debug("Particle history: {0}".format(particles_history))
nucleus = cmds.ls(particles_history, long=True, type="nucleus")
self.log.info("Collected nucleus: {0}".format(nucleus))
export.extend(nucleus)
caches = cmds.ls(particles_history, long=True, type="cacheFile")
self.log.info("Collected caches: {0}".format(caches))
export.extend(caches)
# Collect input shapes for the instancer
for instancer in cmds.ls(instancers, exactType="instancer", long=True):
attr = "{}.inputHierarchy".format(instancer)
inputs = cmds.listConnections(attr, source=True,
destination=False) or []
export.extend(inputs)
# Add it to the instance
data = instance[:]
data.extend(export)
# Ensure unique objects only
data = list(set(data))
self.log.info("Setting members to {0}".format(data))
instance[:] = data
# Store the recommended export selection so the export can do it
# accordingly
instance.data["exactExportMembers"] = export

View file

@ -0,0 +1,117 @@
from maya import cmds
import pyblish.api
class CollectMindbenderInstances(pyblish.api.ContextPlugin):
"""Gather instances by objectSet and pre-defined attribute
This collector takes into account assets that are associated with
an objectSet and marked with a unique identifier;
Identifier:
id (str): "pyblish.avalon.instance"
Supported Families:
avalon.model: Geometric representation of artwork
avalon.rig: An articulated model for animators.
A rig may contain a series of sets in which to identify
its contents.
- cache_SEL: Should contain cachable polygonal meshes
- controls_SEL: Should contain animatable controllers for animators
- resources_SEL: Should contain nodes that reference external files
Limitations:
- Only Maya is supported
- One (1) rig per scene file
- Unmanaged history, it is up to the TD to ensure
history is up to par.
avalon.animation: Pointcache of `avalon.rig`
Limitations:
- Does not take into account nodes connected to those
within an objectSet. Extractors are assumed to export
with history preserved, but this limits what they will
be able to achieve and the amount of data available
to validators.
"""
label = "Collect Mindbender Instances"
order = pyblish.api.CollectorOrder
hosts = ["maya"]
def process(self, context):
objectset = cmds.ls("*.id", long=True, type="objectSet",
recursive=True, objectsOnly=True)
for objset in objectset:
members = cmds.sets(objset, query=True)
if members is None:
self.log.info("Skipped following Set: \"%s\" " % objset)
continue
if not cmds.attributeQuery("id", node=objset, exists=True):
continue
id_attr = "{}.id".format(objset)
if cmds.getAttr(id_attr) != "pyblish.avalon.instance":
continue
# The developer is responsible for specifying
# the family of each instance.
has_family = cmds.attributeQuery("family", node=objset,
exists=True)
assert has_family, "\"%s\" was missing a family" % objset
data = dict()
# Apply each user defined attribute as data
for attr in cmds.listAttr(objset, userDefined=True) or list():
try:
value = cmds.getAttr("{}.{}".format(objset, attr))
except Exception:
# Some attributes cannot be read directly,
# such as mesh and color attributes. These
# are considered non-essential to this
# particular publishing pipeline.
value = None
data[attr] = value
# Collect members
members = cmds.ls(members, long=True) or []
children = cmds.listRelatives(members,
allDescendents=True,
fullPath=True)
parents = self.get_all_parents(members)
members_hierarchy = list(set(members + children + parents))
# Create the instance
name = cmds.ls(objset, long=False)[0] # use short name
instance = context.create_instance(data.get("name", name))
instance[:] = members_hierarchy
instance.data["setMembers"] = members
instance.data.update(data)
# Produce diagnostic message for any graphical
# user interface interested in visualising it.
self.log.info("Found: \"%s\" " % instance.data["name"])
def get_all_parents(self, nodes):
"""Get all parents by using string operations (optimization)
Args:
nodes (list): the nodes which are found in the objectSet
Returns:
list
"""
parents = []
for node in nodes:
splitted = node.split("|")
items = ["|".join(splitted[0:i]) for i in range(2, len(splitted))]
parents.extend(items)
return list(set(parents))

View file

@ -0,0 +1,221 @@
from maya import cmds
from cb.utils.maya import context
import cbra.utils.maya.node_uuid as id_utils
import pyblish.api
SHAPE_ATTRS = ["castsShadows",
"receiveShadows",
"motionBlur",
"primaryVisibility",
"smoothShading",
"visibleInReflections",
"visibleInRefractions",
"doubleSided",
"opposite"]
SHAPE_ATTRS = set(SHAPE_ATTRS)
def get_look_attrs(node):
"""Returns attributes of a node that are important for the look.
These are the "changed" attributes (those that have edits applied
in the current scene).
Returns:
list: Attribute names to extract
"""
result = cmds.listAttr(node, userDefined=True,
changedSinceFileOpen=True) or []
# For shapes allow render stat changes
if cmds.objectType(node, isAType="shape"):
attrs = cmds.listAttr(node, changedSinceFileOpen=True) or []
valid = [attr for attr in attrs if attr in SHAPE_ATTRS]
result.extend(valid)
if "cbId" in result:
result.remove("cbId")
return result
class CollectLook(pyblish.api.InstancePlugin):
"""Collect look data for instance.
For the shapes/transforms of the referenced object to collect look for
retrieve the user-defined attributes (like V-ray attributes) and their
values as they were created in the current scene.
For the members of the instance collect the sets (shadingEngines and
other sets, e.g. VRayDisplacement) they are in along with the exact
membership relations.
Collects:
lookAttribtutes (list): Nodes in instance with their altered attributes
lookSetRelations (list): Sets and their memberships
lookSets (list): List of set names included in the look
"""
order = pyblish.api.CollectorOrder + 0.4
families = ["colorbleed.look"]
label = "Collect Look"
hosts = ["maya"]
# Ignore specifically named sets (check with endswith)
IGNORE = ["pointcache_SET", "controls_SET", "_INST"]
def collect(self, instance):
# Whether to log information verbosely
verbose = instance.data.get("verbose", False)
self.log.info("Looking for look associations "
"for {0}..".format(instance.data['label']))
# Get view sets (so we can ignore those sets later)
model_panels = cmds.getPanel(type="modelPanel")
view_sets = set()
for panel in model_panels:
view_set = cmds.modelEditor(panel, query=True, viewObjects=True)
if view_set:
view_sets.add(view_set)
# Discover related object sets
self.log.info("Gathering sets..")
sets = dict()
for node in instance:
node_sets = cmds.listSets(object=node, extendToShape=False) or []
if verbose:
self.log.info("Found raw sets "
"{0} for {1}".format(node_sets, node))
if not node_sets:
continue
# Exclude deformer sets
deformer_sets = cmds.listSets(object=node,
extendToShape=False,
type=2) or []
deformer_sets = set(deformer_sets) # optimize lookup
node_sets = [s for s in node_sets if s not in deformer_sets]
if verbose:
self.log.debug("After filtering deformer sets "
"{0}".format(node_sets))
# Ignore specifically named sets
node_sets = [s for s in node_sets if
not any(s.endswith(x) for x in self.IGNORE)]
if verbose:
self.log.debug("After filtering ignored sets "
"{0}".format(node_sets))
# Ignore viewport filter view sets (from isolate select and
# viewports)
node_sets = [s for s in node_sets if s not in view_sets]
if verbose:
self.log.debug("After filtering view sets {0}".format(node_sets))
self.log.info("Found sets {0} for {1}".format(node_sets, node))
for objset in node_sets:
if objset not in sets:
sets[objset] = {"name": objset,
"uuid": id_utils.get_id(objset),
"members": list()}
# Lookup with absolute names (from root namespace)
instance_lookup = set([str(x) for x in cmds.ls(instance,
long=True,
absoluteName=True)])
self.log.info("Gathering set relations..")
for objset in sets:
self.log.debug("From %s.." % objset)
content = cmds.sets(objset, query=True)
for member in cmds.ls(content, long=True, absoluteName=True):
node, components = (member.rsplit(".", 1) + [None])[:2]
# Only include valid members of the instance
if node not in instance_lookup:
if verbose:
self.log.info("Skipping member %s" % member)
continue
if member in [m["name"] for m in sets[objset]["members"]]:
continue
if verbose:
self.log.debug("Such as %s.." % member)
member_data = {"name": node, "uuid": id_utils.get_id(node)}
# Include components information when components are assigned
if components:
member_data["components"] = components
sets[objset]["members"].append(member_data)
# Remove sets that didn't have any members assigned in the end
for objset, data in sets.items():
if not data['members']:
self.log.debug("Removing redundant set "
"information: {0}".format(objset))
sets.pop(objset)
# Member attributes (shapes + transforms)
self.log.info("Gathering attribute changes to instance members..")
attrs = []
for node in instance:
# Collect changes to "custom" attributes
node_attrs = get_look_attrs(node)
# Only include if there are any properties we care about
if not node_attrs:
continue
attributes = {}
for attr in node_attrs:
attribute = "{}.{}".format(node, attr)
attributes[attr] = cmds.getAttr(attribute)
# attributes = dict((attr, pm.getAttr("{}.{}".format(node, attr))
# for attr in node_attrs))
data = {"name": node,
"uuid": id_utils.get_id(node),
"attributes": attributes}
attrs.append(data)
# Store data on the instance
instance.data["lookAttributes"] = attrs
instance.data["lookSetRelations"] = sets.values()
instance.data["lookSets"] = cmds.ls(sets.keys(),
absoluteName=True,
long=True)
# Log a warning when no relevant sets were retrieved for the look.
if not instance.data['lookSets']:
self.log.warning("No sets found for the nodes in the instance: {0}".format(instance[:]))
self.log.info("Collected look for %s" % instance)
def process(self, instance):
"""Collect the Look in the instance with the correct layer settings"""
layer = instance.data.get("renderlayer", "defaultRenderLayer")
with context.renderlayer(layer):
self.log.info("Checking out layer: {0}".format(layer))
self.collect(instance)

View file

@ -0,0 +1,135 @@
from maya import cmds
import pyblish.api
import cb.utils.maya.shaders as shader
TAGS = ["maya", "attribute", "look"]
TAGS_LOOKUP = set(TAGS)
class SelectTextureNodesAction(pyblish.api.Action):
"""Select the nodes related to the collected file textures"""
label = "Select texture nodes"
on = "succeeded" # This action is only available on a failed plug-in
icon = "search" # Icon from Awesome Icon
def process(self, context, plugin):
self.log.info("Finding textures..")
# Get the errored instances
instances = []
for result in context.data["results"]:
instance = result["instance"]
if instance is None:
continue
instances.append(instance)
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(instances, plugin)
def is_texture_resource(resource):
"""Return whether the resource is a texture"""
tags = resource.get("tags", [])
if not TAGS_LOOKUP.issubset(tags):
return False
if resource.get("subfolder", None) != "textures":
return False
if "node" not in resource:
return False
return True
# Get the texture nodes from the instances
nodes = []
for instance in instances:
for resource in instance.data.get("resources", []):
if is_texture_resource(resource):
node = resource['node']
nodes.append(node)
# Ensure unique
nodes = list(set(nodes))
if nodes:
self.log.info("Selecting texture nodes: %s" % ", ".join(nodes))
cmds.select(nodes, r=True, noExpand=True)
else:
self.log.info("No texture nodes found.")
cmds.select(deselect=True)
class CollectLookTextures(pyblish.api.InstancePlugin):
"""Collect look textures
Includes the link from source to destination.
"""
order = pyblish.api.CollectorOrder + 0.498
label = 'Textures'
families = ["colorbleed.look"]
actions = [SelectTextureNodesAction]
def process(self, instance):
verbose = instance.data.get("verbose", False)
# Get textures from sets
sets = instance.data["lookSets"]
if not sets:
raise RuntimeError("No look sets found for the nodes in the instance. {0}".format(sets))
# Get the file nodes
history = cmds.listHistory(sets) or []
files = cmds.ls(history, type="file")
files = list(set(files))
resources = instance.data.get("resources", [])
for node in files:
attribute = "%s.fileTextureName" % node
source = cmds.getAttr(attribute)
# Get the computed file path (e.g. the one with the <UDIM> pattern
# in it) So we can reassign it this computed file path whenever
# we need to.
computed_attribute = "%s.computedFileTextureNamePattern" % node
computed_source = cmds.getAttr(computed_attribute)
if source != computed_source:
if verbose:
self.log.debug("File node computed pattern differs from "
"original pattern: {0} "
"({1} -> {2})".format(node,
source,
computed_source))
# We replace backslashes with forward slashes because V-Ray
# can't handle the UDIM files with the backslashes in the
# paths as the computed patterns
source = computed_source.replace("\\", "/")
files = shader.get_file_node_files(node)
if not files:
self.log.error("File node does not have a texture set: "
"{0}".format(node))
# Define the resource
resource = {"tags": TAGS[:],
"node": node,
"attribute": attribute,
"source": source, # required for resources
"files": files, # required for resources
"subfolder": "textures" # optional for resources
}
resources.append(resource)
# Store resources
instance.data['resources'] = resources

View file

@ -0,0 +1,30 @@
import maya.cmds as cmds
import maya.mel as mel
import pyblish.api
class CollectMayaUnits(pyblish.api.ContextPlugin):
"""Collect Maya's scene units."""
label = "Maya Units"
order = pyblish.api.CollectorOrder
hosts = ["maya"]
def process(self, context):
# Get the current linear units
units = cmds.currentUnit(query=True, linear=True)
# Get the current angular units ('deg' or 'rad')
units_angle = cmds.currentUnit(query=True, angle=True)
# Get the current time units
# Using the mel command is simpler than using
# `cmds.currentUnit(q=1, time=1)`. Otherwise we
# have to parse the returned string value to FPS
fps = mel.eval('currentTimeUnitToFPS()')
context.data['linearUnits'] = units
context.data['angularUnits'] = units_angle
context.data['fps'] = fps

View file

@ -0,0 +1,39 @@
import pyblish.api
import copy
class CollectMetadata(pyblish.api.ContextPlugin):
"""Transfer context metadata to the instance.
This applies a copy of the `context.data['metadata']` to the
`instance.data['metadata']` for the following metadata:
Provides:
{
"topic": "topic",
"author": "user",
"date": "date",
"filename": "currentFile"
}
"""
order = pyblish.api.CollectorOrder + 0.2
label = "Metadata"
mapping = {"topic": "topic",
"author": "user",
"date": "date",
"filename": "currentFile"}
def process(self, context):
metadata = {}
for key, source in self.mapping.iteritems():
if source in context.data:
metadata[key] = context.data.get(source)
for instance in context:
instance.data["metadata"] = copy.deepcopy(metadata)
self.log.info("Collected {0}".format(metadata))

View file

@ -0,0 +1,21 @@
from maya import cmds
import pyblish.api
class CollectModelData(pyblish.api.InstancePlugin):
"""Collect model data
Ensures always only a single frame is extracted (current frame).
"""
order = pyblish.api.CollectorOrder + 0.499
label = 'Model Data'
families = ["colorbleed.model"]
def process(self, instance):
# Extract only current frame (override)
frame = cmds.currentTime(query=True)
instance.data['startFrame'] = frame
instance.data['endFrame'] = frame

View file

@ -0,0 +1,47 @@
import maya.cmds as cmds
import pyblish.api
class CollectParticlesHistory(pyblish.api.InstancePlugin):
"""For a Particle system collect the history.
This would collect its nucleus and cache files.
"""
order = pyblish.api.CollectorOrder + 0.499
families = ['colorbleed.particles']
label = "Particles History"
def process(self, instance):
# Include history of the instancer
particles = cmds.ls(instance, dag=True, shapes=True,
leaf=True, long=True)
particles = cmds.ls(particles, type="nParticle", long=True)
if not particles:
self.log.info("No particles found")
return
export = particles
# Get the required inputs of the particles from its history
particles_history = cmds.listHistory(particles) or []
if particles_history:
nucleus = cmds.ls(particles_history, type="nucleus")
export.extend(nucleus)
caches = cmds.ls(particles_history, type="cacheFile")
export.extend(caches)
# Add it to the instance
data = instance[:]
data.extend(export)
# Ensure unique objects only
data = list(set(data))
self.log.info("Setting members to {0}".format(data))
instance[:] = data
# Store the recommended export selection so the export can do it
# accordingly
instance.data["exactExportMembers"] = export

View file

@ -0,0 +1,235 @@
import os
import json
import contextlib
from maya import cmds
import pyblish_maya
import colorbleed.api
@contextlib.contextmanager
def suspension():
try:
cmds.refresh(suspend=True)
yield
finally:
cmds.refresh(suspend=False)
class ExtractAlembic(colorbleed.api.Extractor):
"""Extract Alembic Cache
This extracts an Alembic cache using the `-selection` flag to minimize
the extracted content to solely what was Collected into the instance.
Arguments:
startFrame (float): Start frame of output. Ignored if `frameRange`
provided.
endFrame (float): End frame of output. Ignored if `frameRange`
provided.
frameRange (str): Frame range in the format of "startFrame endFrame".
Overrides `startFrame` and `endFrame` arguments.
dataFormat (str): The data format to use for the cache,
defaults to "ogawa"
verbose (bool): When on, outputs frame number information to the
Script Editor or output window during extraction.
noNormals (bool): When on, normal data from the original polygon
objects is not included in the exported Alembic cache file.
renderableOnly (bool): When on, any non-renderable nodes or hierarchy,
such as hidden objects, are not included in the Alembic file.
Defaults to False.
stripNamespaces (bool): When on, any namespaces associated with the
exported objects are removed from the Alembic file. For example, an
object with the namespace taco:foo:bar appears as bar in the
Alembic file.
uvWrite (bool): When on, UV data from polygon meshes and subdivision
objects are written to the Alembic file. Only the current UV map is
included.
worldSpace (bool): When on, the top node in the node hierarchy is
stored as world space. By default, these nodes are stored as local
space. Defaults to False.
eulerFilter (bool): When on, X, Y, and Z rotation data is filtered with
an Euler filter. Euler filtering helps resolve irregularities in
rotations especially if X, Y, and Z rotations exceed 360 degrees.
Defaults to True.
"""
label = "Alembic"
families = ["colorbleed.model",
"colorbleed.pointcache",
"colorbleed.animation",
"colorbleed.proxy"]
optional = True
@property
def options(self):
"""Overridable options for Alembic export
Given in the following format
- {NAME: EXPECTED TYPE}
If the overridden option's type does not match,
the option is not included and a warning is logged.
"""
return {"startFrame": float,
"endFrame": float,
"frameRange": str, # "start end"; overrides startFrame & endFrame
"eulerFilter": bool,
"frameRelativeSample": float,
"noNormals": bool,
"renderableOnly": bool,
"step": float,
"stripNamespaces": bool,
"uvWrite": bool,
"wholeFrameGeo": bool,
"worldSpace": bool,
"writeVisibility": bool,
"writeColorSets": bool,
"writeFaceSets": bool,
"writeCreases": bool, # Maya 2015 Ext1+
"dataFormat": str,
"root": (list, tuple),
"attr": (list, tuple),
"attrPrefix": (list, tuple),
"userAttr": (list, tuple),
"melPerFrameCallback": str,
"melPostJobCallback": str,
"pythonPerFrameCallback": str,
"pythonPostJobCallback": str,
"selection": bool}
@property
def default_options(self):
"""Supply default options to extraction.
This may be overridden by a subclass to provide
alternative defaults.
"""
start_frame = cmds.playbackOptions(query=True, animationStartTime=True)
end_frame = cmds.playbackOptions(query=True, animationEndTime=True)
return {"startFrame": start_frame,
"endFrame": end_frame,
"selection": True,
"uvWrite": True,
"eulerFilter": True,
"dataFormat": "ogawa" # ogawa, hdf5
}
def process(self, instance):
# Ensure alembic exporter is loaded
cmds.loadPlugin('AbcExport', quiet=True)
parent_dir = self.staging_dir(instance)
filename = "%s.abc" % instance.name
path = os.path.join(parent_dir, filename)
# Alembic Exporter requires forward slashes
path = path.replace('\\', '/')
options = self.default_options
options["userAttr"] = ("uuid",)
options = self.parse_overrides(instance, options)
job_str = self.parse_options(options)
job_str += ' -file "%s"' % path
self.log.info('Extracting alembic to: "%s"' % path)
verbose = instance.data('verbose', False)
if verbose:
self.log.debug('Alembic job string: "%s"'% job_str)
if not os.path.exists(parent_dir):
os.makedirs(parent_dir)
with suspension():
with pyblish_maya.maintained_selection():
self.log.debug(
"Preparing %s for export using the following options: %s\n"
"and the following string: %s"
% (list(instance),
json.dumps(options, indent=4),
job_str))
cmds.select(instance.data("setMembers"), hierarchy=True)
cmds.AbcExport(j=job_str, verbose=verbose)
def parse_overrides(self, instance, options):
"""Inspect data of instance to determine overridden options
An instance may supply any of the overridable options
as data, the option is then added to the extraction.
"""
for key in instance.data():
if key not in self.options:
continue
# Ensure the data is of correct type
value = instance.data(key)
if not isinstance(value, self.options[key]):
self.log.warning(
"Overridden attribute {key} was of "
"the wrong type: {invalid_type} "
"- should have been {valid_type}".format(
key=key,
invalid_type=type(value).__name__,
valid_type=self.options[key].__name__))
continue
options[key] = value
return options
@classmethod
def parse_options(cls, options):
"""Convert key-word arguments to job arguments string
Args:
options (dict): the options for the command
"""
# Convert `startFrame` and `endFrame` arguments
if 'startFrame' in options or 'endFrame' in options:
start_frame = options.pop('startFrame', None)
end_frame = options.pop('endFrame', None)
if 'frameRange' in options:
cls.log.debug("The `startFrame` and/or `endFrame` arguments "
"are overridden by the provided `frameRange`.")
elif start_frame is None or end_frame is None:
cls.log.warning("The `startFrame` and `endFrame` arguments "
"must be supplied together.")
else:
options['frameRange'] = "%s %s" % (start_frame, end_frame)
job_args = list()
for key, value in options.items():
if isinstance(value, (list, tuple)):
for entry in value:
job_args.append("-%s %s" % (key, entry))
elif isinstance(value, bool):
job_args.append("%s" % key)
else:
job_args.append("-%s %s" % (key, value))
job_str = " ".join(job_args)
return job_str

View file

@ -0,0 +1,140 @@
import os
from maya import cmds
import pyblish_maya
import colorbleed.api
import cb.utils.maya.context as context
from cb.utils.maya.animation import bakeToWorldSpace
def massage_ma_file(path):
"""Clean up .ma file for backwards compatibility.
Massage the .ma of baked camera to stay
backwards compatible with older versions
of Fusion (6.4)
"""
# Get open file's lines
f = open(path, "r+")
lines = f.readlines()
f.seek(0) # reset to start of file
# Rewrite the file
for line in lines:
# Skip all 'rename -uid' lines
stripped = line.strip()
if stripped.startswith("rename -uid "):
continue
f.write(line)
f.truncate() # remove remainder
f.close()
class ExtractCameraBaked(colorbleed.api.Extractor):
"""Extract as Maya Ascii and Alembic a baked camera.
The cameras gets baked to world space and then extracted.
The extracted Maya ascii file gets "massaged" removing the uuid values
so they are valid for older versions of Fusion (e.g. 6.4)
"""
label = "Camera Baked (Maya Ascii + Alembic)"
hosts = ["maya"]
families = ["colorbleed.camera"]
def process(self, instance):
nodetype = 'camera'
# Define extract output file path
dir_path = self.staging_dir(instance)
alembic_as_baked = instance.data("cameraBakedAlembic", True)
# get cameras
members = instance.data['setMembers']
cameras = cmds.ls(members, leaf=True, shapes=True,
dag=True, type=nodetype)
# Bake the cameras
transforms = cmds.listRelatives(cameras, parent=True,
fullPath=True) or []
framerange = [instance.data.get("startFrame", 1),
instance.data.get("endFrame", 1)]
self.log.info("Performing camera bakes for: {0}".format(transforms))
with context.evaluation("off"):
with context.no_refresh():
baked = bakeToWorldSpace(transforms, frameRange=framerange)
# Extract using the shape so it includes that and its hierarchy
# above. Otherwise Alembic takes only the transform
baked_shapes = cmds.ls(baked, type=nodetype, dag=True,
shapes=True, long=True)
# Perform maya ascii extraction
filename = "{0}.ma".format(instance.name)
path = os.path.join(dir_path, filename)
self.log.info("Performing extraction..")
with pyblish_maya.maintained_selection():
cmds.select(baked_shapes, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=False,
constructionHistory=False,
channels=True, # allow animation
constraints=False,
shader=False,
expressions=False)
massage_ma_file(path)
# Perform alembic extraction
filename = "{0}.abc".format(instance.name)
path = os.path.join(dir_path, filename)
if alembic_as_baked:
abc_shapes = baked_shapes
else:
# get cameras in the instance
members = instance.data['setMembers']
abc_shapes = cmds.ls(members, leaf=True, shapes=True, dag=True,
long=True, type=nodetype)
# Whenever the camera was baked and Maya's scene time warp was enabled
# then we want to disable it whenever we publish the baked camera
# otherwise we'll get double the scene time warping. But whenever
# we *do not* publish a baked camera we want to keep it enabled. This
# way what the artist has in the scene visually represents the output.
with context.timewarp(state=not alembic_as_baked):
with pyblish_maya.maintained_selection():
cmds.select(abc_shapes, replace=True, noExpand=True)
# Enforce forward slashes for AbcExport because we're
# embedding it into a job string
path = path.replace("\\", "/")
job_str = ' -selection -dataFormat "ogawa" '
job_str += ' -attrPrefix cb'
job_str += ' -frameRange {0} {1} '.format(framerange[0],
framerange[1])
job_str += ' -file "{0}"'.format(path)
with context.evaluation("off"):
with context.no_refresh():
cmds.AbcExport(j=job_str, verbose=False)
# Delete the baked camera (using transform to leave no trace)
cmds.delete(baked)
self.log.info("Extracted instance '{0}' to: {1}".format(
instance.name, path))

View file

@ -0,0 +1,46 @@
import os
from maya import cmds
import pyblish_maya
import colorbleed.api
class ExtractCameraRaw(colorbleed.api.Extractor):
"""Extract as Maya Ascii
Includes constraints and channels
"""
label = "Camera Raw (Maya Ascii)"
hosts = ["maya"]
families = ["colorbleed.camera"]
def process(self, instance):
# Define extract output file path
dir_path = self.staging_dir(instance)
filename = "{0}.raw.ma".format(instance.name)
path = os.path.join(dir_path, filename)
# get cameras
cameras = cmds.ls(instance.data['setMembers'], leaf=True,
shapes=True, dag=True, type='camera')
# Perform extraction
self.log.info("Performing extraction..")
with pyblish_maya.maintained_selection():
cmds.select(cameras, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=False,
constructionHistory=False,
channels=True, # allow animation
constraints=True,
shader=False,
expressions=False)
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))

View file

@ -0,0 +1,144 @@
import os
import contextlib
import maya.cmds as cmds
import pyblish_maya
import colorbleed.api
import cb.utils.maya.context as context
def _set_cache_file_path(node, path):
"""Forces a cacheFile.cachePath attribute to be set to path.
When the given path does not exist Maya will raise an error
when using `maya.cmds.setAttr` to set the "cachePath" attribute.
Arguments:
node (str): Name of cacheFile node.
path (str): Path value to set.
"""
path = str(path)
# Temporary unique attribute name
attr = "__tmp_path"
while cmds.attributeQuery(attr, node=node, exists=True):
attr += "_"
# Create the temporary attribute, set its value and connect
# it to the `.cachePath` attribute to force the value to be
# set and applied without errors.
cmds.addAttr(node, longName=attr, dataType="string")
plug = "{0}.{1}".format(node, attr)
try:
cmds.setAttr(plug, path, type="string")
cmds.connectAttr(plug,
"{0}.cachePath".format(node),
force=True)
finally:
# Ensure the temporary attribute is deleted
cmds.deleteAttr(plug)
@contextlib.contextmanager
def cache_file_paths(mapping):
"""Set the cacheFile paths during context.
This is a workaround context manager that allows
to set the .cachePath attribute to a folder that
doesn't actually exist since using regular
`maya.cmds.setAttr` results in an error.
Arguments:
mapping (dict): node -> path mapping
"""
# Store the original values
original = dict()
for node in mapping:
original[node] = cmds.getAttr("{}.cachePath".format(node))
try:
for node, path in mapping.items():
_set_cache_file_path(node, path)
yield
finally:
for node, path in original.items():
_set_cache_file_path(node, path)
def is_cache_resource(resource):
"""Return whether resource is a cacheFile resource"""
start_tags = ["maya", "node", "cacheFile"]
required = set(start_tags)
tags = resource.get("tags", [])
return required.issubset(tags)
class ExtractInstancerMayaAscii(colorbleed.api.Extractor):
"""Extract as Maya Ascii"""
label = "Instancer (Maya Ascii)"
hosts = ["maya"]
families = ["colorbleed.instancer"]
# TODO: Find other solution than expanding vars to fix lack of support of cacheFile
def process(self, instance):
export = instance.data("exactExportMembers")
# Set up cacheFile path remapping.
resources = instance.data.get("resources", [])
attr_remap = dict()
cache_remap = dict()
for resource in resources:
if not is_cache_resource(resource):
continue
node = resource['node']
destination = resource['destination']
folder = os.path.dirname(destination)
fname = os.path.basename(destination)
if fname.endswith(".xml"):
fname = fname[:-4]
# Ensure the folder path ends with a slash
if not folder.endswith("\\") and not folder.endswith("/"):
folder += "/"
# Set path and name
attr_remap["{0}.cacheName".format(node)] = os.path.expandvars(fname)
cache_remap[node] = os.path.expandvars(folder)
self.log.info("Mapping {0} to {1}".format(node, destination))
# Define extract output file path
dir_path = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
path = os.path.join(dir_path, filename)
# Perform extraction
self.log.info("Performing extraction..")
with pyblish_maya.maintained_selection():
with cache_file_paths(cache_remap):
with context.attribute_values(attr_remap):
cmds.select(export, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=False,
constructionHistory=False,
channels=True, # allow animation
constraints=False,
shader=False,
expressions=False)
self.log.info("Extracted instance '{0}' to: {1}".format(
instance.name, path))

View file

@ -0,0 +1,38 @@
import os
from maya import cmds
import pyblish_maya
import colorbleed.api
class ExtractLayoutMayaAscii(colorbleed.api.Extractor):
"""Extract as Maya Ascii"""
label = "Layout (Maya ASCII)"
hosts = ["maya"]
families = ["colorbleed.layout"]
def process(self, instance):
# Define extract output file path
dir_path = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
path = os.path.join(dir_path, filename)
# Perform extraction
self.log.info("Performing extraction..")
with pyblish_maya.maintained_selection():
cmds.select(instance, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=True,
channels=True,
constraints=True,
expressions=True,
constructionHistory=True)
self.log.info("Extracted instance '{0}' to: {1}".format(
instance.name, path))

View file

@ -0,0 +1,79 @@
import os
import json
from maya import cmds
import pyblish_maya
import colorbleed.api
import cb.utils.maya.context as context
class ExtractLook(colorbleed.api.Extractor):
"""Extract Look (Maya Ascii + JSON)
Only extracts the sets (shadingEngines and alike) alongside a .json file
that stores it relationships for the sets and "attribute" data for the
instance members.
"""
label = "Look (Maya ASCII + JSON)"
hosts = ["maya"]
families = ["colorbleed.look"]
def process(self, instance):
# Define extract output file path
dir_path = self.staging_dir(instance)
maya_fname = "{0}.ma".format(instance.name)
json_fname = "{0}.json".format(instance.name)
maya_path = os.path.join(dir_path, maya_fname)
json_path = os.path.join(dir_path, json_fname)
self.log.info("Performing extraction..")
# Remove all members of the sets so they are not included in the
# exported file by accident
self.log.info("Extract sets (Maya ASCII)..")
sets = instance.data["lookSets"]
# Define the texture file node remapping
resource_remap = dict()
required = ["maya", "attribute", "look"] # required tags to be a look resource
resources = instance.data.get("resources", [])
for resource in resources:
resource_tags = resource.get("tags", [])
if all(tag in resource_tags for tag in required):
node = resource['node']
destination = resource['destination']
resource_remap["{}.fileTextureName".format(node)] = destination
# Extract in corect render layer
layer = instance.data.get("renderlayer", "defaultRenderLayer")
with context.renderlayer(layer):
# TODO: Ensure membership edits don't become renderlayer overrides
with context.empty_sets(sets):
with context.attribute_values(resource_remap):
with pyblish_maya.maintained_selection():
cmds.select(sets, noExpand=True)
cmds.file(maya_path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=False,
channels=True,
constraints=True,
expressions=True,
constructionHistory=True)
# Write the JSON data
self.log.info("Extract json..")
data = {"attributes": instance.data["lookAttributes"],
"sets": instance.data["lookSetRelations"]}
with open(json_path, "w") as f:
json.dump(data, f)
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
maya_path))

View file

@ -0,0 +1,35 @@
import os
from maya import cmds
import pyblish_maya
import colorbleed.api
class ExtractMayaAscii(colorbleed.api.Extractor):
"""Extract as Maya Ascii"""
label = "Maya ASCII"
hosts = ["maya"]
families = ["colorbleed.rig"]
optional = True
def process(self, instance):
# Define extract output file path
dir_path = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
path = os.path.join(dir_path, filename)
# Perform extraction
self.log.info("Performing extraction..")
with pyblish_maya.maintained_selection():
cmds.select(instance, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=False,
constructionHistory=True)
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))

View file

@ -0,0 +1,49 @@
import os
from maya import cmds
import pyblish_maya
import colorbleed.api
class ExtractMayaAsciiRaw(colorbleed.api.Extractor):
"""Extract as Maya Ascii (raw)
This will preserve all references, construction history, etc.
"""
label = "Maya ASCII (Raw)"
hosts = ["maya"]
families = ["colorbleed.mayaAscii"]
def process(self, instance):
# Define extract output file path
dir_path = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
path = os.path.join(dir_path, filename)
# Whether to include all nodes in the instance (including those from
# history) or only use the exact set members
members_only = instance.data.get("exactSetMembersOnly", False)
if members_only:
members = instance.data.get("setMembers", list())
if not members:
raise RuntimeError("Can't export 'exact set members only' "
"when set is empty.")
else:
members = instance[:]
# Perform extraction
self.log.info("Performing extraction..")
with pyblish_maya.maintained_selection():
cmds.select(members, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=True,
constructionHistory=True)
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))

View file

@ -0,0 +1,21 @@
import os
import json
import colorbleed.api
class ExtractMetadata(colorbleed.api.Extractor):
"""Extract origin metadata from scene"""
label = "Metadata"
def process(self, instance):
temp_dir = self.staging_dir(instance)
temp_file = os.path.join(temp_dir, "metadata.meta")
metadata = instance.data("metadata")
self.log.info("Extracting %s" % metadata)
with open(temp_file, "w") as f:
json.dump(metadata, f, indent=2, sort_keys=True)
self.log.info("Written to %s" % temp_file)

View file

@ -0,0 +1,79 @@
import os
from maya import cmds
import pyblish_maya
import colorbleed.api
from cb.utils.maya import context
class ExtractModel(colorbleed.api.Extractor):
"""Extract as Model (Maya Ascii)
Only extracts contents based on the original "setMembers" data to ensure
publishing the least amount of required shapes. From that it only takes
the shapes that are not intermediateObjects
During export it sets a temporary context to perform a clean extraction.
The context ensures:
- Smooth preview is turned off for the geometry
- Default shader is assigned (no materials are exported)
- Remove display layers
"""
label = "Model (Maya ASCII)"
hosts = ["maya"]
families = ["colorbleed.model"]
optional = True
def process(self, instance):
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
path = os.path.join(stagingdir, filename)
# Perform extraction
self.log.info("Performing extraction..")
# Get only the shape contents we need in such a way that we avoid
# taking along intermediateObjects
members = instance.data("setMembers")
members = cmds.ls(members,
dag=True,
shapes=True,
type=("mesh", "nurbsCurve"),
noIntermediate=True,
long=True)
with context.no_display_layers(instance):
with context.displaySmoothness(members,
divisionsU=0,
divisionsV=0,
pointsWire=4,
pointsShaded=1,
polygonObject=1):
with context.shader(members,
shadingEngine="initialShadingGroup"):
with pyblish_maya.maintained_selection():
cmds.select(members, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=False,
channels=False,
constraints=False,
expressions=False,
constructionHistory=False)
# Store reference for integration
if "files" not in instance.data:
instance.data["files"] = list()
instance.data["files"].append(filename)
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))

View file

@ -0,0 +1,43 @@
import os
from maya import cmds
import pyblish_maya
import colorbleed.api
class ExtractParticlesMayaAscii(colorbleed.api.Extractor):
"""Extract as Maya Ascii"""
label = "Particles (Maya Ascii)"
hosts = ["maya"]
families = ["colorbleed.particles"]
def process(self, instance):
# Define extract output file path
dir_path = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
path = os.path.join(dir_path, filename)
export = instance.data("exactExportMembers")
# TODO: Transfer cache files and relink temporarily on the particles
# Perform extraction
self.log.info("Performing extraction..")
with pyblish_maya.maintained_selection():
cmds.select(export, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=False,
constructionHistory=False,
channels=True, # allow animation
constraints=False,
shader=False,
expressions=False)
self.log.info("Extracted instance '{0}' to: {1}".format(
instance.name, path))

View file

@ -0,0 +1,62 @@
import os
from maya import cmds
import pyblish_maya
import colorbleed.api
import cb.utils.maya.context as context
class ExtractFurYeti(colorbleed.api.Extractor):
"""Extract as Yeti nodes"""
label = "Yeti Nodes"
hosts = ["maya"]
families = ["colorbleed.groom"]
def process(self, instance):
# Define extract output file path
dir_path = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
path = os.path.join(dir_path, filename)
# Perform extraction
self.log.info("Performing extraction..")
# Get only the shape contents we need in such a way that we avoid
# taking along intermediateObjects
members = instance.data("setMembers")
members = cmds.ls(members,
dag=True,
shapes=True,
type="pgYetiMaya",
noIntermediate=True,
long=True)
# Remap cache files names and ensure fileMode is set to load from cache
resource_remap = dict()
required = ["maya", "yeti", "attribute"] # required tags to be a yeti resource
resources = instance.data.get("resources", [])
for resource in resources:
resource_tags = resource.get("tags", [])
if all(tag in resource_tags for tag in required):
attribute = resource['attribute']
destination = resource['destination']
resource_remap[attribute] = destination
# Perform extraction
with pyblish_maya.maintained_selection():
with context.attribute_values(resource_remap):
cmds.select(members, r=True, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=False,
constructionHistory=False,
shader=False)
self.log.info("Extracted instance '{0}' to: {1}".format(
instance.name, path))

View file

@ -0,0 +1,67 @@
from maya import cmds
import pyblish.api
import colorbleed.api
class ValidateCameraAttributes(pyblish.api.InstancePlugin):
"""Validates Camera has no invalid attribute keys or values.
The Alembic file format does not a specifc subset of attributes as such
we validate that no values are set there as the output will not match the
current scene. For example the preScale, film offsets and film roll.
"""
order = colorbleed.api.ValidateContentsOrder
families = ['colorbleed.camera']
hosts = ['maya']
label = 'Camera Attributes'
actions = [colorbleed.api.SelectInvalidAction]
DEFAULTS = [
("filmFitOffset", 0.0),
("horizontalFilmOffset", 0.0),
("verticalFilmOffset", 0.0),
("preScale", 1.0),
("filmTranslateH", 0.0),
("filmTranslateV", 0.0),
("filmRollValue", 0.0)
]
@classmethod
def get_invalid(cls, instance):
# get cameras
members = instance.data['setMembers']
shapes = cmds.ls(members, dag=True, shapes=True, long=True)
cameras = cmds.ls(shapes, type='camera', long=True)
invalid = set()
for cam in cameras:
for attr, default_value in cls.DEFAULTS:
plug = "{}.{}".format(cam, attr)
value = cmds.getAttr(plug)
# Check if is default value
if value != default_value:
cls.log.warning("Invalid attribute value: {0} "
"(should be: {1}))".format(plug,
default_value))
invalid.add(cam)
if cmds.listConnections(plug, source=True, destination=False):
# TODO: Validate correctly whether value always correct
cls.log.warning("%s has incoming connections, validation "
"is unpredictable." % plug)
return list(invalid)
def process(self, instance):
"""Process all the nodes in the instance"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Invalid camera attributes: %s" % invalid)

View file

@ -0,0 +1,64 @@
from maya import cmds
import pyblish.api
import colorbleed.api
class ValidateCameraContents(pyblish.api.InstancePlugin):
"""Validates Camera instance contents.
A Camera instance may only hold a SINGLE camera's transform, nothing else.
It may hold a "locator" as shape, but different shapes are in the up-
and down
"""
order = colorbleed.api.ValidateContentsOrder
families = ['colorbleed.camera']
hosts = ['maya']
label = 'Camera Contents'
actions = [colorbleed.api.SelectInvalidAction]
@classmethod
def get_invalid(cls, instance):
# get cameras
members = instance.data['setMembers']
shapes = cmds.ls(members, dag=True, shapes=True, long=True)
# single camera
invalid = []
cameras = cmds.ls(shapes, type='camera', long=True)
if len(cameras) != 1:
cls.log.warning("Camera instance must have a single camera. "
"Found {0}: {1}".format(len(cameras), cameras))
invalid.extend(cameras)
# We need to check this edge case because returning an extended
# list when there are no actual cameras results in
# still an empty 'invalid' list
if len(cameras) < 1:
raise RuntimeError("No cameras in instance.")
# non-camera shapes
valid_shapes = cmds.ls(shapes, type=('camera', 'locator'), long=True)
shapes = set(shapes) - set(valid_shapes)
if shapes:
shapes = list(shapes)
cls.log.warning("Camera instance should only contain camera "
"shapes. Found: {0}".format(shapes))
invalid.extend(shapes)
invalid = list(set(invalid))
return invalid
def process(self, instance):
"""Process all the nodes in the instance"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Invalid camera contents: "
"{0}".format(invalid))

View file

@ -0,0 +1,50 @@
import pyblish.api
import colorbleed.api
class ValidateFrameRange(pyblish.api.InstancePlugin):
"""Valides the frame ranges.
Checks the `startFrame`, `endFrame` and `handles` data.
This does NOT ensure there's actual data present.
This validates:
- `startFrame` is lower than or equal to the `endFrame`.
- must have both the `startFrame` and `endFrame` data.
- The `handles` value is not lower than zero.
"""
order = colorbleed.api.ValidateContentsOrder
label = "Frame Range"
def process(self, instance):
start = instance.data.get("startFrame", None)
end = instance.data.get("endFrame", None)
handles = instance.data.get("handles", None)
# Check if any of the values are present. If not we'll assume the
# current instance does not require any time values.
if all(value is None for value in [start, end, handles]):
self.log.debug("No time values for this instance. "
"(Missing `startFrame`, `endFrame` or `handles`)")
return
# If only one of the two raise an error, it will require both.
has_start = int(start is not None)
has_end = int(end is not None)
if has_start + has_end == 1:
raise RuntimeError("Only a start frame or an end frame is provided"
" instead of both.")
if has_start and has_end:
self.log.info("Comparing start (%s) and end (%s)" % (start, end))
if start > end:
raise RuntimeError("The start frame is a higher value "
"than the end frame: {0}>{1}".format(start, end))
if handles is not None:
if handles < 0.0:
raise RuntimeError("Handles are set to a negative value")

View file

@ -0,0 +1,91 @@
import re
from collections import defaultdict
import maya.cmds as cmds
import pyblish.api
import colorbleed.api
def get_gpu_cache_subnodes(cache):
"""Return the amount of subnodes in the cache
This uses `maya.cmds.gpuCache(showStats=True)` and parses
the resulting stats for the amount of internal sub nodes.
Args:
cache (str): gpuCache node name.
Returns:
int: Amount of subnodes in loaded gpuCache
Raises:
TypeError: when `cache` is not a gpuCache object type.
RuntimeError: when `maya.cmds.gpuCache(showStats=True)`
does not return stats from which we can parse the
amount of subnodes.
"""
# Ensure gpuCache
if not cmds.objectType(cache, isType="gpuCache"):
raise TypeError("Node is not a gpuCache: {0}".format(cache))
stats = cmds.gpuCache(cache, query=True, showStats=True)
for line in stats.splitlines():
match = re.search('nb of internal sub nodes: ([0-9]+)$', line)
if match:
return int(match.group(1))
raise RuntimeError("Couldn't parse amount of subnodes "
"in cache stats: {0}".format(cache))
def get_empty_gpu_caches(caches):
empty = list()
# Group caches per path (optimization) so
# we check each file only once
caches_per_path = defaultdict(list)
for cache in caches:
path = cmds.getAttr(cache + ".cacheFileName")
caches_per_path[path].append(cache)
# We consider the cache empty if its stats
# result in 0 subnodes
for path, path_caches in caches_per_path.items():
cache = path_caches[0]
num = get_gpu_cache_subnodes(cache)
if num == 0:
empty.extend(path_caches)
return empty
class ValidateGPUCacheNotEmpty(pyblish.api.InstancePlugin):
"""Validates that gpuCaches have at least one visible shape in them.
This is tested using the `maya.cmds.gpuCache(cache, showStats=True)`
command.
"""
order = colorbleed.api.ValidateContentsOrder
label = 'GpuCache has subnodes'
families = ['colorbleed.layout']
actions = [colorbleed.api.SelectInvalidAction]
@classmethod
def get_invalid(cls, instance):
caches = cmds.ls(instance, type="gpuCache", long=True)
invalid = get_empty_gpu_caches(caches)
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Invalid nodes found: {0}".format(invalid))

View file

@ -0,0 +1,27 @@
import pyblish.api
import colorbleed.api
class ValidateInstanceHasMembers(pyblish.api.InstancePlugin):
"""Validates instance objectSet has *any* members."""
order = colorbleed.api.ValidateContentsOrder
hosts = ["maya"]
label = 'Instance has members'
actions = [colorbleed.api.SelectInvalidAction]
@classmethod
def get_invalid(cls, instance):
invalid = list()
if not instance.data["setMembers"]:
objectset_name = instance.data['subset']
invalid.append(objectset_name)
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Empty instances found: {0}".format(invalid))

View file

@ -0,0 +1,49 @@
import pyblish.api
import colorbleed.api
import string
# Allow only characters, numbers and underscore
allowed = set(string.ascii_lowercase +
string.ascii_uppercase +
string.digits +
'_')
def validate_name(subset):
return all(x in allowed for x in subset)
class ValidateSubsetName(pyblish.api.InstancePlugin):
"""Adheres to the content of 'model' family
- Must have one top group named: geo_GRP
- Must only contain: transforms, meshes and groups
"""
order = colorbleed.api.ValidateContentsOrder
families = ["*"]
label = "Subset Name"
def process(self, instance):
subset = instance.data.get("subset", None)
# Ensure subset data
if subset is None:
raise RuntimeError("Instance is missing subset "
"name: {0}".format(subset))
if not isinstance(subset, basestring):
raise TypeError("Instance subset name must be string, "
"got: {0} ({1})".format(subset, type(subset)))
# Ensure is not empty subset
if not subset:
raise ValueError("Instance subset name is "
"empty: {0}".format(subset))
# Validate subset characters
if not validate_name(subset):
raise ValueError("Instance subset name contains invalid "
"characters: {0}".format(subset))

View file

@ -0,0 +1,74 @@
import pyblish.api
import maya.cmds as cmds
import cb.utils.maya.dag as dag
class ValidateInstancerContent(pyblish.api.InstancePlugin):
"""Validates that all meshes in the instance have object IDs.
This skips a check on intermediate objects because we consider them
not important.
"""
order = pyblish.api.ValidatorOrder
label = 'Instancer Content'
families = ['colorbleed.instancer']
def process(self, instance):
invalid = False
members = instance.data['setMembers']
export_members = instance.data['exactExportMembers']
self.log.info("Contents {0}".format(members))
if not len(members) == len(cmds.ls(members, type="instancer")):
self.log.error("Instancer can only contain instancers")
invalid = True
# TODO: Implement better check for particles are cached
if not cmds.ls(export_members, type="nucleus"):
self.log.error("Instancer must have a connected nucleus")
invalid = True
if not cmds.ls(export_members, type="cacheFile"):
self.log.error("Instancer must be cached")
invalid = True
# Ensure all instanced geometry is hidden
shapes = cmds.ls(export_members,
dag=True, shapes=True,
noIntermediate=True)
meshes = cmds.ls(shapes, type="mesh")
def invalidate(node):
"""Whether mesh is in a valid state
Arguments:
node (str): The node to check
Returns:
bool: Whether it is in a valid state.
"""
return dag.is_visible(node,
displayLayer=False,
intermediateObject=False)
visible = [node for node in meshes if invalidate(node)]
if visible:
self.log.error("Instancer input geometry must be hidden "
"the scene. Invalid: {0}".format(visible))
invalid = True
# Ensure all in one group
parents = cmds.listRelatives(members,
allParents=True,
fullPath=True) or []
roots = list(set(cmds.ls(parents, assemblies=True, long=True)))
if len(roots) > 1:
self.log.error("Instancer should all be contained in a single "
"group. Current roots: {0}".format(roots))
invalid = True
if invalid:
raise RuntimeError("Instancer Content is invalid. See log.")

View file

@ -0,0 +1,151 @@
import pyblish.api
VERBOSE = False
def is_cache_resource(resource):
"""Return whether resource is a cacheFile resource"""
required = set(["maya", "node", "cacheFile"])
tags = resource.get("tags", [])
return required.issubset(tags)
class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
"""Validates all instancer particle systems are cached correctly.
This means they should have the files/frames as required by the start-end
frame (including handles).
This also checks the files exist and checks the "ticks" (substeps) files.
"""
order = pyblish.api.ValidatorOrder
label = 'Instancer Cache Frame Ranges'
families = ['colorbleed.instancer']
@classmethod
def get_invalid(cls, instance):
import os
import pyseq
start_frame = instance.data.get("startFrame", 0)
end_frame = instance.data.get("endFrame", 0)
required = range(int(start_frame), int(end_frame) + 1)
invalid = list()
resources = instance.data.get("resources", [])
for resource in resources:
if not is_cache_resource(resource):
continue
node = resource['node']
all_files = resource['files'][:]
all_lookup = set(all_files)
# The first file is usually the .xml description file.
xml = all_files.pop(0)
assert xml.endswith(".xml")
if VERBOSE:
cls.log.info("Checking: {0}".format(all_files))
# Ensure all files exist (including ticks)
# The remainder file paths should be the .mcx or .mcc files
for f in all_files:
assert os.path.exists(f)
assert f.endswith(".mcx") or f.endswith(".mcc")
# Maya particle caches support substeps by saving out additional files
# that end with a Tick60.mcx, Tick120.mcx, etc. suffix. To avoid `pyseq`
# getting confused we filter those out and then for each file (except
# the last frame) check that at least all ticks exist.
tick_files = set()
ticks = set()
for path in all_files:
import re
match = re.match(".+Tick([0-9]+).mcx$", os.path.basename(path))
if match:
tick_files.add(path)
num = match.group(1)
ticks.add(int(num))
files = [f for f in all_files if f not in tick_files] if tick_files else all_files
sequences = pyseq.get_sequences(files)
if len(sequences) != 1:
invalid.append(node)
cls.log.warning("More than one sequence found? "
"{0} {1}".format(node, files))
cls.log.warning("Found caches: {0}".format(sequences))
continue
sequence = sequences[0]
cls.log.debug("Found sequence: {0}".format(sequence))
start = sequence.start()
end = sequence.end()
if start > start_frame or end < end_frame:
invalid.append(node)
cls.log.warning("Sequence does not have enough "
"frames: {0}-{1} (requires: {2}-{3})"
"".format(start, end,
start_frame,
end_frame))
continue
# Ensure all frames are present
missing = set(sequence.missing())
if missing:
required_missing = [x for x in required if x in missing]
if required_missing:
invalid.append(node)
cls.log.warning("Sequence is missing required frames: "
"{0}".format(required_missing))
continue
# Ensure all tick files (substep) exist for the files in the folder
# for the frames required by the time range.
if ticks:
ticks = list(sorted(ticks))
cls.log.info("Found ticks: {0} (substeps: {1})".format(ticks, len(ticks)))
# Check all frames except the last since we don't
# require subframes after our time range.
tick_check_frames = set(required[:-1])
# Check all frames
for item in sequence:
frame = item.frame
if not frame:
invalid.append(node)
cls.log.error("Path is not a frame in sequence: {0}".format(item))
continue
# Not required for our time range
if frame not in tick_check_frames:
continue
path = item.path
for num in ticks:
base, ext = os.path.splitext(path)
tick_file = base + "Tick{0}".format(num) + ext
if tick_file not in all_lookup:
invalid.append(node)
cls.log.warning("Tick file found that is not "
"in cache query filenames: {0}".format(tick_file))
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
self.log.error("Invalid nodes: {0}".format(invalid))
raise RuntimeError("Invalid particle caches in instance. "
"See logs for details.")

View file

@ -0,0 +1,97 @@
from maya import cmds
import pyblish.api
import colorbleed.api
def is_visible(node,
displayLayer=True,
intermediateObject=True,
parentHidden=True,
visibility=True):
"""Is `node` visible?
Returns whether a node is hidden by one of the following methods:
- The node exists (always checked)
- The node must be a dagNode (always checked)
- The node's visibility is off.
- The node is set as intermediate Object.
- The node is in a disabled displayLayer.
- Whether any of its parent nodes is hidden.
Roughly based on: http://ewertb.soundlinker.com/mel/mel.098.php
Returns:
bool: Whether the node is visible in the scene
"""
# Only existing objects can be visible
if not cmds.objExists(node):
return False
# Only dagNodes can be visible
if not cmds.objectType(node, isAType='dagNode'):
return False
if visibility:
if not cmds.getAttr('{0}.visibility'.format(node)):
return False
if intermediateObject and cmds.objectType(node, isAType='shape'):
if cmds.getAttr('{0}.intermediateObject'.format(node)):
return False
if displayLayer:
# Display layers set overrideEnabled and overrideVisibility on members
if cmds.attributeQuery('overrideEnabled', node=node, exists=True):
override_enabled = cmds.getAttr('{}.overrideEnabled'.format(node))
override_visibility = cmds.getAttr('{}.overrideVisibility'.format(node))
if override_enabled and override_visibility:
return False
if parentHidden:
parents = cmds.listRelatives(node, parent=True, fullPath=True)
if parents:
parent = parents[0]
if not is_visible(parent,
displayLayer=displayLayer,
intermediateObject=False,
parentHidden=parentHidden,
visibility=visibility):
return False
return True
class ValidateJointsHidden(pyblish.api.InstancePlugin):
"""Validate all joints are hidden visually.
This includes being hidden:
- visibility off,
- in a display layer that has visibility off,
- having hidden parents or
- being an intermediate object.
"""
order = colorbleed.api.ValidateContentsOrder
hosts = ['maya']
families = ['colorbleed.rig']
category = 'rig'
version = (0, 1, 0)
label = "Joints Hidden"
actions = [colorbleed.api.SelectInvalidAction]
@staticmethod
def get_invalid(instance):
joints = cmds.ls(instance, type='joint', long=True)
return [j for j in joints if is_visible(j, displayLayer=True)]
def process(self, instance):
"""Process all the nodes in the instance 'objectSet'"""
invalid = self.get_invalid(instance)
if invalid:
raise ValueError("Visible joints found: "
"{0}".format(invalid))

View file

@ -0,0 +1,108 @@
import os
from maya import cmds
import pyblish.api
import colorbleed.api
import cbra.lib
from cb.utils.python.decorators import memorize
def is_latest_version(path):
"""Return whether path is the latest version.
Args:
path (str): Full path to published file.
Returns:
bool: Whether the path belongs to the latest version.
"""
ctx = cbra.lib.parse_context(path)
versions = cbra.lib.list_versions(ctx)
highest = cbra.lib.find_highest_version(versions)
if ctx.get('version', None) != highest:
return False
else:
return True
@memorize
def is_latest_version_cached(path):
"""Memorized cached wrapper to `is_latest_version`"""
return is_latest_version(path)
class ValidateLatestVersions(pyblish.api.InstancePlugin):
"""Validates content included is using latest published versions.
If published contents are out of date they can be easily updated to the
latest version using the scripts > pyblish > utilities > update_xxx for
the corresponding node type.
"""
order = colorbleed.api.ValidateContentsOrder
families = ['colorbleed.layout']
label = "Latest Versions"
actions = [colorbleed.api.SelectInvalidAction]
optional = True
# (node_type, attribute) that are non-referenced to check paths for
LOCAL_CHECKS = {
"gpuCache": "cacheFileName",
"VRayMesh": "fileName2"
}
@classmethod
def get_invalid(cls, instance):
all_nodes = instance[:]
invalid = list()
# check non-referenced nodes
for node_type, attr in cls.LOCAL_CHECKS.iteritems():
nodes = cmds.ls(all_nodes, type=node_type, long=True)
referenced = cmds.ls(nodes, referencedNodes=True, long=True)
non_referenced = [n for n in nodes if n not in referenced]
for node in non_referenced:
path = cmds.getAttr("{0}.{1}".format(node, attr))
path = os.path.normpath(path)
if not is_latest_version_cached(path):
invalid.append(node)
# reference nodes related to this isntance
referenced = cmds.ls(all_nodes, long=True, referencedNodes=True)
referenced_nodes = set(cmds.referenceQuery(reference, referenceNode=True)
for reference in referenced)
for reference in referenced_nodes:
path = cmds.referenceQuery(reference,
filename=True,
withoutCopyNumber=True)
path = os.path.normpath(path)
if not is_latest_version_cached(path):
invalid.append(reference)
return invalid
def process(self, instance):
# Clear cache only once per publish. So we store a value on
# the context on the first instance so we clear only once.
name = self.__class__.__name__
key = "_plugin_{0}_processed".format(name)
if not instance.context.data.get(key, False):
is_latest_version_cached.cache.clear()
instance.context.data[key] = True
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Used Items are not updated to latest versions:"
"{0}".format(invalid))

View file

@ -0,0 +1,87 @@
import pyblish.api
import maya.cmds as cmds
import colorbleed.api
import pyblish_maya
import cb.utils.maya.dag as dag
class ValidateLayoutContent(pyblish.api.InstancePlugin):
"""Validates that layout contains at least a gpuCache or mesh shape node
Also validates that (at the current frame that this is tested at) at least
a single shape is visible.
Without any shape nodes the layout would simply cache 'nothing' visually
and would seem redundant.
Note: Theoretically this validation does disable the possibility to just
cache some "transforms" to be used elsewhere. As such currently the
'layout' family is only intended to be used for visual shapes.
"""
order = colorbleed.api.ValidateContentsOrder
label = 'Layout Content'
families = ['colorbleed.layout']
def process(self, instance):
placeholder = instance.data.get("placeholder", False)
# Ensure any meshes or gpuCaches in instance
if not cmds.ls(instance, type=("mesh", "gpuCache", "nurbsCurve"), long=True):
raise RuntimeError("Layout has no mesh, gpuCache or nurbsCurve children: "
"{0}".format(instance))
# Ensure at least any extract nodes readily available after filtering
with pyblish_maya.maintained_selection():
import cbra.utils.maya.layout as layout
nodes = instance.data['setMembers']
cmds.select(nodes, r=1, hierarchy=True)
hierarchy = cmds.ls(sl=True, long=True)
extract_nodes = layout.filter_nodes(hierarchy)
if not extract_nodes:
self.log.info("Set members: {0}".format(nodes))
self.log.info("Hierarchy: {0}".format(hierarchy))
raise RuntimeError("No nodes to extract after "
"filtering: {0}".format(extract_nodes))
# If no meshes in layout the gpuCache command will crash as such
# we consider this invalid, unless "placeholder" is set to True
meshes = cmds.ls(cmds.ls(extract_nodes,
dag=True,
leaf=True,
shapes=True,
noIntermediate=True,
long=True),
type=("mesh", "gpuCache"),
long=True)
if not meshes and not placeholder:
raise RuntimeError("No meshes in layout. "
"Set placeholder to True on instance to allow "
"extraction without meshes")
# Ensure at least one MESH shape is visible
extract_shapes = cmds.ls(extract_nodes,
shapes=True,
long=True)
if not placeholder:
# We validate that at least one shape is visible to avoid erroneous
# extractions of invisible-only content.
for shape in extract_shapes:
if dag.is_visible(shape,
displayLayer=False,
intermediateObject=True,
visibility=True,
parentHidden=True):
break
else:
raise RuntimeError("No extract shape is visible. "
"Layout requires at least one "
"shape to be visible.")

View file

@ -0,0 +1,45 @@
import pyblish.api
import colorbleed.api
import cbra.utils.maya.node_uuid as id_utils
class ValidateLayoutNodeIds(pyblish.api.InstancePlugin):
"""Validate nodes have colorbleed id attributes
All non-referenced transform nodes in the hierarchy should have unique IDs
"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.layout']
hosts = ['maya']
label = 'Layout Transform Ids'
actions = [colorbleed.api.SelectInvalidAction,
colorbleed.api.GenerateUUIDsOnInvalidAction]
@staticmethod
def get_invalid(instance):
from maya import cmds
nodes = cmds.ls(instance, type='transform', long=True)
referenced = cmds.ls(nodes, referencedNodes=True, long=True)
non_referenced = set(nodes) - set(referenced)
invalid = []
for node in non_referenced:
if not id_utils.get_id(node):
invalid.append(node)
return invalid
def process(self, instance):
"""Process all meshes"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Transforms (non-referenced) found in layout "
"without asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,73 @@
import maya.cmds as cmds
import pyblish.api
import colorbleed.api
from cb.utils.maya.core import getHighestInHierarchy, iterParents
_IDENTITY = [1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0]
_ATTRS = ['tx', 'ty', 'tz',
'rx', 'ry', 'rz',
'sx', 'sy', 'sz',
'shearXY', 'shearXZ', 'shearYZ']
def is_identity(node, tolerance=1e-30):
mat = cmds.xform(node, query=True, matrix=True, objectSpace=True)
if not all(abs(x-y) < tolerance for x, y in zip(_IDENTITY, mat)):
return False
return True
def is_animated(node):
return any(cmds.listConnections("{}.{}".format(node, attr), source=True,
destination=False) for attr in _ATTRS)
class ValidateLayoutParentNoTransforms(pyblish.api.InstancePlugin):
"""Validate layout parents have no transformations.
The parent nodes above the extracted layout contents MUST have zero
transformation (no offsets in translate, rotate, scale) for this pass
validly.
This is required to ensure no offsets are lacking from extracted caches.
"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.layout']
hosts = ['maya']
label = 'Layout No Parent Transforms'
actions = [colorbleed.api.SelectInvalidAction]
@staticmethod
def get_invalid(instance):
# Get highest in hierarchy
nodes = instance.data["setMembers"]
highest = getHighestInHierarchy(nodes)
invalid = []
for node in highest:
for parent in iterParents(node):
if not is_identity(parent) or is_animated(parent):
invalid.append(parent)
return invalid
def process(self, instance):
"""Process all meshes"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Transforms (non-referenced) found in layout "
"without asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,150 @@
from maya import cmds
import pyblish.api
import colorbleed.api
import cbra.utils.maya.node_uuid as id_utils
def get_id_from_history(node):
"""Return the ID from the first node in the history of the same type
If the node itself has an ID that will be returned. If no ID found None is
returned.
Returns:
str: The id on first node in history
"""
nodeType = cmds.nodeType(node)
history = cmds.listHistory(node, leaf=False) or []
similar = cmds.ls(history, exactType=nodeType, long=True)
for node in similar:
id = id_utils.get_id(node)
if id:
return id
class CopyUUIDsFromHistoryAction(pyblish.api.Action):
"""Copy UUIDs from the history of a node.
This allows a deformed Shape to take its UUID from the original shape.
"""
label = "Copy UUIDs from History"
on = "failed" # This action is only available on a failed plug-in
icon = "wrench" # Icon from Awesome Icon
def process(self, context, plugin):
self.log.info("Finding bad nodes..")
# Get the errored instances
errored_instances = []
for result in context.data["results"]:
if result["error"] is not None and result["instance"] is not None:
if result["error"]:
instance = result["instance"]
errored_instances.append(instance)
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
# Get the nodes from the all instances that ran through this plug-in
invalid = []
for instance in instances:
invalid_nodes = plugin.get_invalid(instance)
invalid.extend(invalid_nodes)
# Ensure unique
invalid = list(set(invalid))
if not invalid:
self.log.info("No invalid nodes found.")
return
# Generate a mapping of UUIDs using history
mapping = dict()
for shape in invalid:
id = get_id_from_history(shape)
if not id:
self.log.info("No ID found in history of: {0}".format(shape))
continue
mapping[shape] = id
# Add the ids to the nodes
id_utils.add_ids(mapping)
self.log.info("Generated ids on nodes: {0}".format(mapping.values()))
class ValidateLayoutShapeNodeIds(pyblish.api.InstancePlugin):
"""Validate shapes nodes have colorbleed id attributes
All non-referenced transforms in the hierarchy should have unique IDs.
This does not check for unique shape ids to allow a same non-referenced
shape in the output (e.g. when multiple of the same characters are in
the scene with a deformer on it).
How?
This usually happens when a node was created locally and did not come
from a correctly published asset.
In the case you're entirely sure you still want to publish the shapes
you can forcefully generate ids for them. USE WITH CARE! Select the
nodes (shapes!) and run:
> scripts > pyblish > utilities > regenerate_uuids
Why?
The pipeline needs the ids to be able to identify "what" an object is.
When it knows that it's able to correctly assign its shaders or do all
kinds of other magic with it!
"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.layout']
hosts = ['maya']
label = 'Layout Shape Ids'
actions = [colorbleed.api.SelectInvalidAction,
CopyUUIDsFromHistoryAction]
@staticmethod
def get_invalid(instance):
nodes = cmds.ls(instance, shapes=True, long=True)
referenced = cmds.ls(nodes, referencedNodes=True, long=True)
non_referenced = set(nodes) - set(referenced)
# Ignore specific node types
# `deformFunc` = deformer shapes
IGNORED = ("gpuCache",
"constraint",
"lattice",
"baseLattice",
"geometryFilter",
"deformFunc",
"locator")
ignored_nodes = cmds.ls(list(non_referenced), type=IGNORED, long=True)
if ignored_nodes:
non_referenced -= set(ignored_nodes)
invalid = []
for node in non_referenced:
if not id_utils.get_id(node):
invalid.append(node)
return invalid
def process(self, instance):
"""Process all meshes"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Shapes (non-referenced) found in layout "
"without asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,65 @@
import pyblish.api
import colorbleed.api
import cbra.utils.maya.node_uuid as id_utils
class ValidateLayoutUniqueNodeIds(pyblish.api.InstancePlugin):
"""Validate nodes have unique colorbleed id attributes"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.layout']
hosts = ['maya']
label = 'Layout Transform Unique Ids'
actions = [colorbleed.api.SelectInvalidAction,
colorbleed.api.GenerateUUIDsOnInvalidAction]
@staticmethod
def get_invalid_dict(instance):
"""Return a dictionary mapping of id key to list of member nodes"""
from maya import cmds
nodes = cmds.ls(instance, type='transform', long=True)
referenced = cmds.ls(nodes, referencedNodes=True, long=True)
non_referenced = set(nodes) - set(referenced)
members = non_referenced
# Collect each id with their members
from collections import defaultdict
ids = defaultdict(list)
for member in members:
id = id_utils.get_id(member)
ids[id].append(member)
# Skip those without IDs (if everything should have an ID that should
# be another validation)
ids.pop(None, None)
# Take only the ids with more than one member
invalid = dict((id, members) for id, members in ids.iteritems() if
len(members) > 1)
return invalid
@classmethod
def get_invalid(cls, instance):
"""Return the member nodes that are invalid"""
invalid_dict = cls.get_invalid_dict(instance)
# Take only the ids with more than one member
invalid = list()
for members in invalid_dict.itervalues():
invalid.extend(members)
return invalid
def process(self, instance):
"""Process all meshes"""
# Ensure all nodes have a cbId
invalid = self.get_invalid_dict(instance)
if invalid:
raise RuntimeError("Transforms found with non-unique "
"asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,27 @@
import pyblish.api
import colorbleed.api
class ValidateLookContents(pyblish.api.InstancePlugin):
"""Validate look instance contents
This is invalid when the collection was unable to collect the required
data for a look to be published correctly.
"""
order = colorbleed.api.ValidateContentsOrder
families = ['colorbleed.look']
hosts = ['maya']
label = 'Look Contents'
def process(self, instance):
"""Process all the nodes in the instance"""
if not instance[:]:
raise RuntimeError("Instance is empty")
# Required look data
assert "lookSets" in instance.data
assert "lookSetRelations" in instance.data
assert "lookAttributes" in instance.data

View file

@ -0,0 +1,62 @@
import pyblish.api
import colorbleed.api
class ValidateLookDefaultShadersConnections(pyblish.api.InstancePlugin):
"""Validate default shaders in the scene have their default connections.
For example the lambert1 could potentially be disconnected from the
initialShadingGroup. As such it's not lambert1 that will be identified
as the default shader which can have unpredictable results.
To fix the default connections need to be made again. See the logs for
more details on which connections are missing.
"""
order = colorbleed.api.ValidateContentsOrder
families = ['colorbleed.look']
hosts = ['maya']
label = 'Look Default Shader Connections'
# The default connections to check
DEFAULTS = [
("initialShadingGroup.surfaceShader", "lambert1"),
("initialParticleSE.surfaceShader", "lambert1"),
("initialParticleSE.volumeShader", "particleCloud1")
]
def process(self, instance):
# Ensure check is run only once. We don't use ContextPlugin because
# of a bug where the ContextPlugin will always be visible. Even when
# the family is not present in an instance.
key = "__validate_look_default_shaders_connections_checked"
context = instance.context
is_run = context.data.get(key,
False)
if is_run:
return
else:
context.data[key] = True
# Process as usual
from maya import cmds
invalid = list()
for plug, input_node in self.DEFAULTS:
inputs = cmds.listConnections(plug,
source=True,
destination=False) or None
if not inputs or inputs[0] != input_node:
self.log.error("{0} is not connected to {1}. "
"This can result in unexpected behavior. "
"Please reconnect to continue.".format(
plug,
input_node))
invalid.append(plug)
if invalid:
raise RuntimeError("Invalid connections.")

View file

@ -0,0 +1,129 @@
from maya import cmds
import pyblish.api
import colorbleed.api
from colorbleed.api import get_errored_instances_from_context
from cbra.utils.maya.node_uuid import get_id, add_ids
def get_deformed_history_id_mapping(shapes):
"""Return the id from history for nodes that are "Deformed".
When shapes are referenced and get deformed by a deformer
the shape is duplicated *without its attributes* as such
the new shape misses object ids. This method will try to
trace back in the history to find the first shape with
ids to identify the possible best match.
Args:
shapes (list): The shapes that are deformed.
Returns:
dict: Mapping of deformed shape to history shape.
"""
shapes = cmds.ls(shapes, shapes=True, long=True)
# Possibly deformed shapes
deformed_shapes = [x for x in shapes if "Deformed" in x.rsplit("|", 1)[-1]]
# The deformed shape should not be referenced
is_referenced = lambda n: cmds.referenceQuery(n, isNodeReferenced=True)
deformed_shapes = [x for x in deformed_shapes if not is_referenced(x)]
# Shapes without id
deformed_shapes_without_id = [x for x in deformed_shapes if not get_id(x)]
mapping = {}
for shape in deformed_shapes_without_id:
node_type = cmds.objectType(shape)
history = cmds.listHistory(shape)[1:] # history, skipping itself
history_shapes = cmds.ls(history, exactType=node_type, long=True)
if not history_shapes:
continue
for history_shape in history_shapes:
id = get_id(history_shape)
if not id:
continue
mapping[shape] = history_shape
break
return mapping
class CopyUUIDsFromHistory(pyblish.api.Action):
"""Repairs the action
To retrieve the invalid nodes this assumes a static `repair(instance)`
method is available on the plugin.
"""
label = "Copy UUIDs from History"
on = "failed" # This action is only available on a failed plug-in
icon = "wrench" # Icon from Awesome Icon
def process(self, context, plugin):
# Get the errored instances
self.log.info("Finding failed instances..")
errored_instances = get_errored_instances_from_context(context)
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
ids_map = dict()
for instance in instances:
invalid = plugin.get_invalid(instance)
mapping = get_deformed_history_id_mapping(invalid)
for destination, source in mapping.items():
ids_map[destination] = get_id(source)
if not ids_map:
return
add_ids(ids_map)
class ValidateLookDeformedShapes(pyblish.api.InstancePlugin):
"""Validate look textures are set to ignore color space when set to RAW
Whenever the format is NOT set to sRGB for a file texture it must have
its ignore color space file rules checkbox enabled to avoid unwanted
reverting to sRGB settings upon file relinking.
To fix this use the select invalid action to find the invalid file nodes
and then check the "Ignore Color Space File Rules" checkbox under the
Color Space settings.
"""
order = colorbleed.api.ValidateContentsOrder
families = ['colorbleed.look']
hosts = ['maya']
label = 'Look deformed shapes'
actions = [colorbleed.api.SelectInvalidAction, CopyUUIDsFromHistory]
@classmethod
def get_invalid(cls, instance):
context = instance.context
nodes = context.data.get("instancePerItemNodesWithoutId", None)
if not nodes:
return list()
mapping = get_deformed_history_id_mapping(nodes)
return mapping.keys()
def process(self, instance):
"""Process all the nodes in the instance"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Shapes found that are considered 'Deformed'"
"without object ids: {0}".format(invalid))

View file

@ -0,0 +1,58 @@
from maya import cmds
import pyblish.api
import colorbleed.api
class ValidateLookIgnoreColorSpace(pyblish.api.InstancePlugin):
"""Validate look textures are set to ignore color space when set to RAW
Whenever the format is NOT set to sRGB for a file texture it must have
its ignore color space file rules checkbox enabled to avoid unwanted
reverting to sRGB settings upon file relinking.
To fix this use the select invalid action to find the invalid file nodes
and then check the "Ignore Color Space File Rules" checkbox under the
Color Space settings.
"""
order = colorbleed.api.ValidateContentsOrder
families = ['colorbleed.look']
hosts = ['maya']
label = 'Look RAW Ignore color space'
actions = [colorbleed.api.SelectInvalidAction]
@classmethod
def get_invalid(cls, instance):
# Get texture nodes from the collected resources
required = ["maya", "look", "attribute"]
nodes = list()
for resource in instance.data.get("resources", []):
if all(tag in resource.get("tags", []) for tag in required):
node = resource['node']
nodes.append(node)
nodes = list(sorted(set(nodes)))
cls.log.info("Checking nodes: {0}".format(nodes))
# Validate
invalid = []
for node in nodes:
color_space = cmds.getAttr(node + ".colorSpace")
ignore_rules = cmds.getAttr(node + ".ignoreColorSpaceFileRules")
if color_space != "sRGB" and not ignore_rules:
invalid.append(node)
return invalid
def process(self, instance):
"""Process all the nodes in the instance"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Non-sRGB file textures nodes with ignore "
"color space file rules disabled: "
"{0}".format(invalid))

View file

@ -0,0 +1,55 @@
import pyblish.api
import colorbleed.api
import cbra.utils.maya.node_uuid as id_utils
class ValidateLookMembersNodeIds(pyblish.api.InstancePlugin):
"""Validate look members have colorbleed id attributes
Looks up the contents of the look to see if all its members have
colorbleed id attributes so they can be connected correctly.
When invalid it's very likely related to the model not having the id
attributes that it should have. These should have been generated in the
work files for the model/rig/fur or alike.
"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.look']
hosts = ['maya']
label = 'Look Members Id Attributes'
actions = [colorbleed.api.SelectInvalidAction]
@staticmethod
def get_invalid(instance):
# Get all members from the sets
members = []
relations = instance.data["lookSetRelations"]
for sg in relations:
sg_members = sg['members']
sg_members = [member['name'] for member in sg_members]
members.extend(sg_members)
# Get all sets
members = list(set(members))
# Ensure all nodes have a cbId
invalid = list()
for node in members:
if not id_utils.has_id(node):
invalid.append(node)
return invalid
def process(self, instance):
"""Process all meshes"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Members found without "
"asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,69 @@
from collections import defaultdict
from maya import cmds
import pyblish.api
import colorbleed.api
import cbra.utils.maya.node_uuid as id_utils
class ValidateLookMembersUnique(pyblish.api.InstancePlugin):
"""Validate members of look are unique.
This ensures the same id is not present as more than one node in the look.
That means there's only ever one of a specific node inside the look to be
published. For example if you'd have a loaded 3x the same tree and by
accident you're trying to publish them all together in a single look that
would be invalid, because they are the same tree it should be included
inside the look instance only once.
"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.look']
hosts = ['maya']
label = 'Look Members Unique'
actions = [colorbleed.api.SelectInvalidAction]
@staticmethod
def get_invalid(instance):
# Get all members from the sets
members = []
relations = instance.data["lookSetRelations"]
for sg in relations:
sg_members = sg['members']
sg_members = [member['name'] for member in sg_members]
members.extend(sg_members)
# Ensure we don't have components but the objects
members = cmds.ls(members, objectsOnly=True, long=True)
members = list(set(members))
# Group members per id
id_nodes = defaultdict(set)
for node in members:
id = id_utils.get_id(node)
if not id:
continue
id_nodes[id].add(node)
invalid = list()
for nodes in id_nodes.itervalues():
if len(nodes) > 1:
invalid.extend(nodes)
return invalid
def process(self, instance):
"""Process all meshes"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Members found without "
"asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,85 @@
from maya import cmds
import pyblish.api
import colorbleed.api
class ValidateLookNoDefaultShaders(pyblish.api.InstancePlugin):
"""Validate look contains no default shaders.
This checks whether the look has any members of:
- lambert1
- initialShadingGroup
- initialParticleSE
- particleCloud1
If any of those is present it will raise an error. A look is not allowed
to have any of the "default" shaders present in a scene as they can
introduce problems when referenced (overriding local scene shaders).
To fix this no shape nodes in the look must have any of default shaders
applied.
"""
order = colorbleed.api.ValidateContentsOrder
families = ['colorbleed.look']
hosts = ['maya']
label = 'Look No Default Shaders'
actions = [colorbleed.api.SelectInvalidAction]
@classmethod
def get_invalid_sets(cls, instance):
disallowed = ["lambert1",
"initialShadingGroup",
"initialParticleSE",
"particleCloud1"]
disallowed = set(disallowed)
# Check among the sets
sets = instance.data['lookSets']
lookup = set(sets)
intersect = lookup.intersection(disallowed)
if intersect:
cls.log.error("Default shaders found in the "
"look: {0}".format(list(intersect)))
return list(intersect)
# Check among history/inputs of the sets
history = cmds.listHistory(sets) or []
lookup = set(history)
intersect = lookup.intersection(disallowed)
if intersect:
cls.log.error("Default shaders found in the history of the "
"look: {0}".format(list(intersect)))
return list(intersect)
return list()
@classmethod
def get_invalid(cls, instance):
shaders = cls.get_invalid_sets(instance)
nodes = instance[:]
# Get members of the shaders
all = set()
for shader in shaders:
members = cmds.sets(shader, q=True) or []
members = cmds.ls(members, long=True)
all.update(members)
# Get the instance nodes among the shader members
invalid = all.intersection(nodes)
invalid = list(invalid)
return invalid
def process(self, instance):
"""Process all the nodes in the instance"""
sets = self.get_invalid_sets(instance)
if sets:
raise RuntimeError("Invalid shaders found: {0}".format(sets))

View file

@ -0,0 +1,48 @@
from collections import defaultdict
import pyblish.api
import colorbleed.api
import cbra.utils.maya.node_uuid as id_utils
class ValidateLookNodeUniqueIds(pyblish.api.InstancePlugin):
"""Validate look sets have unique colorbleed id attributes
"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.look']
hosts = ['maya']
label = 'Look Id Unique Attributes'
actions = [colorbleed.api.SelectInvalidAction,
colorbleed.api.GenerateUUIDsOnInvalidAction]
@staticmethod
def get_invalid(instance):
nodes = instance.data["lookSets"]
# Ensure all nodes have a cbId
id_sets = defaultdict(list)
invalid = list()
for node in nodes:
id = id_utils.get_id(node)
if not id:
continue
id_sets[id].append(node)
for id, nodes in id_sets.iteritems():
if len(nodes) > 1:
invalid.extend(nodes)
return invalid
def process(self, instance):
"""Process all meshes"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Nodes found without "
"asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,39 @@
import pyblish.api
import colorbleed.api
class ValidateMayaUnits(pyblish.api.ContextPlugin):
"""Check if the Maya units are set correct"""
order = colorbleed.api.ValidateSceneOrder
label = "Maya Units"
families = ["colorbleed.rig",
"colorbleed.model",
"colorbleed.pointcache",
"colorbleed.curves"]
actions = [colorbleed.api.RepairAction]
def process(self, context):
linearunits = context.data('linearUnits')
angularunits = context.data('angularUnits')
fps = context.data['fps']
self.log.info('Units (linear): {0}'.format(linearunits))
self.log.info('Units (angular): {0}'.format(angularunits))
self.log.info('Units (time): {0} FPS'.format(fps))
# check if units are correct
assert linearunits and linearunits == 'cm', ("Scene linear units must "
"be centimeters")
assert angularunits and angularunits == 'deg', ("Scene angular units "
"must be degrees")
assert fps and fps == 25.0, "Scene must be 25 FP"
@classmethod
def repair(cls):
"""Fix the current FPS setting of the scene, set to PAL(25.0 fps)
"""
raise NotImplementedError()

View file

@ -0,0 +1,96 @@
import re
from maya import cmds
import pyblish.api
import colorbleed.api
def len_flattened(components):
"""Return the length of the list as if it was flattened.
Maya will return consecutive components as a single entry
when requesting with `maya.cmds.ls` without the `flatten`
flag. Though enabling `flatten` on a large list (e.g. millions)
will result in a slow result. This command will return the amount
of entries in a non-flattened list by parsing the result with
regex.
Args:
components (list): The non-flattened components.
Returns:
int: The amount of entries.
"""
assert isinstance(components, (list, tuple))
n = 0
for c in components:
match = re.search("\[([0-9]+):([0-9]+)\]", c)
if match:
start, end = match.groups()
n += int(end) - int(start) + 1
else:
n += 1
return n
class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
"""Validate the current mesh has UVs.
It validates whether the current UV set has non-zero UVs and
at least more than the vertex count. It's not really bulletproof,
but a simple quick validation to check if there are likely
UVs for every face.
"""
order = colorbleed.api.ValidateMeshOrder
hosts = ['maya']
families = ['colorbleed.model']
category = 'geometry'
label = 'Mesh Has UVs'
actions = [colorbleed.api.SelectInvalidAction]
optional = True
@classmethod
def get_invalid(cls, instance):
invalid = []
for node in cmds.ls(instance, type='mesh'):
uv = cmds.polyEvaluate(node, uv=True)
if uv == 0:
invalid.append(node)
continue
vertex = cmds.polyEvaluate(node, vertex=True)
if uv < vertex:
# Workaround:
# Maya can have instanced UVs in a single mesh, for example
# imported from an Alembic. With instanced UVs the UV count from
# `maya.cmds.polyEvaluate(uv=True)` will only result in the unique
# UV count instead of for all vertices.
#
# Note: Maya can save instanced UVs to `mayaAscii` but cannot
# load this as instanced. So saving, opening and saving
# again will lose this information.
map_attr = "{}.map[*]".format(node)
uv_to_vertex = cmds.polyListComponentConversion(map_attr,
toVertex=True)
uv_vertex_count = len_flattened(uv_to_vertex)
if uv_vertex_count < vertex:
invalid.append(node)
else:
cls.log.warning("Node has instanced UV points: "
"{0}".format(node))
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Meshes found in instance without "
"valid UVs: {0}".format(invalid))

View file

@ -0,0 +1,34 @@
from maya import cmds
import pyblish.api
import colorbleed.api
class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin):
"""Validate meshes don't have lamina faces.
Lamina faces share all of their edges.
"""
order = colorbleed.api.ValidateMeshOrder
hosts = ['maya']
families = ['colorbleed.model']
category = 'geometry'
version = (0, 1, 0)
label = 'Mesh Lamina Faces'
actions = [colorbleed.api.SelectInvalidAction]
@staticmethod
def get_invalid(instance):
meshes = cmds.ls(instance, type='mesh', long=True)
return [mesh for mesh in meshes if cmds.polyInfo(mesh, laminaFaces=True)]
def process(self, instance):
"""Process all the nodes in the instance 'objectSet'"""
invalid = self.get_invalid(instance)
if invalid:
raise ValueError("Meshes found with lamina faces: "
"{0}".format(invalid))

View file

@ -0,0 +1,49 @@
from maya import cmds
import pyblish.api
import colorbleed.api
class ValidateMeshNoNegativeScale(pyblish.api.Validator):
"""Ensure that meshes don't have a negative scale.
Using negatively scaled proxies in a VRayMesh results in inverted
normals. As such we want to avoid this.
We also avoid this on the rig or model because these are often the
previous steps for those that are cached to proxies so we can catch this
issue early.
"""
order = colorbleed.api.ValidateMeshOrder
hosts = ['maya']
families = ['colorbleed.proxy', 'colorbleed.rig', 'colorbleed.model']
label = 'Mesh No Negative Scale'
actions = [colorbleed.api.SelectInvalidAction]
@staticmethod
def get_invalid(instance):
meshes = cmds.ls(instance,
type='mesh',
long=True,
noIntermediate=True)
invalid = []
for mesh in meshes:
transform = cmds.listRelatives(mesh, parent=True, fullPath=True)[0]
scale = cmds.getAttr("{0}.scale".format(transform))[0]
if any(x < 0 for x in scale):
invalid.append(mesh)
return invalid
def process(self, instance):
"""Process all the nodes in the instance 'objectSet'"""
invalid = self.get_invalid(instance)
if invalid:
raise ValueError("Meshes found with negative "
"scale: {0}".format(invalid))

View file

@ -0,0 +1,41 @@
from maya import cmds
import pyblish.api
import colorbleed.api
class ValidateMeshNonManifold(pyblish.api.Validator):
"""Ensure that meshes don't have non-manifold edges or vertices
To debug the problem on the meshes you can use Maya's modeling
tool: "Mesh > Cleanup..."
"""
order = colorbleed.api.ValidateMeshOrder
hosts = ['maya']
families = ['colorbleed.model']
label = 'Mesh Non-Manifold Vertices/Edges'
actions = [colorbleed.api.SelectInvalidAction]
@staticmethod
def get_invalid(instance):
meshes = cmds.ls(instance, type='mesh', long=True)
invalid = []
for mesh in meshes:
if (cmds.polyInfo(mesh, nonManifoldVertices=True) or
cmds.polyInfo(mesh, nonManifoldEdges=True)):
invalid.append(mesh)
return invalid
def process(self, instance):
"""Process all the nodes in the instance 'objectSet'"""
invalid = self.get_invalid(instance)
if invalid:
raise ValueError("Meshes found with non-manifold "
"edges/vertices: {0}".format(invalid))

View file

@ -0,0 +1,58 @@
from maya import cmds
import pyblish.api
import colorbleed.api
from pyblish_cb.tmp_utils import polyConstraint
class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
"""Validate meshes don't have edges with a zero length.
Based on Maya's polyCleanup 'Edges with zero length'.
Note:
This can be slow for high-res meshes.
"""
order = colorbleed.api.ValidateMeshOrder
families = ['colorbleed.model']
hosts = ['maya']
category = 'geometry'
version = (0, 1, 0)
label = 'Mesh Edge Length Non Zero'
actions = [colorbleed.api.SelectInvalidAction]
__tolerance = 1e-5
@classmethod
def get_invalid(cls, instance):
"""Return the invalid edges.
Also see: http://help.autodesk.com/view/MAYAUL/2015/ENU/?guid=Mesh__Cleanup
"""
meshes = cmds.ls(instance, type='mesh', long=True)
if not meshes:
return list()
# Get all edges
edges = ['{0}.e[*]'.format(node) for node in meshes]
# Filter by constraint on edge length
invalid = polyConstraint(edges,
t=0x8000, # type=edge
length=1,
lengthbound=(0, cls.__tolerance))
return invalid
def process(self, instance):
"""Process all meshes"""
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Meshes found with zero "
"edge length: {0}".format(invalid))

View file

@ -0,0 +1,53 @@
from maya import cmds
import pyblish.api
import colorbleed.api
class ValidateMeshNormalsUnlocked(pyblish.api.Validator):
"""Validate all meshes in the instance have unlocked normals
These can be unlocked manually through:
Modeling > Mesh Display > Unlock Normals
"""
order = colorbleed.api.ValidateMeshOrder
hosts = ['maya']
families = ['colorbleed.model']
category = 'geometry'
version = (0, 1, 0)
label = 'Mesh Normals Unlocked'
actions = [colorbleed.api.SelectInvalidAction,
colorbleed.api.RepairAction]
optional = True
@staticmethod
def has_locked_normals(mesh):
"""Return whether a mesh node has locked normals"""
return any(cmds.polyNormalPerVertex("{}.vtxFace[*][*]".format(mesh),
query=True,
freezeNormal=True))
@classmethod
def get_invalid(cls, instance):
"""Return the meshes with locked normals in instance"""
meshes = cmds.ls(instance, type='mesh', long=True)
return [mesh for mesh in meshes if cls.has_locked_normals(mesh)]
def process(self, instance):
"""Raise invalid when any of the meshes have locked normals"""
invalid = self.get_invalid(instance)
if invalid:
raise ValueError("Meshes found with "
"locked normals: {0}".format(invalid))
@classmethod
def repair(cls, instance):
"""Unlocks all normals on the meshes in this instance."""
invalid = cls.get_invalid(instance)
for mesh in invalid:
cmds.polyNormalPerVertex(mesh, unFreezeNormal=True)

View file

@ -0,0 +1,123 @@
from maya import cmds
import pyblish.api
import colorbleed.api
class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin):
"""Ensure no multiple UV sets exist for each polygon mesh"""
order = colorbleed.api.ValidateMeshOrder
hosts = ['maya']
families = ['colorbleed.model', 'colorbleed.pointcache']
category = 'uv'
optional = True
version = (0, 1, 0)
label = "Mesh Single UV Set"
actions = [colorbleed.api.SelectInvalidAction,
colorbleed.api.RepairAction]
@staticmethod
def get_invalid(instance):
meshes = cmds.ls(instance, type='mesh', long=True)
invalid = []
for mesh in meshes:
uvSets = cmds.polyUVSet(mesh,
query=True,
allUVSets=True) or []
# ensure unique (sometimes maya will list 'map1' twice)
uvSets = set(uvSets)
if len(uvSets) != 1:
invalid.append(mesh)
return invalid
def process(self, instance):
"""Process all the nodes in the instance 'objectSet'"""
invalid = self.get_invalid(instance)
if invalid:
raise ValueError("Nodes found with multiple "
"UV sets: {0}".format(invalid))
@classmethod
def repair(cls, instance):
for mesh in cls.get_invalid(instance):
cls._repair_mesh(mesh)
@classmethod
def _repair_mesh(cls, mesh):
"""Process a single mesh, deleting other UV sets than the active one.
Keep only current UV set and ensure it's the default 'map1'
"""
from maya import cmds
uvSets = cmds.polyUVSet(mesh,
query=True,
allUVSets=True)
current = cmds.polyUVSet(mesh,
query=True,
currentUVSet=True)[0]
# Copy over to map1
if current != 'map1':
cmds.polyUVSet(mesh,
uvSet=current,
newUVSet='map1',
copy=True)
cmds.polyUVSet(mesh,
currentUVSet=True,
uvSet='map1')
current = 'map1'
# Delete all non-current UV sets
deleteUVSets = [uvSet for uvSet in uvSets if uvSet != current]
uvSet = None
# Maya Bug (tested in 2015/2016):
# In some cases the API's MFnMesh will report less UV sets
# than maya.cmds.polyUVSet.
# This seems to happen when the deletion of UV sets has not
# triggered a cleanup of the UVSet array
# attribute on the mesh node. It will still have extra
# entries in the attribute, though it will not
# show up in API or UI. Nevertheless it does show up in
# maya.cmds.polyUVSet.
# To ensure we clean up the array we'll force delete the
# extra remaining 'indices' that we don't want.
# TODO: Implement a better fix
# The best way to fix would be to get the UVSet
# indices from api with MFnMesh (to ensure we keep
# correct ones) and then only force delete the other
# entries in the array attribute on the node.
# But for now we're deleting all entries except first
# one. Note that the first entry could never
# be removed (the default 'map1' always exists and is
# supposed to be undeletable.)
try:
for uvSet in deleteUVSets:
cmds.polyUVSet(mesh, delete=True, uvSet=uvSet)
except RuntimeError, e:
cls.log.warning('uvSet: {0} - '
'Error: {1}'.format(uvSet, e))
indices = cmds.getAttr('{0}.uvSet'.format(mesh),
multiIndices=True)
if not indices:
cls.log.warning("No uv set found indices for: {0}".format(mesh))
return
# Delete from end to avoid shifting indices
# and remove the indices in the attribute
indices = reversed(indices[1:])
for i in indices:
attr = '{0}.uvSet[{1}]'.format(mesh, i)
cmds.removeMultiInstance(attr, b=True)

Some files were not shown because too many files have changed in this diff Show more