Merge pull request #62 from aardschok/PLN-0058

Updated collect data & related logic
This commit is contained in:
Wijnand Koreman 2017-12-05 11:29:11 +01:00 committed by GitHub
commit 8fe596dd69
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 145 additions and 105 deletions

View file

@ -38,78 +38,13 @@ class YetiCacheLoader(api.Loader):
fursettings = json.load(fp)
# Check if resources map exists
# TODO: should be stored in fursettings
image_search_path = ""
version_folder = os.path.dirname(self.fname)
resource_folder = os.path.join(version_folder, "resources")
if os.path.exists(resource_folder):
image_search_path = os.path.normpath(resource_folder)
# Get node name from JSON
nodes = []
for node, settings in fursettings.items():
if "nodes" not in fursettings:
raise RuntimeError("Encountered invalid data, expect 'nodes' in "
"fursettings.")
# Create transform
transform_name = "{}:{}".format(namespace, node.split("Shape")[0])
transform_node = cmds.createNode("transform", name=transform_name)
# Create new pgYetiMaya node
node_name = "{}:{}".format(namespace, node)
yeti_node = cmds.createNode("pgYetiMaya",
name=node_name,
parent=transform_node)
cmds.connectAttr("time1.outTime", "%s.currentTime" % yeti_node)
# Apply explicit colorbleed ID to node
shape_id = settings["cbId"]
asset_id = shape_id.split(":", 1)[0]
lib.set_id(node=yeti_node,
unique_id=shape_id,
overwrite=True)
settings.pop("cbId", None)
# Apply new colorbleed ID to transform node
# TODO: get ID from transform in data to ensure consistency
_ids = lib.generate_ids(nodes=[transform_node], asset_id=asset_id)
for n, _id in _ids:
lib.set_id(n, unique_id=_id)
# Apply settings
for attr, value in settings.items():
attribute = "%s.%s" % (yeti_node, attr)
cmds.setAttr(attribute, value)
# Ensure the node has no namespace identifiers
node = node.replace(":", "_")
# Create full cache path
cache = os.path.join(self.fname, "{}.%04d.fur".format(node))
cache = os.path.normpath(cache)
cache_fname = self.validate_cache(cache)
cache_path = os.path.join(self.fname, cache_fname)
# Preset the viewport density
cmds.setAttr("%s.viewportDensity" % yeti_node, 0.1)
# Add filename to `cacheFileName` attribute
cmds.setAttr("%s.cacheFileName" % yeti_node,
cache_path,
type="string")
cmds.setAttr("%s.imageSearchPath" % yeti_node,
image_search_path,
type="string")
# Set verbosity for debug purposes
cmds.setAttr("%s.verbosity" % yeti_node, 2)
# Enable the cache by setting the file mode
cmds.setAttr("%s.fileMode" % yeti_node, 1)
nodes.append(yeti_node)
nodes.append(transform_node)
node_data = fursettings["nodes"]
nodes = self.create_nodes(namespace, node_data)
group_name = "{}:{}".format(namespace, asset["name"])
group_node = cmds.group(nodes, name=group_name)
@ -202,3 +137,64 @@ class YetiCacheLoader(api.Loader):
return filename
def create_nodes(self, namespace, settings):
# Get node name from JSON
nodes = []
for node_settings in settings:
# Create transform node
transform = node_settings["transform"]
transform_name = "{}:{}".format(namespace, transform["name"])
transform_node = cmds.createNode("transform", name=transform_name)
lib.set_id(transform_node, transform["cbId"])
# Create pgYetiMaya node
original_node = node_settings["name"]
node_name = "{}:{}".format(namespace, original_node)
yeti_node = cmds.createNode("pgYetiMaya",
name=node_name,
parent=transform_node)
lib.set_id(yeti_node, node_settings["cbId"])
nodes.append(transform_node)
nodes.append(yeti_node)
# Apply attributes to pgYetiMaya node
kwargs = {}
for attr, value in node_settings["attrs"].items():
attribute = "%s.%s" % (yeti_node, attr)
if isinstance(value, (str, unicode)):
cmds.setAttr(attribute, value, type="string")
continue
cmds.setAttr(attribute, value, **kwargs)
# Ensure the node has no namespace identifiers
node_name = original_node.replace(":", "_")
# Create full cache path
cache = os.path.join(self.fname, "{}.%04d.fur".format(node_name))
cache = os.path.normpath(cache)
cache_fname = self.validate_cache(cache)
cache_path = os.path.join(self.fname, cache_fname)
# Preset the viewport density
cmds.setAttr("%s.viewportDensity" % yeti_node, 0.1)
# Add filename to `cacheFileName` attribute
cmds.setAttr("%s.cacheFileName" % yeti_node,
cache_path,
type="string")
# Set verbosity for debug purposes
cmds.setAttr("%s.verbosity" % yeti_node, 2)
# Enable the cache by setting the file mode
cmds.setAttr("%s.fileMode" % yeti_node, 1)
nodes.append(yeti_node)
nodes.append(transform_node)
return nodes

View file

@ -27,8 +27,12 @@ def get_look_attrs(node):
"""
# When referenced get only attributes that are "changed since file open"
# which includes any reference edits, otherwise take *all* user defined
# attributes
is_referenced = cmds.referenceQuery(node, isNodeReferenced=True)
result = cmds.listAttr(node, userDefined=True,
changedSinceFileOpen=True) or []
changedSinceFileOpen=is_referenced) or []
# `cbId` is added when a scene is saved, ignore by default
if "cbId" in result:
@ -91,7 +95,7 @@ class CollectLook(pyblish.api.InstancePlugin):
for objset in list(sets):
self.log.debug("From %s.." % objset)
# Get all nodes of the current objectSet
# Get all nodes of the current objectSet (shadingEngine)
for member in cmds.ls(cmds.sets(objset, query=True), long=True):
member_data = self.collect_member_data(member,
instance_lookup)

View file

@ -2,16 +2,29 @@ from maya import cmds
import pyblish.api
from colorbleed.maya import lib
SETTINGS = {"renderDensity",
"renderWidth",
"renderLength",
"increaseRenderBounds",
"imageSearchPath",
"cbId"}
class CollectYetiCache(pyblish.api.InstancePlugin):
"""Collect all information of the Yeti caches"""
"""Collect all information of the Yeti caches
The information contains the following attributes per Yeti node
- "renderDensity"
- "renderWidth"
- "renderLength"
- "increaseRenderBounds"
- "imageSearchPath"
Other information is the name of the transform and it's Colorbleed ID
"""
order = pyblish.api.CollectorOrder + 0.45
label = "Collect Yeti Cache"
@ -22,11 +35,30 @@ class CollectYetiCache(pyblish.api.InstancePlugin):
def process(self, instance):
# Collect fur settings
settings = {}
for node in cmds.ls(instance, type="pgYetiMaya"):
settings[node] = {}
settings = {"nodes": []}
# Get yeti nodes and their transforms
yeti_shapes = cmds.ls(instance, type="pgYetiMaya")
for shape in yeti_shapes:
shape_data = {"transform": None,
"name": shape,
"cbId": lib.get_id(shape),
"attrs": None}
# Get specific node attributes
attr_data = {}
for attr in SETTINGS:
current = cmds.getAttr("%s.%s" % (node, attr))
settings[node][attr] = current
current = cmds.getAttr("%s.%s" % (shape, attr))
attr_data[attr] = current
# Get transform data
parent = cmds.listRelatives(shape, parent=True)[0]
transform_data = {"name": parent, "cbId": lib.get_id(parent)}
# Store collected data
shape_data["attrs"] = attr_data
shape_data["transform"] = transform_data
settings["nodes"].append(shape_data)
instance.data["fursettings"] = settings

View file

@ -37,28 +37,30 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
fullPath=True) or input_content
# Get all the shapes
input_shapes = cmds.ls(input_nodes, long=True)
input_shapes = cmds.ls(input_nodes, long=True, noIntermediate=True)
# Store all connections
print "input shape:", input_shapes
connections = cmds.listConnections(input_shapes,
source=True,
destination=False,
connections=True,
plugs=True) or []
# Group per source, destination pair
grouped = [(item, connections[i+1]) for i, item in
# Group per source, destination pair. We need to reverse the connection
# list as it comes in with the shape used to query first while that
# shape is the destination of the connection
grouped = [(connections[i+1], item) for i, item in
enumerate(connections) if i % 2 == 0]
inputs = []
for src, dest in grouped:
src_node, src_attr = src.split(".", 1)
source_node, source_attr = src.split(".", 1)
dest_node, dest_attr = dest.split(".", 1)
# The plug must go in the socket, remember this for the loader
inputs.append({"connections": [src_attr, dest_attr],
"plugID": lib.get_id(dest_node),
"socketID": lib.get_id(src_node)})
inputs.append({"connections": [source_attr, dest_attr],
"sourceID": lib.get_id(source_node),
"destinationID": lib.get_id(dest_node)})
# Collect any textures if used
yeti_resources = []

View file

@ -17,36 +17,40 @@ def disconnected_attributes(settings, members):
try:
for input in settings["inputs"]:
# get source
socket_id = input["socketID"]
sources = lib.lsattr("cbId", socket_id)
sources = [i for i in sources if
# Get source shapes
source_nodes = lib.lsattr("cbId", input["sourceID"])
sources = [i for i in source_nodes if
not cmds.referenceQuery(i, isNodeReferenced=True)
and i in members]
src = sources[0]
source = sources[0]
# get destination
plug_id = input["plugID"]
plugs = lib.lsattr("cbId", plug_id)
destinations = [i for i in plugs if i not in members and
i not in sources]
dst = destinations[0]
# Get destination shapes (the shapes used as hook up)
destination_nodes = lib.lsattr("cbId", input["destinationID"])
destinations = [i for i in destination_nodes if i not in members
and i not in sources]
destination = destinations[0]
# break connection
# Break connection
connections = input["connections"]
src_attribute = "%s.%s" % (src, connections[0])
dst_attribute = "%s.%s" % (dst, connections[1])
src_attribute = "%s.%s" % (source, connections[0])
dst_attribute = "%s.%s" % (destination, connections[1])
# store connection pair
original_connection.append([src_attribute, dst_attribute])
cmds.disconnectAttr(dst_attribute, src_attribute)
yield
if not cmds.isConnected(src_attribute, dst_attribute):
continue
cmds.disconnectAttr(src_attribute, dst_attribute)
original_connection.append([src_attribute, dst_attribute])
yield
finally:
# restore connections
for connection in original_connection:
src, dest = connection
cmds.connectAttr(dest, src)
try:
cmds.connectAttr(dest, src)
except Exception as e:
print e,
continue
class ExtractYetiRig(colorbleed.api.Extractor):
@ -96,9 +100,11 @@ class ExtractYetiRig(colorbleed.api.Extractor):
attr_value = {"%s.imageSearchPath" % n: image_search_path for
n in yeti_nodes}
# get input_SET members
# Get input_SET members
input_set = [i for i in instance if i == "input_SET"]
members = cmds.sets(input_set[0], query=True)
# Get all items
members = cmds.listRelatives(members, ad=True, fullPath=True)
nodes = instance.data["setMembers"]
with disconnected_attributes(settings, members):