mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
commit
8ce4cb8499
27 changed files with 692 additions and 1485 deletions
|
|
@ -1,11 +1,9 @@
|
|||
# absolute_import is needed to counter the `module has no cmds error` in Maya
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
@ -164,7 +162,7 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
|
|||
instance = result["instance"]
|
||||
errored_instances.append(instance)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
# Apply pyblish logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the nodes from the all instances that ran through this plug-in
|
||||
|
|
@ -178,78 +176,34 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
|
|||
self.log.info("No invalid nodes found.")
|
||||
return
|
||||
|
||||
# Ensure unique (process each node only once)
|
||||
# Ensure unique ( process each node only once )
|
||||
invalid = list(set(invalid))
|
||||
|
||||
# Parse context from current file
|
||||
self.log.info("Parsing current context..")
|
||||
print(">>> DEBUG CONTEXT :", context)
|
||||
print(">>> DEBUG CONTEXT DATA:", context.data)
|
||||
self.log.info("Updating node IDs ...")
|
||||
# Update the attributes
|
||||
self._update_id_attribute(invalid)
|
||||
|
||||
# # Generate and add the ids to the nodes
|
||||
node_ids = self.generate_ids(context, invalid)
|
||||
self.apply_ids(node_ids)
|
||||
self.log.info("Generated ids on nodes: {0}".format(invalid))
|
||||
|
||||
def get_context(self, instance=None):
|
||||
def _update_id_attribute(self, nodes):
|
||||
"""Delete the id attribute
|
||||
|
||||
PROJECT = os.environ["AVALON_PROJECT"]
|
||||
ASSET = instance.data.get("asset") or os.environ["AVALON_ASSET"]
|
||||
SILO = os.environ["AVALON_SILO"]
|
||||
LOCATION = os.getenv("AVALON_LOCATION")
|
||||
|
||||
return {"project": PROJECT,
|
||||
"asset": ASSET,
|
||||
"silo": SILO,
|
||||
"location": LOCATION}
|
||||
|
||||
def generate_ids(self, context, nodes):
|
||||
"""Generate cb UUIDs for nodes.
|
||||
|
||||
The identifiers are formatted like:
|
||||
assets:character/test:bluey:46D221D9-4150-8E49-6B17-43B04BFC26B6
|
||||
|
||||
This is a concatenation of:
|
||||
- entity (shots or assets)
|
||||
- folders (parent hierarchy)
|
||||
- asset (the name of the asset)
|
||||
- uuid (unique id for node in the scene)
|
||||
|
||||
Raises:
|
||||
RuntimeError: When context can't be parsed of the current asset
|
||||
|
||||
Returns:
|
||||
dict: node, uuid dictionary
|
||||
|
||||
"""
|
||||
|
||||
# Make a copy of the context
|
||||
data = context.copy()
|
||||
|
||||
# Define folders
|
||||
|
||||
node_ids = dict()
|
||||
for node in nodes:
|
||||
# Generate a unique ID per node
|
||||
data['uuid'] = uuid.uuid4()
|
||||
unique_id = "{asset}:{item}:{uuid}".format(**data)
|
||||
node_ids[node] = unique_id
|
||||
|
||||
return node_ids
|
||||
|
||||
def apply_ids(self, node_ids):
|
||||
"""Apply the created unique IDs to the node
|
||||
Args:
|
||||
node_ids (dict): each node with a unique id
|
||||
|
||||
Returns:
|
||||
None
|
||||
nodes (list): all nodes to remove the attribute from
|
||||
"""
|
||||
|
||||
attribute = "mbId"
|
||||
for node, id in node_ids.items():
|
||||
# check if node has attribute
|
||||
if not cmds.attributeQuery(attribute, node=node, exists=True):
|
||||
cmds.addAttr(node, longName=attribute, dataType="string")
|
||||
for node in nodes:
|
||||
|
||||
# get the database asset id
|
||||
attr = "{}.cbId".format(node)
|
||||
id_attr = cmds.getAttr(attr)
|
||||
asset_id = id_attr.split(":")[0]
|
||||
|
||||
# create a new unique id
|
||||
_, uid = str(uuid.uuid4()).rsplit("-", 1)
|
||||
cb_uid = "{}:{}".format(asset_id, uid)
|
||||
|
||||
# set the new id
|
||||
cmds.setAttr(attr, cb_uid, type="string")
|
||||
|
||||
cmds.setAttr("{}.{}".format(node, attribute), id)
|
||||
|
|
|
|||
|
|
@ -1,2 +0,0 @@
|
|||
accepted_images_types = [".png", ".jpg", ".tga", ".tiff"]
|
||||
ignored_images_types = [".pds"]
|
||||
|
|
@ -70,14 +70,14 @@ def _copy_uuid(source, target):
|
|||
cmds.setAttr(target_attr, attribute_value, type="string")
|
||||
|
||||
|
||||
def on_init():
|
||||
def on_init(_):
|
||||
avalon.logger.info("Running callback on init..")
|
||||
|
||||
maya.commands.reset_frame_range()
|
||||
maya.commands.reset_resolution()
|
||||
|
||||
|
||||
def on_new():
|
||||
def on_new(_):
|
||||
avalon.logger.info("Running callback on new..")
|
||||
|
||||
# Load dependencies
|
||||
|
|
@ -88,7 +88,7 @@ def on_new():
|
|||
maya.commands.reset_resolution()
|
||||
|
||||
|
||||
def on_save():
|
||||
def on_save(_):
|
||||
"""Automatically add IDs to new nodes
|
||||
Any transform of a mesh, without an existing ID,
|
||||
is given one automatically on file save.
|
||||
|
|
@ -96,35 +96,31 @@ def on_save():
|
|||
|
||||
avalon.logger.info("Running callback on save..")
|
||||
|
||||
defaults = ["initialShadingGroup", "initialParticleSE"]
|
||||
# establish set of nodes to ignore
|
||||
ignore = set(["initialShadingGroup", "initialParticleSE"])
|
||||
ignore |= set(cmds.ls(long=True, readOnly=True))
|
||||
ignore |= set(cmds.ls(long=True, lockedNodes=True))
|
||||
|
||||
# the default items which always want to have an ID
|
||||
types = ["mesh", "shadingEngine", "file", "nurbsCurve"]
|
||||
types = ["shadingEngine", "file", "mesh", "nurbsCurve"]
|
||||
|
||||
# the items which need to pass the id to their parent
|
||||
nodes = (set(cmds.ls(type=types, long=True)) -
|
||||
set(cmds.ls(long=True, readOnly=True)) -
|
||||
set(cmds.ls(long=True, lockedNodes=True)))
|
||||
nodes = set(cmds.ls(type=types, long=True))
|
||||
|
||||
transforms = set()
|
||||
for n in cmds.ls(type=types, long=True):
|
||||
# pass id to parent of node if in subtypes
|
||||
relatives = cmds.listRelatives(n, parent=True, fullPath=True)
|
||||
if not relatives:
|
||||
continue
|
||||
# Add the collected transform to the nodes
|
||||
transforms = cmds.listRelatives(list(nodes),
|
||||
parent=True,
|
||||
fullPath=True) or []
|
||||
|
||||
for r in cmds.listRelatives(n, parent=True, fullPath=True):
|
||||
transforms.add(r)
|
||||
nodes |= set(transforms)
|
||||
|
||||
# merge transforms and nodes in one set to make sure every item
|
||||
# is unique
|
||||
nodes |= transforms
|
||||
# Remove the ignored nodes
|
||||
nodes -= ignore
|
||||
|
||||
# Lead with asset ID from the database
|
||||
asset = os.environ["AVALON_ASSET"]
|
||||
asset_id = io.find_one({"type": "asset", "name": asset})
|
||||
|
||||
# generate the ids
|
||||
for node in nodes:
|
||||
if node in defaults:
|
||||
continue
|
||||
_set_uuid(str(asset_id["_id"]), node)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,637 +0,0 @@
|
|||
"""Used for scripting
|
||||
|
||||
These are used in other scripts and mostly require explicit input,
|
||||
such as which specific nodes they apply to.
|
||||
|
||||
For interactive use, see :mod:`interactive.py`
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from . import lib
|
||||
|
||||
if sys.version_info[0] == 3:
|
||||
basestring = str
|
||||
|
||||
# Flags
|
||||
LocalSpace = 1 << 0
|
||||
WorldSpace = 1 << 1
|
||||
|
||||
|
||||
def auto_connect2(src, dst):
|
||||
"""Connect to `dst` based on what `dst` is and `src` has available
|
||||
|
||||
TODO: Offer optionbox of choices when multiple inputs are possible.
|
||||
For example, connecting a mesh to a wrap node could either
|
||||
go to driverMesh, or baseMesh.
|
||||
|
||||
"""
|
||||
|
||||
to_from = {
|
||||
"mesh": (
|
||||
["mesh", (".outMesh", ".inMesh")],
|
||||
),
|
||||
"nurbsSurface": (
|
||||
["nurbsSurface", (".local", ".create")],
|
||||
),
|
||||
"nurbsCurve": (
|
||||
["nurbsCurve", (".local", ".create")],
|
||||
),
|
||||
"decomposeMatrix": (
|
||||
["transform", (".worldMatrix", ".inputMatrix")],
|
||||
),
|
||||
"transform": (
|
||||
[
|
||||
"transform", (
|
||||
(".translate", ".rotate", ".scale"),
|
||||
(".translate", ".rotate", ".scale"))
|
||||
],
|
||||
[
|
||||
"decomposeMatrix", (
|
||||
(".outTranslate", ".outRotate", ".outScale"),
|
||||
(".translate", ".rotate", ".scale"))
|
||||
],
|
||||
),
|
||||
"objectSet": (
|
||||
["dagNode", (".message", ".dagSetMembers")],
|
||||
["entity", (".message", ".dnSetMembers")],
|
||||
),
|
||||
}
|
||||
|
||||
support = next(
|
||||
(to_from[to] for to in to_from
|
||||
if to in cmds.nodeType(dst, inherited=True)), None
|
||||
)
|
||||
|
||||
if not support:
|
||||
# Guess, based on available inputs,
|
||||
# what is the closest match
|
||||
print("Guessing..")
|
||||
pass
|
||||
|
||||
assert support, "No supported outputs for '%s'" % (cmds.nodeType(src))
|
||||
|
||||
out_, in_ = next(
|
||||
(typ for typ in support
|
||||
if typ[0] in cmds.nodeType(src, inherited=True)), (None, None)
|
||||
)
|
||||
|
||||
assert in_ and out_, "No matching attributes found for %s" % src
|
||||
|
||||
if not isinstance(in_, tuple):
|
||||
in_ = (in_,)
|
||||
|
||||
if not isinstance(out_, tuple):
|
||||
out_ = (out_,)
|
||||
|
||||
assert len(in_) == len(out_)
|
||||
|
||||
map(lambda io: cmds.connectAttr(src + io[0],
|
||||
dst + io[1],
|
||||
force=True), zip(out_, in_))
|
||||
|
||||
|
||||
def auto_connect(src, dst):
|
||||
"""Connect `src` to `dst` via the most likely input and output
|
||||
|
||||
Usage:
|
||||
>>> # Create cube and transfer mesh into new shape
|
||||
>>> shape = cmds.createNode("mesh", name="newShape")
|
||||
>>> transform, generator = cmds.polyCube(name="original")
|
||||
>>> auto_connect(generator, shape)
|
||||
>>> cmds.delete(transform)
|
||||
|
||||
"""
|
||||
|
||||
out_ = {
|
||||
"mesh": ".outMesh",
|
||||
"nurbsSurface": ".local",
|
||||
"nurbsCurve": ".local",
|
||||
"decomposeMatrix": (".outTranslate",
|
||||
".outRotate",
|
||||
".outScale"),
|
||||
"transform": (".translate",
|
||||
".rotate",
|
||||
".scale",
|
||||
".visibility")
|
||||
}
|
||||
|
||||
in_ = {
|
||||
"mesh": ".inMesh",
|
||||
"nurbsSurface": ".create",
|
||||
"nurbsCurve": ".create",
|
||||
"decomposeMatrix": "inputMatrix",
|
||||
"transform": (".translate",
|
||||
".rotate",
|
||||
".scale",
|
||||
".visibility"),
|
||||
"objectSet": ["dnSetMembers", "dgSetMembers"]
|
||||
}
|
||||
|
||||
try:
|
||||
in_ = in_[cmds.nodeType(dst)]
|
||||
except KeyError:
|
||||
in_ = next((attr for attr in (".input",
|
||||
".inputGeometry")
|
||||
if cmds.objExists(dst + attr)), None)
|
||||
|
||||
try:
|
||||
out_ = out_[cmds.nodeType(src)]
|
||||
except KeyError:
|
||||
out_ = next((attr for attr in (".output",
|
||||
".outputGeometry")
|
||||
if cmds.objExists(src + attr)), None)
|
||||
|
||||
assert in_ and out_, "No matching attributes found for %s" % src
|
||||
|
||||
if not isinstance(in_, tuple):
|
||||
in_ = (in_,)
|
||||
|
||||
if not isinstance(out_, tuple):
|
||||
out_ = (out_,)
|
||||
|
||||
assert len(in_) == len(out_)
|
||||
|
||||
map(lambda io: cmds.connectAttr(src + io[0],
|
||||
dst + io[1],
|
||||
force=True), zip(out_, in_))
|
||||
|
||||
|
||||
@lib.maintained_selection
|
||||
def match_transform(src, dst):
|
||||
"""Transform `src` to `dst`, taking worldspace into account
|
||||
|
||||
Arguments:
|
||||
src (str): Absolute path to source transform
|
||||
dst (str): Absolute path to destination transform
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
parent = cmds.listRelatives(src, parent=True)[0]
|
||||
except Exception:
|
||||
parent = None
|
||||
|
||||
node_decompose = cmds.createNode("decomposeMatrix")
|
||||
node_multmatrix = cmds.createNode("multMatrix")
|
||||
|
||||
connections = {
|
||||
dst + ".worldMatrix": node_multmatrix + ".matrixIn[0]",
|
||||
node_multmatrix + ".matrixSum": node_decompose + ".inputMatrix",
|
||||
node_decompose + ".outputTranslate": src + ".translate",
|
||||
node_decompose + ".outputRotate": src + ".rotate",
|
||||
node_decompose + ".outputScale": src + ".scale",
|
||||
}
|
||||
|
||||
if parent:
|
||||
connections.update({
|
||||
parent + ".worldInverseMatrix": node_multmatrix + ".matrixIn[1]"
|
||||
})
|
||||
|
||||
for s, d in connections.iteritems():
|
||||
cmds.connectAttr(s, d, force=True)
|
||||
|
||||
cmds.refresh()
|
||||
|
||||
cmds.delete([node_decompose, node_multmatrix])
|
||||
|
||||
|
||||
def connect_shapes(src, dst):
|
||||
"""Connect geometry of `src` to source geometry of dst
|
||||
|
||||
Arguments:
|
||||
src (str): Name of source shape
|
||||
dst (list): Names of destination nodes
|
||||
|
||||
"""
|
||||
|
||||
out_attr = None
|
||||
|
||||
if cmds.nodeType(src) == "mesh":
|
||||
out_attr = ".outMesh"
|
||||
|
||||
elif cmds.nodeType(src) in ("nurbsSurface", "nurbsCurve"):
|
||||
out_attr = ".local"
|
||||
|
||||
else:
|
||||
for wildcard in (".output",):
|
||||
if cmds.objExists(src + wildcard):
|
||||
out_attr = wildcard
|
||||
break
|
||||
|
||||
if not out_attr:
|
||||
return cmds.warning("Could not detect output of %s" % src)
|
||||
|
||||
for target in dst:
|
||||
in_attr = None
|
||||
|
||||
if cmds.nodeType(target) == "mesh":
|
||||
in_attr = ".inMesh"
|
||||
|
||||
elif cmds.nodeType(target) in ("nurbsSurface", "nurbsCurve"):
|
||||
in_attr = ".create"
|
||||
|
||||
else:
|
||||
# Support unspecific nodes with common input attributes
|
||||
for support, wildcard in (("mesh", ".inputPolymesh"),
|
||||
("mesh", ".inputMesh"),
|
||||
("mesh", ".inputGeometry")):
|
||||
if cmds.objExists(target + wildcard):
|
||||
if not cmds.nodeType(src) == support:
|
||||
cmds.warning("Could not connect: %s -> %s" % (src,
|
||||
target))
|
||||
break
|
||||
|
||||
in_attr = wildcard
|
||||
break
|
||||
|
||||
if not in_attr:
|
||||
cmds.warning("Could not detect input of %s" % target)
|
||||
continue
|
||||
|
||||
try:
|
||||
cmds.connectAttr(src + out_attr,
|
||||
target + in_attr,
|
||||
force=True)
|
||||
except Exception as e:
|
||||
cmds.warning("Could not connect: %s%s -> %s%s (%s)" % (
|
||||
src, out_attr,
|
||||
target, in_attr, e)
|
||||
)
|
||||
|
||||
|
||||
def connect_transform(driver, driven, source=WorldSpace, compensate=False):
|
||||
"""Connect translation, rotation and scale via decomposeMatrix
|
||||
|
||||
Arguments:
|
||||
driver (str): Absolute path to driver
|
||||
driven (str): Absolute path to driven
|
||||
source (str, optional): Either WorldSpace or LocalSpace,
|
||||
default WorldSpace
|
||||
compensate (bool, optional): Whether or not to take into account
|
||||
the current transform, default False.
|
||||
|
||||
Returns:
|
||||
output (list): Newly created nodes
|
||||
|
||||
"""
|
||||
|
||||
outputattr = ".matrix" if source == LocalSpace else ".worldMatrix[0]"
|
||||
|
||||
assert cmds.objExists(driver), "%s not found" % driver
|
||||
assert cmds.objExists(driven), "%s not found" % driven
|
||||
|
||||
decompose = driver + "_decompose"
|
||||
output = [decompose]
|
||||
|
||||
if not cmds.objExists(decompose):
|
||||
decompose = cmds.createNode("decomposeMatrix", name=decompose)
|
||||
|
||||
if compensate:
|
||||
|
||||
multMatrix = cmds.createNode(
|
||||
"multMatrix", name=driver + "_multMatrix")
|
||||
|
||||
# Compensate for drivens parentMatrix.
|
||||
cmds.connectAttr(driver + outputattr,
|
||||
multMatrix + ".matrixIn[0]")
|
||||
cmds.connectAttr(driven + ".parentInverseMatrix",
|
||||
multMatrix + ".matrixIn[1]")
|
||||
cmds.connectAttr(multMatrix + ".matrixSum",
|
||||
decompose + ".inputMatrix")
|
||||
|
||||
output.append(multMatrix)
|
||||
else:
|
||||
cmds.connectAttr(driver + outputattr,
|
||||
decompose + ".inputMatrix")
|
||||
|
||||
# Drive driven with compensated driver.
|
||||
cmds.connectAttr(decompose + ".outputTranslate", driven + ".t")
|
||||
cmds.connectAttr(decompose + ".outputRotate", driven + ".r")
|
||||
cmds.connectAttr(decompose + ".outputScale", driven + ".s")
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def clone(shape, worldspace=False):
|
||||
"""Clone `shape`
|
||||
|
||||
Arguments:
|
||||
shape (str): Absolute path to shape
|
||||
worldspace (bool, optional): Whether or not to consider worldspace
|
||||
|
||||
Returns:
|
||||
node (str): Newly created clone
|
||||
|
||||
"""
|
||||
|
||||
type = cmds.nodeType(shape)
|
||||
assert type in ("mesh", "nurbsSurface", "nurbsCurve"), (
|
||||
"clone() works on polygonal and nurbs surfaces")
|
||||
|
||||
src, dst = {
|
||||
"mesh": (".outMesh", ".inMesh"),
|
||||
"nurbsSurface": (".local", ".create"),
|
||||
"nurbsCurve": (".local", ".create"),
|
||||
}[type]
|
||||
|
||||
nodetype = cmds.nodeType(shape)
|
||||
|
||||
name = lib.unique(name=shape.rsplit("|")[-1])
|
||||
clone = cmds.createNode(nodetype, name=name)
|
||||
|
||||
cmds.connectAttr(shape + src, clone + dst, force=True)
|
||||
|
||||
if worldspace:
|
||||
transform = cmds.createNode("transformGeometry",
|
||||
name=name + "_transformGeometry")
|
||||
|
||||
cmds.connectAttr(shape + src,
|
||||
transform + ".inputGeometry", force=True)
|
||||
cmds.connectAttr(shape + ".worldMatrix[0]",
|
||||
transform + ".transform", force=True)
|
||||
cmds.connectAttr(transform + ".outputGeometry",
|
||||
clone + dst, force=True)
|
||||
|
||||
# Assign default shader
|
||||
cmds.sets(clone, addElement="initialShadingGroup")
|
||||
|
||||
return clone
|
||||
|
||||
|
||||
def combine(nodes):
|
||||
"""Produce a new mesh with the contents of `nodes`
|
||||
|
||||
Arguments:
|
||||
nodes (list): Path to shapes
|
||||
|
||||
"""
|
||||
|
||||
unite = cmds.createNode("polyUnite", n=nodes[0] + "_polyUnite")
|
||||
|
||||
count = 0
|
||||
for node in nodes:
|
||||
# Are we dealing with transforms, or shapes directly?
|
||||
shapes = cmds.listRelatives(node, shapes=True) or [node]
|
||||
|
||||
for shape in shapes:
|
||||
try:
|
||||
cmds.connectAttr(shape + ".outMesh",
|
||||
unite + ".inputPoly[%s]" % count, force=True)
|
||||
cmds.connectAttr(shape + ".worldMatrix",
|
||||
unite + ".inputMat[%s]" % count, force=True)
|
||||
count += 1
|
||||
|
||||
except Exception:
|
||||
cmds.warning("'%s' is not a polygonal mesh" % shape)
|
||||
|
||||
if count:
|
||||
output = cmds.createNode("mesh", n=nodes[0] + "_combinedShape")
|
||||
cmds.connectAttr(unite + ".output", output + ".inMesh", force=True)
|
||||
return output
|
||||
|
||||
else:
|
||||
cmds.delete(unite)
|
||||
return None
|
||||
|
||||
|
||||
def transfer_outgoing_connections(src, dst):
|
||||
"""Connect outgoing connections from `src` to `dst`
|
||||
|
||||
Connections that cannot be made are ignored.
|
||||
|
||||
Arguments:
|
||||
src (str): Absolute path to source node
|
||||
dst (str): Absolute path to destination node
|
||||
|
||||
"""
|
||||
|
||||
for destination in cmds.listConnections(src,
|
||||
source=False,
|
||||
plugs=True) or []:
|
||||
for source in cmds.listConnections(destination,
|
||||
destination=False,
|
||||
plugs=True) or []:
|
||||
try:
|
||||
cmds.connectAttr(source.replace(src, dst),
|
||||
destination, force=True)
|
||||
except RuntimeError:
|
||||
continue
|
||||
|
||||
|
||||
def parent_group(source, transferTransform=True):
|
||||
"""Create and transfer transforms to parent group"""
|
||||
assert cmds.objExists(source), "%s does not exist" % source
|
||||
assert cmds.nodeType(source) == "transform", (
|
||||
"%s must be transform" % source)
|
||||
|
||||
parent = cmds.listRelatives(source, parent=True)
|
||||
|
||||
if transferTransform:
|
||||
group = cmds.createNode("transform", n="%s_parent" % source)
|
||||
match_transform(group, source)
|
||||
|
||||
try:
|
||||
cmds.parent(source, group)
|
||||
except Exception:
|
||||
cmds.warning("Failed to parent child under new parent")
|
||||
cmds.delete(group)
|
||||
|
||||
if parent:
|
||||
cmds.parent(group, parent[0])
|
||||
|
||||
else:
|
||||
cmds.select(source)
|
||||
group = cmds.group(n="%s_parent" % source)
|
||||
|
||||
return group
|
||||
|
||||
|
||||
def _output_node(source, type, suffix):
|
||||
newname = lib.unique(name=source.rsplit("_", 1)[0] + suffix)
|
||||
|
||||
node = cmds.createNode(type)
|
||||
node = [cmds.listRelatives(node, parent=True) or node][0]
|
||||
node = cmds.rename(node, newname)
|
||||
|
||||
try:
|
||||
cmds.parent(node, source)
|
||||
match_transform(node, source)
|
||||
|
||||
except Exception:
|
||||
cmds.warning("Could not create %s" % node)
|
||||
cmds.delete(node)
|
||||
|
||||
return node
|
||||
|
||||
|
||||
def output_locator(source, suffix="_LOC"):
|
||||
"""Create child locator
|
||||
|
||||
Arguments:
|
||||
source (str): Parent node
|
||||
suffix (str): Suffix of output
|
||||
|
||||
"""
|
||||
|
||||
return _output_node(source, "locator", suffix)
|
||||
|
||||
|
||||
def output_joint(source, suffix="_JNT"):
|
||||
"""Create child joint
|
||||
|
||||
Arguments:
|
||||
source (str): Parent node
|
||||
suffix (str): Suffix of output
|
||||
|
||||
"""
|
||||
|
||||
return _output_node(source, "joint", suffix)
|
||||
|
||||
|
||||
def follicle(shape, u=0, v=0, name=""):
|
||||
"""Attach follicle to "shape" at specified "u" and "v" values"""
|
||||
|
||||
type = cmds.nodeType(shape)
|
||||
assert type in ("mesh", "nurbsSurface"), (
|
||||
"follicle() works on polygonal meshes and nurbs")
|
||||
|
||||
src, dst = {
|
||||
"mesh": (".outMesh", ".inputMesh"),
|
||||
"nurbsSurface": (".local", ".inputSurface")
|
||||
}[type]
|
||||
|
||||
follicle = cmds.createNode("follicle", name=name + "Shape")
|
||||
transform = cmds.listRelatives(follicle, parent=True)[0]
|
||||
|
||||
cmds.setAttr(follicle + ".parameterU", u)
|
||||
cmds.setAttr(follicle + ".parameterV", v)
|
||||
|
||||
cmds.connectAttr(follicle + ".outTranslate", transform + ".translate")
|
||||
cmds.connectAttr(follicle + ".outRotate", transform + ".rotate")
|
||||
cmds.connectAttr(shape + ".worldMatrix[0]", follicle + ".inputWorldMatrix")
|
||||
cmds.connectAttr(shape + src, follicle + dst, force=True)
|
||||
|
||||
return transform
|
||||
|
||||
|
||||
def connect_matching_attributes(source, target):
|
||||
"""Connect matching attributes from source to target
|
||||
|
||||
Arguments:
|
||||
source (str): Absolute path to node from which to connect
|
||||
target (str): Target node
|
||||
|
||||
Example:
|
||||
>>> # Select two matching nodes
|
||||
>>> source = cmds.createNode("transform", name="source")
|
||||
>>> target = cmds.createNode("transform", name="target")
|
||||
>>> cmds.select([source, target], replace=True)
|
||||
>>> source, target = cmds.ls(selection=True)
|
||||
>>> connect_matching_attributes(source, target)
|
||||
|
||||
"""
|
||||
|
||||
dsts = cmds.listAttr(target, keyable=True)
|
||||
for src in cmds.listAttr(source, keyable=True):
|
||||
if src not in dsts:
|
||||
continue
|
||||
|
||||
try:
|
||||
src = "." + src
|
||||
cmds.connectAttr(source + src,
|
||||
target + src,
|
||||
force=True)
|
||||
except RuntimeError as e:
|
||||
cmds.warning("Could not connect %s: %s" % (src, e))
|
||||
|
||||
|
||||
def create_ncloth(input_mesh):
|
||||
"""Replace Create nCloth menu item
|
||||
|
||||
This performs the identical option of nCloth -> Create nCloth
|
||||
with the following changes.
|
||||
|
||||
1. Input mesh not made intermediate
|
||||
2. Current mesh and shape named "currentMesh"
|
||||
|
||||
Arguments:
|
||||
input_mesh (str): Path to shape
|
||||
|
||||
"""
|
||||
|
||||
assert cmds.nodeType(input_mesh) == "mesh", (
|
||||
"%s was not of type mesh" % input_mesh)
|
||||
|
||||
nucleus = cmds.createNode("nucleus", name="nucleus1")
|
||||
ncloth = cmds.createNode("nCloth", name="nClothShape1")
|
||||
current_mesh = cmds.createNode("mesh", name="currentMesh")
|
||||
|
||||
cmds.connectAttr(input_mesh + ".worldMesh[0]", ncloth + ".inputMesh")
|
||||
cmds.connectAttr(ncloth + ".outputMesh", current_mesh + ".inMesh")
|
||||
cmds.connectAttr("time1.outTime", nucleus + ".currentTime")
|
||||
cmds.connectAttr("time1.outTime", ncloth + ".currentTime")
|
||||
cmds.connectAttr(ncloth + ".currentState", nucleus + ".inputActive[0]")
|
||||
cmds.connectAttr(ncloth + ".startState", nucleus + ".inputActiveStart[0]")
|
||||
cmds.connectAttr(nucleus + ".outputObjects[0]", ncloth + ".nextState")
|
||||
cmds.connectAttr(nucleus + ".startFrame", ncloth + ".startFrame")
|
||||
|
||||
# Assign default shader
|
||||
cmds.sets(current_mesh, addElement="initialShadingGroup")
|
||||
|
||||
return current_mesh
|
||||
|
||||
|
||||
def enhanced_parent(child, parent):
|
||||
if "shape" in cmds.nodeType(child, inherited=True):
|
||||
cmds.parent(relative=True, shape=True)
|
||||
else:
|
||||
cmds.parent(child, parent)
|
||||
|
||||
|
||||
def auto_connect_assets(src, dst):
|
||||
"""Attempt to automatically two assets
|
||||
|
||||
Arguments:
|
||||
src (str): Name of source reference node
|
||||
dst (str): Name of destination reference node
|
||||
|
||||
Raises:
|
||||
StopIteration on missing in_SET
|
||||
|
||||
"""
|
||||
|
||||
in_set = None
|
||||
|
||||
for node in cmds.referenceQuery(dst, nodes=True):
|
||||
if node.endswith("in_SET"):
|
||||
in_set = node
|
||||
break
|
||||
|
||||
for input_transform in cmds.sets(in_set, query=True):
|
||||
mbid = cmds.getAttr(input_transform + ".cbId")
|
||||
input_shape = cmds.listRelatives(input_transform, shapes=True)[0]
|
||||
|
||||
for output_transform in lib.lsattr("cbId", value=mbid):
|
||||
|
||||
ref = cmds.referenceQuery(output_transform, referenceNode=True)
|
||||
if ref != src:
|
||||
continue
|
||||
|
||||
print("Connecting %s -> %s" % (output_transform, input_transform))
|
||||
output_shape = cmds.listRelatives(output_transform, shapes=True)[0]
|
||||
|
||||
try:
|
||||
auto_connect(output_transform, input_transform)
|
||||
except RuntimeError:
|
||||
# Already connected
|
||||
pass
|
||||
|
||||
try:
|
||||
auto_connect(output_shape, input_shape)
|
||||
except RuntimeError:
|
||||
# Already connected
|
||||
pass
|
||||
|
|
@ -1,288 +0,0 @@
|
|||
"""Interactive functionality
|
||||
|
||||
These depend on user selection in Maya, and may be used as-is. They
|
||||
implement the functionality in :mod:`commands.py`.
|
||||
|
||||
Each of these functions take `*args` as argument, because when used
|
||||
in a Maya menu an additional argument is passed with metadata about
|
||||
what state the button was pressed in. None of this data is used here.
|
||||
|
||||
"""
|
||||
|
||||
from maya import cmds, mel
|
||||
from . import commands, lib
|
||||
|
||||
|
||||
def connect_shapes(*args):
|
||||
"""Connect the first selection to the last selection(s)"""
|
||||
selection = cmds.ls(selection=True)
|
||||
|
||||
src = selection.pop(0)
|
||||
commands.connect_shapes(src, dst=selection)
|
||||
|
||||
|
||||
def combine(*args):
|
||||
"""Combine currently selected meshes
|
||||
|
||||
This differs from the default Maya combine in that it
|
||||
retains the original mesh and produces a new mesh with the result.
|
||||
|
||||
"""
|
||||
|
||||
commands.combine(cmds.ls(sl=1))
|
||||
|
||||
|
||||
def read_selected_channels(*args):
|
||||
"""Return a list of selected channels in the Channel Box"""
|
||||
channelbox = mel.eval("global string $gChannelBoxName; "
|
||||
"$temp=$gChannelBoxName;")
|
||||
return cmds.channelBox(channelbox,
|
||||
query=True,
|
||||
selectedMainAttributes=True) or []
|
||||
|
||||
|
||||
def set_defaults(*args):
|
||||
"""Set currently selected values from channel box to their default value
|
||||
|
||||
If no channel is selected, default all keyable attributes.
|
||||
|
||||
"""
|
||||
|
||||
for node in cmds.ls(selection=True):
|
||||
selected_channels = read_selected_channels()
|
||||
for channel in (selected_channels or
|
||||
cmds.listAttr(node, keyable=True)):
|
||||
try:
|
||||
default = cmds.attributeQuery(channel,
|
||||
node=node,
|
||||
listDefault=True)[0]
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
else:
|
||||
cmds.setAttr(node + "." + channel, default)
|
||||
|
||||
|
||||
def transfer_outgoing_connections(*args):
|
||||
"""Connect outgoing connections from first to second selected node"""
|
||||
|
||||
try:
|
||||
src, dst = cmds.ls(selection=True)
|
||||
except ValueError:
|
||||
return cmds.warning("Select source and destination nodes")
|
||||
|
||||
commands.transfer_outgoing_connections(src, dst)
|
||||
|
||||
|
||||
def clone_special(*args):
|
||||
"""Clone in localspace, and preserve user-defined attributes"""
|
||||
|
||||
for transform in cmds.ls(selection=True, long=True):
|
||||
if cmds.nodeType(transform) != "transform":
|
||||
cmds.warning("Skipping '%s', not a `transform`" % transform)
|
||||
continue
|
||||
|
||||
shape = _find_shape(transform)
|
||||
type = cmds.nodeType(shape)
|
||||
|
||||
if type not in ("mesh", "nurbsSurface", "nurbsCurve"):
|
||||
cmds.warning("Skipping '{transform}': cannot clone nodes "
|
||||
"of type '{type}'".format(**locals()))
|
||||
continue
|
||||
|
||||
cloned = commands.clone(shape, worldspace=False)
|
||||
new_transform = cmds.listRelatives(cloned,
|
||||
parent=True,
|
||||
fullPath=True)[0]
|
||||
|
||||
new_transform = cmds.rename(new_transform,
|
||||
new_transform.rsplit(":", 1)[-1])
|
||||
|
||||
for attr in cmds.listAttr(transform,
|
||||
userDefined=True) or list():
|
||||
try:
|
||||
cmds.addAttr(new_transform, longName=attr, dataType="string")
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
value = cmds.getAttr(transform + "." + attr)
|
||||
cmds.setAttr(new_transform + "." + attr, value, type="string")
|
||||
|
||||
# Connect visibility
|
||||
cmds.connectAttr(transform + ".visibility",
|
||||
new_transform + ".visibility")
|
||||
|
||||
|
||||
def clone_worldspace(*args):
|
||||
return _clone(worldspace=True)
|
||||
|
||||
|
||||
def clone_localspace(*args):
|
||||
return _clone(worldspace=False)
|
||||
|
||||
|
||||
def _clone(worldspace=False):
|
||||
"""Clone selected objects in viewport
|
||||
|
||||
Arguments:
|
||||
worldspace (bool): Whether or not to append a transformGeometry to
|
||||
resulting clone.
|
||||
|
||||
"""
|
||||
|
||||
clones = list()
|
||||
|
||||
for node in cmds.ls(selection=True, long=True):
|
||||
shape = _find_shape(node)
|
||||
type = cmds.nodeType(shape)
|
||||
|
||||
if type not in ("mesh", "nurbsSurface", "nurbsCurve"):
|
||||
cmds.warning("Skipping '{node}': cannot clone nodes "
|
||||
"of type '{type}'".format(**locals()))
|
||||
continue
|
||||
|
||||
cloned = commands.clone(shape, worldspace=worldspace)
|
||||
clones.append(cloned)
|
||||
|
||||
if not clones:
|
||||
return
|
||||
|
||||
# Select newly created transform nodes in the viewport
|
||||
transforms = list()
|
||||
|
||||
for clone in clones:
|
||||
transform = cmds.listRelatives(clone, parent=True, fullPath=True)[0]
|
||||
transforms.append(transform)
|
||||
|
||||
cmds.select(transforms, replace=True)
|
||||
|
||||
|
||||
def _find_shape(element):
|
||||
"""Return shape of given 'element'
|
||||
|
||||
Supports components, meshes, and surfaces
|
||||
|
||||
Arguments:
|
||||
element (str): Path to component, mesh or surface
|
||||
|
||||
Returns:
|
||||
str of path if found, None otherwise
|
||||
|
||||
"""
|
||||
|
||||
# Get either shape or transform, based on element-type
|
||||
node = cmds.ls(element, objectsOnly=True, long=True)[0]
|
||||
|
||||
if cmds.nodeType(node) == "transform":
|
||||
try:
|
||||
return cmds.listRelatives(node, shapes=True, fullPath=True)[0]
|
||||
except IndexError:
|
||||
return cmds.warning("Could not find shape in %s" % element)
|
||||
else:
|
||||
return node
|
||||
|
||||
|
||||
def connect_matching_attributes_from_selection(*args):
|
||||
try:
|
||||
source, target = cmds.ls(sl=True)
|
||||
except ValueError:
|
||||
raise ValueError("Select (1) source and (2) target nodes only.")
|
||||
|
||||
return commands.connect_matching_attributes(source, target)
|
||||
|
||||
|
||||
def auto_connect(*args):
|
||||
"""Connect `src` to `dst` via the most likely input and output"""
|
||||
try:
|
||||
commands.auto_connect(*cmds.ls(selection=True))
|
||||
except TypeError:
|
||||
cmds.warning("Select only source and destination nodes.")
|
||||
|
||||
|
||||
def create_ncloth():
|
||||
selection = cmds.ls(selection=True)[0]
|
||||
|
||||
input_mesh = cmds.listRelatives(selection, shapes=True)[0]
|
||||
current_mesh = commands.create_ncloth(input_mesh)
|
||||
|
||||
# Optionally append suffix
|
||||
comp = selection.rsplit("_", 1)
|
||||
suffix = ("_" + comp[-1]) if len(comp) > 1 else ""
|
||||
|
||||
cmds.rename(current_mesh, "currentMesh%sShape" % suffix)
|
||||
|
||||
# Mimic default nCloth command
|
||||
cmds.hide(selection)
|
||||
|
||||
|
||||
def follicle(*args):
|
||||
supported = ["mesh", "nurbsSurface"]
|
||||
selection = cmds.ls(sl=1)
|
||||
|
||||
new_follicles = []
|
||||
for sel in selection:
|
||||
uv = lib.uv_from_element(sel)
|
||||
|
||||
geometry_shape = lib.shape_from_element(sel)
|
||||
geometry_transform = cmds.listRelatives(geometry_shape, parent=True)[0]
|
||||
|
||||
# Figure out output connection
|
||||
inputs = [".inputMesh", ".inputSurface"]
|
||||
outputs = [".outMesh", ".local"]
|
||||
|
||||
failed = False
|
||||
type = cmds.nodeType(geometry_shape)
|
||||
if type not in supported:
|
||||
failed = True
|
||||
shapes = cmds.listRelatives(geometry_shape, shapes=True)
|
||||
|
||||
if shapes:
|
||||
geometry_shape = shapes[0]
|
||||
type = cmds.nodeType(geometry_shape)
|
||||
if type in supported:
|
||||
failed = False
|
||||
|
||||
if failed:
|
||||
cmds.error("Skipping '%s': Type not accepted" % type)
|
||||
return
|
||||
|
||||
input = inputs[supported.index(type)]
|
||||
output = outputs[supported.index(type)]
|
||||
|
||||
# Make follicle
|
||||
follicle = cmds.createNode("follicle",
|
||||
name=geometry_transform + "_follicleShape1")
|
||||
follicle_transform = cmds.listRelatives(follicle, parent=True)[0]
|
||||
follicle_transform = cmds.rename(follicle_transform,
|
||||
geometry_transform + "_follicle1")
|
||||
|
||||
# Set U and V value
|
||||
cmds.setAttr(follicle + ".parameterU", uv[0])
|
||||
cmds.setAttr(follicle + ".parameterV", uv[1])
|
||||
|
||||
# Make the connections
|
||||
cmds.connectAttr(follicle + ".outTranslate",
|
||||
follicle_transform + ".translate")
|
||||
cmds.connectAttr(follicle + ".outRotate",
|
||||
follicle_transform + ".rotate")
|
||||
cmds.connectAttr(geometry_shape + output,
|
||||
follicle + input)
|
||||
|
||||
# Select last
|
||||
new_follicles.append(follicle_transform)
|
||||
|
||||
# Select newly created follicles
|
||||
if new_follicles:
|
||||
cmds.select(new_follicles, r=1)
|
||||
|
||||
return new_follicles
|
||||
|
||||
|
||||
def auto_connect_assets(*args):
|
||||
references = cmds.ls(selection=True, type="reference")
|
||||
|
||||
if not len(references) == 2:
|
||||
raise RuntimeError("Select source and destination "
|
||||
"reference nodes, in that order.")
|
||||
|
||||
return commands.auto_connect_assets(*references)
|
||||
|
|
@ -8,10 +8,10 @@ import logging
|
|||
import contextlib
|
||||
from collections import OrderedDict, defaultdict
|
||||
|
||||
from avalon import maya, io
|
||||
|
||||
from maya import cmds, mel
|
||||
|
||||
from avalon import maya, io
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -21,6 +21,24 @@ project = io.find_one({"type": "project",
|
|||
"_id": False})
|
||||
TEMPLATE = project["config"]["template"]["publish"]
|
||||
|
||||
ATTRIBUTE_DICT = {"int": {"attributeType": "long"},
|
||||
"str": {"dataType": "string"},
|
||||
"unicode": {"dataType": "string"},
|
||||
"float": {"attributeType": "double"},
|
||||
"bool": {"attributeType": "bool"}}
|
||||
|
||||
SHAPE_ATTRS = ["castsShadows",
|
||||
"receiveShadows",
|
||||
"motionBlur",
|
||||
"primaryVisibility",
|
||||
"smoothShading",
|
||||
"visibleInReflections",
|
||||
"visibleInRefractions",
|
||||
"doubleSided",
|
||||
"opposite"]
|
||||
|
||||
SHAPE_ATTRS = set(SHAPE_ATTRS)
|
||||
|
||||
|
||||
def maintained_selection(arg=None):
|
||||
if arg is not None:
|
||||
|
|
@ -619,36 +637,6 @@ def _get_id(node):
|
|||
return attribute_value
|
||||
|
||||
|
||||
def filter_by_id(nodes, uuids):
|
||||
"""Filter all nodes which match the UUIDs
|
||||
|
||||
Args:
|
||||
nodes (list): collection of nodes to check
|
||||
uuids (list): a list of UUIDs which are linked to the shader
|
||||
|
||||
Returns:
|
||||
list: matching nodes
|
||||
"""
|
||||
|
||||
filtered_nodes = []
|
||||
for node in nodes:
|
||||
if node is None:
|
||||
continue
|
||||
|
||||
if not cmds.attributeQuery("cbId", node=node, exists=True):
|
||||
continue
|
||||
|
||||
# Deformed shaped
|
||||
attr = "{}.cbId".format(node)
|
||||
attribute_value = cmds.getAttr(attr)
|
||||
if attribute_value not in uuids:
|
||||
continue
|
||||
|
||||
filtered_nodes.append(node)
|
||||
|
||||
return filtered_nodes
|
||||
|
||||
|
||||
def get_representation_file(representation, template=TEMPLATE):
|
||||
"""
|
||||
Rebuild the filepath of the representation's context
|
||||
|
|
@ -674,12 +662,68 @@ def get_reference_node(path):
|
|||
Returns:
|
||||
node (str): name of the reference node in question
|
||||
"""
|
||||
node = cmds.file(path, query=True, referenceNode=True)
|
||||
try:
|
||||
node = cmds.file(path, query=True, referenceNode=True)
|
||||
except RuntimeError:
|
||||
log.debug('File is not referenced : "{}"'.format(path))
|
||||
return
|
||||
|
||||
reference_path = cmds.referenceQuery(path, filename=True)
|
||||
if os.path.normpath(path) == os.path.normpath(reference_path):
|
||||
return node
|
||||
|
||||
|
||||
def set_attribute(attribute, value, node):
|
||||
"""Adjust attributes based on the value from the attribute data
|
||||
|
||||
If an attribute does not exists on the target it will be added with
|
||||
the dataType being controlled by the value type.
|
||||
|
||||
Args:
|
||||
attribute (str): name of the attribute to change
|
||||
value: the value to change to attribute to
|
||||
node (str): name of the node
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
value_type = type(value).__name__
|
||||
kwargs = ATTRIBUTE_DICT[value_type]
|
||||
if not cmds.attributeQuery(attribute, node=node, exists=True):
|
||||
log.debug("Creating attribute '{}' on "
|
||||
"'{}'".format(attribute, node))
|
||||
cmds.addAttr(node, longName=attribute, **kwargs)
|
||||
|
||||
node_attr = "{}.{}".format(node, attribute)
|
||||
if "dataType" in kwargs:
|
||||
attr_type = kwargs["dataType"]
|
||||
cmds.setAttr(node_attr, value, type=attr_type)
|
||||
else:
|
||||
cmds.setAttr(node_attr, value)
|
||||
|
||||
|
||||
def apply_attributes(attributes, nodes_by_id):
|
||||
"""Alter the attributes to match the state when publishing
|
||||
|
||||
Apply attribute settings from the publish to the node in the scene based
|
||||
on the UUID which is stored in the cbId attribute.
|
||||
|
||||
Args:
|
||||
attributes (list): list of dictionaries
|
||||
nodes_by_id (dict): collection of nodes based on UUID
|
||||
{uuid: [node, node]}
|
||||
|
||||
"""
|
||||
|
||||
for attr_data in attributes:
|
||||
nodes = nodes_by_id[attr_data["uuid"]]
|
||||
attr_value = attr_data["attributes"]
|
||||
for node in nodes:
|
||||
for attr, value in attr_value.items():
|
||||
set_attribute(attr, value, node)
|
||||
|
||||
|
||||
def list_looks(asset_id):
|
||||
"""Return all look subsets for the given asset
|
||||
|
||||
|
|
@ -739,8 +783,18 @@ def assign_look_by_version(nodes, version_id):
|
|||
reference=True,
|
||||
returnNewNodes=True)
|
||||
else:
|
||||
log.info("Reusing existing lookdev..")
|
||||
log.info("Reusing existing lookdev '{}'".format(reference_node))
|
||||
shader_nodes = cmds.referenceQuery(reference_node, nodes=True)
|
||||
namespace = cmds.referenceQuery(reference_node, namespace=True)
|
||||
|
||||
# containerise like avalon (for manager)
|
||||
# give re
|
||||
context = {"representation": shader_file}
|
||||
subset_name = shader_file["context"]["subset"]
|
||||
maya.containerise(name=subset_name,
|
||||
namespace=namespace,
|
||||
nodes=shader_nodes,
|
||||
context=context)
|
||||
|
||||
# Assign relationships
|
||||
with open(shader_relation, "r") as f:
|
||||
|
|
@ -763,24 +817,33 @@ def assign_look(nodes, subset="lookDefault"):
|
|||
# Group all nodes per asset id
|
||||
grouped = defaultdict(list)
|
||||
for node in nodes:
|
||||
colorbleed_id = cmds.getAttr("{}.cbId".format(node))
|
||||
asset_id = colorbleed_id.split(":")[0]
|
||||
grouped[asset_id].append(node)
|
||||
colorbleed_id = _get_id(node)
|
||||
if not colorbleed_id:
|
||||
continue
|
||||
|
||||
parts = colorbleed_id.split(":", 1)
|
||||
grouped[parts[0]].append(node)
|
||||
|
||||
for asset_id, asset_nodes in grouped.items():
|
||||
# create objectId for database
|
||||
asset_id = bson.ObjectId(asset_id)
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset,
|
||||
"parent": asset_id})
|
||||
try:
|
||||
asset_id = bson.ObjectId(asset_id)
|
||||
except bson.errors.InvalidId:
|
||||
log.warning("Asset ID is not compatible with bson")
|
||||
continue
|
||||
subset_data = io.find_one({"type": "subset",
|
||||
"name": subset,
|
||||
"parent": asset_id})
|
||||
|
||||
assert subset, "No subset found for {}".format(asset_id)
|
||||
if not subset_data:
|
||||
log.warning("No subset '{}' found for {}".format(subset, asset_id))
|
||||
continue
|
||||
|
||||
# get last version
|
||||
version = io.find_one({"parent": subset['_id'],
|
||||
version = io.find_one({"parent": subset_data['_id'],
|
||||
"type": "version",
|
||||
"data.families":
|
||||
{"$in":["colorbleed.lookdev"]}
|
||||
{"$in": ["colorbleed.lookdev"]}
|
||||
},
|
||||
sort=[("name", -1)],
|
||||
projection={"_id": True})
|
||||
|
|
@ -792,48 +855,56 @@ def assign_look(nodes, subset="lookDefault"):
|
|||
assign_look_by_version(asset_nodes, version['_id'])
|
||||
|
||||
|
||||
def apply_shaders(relationships, shader_nodes, nodes):
|
||||
"""Apply all shaders to the nodes based on the relationship data
|
||||
def apply_shaders(relationships, shadernodes, nodes):
|
||||
"""Link shadingEngine to the right nodes based on relationship data
|
||||
|
||||
Relationship data is constructed of a collection of `sets` and `attributes`
|
||||
`sets` corresponds with the shaderEngines found in the lookdev.
|
||||
Each set has the keys `name`, `members` and `uuid`, the `members`
|
||||
hold a collection of node information `name` and `uuid`.
|
||||
|
||||
Args:
|
||||
relationships (dict): shader to node relationships
|
||||
shader_nodes (list): shader network nodes
|
||||
nodes (list): nodes to assign to
|
||||
relationships (dict): relationship data
|
||||
shadernodes (list): list of nodes of the shading engine
|
||||
nodes (list): list of nodes to apply shader to
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
attributes = relationships.get("attributes", [])
|
||||
shader_sets = relationships.get("sets", [])
|
||||
shading_engines = cmds.ls(shader_nodes, type="shadingEngine", long=True)
|
||||
assert len(shading_engines) > 0, ("Error in retrieving shading engine "
|
||||
|
||||
shading_engines = cmds.ls(shadernodes, type="shadingEngine", long=True)
|
||||
assert len(shading_engines) > 0, ("Error in retrieving shading engines "
|
||||
"from reference")
|
||||
|
||||
# Pre-filter nodes and shader nodes
|
||||
nodes_by_id = defaultdict(list)
|
||||
shader_nodes_by_id = defaultdict(list)
|
||||
# region compute lookup
|
||||
ns_nodes_by_id = defaultdict(list)
|
||||
for node in nodes:
|
||||
_id = _get_id(node)
|
||||
nodes_by_id[_id].append(node)
|
||||
ns_nodes_by_id[_get_id(node)].append(node)
|
||||
|
||||
for shader_node in shader_nodes:
|
||||
_id = _get_id(shader_node)
|
||||
shader_nodes_by_id[_id].append(shader_node)
|
||||
shading_engines_by_id = defaultdict(list)
|
||||
for shad in shading_engines:
|
||||
shading_engines_by_id[_get_id(shad)].append(shad)
|
||||
# endregion
|
||||
|
||||
# get all nodes which we need to link per shader
|
||||
# region assign
|
||||
for shader_set in shader_sets:
|
||||
# collect shading engine
|
||||
uuid = shader_set["uuid"]
|
||||
shading_engine = shader_nodes_by_id.get(uuid, [])
|
||||
# collect all unique IDs of the set members
|
||||
shader_uuid = shader_set["uuid"]
|
||||
member_uuids = [member["uuid"] for member in shader_set["members"]]
|
||||
|
||||
filtered_nodes = list()
|
||||
for uuid in member_uuids:
|
||||
filtered_nodes.extend(ns_nodes_by_id[uuid])
|
||||
|
||||
shading_engine = shading_engines_by_id[shader_uuid]
|
||||
assert len(shading_engine) == 1, ("Could not find the correct "
|
||||
"shading engine with cbId "
|
||||
"'{}'".format(uuid))
|
||||
|
||||
# collect members
|
||||
filtered_nodes = list()
|
||||
for member in shader_set["members"]:
|
||||
member_uuid = member["uuid"]
|
||||
members = nodes_by_id.get(member_uuid, [])
|
||||
filtered_nodes.extend(members)
|
||||
"'{}'".format(shader_uuid))
|
||||
|
||||
cmds.sets(filtered_nodes, forceElement=shading_engine[0])
|
||||
# endregion
|
||||
|
||||
apply_attributes(attributes, ns_nodes_by_id)
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@
|
|||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\modeling\\selectOutlineUI",
|
||||
"command": "$COLORBLEED_SCRIPTS\\modeling\\selectOutlineUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"modeling",
|
||||
|
|
@ -405,351 +405,351 @@
|
|||
"Rigging": [
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\addCurveBetween.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"addCurveBetween",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Add Curve Between"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\averageSkinWeights.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"averageSkinWeights",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Average Skin Weights"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\cbSmoothSkinWeightUI.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"cbSmoothSkinWeightUI",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "CB Smooth Skin Weight UI"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\channelBoxManagerUI.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"channelBoxManagerUI",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Channel Box Manager UI"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\characterAutorigger.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"characterAutorigger",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Character Auto Rigger"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\connectUI.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"connectUI",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Connect UI"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\copySkinWeightsLocal.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"copySkinWeightsLocal",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Copy Skin Weights Local"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\createCenterLocator.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"createCenterLocator",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Create Center Locator"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\freezeTransformToGroup.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"freezeTransformToGroup",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Freeze Transform To Group"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\groupSelected.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"groupSelected",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Group Selected"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\ikHandlePoleVectorLocator.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"ikHandlePoleVectorLocator",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "IK Handle Pole Vector Locator"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\jointOrientUI.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"jointOrientUI",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Joint Orient UI"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\jointsOnCurve.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"jointsOnCurve",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Joints On Curve"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\resetBindSelectedSkinJoints.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"resetBindSelectedSkinJoints",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Reset Bind Selected Skin Joints"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedComponents.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"selectSkinclusterJointsFromSelectedComponents",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Select Skincluster Joints From Selected Components"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedMesh.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"selectSkinclusterJointsFromSelectedMesh",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Select Skincluster Joints From Selected Mesh"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\setJointLabels.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"setJointLabels",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Set Joint Labels"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\setJointOrientationFromCurrentRotation.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"setJointOrientationFromCurrentRotation",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Set Joint Orientation From Current Rotation"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\setSelectedJointsOrientationZero.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"setSelectedJointsOrientationZero",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Set Selected Joints Orientation Zero"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\mirrorCurveShape.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"mirrorCurveShape",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Mirror Curve Shape"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\setRotationOrderUI.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"setRotationOrderUI",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Set Rotation Order UI"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\paintItNowUI.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"paintItNowUI",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Paint It Now UI"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\parentScaleConstraint.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"parentScaleConstraint",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Parent Scale Constraint"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\quickSetWeightsUI.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"quickSetWeightsUI",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Quick Set Weights UI"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\rapidRig.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"rapidRig",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Rapid Rig"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\regenerate_blendshape_targets.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"regenerate_blendshape_targets",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Regenerate Blendshape Targets"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\removeRotationAxis.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"removeRotationAxis",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Remove Rotation Axis"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\resetBindSelectedMeshes.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"resetBindSelectedMeshes",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Reset Bind Selected Meshes"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\simpleControllerOnSelection.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"simpleControllerOnSelection",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Simple Controller On Selection"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\simpleControllerOnSelectionHierarchy.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"simpleControllerOnSelectionHierarchy",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Simple Controller On Selection Hierarchy"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\superRelativeCluster.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"superRelativeCluster",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Super Relative Cluster"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\tfSmoothSkinWeight.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"tfSmoothSkinWeight",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "TF Smooth Skin Weight"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\toggleIntermediates.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"toggleIntermediates",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Toggle Intermediates"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\toggleSegmentScaleCompensate.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"toggleSegmentScaleCompensate",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Toggle Segment Scale Compensate"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\rigging\\toggleSkinclusterDeformNormals.py",
|
||||
"sourcetype": "python",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"toggleSkinclusterDeformNormals",
|
||||
"python"
|
||||
"file"
|
||||
],
|
||||
"title": "Toggle Skincluster Deform Normals"
|
||||
}
|
||||
|
|
@ -1007,12 +1007,16 @@
|
|||
]
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\shading\\LightLinkUI.py",
|
||||
"command": "$COLORBLEED_SCRIPTS\\shading\\autoLookdevAssignment.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"shading",
|
||||
"LightLinkUI"
|
||||
],
|
||||
"tags": ["shading", "lookdev", "assign", "shaders", "auto"],
|
||||
"title": "Assign lookDefault Shader",
|
||||
"tooltip": "Assign the latest 'lookDefault' to assets without any lookdev in the scene"
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\shading\\LightLinkUi.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["shading", "light", "link", "ui"],
|
||||
"title": "Light Link UI",
|
||||
"tooltip": ""
|
||||
},
|
||||
|
|
@ -1029,7 +1033,7 @@
|
|||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "$COLORBLEED_SCRIPTS\\shading\\fixDefaultShaderSetBehavior",
|
||||
"command": "$COLORBLEED_SCRIPTS\\shading\\fixDefaultShaderSetBehavior.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"shading",
|
||||
|
|
@ -1037,7 +1041,7 @@
|
|||
"DefaultShaderSet",
|
||||
"Behavior"
|
||||
],
|
||||
"title": "fixDefaultShaderSetBehavior",
|
||||
"title": "Fix Default Shader Set Behavior",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
|
|
@ -1147,7 +1151,7 @@
|
|||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"command": "$COLORBLEED_SCRIPTS\\layout\\spPaint3d.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "spPaint3d", "paint", "tool"],
|
||||
"title": "SP Paint 3d",
|
||||
|
|
@ -1232,60 +1236,42 @@
|
|||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"particles",
|
||||
"instancerToObjectsInstancesWithAnimation"
|
||||
],
|
||||
"tags": ["particles", "instancerToObjectsInstancesWithAnimation"],
|
||||
"title": "instancerToObjectsInstancesWithAnimation",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"particles",
|
||||
"objectsToParticles"
|
||||
],
|
||||
"tags": ["particles", "objectsToParticles"],
|
||||
"title": "objectsToParticles",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"particles",
|
||||
"add_particle_cacheFile_attrs"
|
||||
],
|
||||
"tags": ["particles", "add_particle_cacheFile_attrs"],
|
||||
"title": "add_particle_cacheFile_attrs",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"particles",
|
||||
"mergeParticleSystems"
|
||||
],
|
||||
"tags": ["particles", "mergeParticleSystems"],
|
||||
"title": "mergeParticleSystems",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"particles",
|
||||
"particlesToLocators"
|
||||
],
|
||||
"tags": ["particles", "particlesToLocators"],
|
||||
"title": "particlesToLocators",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"particles",
|
||||
"instancerToObjectsWithAnimation"
|
||||
],
|
||||
"tags": ["particles", "instancerToObjectsWithAnimation"],
|
||||
"title": "instancerToObjectsWithAnimation",
|
||||
"tooltip": ""
|
||||
},
|
||||
|
|
@ -1314,80 +1300,56 @@
|
|||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"cleanup",
|
||||
"selectIntermediateObjects"
|
||||
],
|
||||
"tags": ["cleanup", "selectIntermediateObjects"],
|
||||
"title": "selectIntermediateObjects",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"cleanup",
|
||||
"resetViewportCache"
|
||||
],
|
||||
"tags": ["cleanup", "resetViewportCache"],
|
||||
"title": "resetViewportCache",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"cleanup",
|
||||
"selectNonUniqueNames"
|
||||
],
|
||||
"tags": ["cleanup", "selectNonUniqueNames"],
|
||||
"title": "selectNonUniqueNames",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"cleanup",
|
||||
"uniqifyNodeNames"
|
||||
],
|
||||
"tags": ["cleanup", "uniqifyNodeNames"],
|
||||
"title": "uniqifyNodeNames",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"cleanup",
|
||||
"selectByType"
|
||||
],
|
||||
"tags": ["cleanup", "selectByType"],
|
||||
"title": "selectByType",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"cleanup",
|
||||
"removeNamespaces"
|
||||
],
|
||||
"tags": ["cleanup", "removeNamespaces"],
|
||||
"title": "removeNamespaces",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"cleanup",
|
||||
"autoRenameFileNodes"
|
||||
],
|
||||
"tags": ["cleanup", "autoRenameFileNodes"],
|
||||
"title": "autoRenameFileNodes",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"command": "",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"cleanup",
|
||||
"remove_user_defined_attributes"
|
||||
],
|
||||
"tags": ["cleanup", "remove_user_defined_attributes"],
|
||||
"title": "remove_user_defined_attributes",
|
||||
"tooltip": ""
|
||||
},
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ self = sys.modules[__name__]
|
|||
self._menu = "colorbleed"
|
||||
|
||||
# set colorbleed scripts path in environment keys
|
||||
os.environ["COLORBLEED_SCRIPTS"] = r"P:\pipeline\dev\git\cbMayaScripts\cbMayaScripts"
|
||||
os.environ["COLORBLEED_SCRIPTS"] = "P:\pipeline\dev\git\cbMayaScripts\cbMayaScripts"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import tempfile
|
||||
import pyblish.api
|
||||
import avalon.maya
|
||||
|
||||
|
||||
ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
import avalon.maya
|
||||
from colorbleed.maya import lib
|
||||
|
||||
|
|
@ -31,4 +32,4 @@ class CreateAnimation(avalon.maya.Creator):
|
|||
# frame range.
|
||||
data["visibleOnly"] = False
|
||||
|
||||
self.data = data
|
||||
self.data = data
|
||||
|
|
@ -23,11 +23,16 @@ class AbcLoader(api.Loader):
|
|||
# Create unique namespace for the cameras
|
||||
|
||||
# Get name from asset being loaded
|
||||
assert "_" in name, "Naming convention not followed"
|
||||
assetname = "{}_".format(name.split("_")[0])
|
||||
namespace = maya.unique_namespace(assetname,
|
||||
# Assuming name is subset name from the animation, we split the number
|
||||
# suffix from the name to ensure the namespace is unique
|
||||
name = name.split("_")[0]
|
||||
namespace = maya.unique_namespace("{}_".format(name),
|
||||
format="%03d",
|
||||
suffix="_abc")
|
||||
|
||||
# hero_001 (abc)
|
||||
# asset_counter{optional}
|
||||
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
|
|
@ -37,7 +42,6 @@ class AbcLoader(api.Loader):
|
|||
returnNewNodes=True)
|
||||
|
||||
# load colorbleed ID attribute
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
|
||||
|
|
@ -52,8 +56,6 @@ class CurvesLoader(api.Loader):
|
|||
icon = "question"
|
||||
|
||||
def process(self, name, namespace, context, data):
|
||||
from maya import cmds
|
||||
from avalon import maya
|
||||
|
||||
cmds.loadPlugin("atomImportExport.mll", quiet=True)
|
||||
|
||||
|
|
|
|||
|
|
@ -30,8 +30,6 @@ class LookLoader(api.Loader):
|
|||
|
||||
"""
|
||||
|
||||
|
||||
|
||||
# improve readability of the namespace
|
||||
assetname = context["asset"]["name"]
|
||||
ns_assetname = "{}_".format(assetname)
|
||||
|
|
@ -71,63 +69,7 @@ class LookLoader(api.Loader):
|
|||
|
||||
# Get all nodes which belong to a matching name space
|
||||
# Currently this is the safest way to get all the nodes
|
||||
namespace_nodes = self.get_namespace_nodes(assetname)
|
||||
lib.apply_shaders(relationships, nodes, namespace_nodes)
|
||||
# Pass empty list as nodes to assign to in order to only load
|
||||
lib.apply_shaders(relationships, nodes, [])
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
def get_namespace_nodes(self, assetname):
|
||||
"""
|
||||
Get all nodes of namespace `asset_*` and check if they have a shader
|
||||
assigned, if not add to list
|
||||
Args:
|
||||
context (dict): current context of asset
|
||||
|
||||
Returns:
|
||||
list
|
||||
|
||||
"""
|
||||
|
||||
# types = ["transform", "mesh"]
|
||||
list_nodes = []
|
||||
|
||||
namespaces = cmds.namespaceInfo(listOnlyNamespaces=True)
|
||||
|
||||
# remove basic namespaces
|
||||
namespaces.remove("UI")
|
||||
namespaces.remove("shared")
|
||||
|
||||
for ns in namespaces:
|
||||
if not ns.startswith(assetname):
|
||||
continue
|
||||
# get reference nodes
|
||||
ns_nodes = cmds.namespaceInfo(ns, listOnlyDependencyNodes=True)
|
||||
# TODO: might need to extend the types
|
||||
# check if any nodes are connected to something else than lambert1
|
||||
list_nodes = cmds.ls(ns_nodes, long=True)
|
||||
unassigned_nodes = [self.has_default_shader(n) for n in list_nodes]
|
||||
nodes = [n for n in unassigned_nodes if n is not None]
|
||||
|
||||
list_nodes.extend(nodes)
|
||||
|
||||
return set(list_nodes)
|
||||
|
||||
def has_default_shader(self, node):
|
||||
"""Check if the nodes have `initialShadingGroup` shader assigned
|
||||
|
||||
Args:
|
||||
node (str): node to check
|
||||
|
||||
Returns:
|
||||
str
|
||||
"""
|
||||
|
||||
shaders = cmds.listConnections(node, type="shadingEngine") or []
|
||||
if "initialShadingGroup" in shaders:
|
||||
# return transform node
|
||||
transform = cmds.listRelatives(node, parent=True, type="transform",
|
||||
fullPath=True)
|
||||
if not transform:
|
||||
return []
|
||||
|
||||
return transform[0]
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from maya import cmds
|
||||
import maya.cmds as cmds
|
||||
|
||||
from avalon import api
|
||||
from avalon import maya
|
||||
import avalon.maya
|
||||
|
||||
|
||||
class ModelLoader(api.Loader):
|
||||
|
|
@ -10,22 +10,26 @@ class ModelLoader(api.Loader):
|
|||
families = ["colorbleed.model"]
|
||||
representations = ["ma"]
|
||||
|
||||
label = "Reference model"
|
||||
label = "Reference Model"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process(self, name, namespace, context, data):
|
||||
|
||||
with maya.maintained_selection():
|
||||
nodes = cmds.file(
|
||||
self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name)
|
||||
)
|
||||
# Create a readable namespace
|
||||
# Namespace should contain asset name and counter
|
||||
# TEST_001{_descriptor} where `descriptor` can be `_abc` for example
|
||||
assetname = "{}_".format(namespace.split("_")[0])
|
||||
namespace = avalon.maya.unique_namespace(assetname, format="%03d")
|
||||
|
||||
with avalon.maya.maintained_selection():
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
|
|
@ -54,15 +58,13 @@ class ModelGPUCacheLoader(api.Loader):
|
|||
cmds.loadPlugin("gpuCache", quiet=True)
|
||||
|
||||
# Create transform with shape
|
||||
transform = cmds.createNode("transform",
|
||||
name=name)
|
||||
cache = cmds.createNode("gpuCache",
|
||||
parent=transform,
|
||||
name="{0}Shape".format(name))
|
||||
node_name = "{0}Shape".format(name)
|
||||
transform = cmds.createNode("transform", name=name)
|
||||
cache = cmds.createNode("gpuCache", parent=transform, name=node_name)
|
||||
|
||||
# Set the cache filepath
|
||||
cmds.setAttr(cache + '.cacheFileName', path, type="string")
|
||||
cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root
|
||||
cmds.setAttr('{}.cacheFileName'.format(cache), path, type="string")
|
||||
cmds.setAttr('{}.cacheGeomPath'.format(cache), "|", type="string") # root
|
||||
|
||||
# Select the transform
|
||||
cmds.select(transform, r=1)
|
||||
|
|
|
|||
|
|
@ -61,8 +61,8 @@ class RigLoader(api.Loader):
|
|||
else:
|
||||
asset = "{}".format(asset_name)
|
||||
|
||||
cmds.select([output, controls], noExpand=True)
|
||||
with maya.maintained_selection():
|
||||
cmds.select([output, controls], noExpand=True)
|
||||
|
||||
# TODO(marcus): Hardcoding the family here, better separate this.
|
||||
dependencies = [context["representation"]["_id"]]
|
||||
|
|
|
|||
|
|
@ -1,14 +1,20 @@
|
|||
import re
|
||||
import os
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateNamingConvention(pyblish.api.InstancePlugin):
|
||||
class ValidateFileNameConvention(pyblish.api.InstancePlugin):
|
||||
|
||||
label = ""
|
||||
families = ["colorbleed.model"]
|
||||
families = ["colorbleed.lookdev"]
|
||||
host = ["maya"]
|
||||
optional = True
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -18,9 +24,15 @@ class ValidateNamingConvention(pyblish.api.InstancePlugin):
|
|||
# todo: change pattern to company standard
|
||||
pattern = re.compile("[a-zA-Z]+_[A-Z]{3}")
|
||||
|
||||
nodes = list(instance)
|
||||
nodes = cmds.ls(instance, type="file")
|
||||
for node in nodes:
|
||||
match = pattern.match(node)
|
||||
# get texture path
|
||||
texture = cmds.getAttr("{}.fileTextureName".format(node))
|
||||
if not texture:
|
||||
self.log.error("")
|
||||
invalid.append(node)
|
||||
filename = os.path.split(os.path.basename(texture))[0]
|
||||
match = pattern.match(filename)
|
||||
if not match:
|
||||
invalid.append(node)
|
||||
|
||||
|
|
@ -2,17 +2,17 @@ import pyblish.api
|
|||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateLookNodeIds(pyblish.api.InstancePlugin):
|
||||
class ValidateNodeIds(pyblish.api.InstancePlugin):
|
||||
"""Validate nodes have colorbleed id attributes
|
||||
|
||||
All look sets should have id attributes.
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidatePipelineOrder
|
||||
families = ['colorbleed.look']
|
||||
label = 'Node Id Attributes'
|
||||
families = ['colorbleed.look', 'colorbleed.model']
|
||||
hosts = ['maya']
|
||||
label = 'Look Id Attributes'
|
||||
order = colorbleed.api.ValidatePipelineOrder
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
|
||||
|
|
@ -20,14 +20,19 @@ class ValidateLookNodeIds(pyblish.api.InstancePlugin):
|
|||
def get_invalid(instance):
|
||||
import maya.cmds as cmds
|
||||
|
||||
nodes = instance.data["lookSets"]
|
||||
nodes = instance.data["setMembers"]
|
||||
|
||||
# Ensure all nodes have a cbId
|
||||
invalid = list()
|
||||
data_id = {}
|
||||
invalid = []
|
||||
for node in nodes:
|
||||
uuid = cmds.attributeQuery("mbId", node=node, exists=True)
|
||||
if not uuid:
|
||||
invalid.append(node)
|
||||
try:
|
||||
uuid = cmds.getAttr("{}.cbId".format(node))
|
||||
data_id[uuid] = node
|
||||
if uuid in data_id:
|
||||
invalid.append(node)
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
return invalid
|
||||
|
||||
|
|
@ -37,5 +42,5 @@ class ValidateLookNodeIds(pyblish.api.InstancePlugin):
|
|||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("Nodes found without "
|
||||
raise RuntimeError("Nodes found with invalid"
|
||||
"asset IDs: {0}".format(invalid))
|
||||
|
|
@ -1,7 +1,6 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from cb.utils.maya import context, shaders
|
||||
import cbra.utils.maya.node_uuid as id_utils
|
||||
|
||||
|
|
@ -120,8 +119,13 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
instance.data["lookData"] = {"attributes": attributes,
|
||||
"relationships": sets.values(),
|
||||
"sets": looksets}
|
||||
# Collect textures
|
||||
resources = [self.collect_resource(n) for n in cmds.ls(type="file")]
|
||||
|
||||
# Collect file nodes used by shading engines
|
||||
history = cmds.listHistory(looksets)
|
||||
files = cmds.ls(history, type="file", long=True)
|
||||
|
||||
# Collect textures,
|
||||
resources = [self.collect_resource(n) for n in files]
|
||||
instance.data["resources"] = resources
|
||||
|
||||
# Log a warning when no relevant sets were retrieved for the look.
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class CollectModelData(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = 'Model Data'
|
||||
label = 'Collect Model Data'
|
||||
families = ["colorbleed.model"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -39,26 +39,35 @@ class ExtractLook(colorbleed.api.Extractor):
|
|||
|
||||
# Remove all members of the sets so they are not included in the
|
||||
# exported file by accident
|
||||
self.log.info("Extract sets (Maya ASCII)..")
|
||||
self.log.info("Extract sets (Maya ASCII) ...")
|
||||
lookdata = instance.data["lookData"]
|
||||
sets = lookdata["sets"]
|
||||
|
||||
resources = instance.data["resources"]
|
||||
remap = {}
|
||||
for resource in resources:
|
||||
attr = resource['attribute']
|
||||
remap[attr] = resource['destination']
|
||||
|
||||
self.log.info("Finished remapping destinations ...")
|
||||
|
||||
# Extract in correct render layer
|
||||
layer = instance.data.get("renderlayer", "defaultRenderLayer")
|
||||
with context.renderlayer(layer):
|
||||
# TODO: Ensure membership edits don't become renderlayer overrides
|
||||
with context.empty_sets(sets):
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(sets, noExpand=True)
|
||||
cmds.file(maya_path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=True,
|
||||
constraints=True,
|
||||
expressions=True,
|
||||
constructionHistory=True)
|
||||
with context.attribute_values(remap):
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(sets, noExpand=True)
|
||||
cmds.file(maya_path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=True,
|
||||
constraints=True,
|
||||
expressions=True,
|
||||
constructionHistory=True)
|
||||
|
||||
# Write the JSON data
|
||||
self.log.info("Extract json..")
|
||||
|
|
|
|||
|
|
@ -10,14 +10,17 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin):
|
|||
"""Validate nodes have colorbleed id attributes"""
|
||||
|
||||
order = colorbleed.api.ValidatePipelineOrder
|
||||
families = ['colorbleed.model']
|
||||
hosts = ['maya']
|
||||
label = 'Unique Id Attributes'
|
||||
hosts = ['maya']
|
||||
families = ['colorbleed.model',
|
||||
'colorbleed.lookdev',
|
||||
'colorbleed.rig']
|
||||
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid_dict(instance):
|
||||
@classmethod
|
||||
def get_invalid_dict(cls, instance):
|
||||
"""Return a dictionary mapping of id key to list of member nodes"""
|
||||
|
||||
uuid_attr = "cbId"
|
||||
|
|
@ -25,18 +28,18 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin):
|
|||
# Collect each id with their members
|
||||
ids = defaultdict(list)
|
||||
for member in instance:
|
||||
has_attr = cmds.attributeQuery(uuid_attr, node=member, exists=True)
|
||||
if not has_attr:
|
||||
if not cmds.attributeQuery(uuid_attr, node=member, exists=True):
|
||||
continue
|
||||
mbid = cmds.getAttr("{}.{}".format(member, uuid_attr))
|
||||
ids[mbid].append(member)
|
||||
|
||||
object_id = cmds.getAttr("{}.{}".format(member, uuid_attr))
|
||||
ids[object_id].append(member)
|
||||
|
||||
# Skip those without IDs (if everything should have an ID that should
|
||||
# be another validation)
|
||||
ids.pop(None, None)
|
||||
|
||||
# Take only the ids with more than one member
|
||||
invalid = dict((id, members) for id, members in ids.iteritems() if
|
||||
invalid = dict((_id, members) for _id, members in ids.iteritems() if
|
||||
len(members) > 1)
|
||||
return invalid
|
||||
|
||||
|
|
@ -61,3 +64,5 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin):
|
|||
if invalid:
|
||||
raise RuntimeError("Nodes found with non-unique "
|
||||
"asset IDs: {0}".format(invalid))
|
||||
|
||||
|
||||
|
|
|
|||
94
colorbleed/plugins/publish/collect_resource_destination.py
Normal file
94
colorbleed/plugins/publish/collect_resource_destination.py
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
|
||||
import avalon.io as io
|
||||
|
||||
|
||||
class CollectResourceDestination(pyblish.api.InstancePlugin):
|
||||
"""This plug-ins displays the comment dialog box per default"""
|
||||
|
||||
label = "Collect Resource Destination"
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.create_destination_template(instance)
|
||||
|
||||
template_data = instance.data["assumedTemplateData"]
|
||||
template = instance.data["template"]
|
||||
|
||||
mock_template = template.format(**template_data)
|
||||
|
||||
# For now assume resources end up in a "resources" folder in the
|
||||
# published folder
|
||||
mock_destination = os.path.join(os.path.dirname(mock_template),
|
||||
"resources")
|
||||
|
||||
# Clean the path
|
||||
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
|
||||
|
||||
# Define resource destination and transfers
|
||||
resources = instance.data.get("resources", list())
|
||||
transfers = instance.data.get("transfers", list())
|
||||
for resource in resources:
|
||||
|
||||
# Add destination to the resource
|
||||
source_filename = os.path.basename(resource["source"])
|
||||
destination = os.path.join(mock_destination, source_filename)
|
||||
resource['destination'] = destination
|
||||
|
||||
# Collect transfers for the individual files of the resource
|
||||
# e.g. all individual files of a cache or UDIM textures.
|
||||
files = resource['files']
|
||||
for fsrc in files:
|
||||
fname = os.path.basename(fsrc)
|
||||
fdest = os.path.join(mock_destination, fname)
|
||||
transfers.append([fsrc, fdest])
|
||||
|
||||
instance.data["resources"] = resources
|
||||
instance.data["transfers"] = transfers
|
||||
|
||||
def create_destination_template(self, instance):
|
||||
"""Create a filepath based on the current data available
|
||||
|
||||
Example template:
|
||||
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
|
||||
{subset}.{representation}
|
||||
Args:
|
||||
instance: the instance to publish
|
||||
|
||||
Returns:
|
||||
file path (str)
|
||||
"""
|
||||
|
||||
# get all the stuff from the database
|
||||
subset_name = instance.data["subset"]
|
||||
project_name = os.environ["AVALON_PROJECT"]
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": project_name},
|
||||
projection={"config": True})
|
||||
template = project["config"]["template"]["publish"]
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset_name})
|
||||
|
||||
# assume there is no version yet, we start at `1`
|
||||
version_number = 1
|
||||
if subset is not None:
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
# if there is a subset there ought to be version
|
||||
version_number += version["name"]
|
||||
|
||||
template_data = {"root": os.environ["AVALON_ROOT"],
|
||||
"project": project_name,
|
||||
"silo": os.environ["AVALON_SILO"],
|
||||
"asset": instance.data["asset"],
|
||||
"subset": subset_name,
|
||||
"version": version_number,
|
||||
"representation": "TEMP"}
|
||||
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
instance.data["template"] = template
|
||||
|
|
@ -1,15 +1,16 @@
|
|||
import os
|
||||
import logging
|
||||
import shutil
|
||||
|
||||
import errno
|
||||
import pyblish.api
|
||||
from avalon import api, io
|
||||
import colorbleed.filetypes as filetypes
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PreIntegrateAsset(pyblish.api.InstancePlugin):
|
||||
class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||
"""Resolve any dependency issies
|
||||
|
||||
This plug-in resolves any paths which, if not updated might break
|
||||
|
|
@ -20,7 +21,7 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
|
|||
publish the shading network. Same goes for file dependent assets.
|
||||
"""
|
||||
|
||||
label = "Pre Intergrate Asset"
|
||||
label = "Integrate Asset"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
families = ["colorbleed.model",
|
||||
"colorbleed.rig",
|
||||
|
|
@ -33,6 +34,17 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
self.log.info("Integrating Asset in to the database ...")
|
||||
|
||||
self.register(instance)
|
||||
self.integrate(instance)
|
||||
|
||||
self.log.info("Removing temporary files and folders ...")
|
||||
stagingdir = instance.data["stagingDir"]
|
||||
shutil.rmtree(stagingdir)
|
||||
|
||||
def register(self, instance):
|
||||
|
||||
# Required environment variables
|
||||
PROJECT = os.environ["AVALON_PROJECT"]
|
||||
ASSET = instance.data.get("asset") or os.environ["AVALON_ASSET"]
|
||||
|
|
@ -75,8 +87,12 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("Establishing staging directory @ %s" % stagingdir)
|
||||
|
||||
project = io.find_one({"type": "project"})
|
||||
asset = io.find_one({"name": ASSET})
|
||||
project = io.find_one({"type": "project"},
|
||||
projection={"config.template.publish": True})
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": ASSET,
|
||||
"parent": project["_id"]})
|
||||
|
||||
assert all([project, asset]), ("Could not find current project or "
|
||||
"asset '%s'" % ASSET)
|
||||
|
|
@ -93,7 +109,17 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
|
|||
if latest_version is not None:
|
||||
next_version += latest_version["name"]
|
||||
|
||||
self.log.debug("Next version: %i" % next_version)
|
||||
self.log.info("Verifying version from assumed destination")
|
||||
|
||||
assumed_data = instance.data["assumedTemplateData"]
|
||||
assumed_version = assumed_data["version"]
|
||||
if assumed_version != next_version:
|
||||
raise AttributeError("Assumed version 'v{0:03d}' does not match"
|
||||
"next version in database "
|
||||
"('v{1:03d}')".format(assumed_version,
|
||||
next_version))
|
||||
|
||||
self.log.debug("Next version: v{0:03d}".format(next_version))
|
||||
|
||||
version_data = self.create_version_data(context, instance)
|
||||
version = self.create_version(subset=subset,
|
||||
|
|
@ -125,7 +151,6 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
|
|||
template_publish = project["config"]["template"]["publish"]
|
||||
|
||||
representations = []
|
||||
traffic = []
|
||||
staging_content = os.listdir(stagingdir)
|
||||
for v, fname in enumerate(staging_content):
|
||||
|
||||
|
|
@ -134,13 +159,6 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
src = os.path.join(stagingdir, fname)
|
||||
dst = template_publish.format(**template_data)
|
||||
if v == 0:
|
||||
instance.data["versionFolder"] = os.path.dirname(dst)
|
||||
|
||||
# Files to copy as if or to specific folder
|
||||
if ext in filetypes.accepted_images_types:
|
||||
dirname = os.path.dirname(dst)
|
||||
dst = os.path.join(dirname, fname)
|
||||
|
||||
# Backwards compatibility
|
||||
if fname == ".metadata.json":
|
||||
|
|
@ -148,7 +166,7 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
|
|||
dst = os.path.join(dirname, fname)
|
||||
|
||||
# copy source to destination (library)
|
||||
traffic.append([src, dst])
|
||||
instance.data["transfers"].append([src, dst])
|
||||
|
||||
representation = {
|
||||
"schema": "avalon-core:representation-2.0",
|
||||
|
|
@ -173,10 +191,53 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
# store data for database and source / destinations
|
||||
instance.data["representations"] = representations
|
||||
instance.data["traffic"] = traffic
|
||||
|
||||
return representations
|
||||
|
||||
def integrate(self, instance):
|
||||
"""Register the representations and move the files
|
||||
|
||||
Through the stored `representations` and `transfers`
|
||||
|
||||
Args:
|
||||
instance: the instance to integrate
|
||||
"""
|
||||
|
||||
# get needed data
|
||||
traffic = instance.data["transfers"]
|
||||
representations = instance.data["representations"]
|
||||
|
||||
self.log.info("Registering {} items".format(len(representations)))
|
||||
io.insert_many(representations)
|
||||
|
||||
# moving files
|
||||
for src, dest in traffic:
|
||||
self.log.info("Copying file .. {} -> {}".format(src, dest))
|
||||
self.copy_file(src, dest)
|
||||
|
||||
|
||||
def copy_file(self, src, dst):
|
||||
""" Copy given source to destination
|
||||
|
||||
Arguments:
|
||||
src (str): the source file which needs to be copied
|
||||
dst (str): the destination of the sourc file
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
dirname = os.path.dirname(dst)
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
pass
|
||||
else:
|
||||
self.log.critical("An unexpected error occurred.")
|
||||
raise
|
||||
|
||||
shutil.copy(src, dst)
|
||||
|
||||
def get_subset(self, asset, instance):
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
import os
|
||||
import errno
|
||||
import shutil
|
||||
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
|
||||
class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||
"""Write to files and metadata
|
||||
|
||||
This plug-in exposes your data to others by encapsulating it
|
||||
into a new version.
|
||||
|
||||
Schema:
|
||||
Data is written in the following format.
|
||||
____________________
|
||||
| |
|
||||
| version |
|
||||
| ________________ |
|
||||
| | | |
|
||||
| | representation | |
|
||||
| |________________| |
|
||||
| | | |
|
||||
| | ... | |
|
||||
| |________________| |
|
||||
|____________________|
|
||||
|
||||
"""
|
||||
|
||||
label = "Integrate Asset"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
families = ["colorbleed.model",
|
||||
"colorbleed.rig",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.camera",
|
||||
"colorbleed.lookdev",
|
||||
"colorbleed.texture",
|
||||
"colorbleed.historyLookdev",
|
||||
"colorbleed.group"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# get needed data
|
||||
traffic = instance.data["traffic"]
|
||||
representations = instance.data["representations"]
|
||||
|
||||
self.log.info("Registering {} items".format(len(representations)))
|
||||
io.insert_many(representations)
|
||||
|
||||
# moving files
|
||||
for src, dest in traffic:
|
||||
self.log.info("Copying file .. {} -> {}".format(src, dest))
|
||||
self.copy_file(src, dest)
|
||||
|
||||
self.log.info("Removing temporary files and folders ...")
|
||||
stagingdir = instance.data["stagingDir"]
|
||||
shutil.rmtree(stagingdir)
|
||||
|
||||
def copy_file(self, src, dst):
|
||||
""" Copy given source to destination
|
||||
|
||||
Arguments:
|
||||
src (str): the source file which needs to be copied
|
||||
dst (str): the destination of the sourc file
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
dirname = os.path.dirname(dst)
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
pass
|
||||
else:
|
||||
self.log.critical("An unexpected error occurred.")
|
||||
raise
|
||||
|
||||
shutil.copy(src, dst)
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
import json
|
||||
import os
|
||||
|
||||
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||
"""Remap source paths for lookdev and textures
|
||||
|
||||
"""
|
||||
|
||||
label = "Remap source paths"
|
||||
order = pyblish.api.IntegratorOrder + 0.15
|
||||
families = ["colorbleed.lookdev",
|
||||
"colorbleed.texture"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
family = instance.data['family']
|
||||
resources = instance.data['resources']
|
||||
version_folder = instance.data['versionFolder']
|
||||
|
||||
if family == "colorbleed.texture":
|
||||
try:
|
||||
lib.remap_resource_nodes(resources, folder=version_folder)
|
||||
except Exception as e:
|
||||
self.log.error(e)
|
||||
|
||||
if family == "colorbleed.lookdev":
|
||||
try:
|
||||
tmp_dir = lib.maya_temp_folder()
|
||||
resource_file = os.path.join(tmp_dir, "resources.json")
|
||||
with open(resource_file, "r") as f:
|
||||
resources = json.load(f)
|
||||
lib.remap_resource_nodes(resources)
|
||||
except Exception as e:
|
||||
self.log.error(e)
|
||||
70
maya_environment.bat
Normal file
70
maya_environment.bat
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
@echo OFF
|
||||
|
||||
echo Entering Maya2016 environment...
|
||||
|
||||
:: Environment: Maya
|
||||
set CB_MAYA_VERSION=2016
|
||||
set CB_MAYA_SHARED=%CB_APP_SHARED%\maya_shared\%CB_MAYA_VERSION%
|
||||
|
||||
if "%CB_MAYA_SHARED%" == "" (
|
||||
echo Error: "CB_MAYA_SHARED" not set
|
||||
goto :eof
|
||||
)
|
||||
|
||||
|
||||
:: Colorbleed Maya
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\cbMayaScripts;%PYTHONPATH%
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\inventory\python;%PYTHONPATH%
|
||||
|
||||
:: Maya shared
|
||||
set MAYA_PLUG_IN_PATH=%CB_MAYA_SHARED%\plugins;%MAYA_PLUGIN_PATH%
|
||||
set MAYA_SHELF_PATH=%CB_MAYA_SHARED%\prefs\shelves;%MAYA_SHELF_PATH%
|
||||
set MAYA_SCRIPT_PATH=%CB_MAYA_SHARED%\scripts;%MAYA_SCRIPT_PATH%
|
||||
set XBMLANGPATH=%CB_MAYA_SHARED%\prefs\icons;%XBMLANGPATH%
|
||||
set MAYA_PRESET_PATH=%CB_MAYA_SHARED%\prefs\attrPresets;%MAYA_PRESET_PATH%
|
||||
set PYTHONPATH=%CB_MAYA_SHARED%\scripts;%PYTHONPATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules;%MAYA_MODULE_PATH%
|
||||
|
||||
:: Additional modules
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\mGear_2016;%MAYA_MODULE_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\SOuP;%MAYA_MODULE_PATH%
|
||||
set MAYA_SHELF_PATH=%CB_MAYA_SHARED%\modules\SOuP\shelves;%MAYA_SHELF_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\pdipro35c_Maya2016x64;%MAYA_MODULE_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\ovdb\maya\maya2016;%MAYA_MODULE_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\cvshapeinverter;%MAYA_MODULE_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Toolchefs;%MAYA_MODULE_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Exocortex;%MAYA_MODULE_PATH%
|
||||
|
||||
:: Miarmy
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Basefount\Miarmy;%MAYA_MODULE_PATH%
|
||||
set PATH=%CB_MAYA_SHARED%\modules\Basefount\Miarmy\bin;%PATH%
|
||||
set VRAY_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Basefount\Miarmy\bin\vray\vray_3.1_3.3_3.4\Maya2015and2016;%VRAY_PLUGINS_x64%;
|
||||
|
||||
:: Yeti
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64;%MAYA_MODULE_PATH%
|
||||
set PATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%PATH%;
|
||||
set VRAY_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%VRAY_PLUGINS_x64%;
|
||||
set VRAY_FOR_MAYA2016_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%VRAY_FOR_MAYA2016_PLUGINS_x64%;
|
||||
set REDSHIFT_MAYAEXTENSIONSPATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\plug-ins;%REDSHIFT_MAYAEXTENSIONSPATH%
|
||||
set peregrinel_LICENSE=5053@CBserver
|
||||
|
||||
:: maya-capture
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\maya-capture;%PYTHONPATH%
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\maya-capture-gui;%PYTHONPATH%
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\maya-capture-gui-cb;%PYTHONPATH%
|
||||
|
||||
:: maya-matrix-deform
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\maya-matrix-deformers;%PYTHONPATH%
|
||||
set MAYA_PLUG_IN_PATH=%CB_PIPELINE%\git\maya-matrix-deformers\plugin;%MAYA_PLUG_IN_PATH%
|
||||
|
||||
:: rapid-rig
|
||||
set XBMLANGPATH=%CB_MAYA_SHARED%\scripts\RapidRig_Modular_V02;%XBMLANGPATH%
|
||||
set MAYA_SCRIPT_PATH=%CB_MAYA_SHARED%\scripts\RapidRig_Modular_V02;%MAYA_SCRIPT_PATH%
|
||||
|
||||
|
||||
:: Fix Maya Playblast Color Management depth
|
||||
set MAYA_FLOATING_POINT_RT_PLAYBLAST=1
|
||||
|
||||
|
||||
:: Fix V-ray forcing affinity to 100%
|
||||
set VRAY_USE_THREAD_AFFINITY=0
|
||||
30
python_environment.bat
Normal file
30
python_environment.bat
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
@echo OFF
|
||||
echo Entering Python environment...
|
||||
|
||||
set CB_PYTHON_VERSION=2.7
|
||||
|
||||
where /Q python.exe
|
||||
if ERRORLEVEL 1 (
|
||||
if EXIST C:\Python27\python.exe (
|
||||
echo Adding C:\Python27 to PATH
|
||||
set "PATH=%PATH%;C:\Python27"
|
||||
goto:has-python
|
||||
) else (
|
||||
echo Adding embedded python (pipeline)
|
||||
set "PATH=%PATH%;%CB_APP_SHARED%\python\standalone\%CB_PYTHON_VERSION%\bin"
|
||||
goto:has-python
|
||||
)
|
||||
)
|
||||
:has-python
|
||||
|
||||
:: Python universal (non-compiled)
|
||||
set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\universal\site-packages
|
||||
|
||||
:: Python version/windows-specific
|
||||
:: set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\win\%CB_PYTHON_VERSION%
|
||||
|
||||
:: Python standalone (compiled to version)
|
||||
if NOT "%CB_PYTHON_STANDALONE%" == "0" (
|
||||
echo Entering Python Standalone environment...
|
||||
set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\standalone\%CB_PYTHON_VERSION%\site-packages
|
||||
)
|
||||
33
set_environment.bat
Normal file
33
set_environment.bat
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
@echo off
|
||||
echo Entering pipeline (raw development) environment...
|
||||
|
||||
:: Initialize environment
|
||||
set CB_PIPELINE=P:\pipeline\dev
|
||||
|
||||
set CB_APP_SHARED=%CB_PIPELINE%\apps
|
||||
|
||||
if "%CB_APP_SHARED%" == "" (
|
||||
echo Error: "CB_APP_SHARED" not set
|
||||
goto :eof
|
||||
)
|
||||
|
||||
echo setting STORAGE..
|
||||
set STORAGE=P:
|
||||
|
||||
set LAUNCHER_ROOT=%~dp0/launchers
|
||||
|
||||
:: Core
|
||||
echo Add cb core..
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\cb;%PYTHONPATH%
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\cbra;%PYTHONPATH%
|
||||
|
||||
:: Extra
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\pyseq;%PYTHONPATH%
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\Qt.py;%PYTHONPATH%
|
||||
|
||||
|
||||
:: Ftrack-connect
|
||||
::set PYTHONPATH=%CB_PIPELINE%\git\ftrack-connect\source;%PYTHONPATH%
|
||||
|
||||
:: FFMPEG
|
||||
set FFMPEG_PATH=%CB_APP_SHARED%\ffmpeg\bin\ffmpeg.exe
|
||||
Loading…
Add table
Add a link
Reference in a new issue