This commit is contained in:
aardschok 2017-08-02 15:27:22 +02:00
commit 076b7ff0c7
21 changed files with 536 additions and 415 deletions

View file

@ -1,11 +1,9 @@
# absolute_import is needed to counter the `module has no cmds error` in Maya
from __future__ import absolute_import
import os
import uuid
from maya import cmds
import pyblish.api
@ -164,7 +162,7 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
instance = result["instance"]
errored_instances.append(instance)
# Apply pyblish.logic to get the instances for the plug-in
# Apply pyblish logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
# Get the nodes from the all instances that ran through this plug-in
@ -178,78 +176,34 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
self.log.info("No invalid nodes found.")
return
# Ensure unique (process each node only once)
# Ensure unique ( process each node only once )
invalid = list(set(invalid))
# Parse context from current file
self.log.info("Parsing current context..")
print(">>> DEBUG CONTEXT :", context)
print(">>> DEBUG CONTEXT DATA:", context.data)
self.log.info("Updating node IDs ...")
# Update the attributes
self._update_id_attribute(invalid)
# # Generate and add the ids to the nodes
node_ids = self.generate_ids(context, invalid)
self.apply_ids(node_ids)
self.log.info("Generated ids on nodes: {0}".format(invalid))
def get_context(self, instance=None):
def _update_id_attribute(self, nodes):
"""Delete the id attribute
PROJECT = os.environ["AVALON_PROJECT"]
ASSET = instance.data.get("asset") or os.environ["AVALON_ASSET"]
SILO = os.environ["AVALON_SILO"]
LOCATION = os.getenv("AVALON_LOCATION")
return {"project": PROJECT,
"asset": ASSET,
"silo": SILO,
"location": LOCATION}
def generate_ids(self, context, nodes):
"""Generate cb UUIDs for nodes.
The identifiers are formatted like:
assets:character/test:bluey:46D221D9-4150-8E49-6B17-43B04BFC26B6
This is a concatenation of:
- entity (shots or assets)
- folders (parent hierarchy)
- asset (the name of the asset)
- uuid (unique id for node in the scene)
Raises:
RuntimeError: When context can't be parsed of the current asset
Returns:
dict: node, uuid dictionary
"""
# Make a copy of the context
data = context.copy()
# Define folders
node_ids = dict()
for node in nodes:
# Generate a unique ID per node
data['uuid'] = uuid.uuid4()
unique_id = "{asset}:{item}:{uuid}".format(**data)
node_ids[node] = unique_id
return node_ids
def apply_ids(self, node_ids):
"""Apply the created unique IDs to the node
Args:
node_ids (dict): each node with a unique id
Returns:
None
nodes (list): all nodes to remove the attribute from
"""
attribute = "mbId"
for node, id in node_ids.items():
# check if node has attribute
if not cmds.attributeQuery(attribute, node=node, exists=True):
cmds.addAttr(node, longName=attribute, dataType="string")
for node in nodes:
# get the database asset id
attr = "{}.cbId".format(node)
id_attr = cmds.getAttr(attr)
asset_id = id_attr.split(":")[0]
# create a new unique id
_, uid = str(uuid.uuid4()).rsplit("-", 1)
cb_uid = "{}:{}".format(asset_id, uid)
# set the new id
cmds.setAttr(attr, cb_uid, type="string")
cmds.setAttr("{}.{}".format(node, attribute), id)

View file

@ -88,7 +88,7 @@ def on_new():
maya.commands.reset_resolution()
def on_save():
def on_save(nodes=None):
"""Automatically add IDs to new nodes
Any transform of a mesh, without an existing ID,
is given one automatically on file save.
@ -102,28 +102,28 @@ def on_save():
types = ["mesh", "shadingEngine", "file", "nurbsCurve"]
# the items which need to pass the id to their parent
nodes = (set(cmds.ls(type=types, long=True)) -
set(cmds.ls(long=True, readOnly=True)) -
set(cmds.ls(long=True, lockedNodes=True)))
if not nodes:
nodes = (set(cmds.ls(type=types, long=True)) -
set(cmds.ls(long=True, readOnly=True)) -
set(cmds.ls(long=True, lockedNodes=True)))
transforms = set()
for n in cmds.ls(type=types, long=True):
# pass id to parent of node if in subtypes
relatives = cmds.listRelatives(n, parent=True, fullPath=True)
if not relatives:
continue
transforms = set()
for n in cmds.ls(type=types, long=True):
# pass id to parent of node if in subtypes
relatives = cmds.listRelatives(n, parent=True, fullPath=True)
if not relatives:
continue
for r in cmds.listRelatives(n, parent=True, fullPath=True):
transforms.add(r)
for r in cmds.listRelatives(n, parent=True, fullPath=True):
transforms.add(r)
# merge transforms and nodes in one set to make sure every item
# is unique
nodes |= transforms
# merge transforms and nodes in one set to make sure every item
# is unique
nodes |= transforms
# Lead with asset ID from the database
asset = os.environ["AVALON_ASSET"]
asset_id = io.find_one({"type": "asset", "name": asset})
for node in nodes:
if node in defaults:
continue

View file

@ -5,7 +5,6 @@ import os
import bson
import json
import logging
import pprint
import contextlib
from collections import OrderedDict, defaultdict
@ -671,7 +670,12 @@ def get_reference_node(path):
Returns:
node (str): name of the reference node in question
"""
node = cmds.file(path, query=True, referenceNode=True)
try:
node = cmds.file(path, query=True, referenceNode=True)
except RuntimeError:
log.debug('Received file not loaded : "{}"'.format(path))
return
reference_path = cmds.referenceQuery(path, filename=True)
if os.path.normpath(path) == os.path.normpath(reference_path):
return node
@ -760,9 +764,15 @@ def assign_look(nodes, subset="lookDefault"):
# Group all nodes per asset id
grouped = defaultdict(list)
for node in nodes:
colorbleed_id = cmds.getAttr("{}.cbId".format(node))
asset_id = colorbleed_id.split(":")[0]
grouped[asset_id].append(node)
colorbleed_id = _get_id(node)
if not colorbleed_id:
continue
parts = colorbleed_id.split(":")
if len(parts) != 2:
continue
grouped[parts[0]].append(node)
for asset_id, asset_nodes in grouped.items():
# create objectId for database

View file

@ -58,7 +58,7 @@
"tooltip": ""
},
{
"command": "$COLORBLEED_SCRIPTS\\modeling\\selectOutlineUI",
"command": "$COLORBLEED_SCRIPTS\\modeling\\selectOutlineUI.py",
"sourcetype": "file",
"tags": [
"modeling",
@ -405,351 +405,351 @@
"Rigging": [
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\addCurveBetween.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"addCurveBetween",
"python"
"file"
],
"title": "Add Curve Between"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\averageSkinWeights.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"averageSkinWeights",
"python"
"file"
],
"title": "Average Skin Weights"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\cbSmoothSkinWeightUI.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"cbSmoothSkinWeightUI",
"python"
"file"
],
"title": "CB Smooth Skin Weight UI"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\channelBoxManagerUI.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"channelBoxManagerUI",
"python"
"file"
],
"title": "Channel Box Manager UI"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\characterAutorigger.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"characterAutorigger",
"python"
"file"
],
"title": "Character Auto Rigger"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\connectUI.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"connectUI",
"python"
"file"
],
"title": "Connect UI"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\copySkinWeightsLocal.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"copySkinWeightsLocal",
"python"
"file"
],
"title": "Copy Skin Weights Local"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\createCenterLocator.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"createCenterLocator",
"python"
"file"
],
"title": "Create Center Locator"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\freezeTransformToGroup.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"freezeTransformToGroup",
"python"
"file"
],
"title": "Freeze Transform To Group"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\groupSelected.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"groupSelected",
"python"
"file"
],
"title": "Group Selected"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\ikHandlePoleVectorLocator.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"ikHandlePoleVectorLocator",
"python"
"file"
],
"title": "IK Handle Pole Vector Locator"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\jointOrientUI.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"jointOrientUI",
"python"
"file"
],
"title": "Joint Orient UI"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\jointsOnCurve.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"jointsOnCurve",
"python"
"file"
],
"title": "Joints On Curve"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\resetBindSelectedSkinJoints.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"resetBindSelectedSkinJoints",
"python"
"file"
],
"title": "Reset Bind Selected Skin Joints"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedComponents.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"selectSkinclusterJointsFromSelectedComponents",
"python"
"file"
],
"title": "Select Skincluster Joints From Selected Components"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedMesh.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"selectSkinclusterJointsFromSelectedMesh",
"python"
"file"
],
"title": "Select Skincluster Joints From Selected Mesh"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\setJointLabels.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"setJointLabels",
"python"
"file"
],
"title": "Set Joint Labels"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\setJointOrientationFromCurrentRotation.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"setJointOrientationFromCurrentRotation",
"python"
"file"
],
"title": "Set Joint Orientation From Current Rotation"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\setSelectedJointsOrientationZero.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"setSelectedJointsOrientationZero",
"python"
"file"
],
"title": "Set Selected Joints Orientation Zero"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\mirrorCurveShape.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"mirrorCurveShape",
"python"
"file"
],
"title": "Mirror Curve Shape"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\setRotationOrderUI.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"setRotationOrderUI",
"python"
"file"
],
"title": "Set Rotation Order UI"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\paintItNowUI.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"paintItNowUI",
"python"
"file"
],
"title": "Paint It Now UI"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\parentScaleConstraint.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"parentScaleConstraint",
"python"
"file"
],
"title": "Parent Scale Constraint"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\quickSetWeightsUI.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"quickSetWeightsUI",
"python"
"file"
],
"title": "Quick Set Weights UI"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\rapidRig.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"rapidRig",
"python"
"file"
],
"title": "Rapid Rig"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\regenerate_blendshape_targets.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"regenerate_blendshape_targets",
"python"
"file"
],
"title": "Regenerate Blendshape Targets"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\removeRotationAxis.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"removeRotationAxis",
"python"
"file"
],
"title": "Remove Rotation Axis"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\resetBindSelectedMeshes.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"resetBindSelectedMeshes",
"python"
"file"
],
"title": "Reset Bind Selected Meshes"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\simpleControllerOnSelection.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"simpleControllerOnSelection",
"python"
"file"
],
"title": "Simple Controller On Selection"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\simpleControllerOnSelectionHierarchy.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"simpleControllerOnSelectionHierarchy",
"python"
"file"
],
"title": "Simple Controller On Selection Hierarchy"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\superRelativeCluster.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"superRelativeCluster",
"python"
"file"
],
"title": "Super Relative Cluster"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\tfSmoothSkinWeight.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"tfSmoothSkinWeight",
"python"
"file"
],
"title": "TF Smooth Skin Weight"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\toggleIntermediates.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"toggleIntermediates",
"python"
"file"
],
"title": "Toggle Intermediates"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\toggleSegmentScaleCompensate.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"toggleSegmentScaleCompensate",
"python"
"file"
],
"title": "Toggle Segment Scale Compensate"
},
{
"command": "$COLORBLEED_SCRIPTS\\rigging\\toggleSkinclusterDeformNormals.py",
"sourcetype": "python",
"sourcetype": "file",
"tags": [
"rigging",
"toggleSkinclusterDeformNormals",
"python"
"file"
],
"title": "Toggle Skincluster Deform Normals"
}
@ -1007,12 +1007,16 @@
]
},
{
"command": "$COLORBLEED_SCRIPTS\\shading\\LightLinkUI.py",
"command": "$COLORBLEED_SCRIPTS\\shading\\autoLookdevAssignment.py",
"sourcetype": "file",
"tags": [
"shading",
"LightLinkUI"
],
"tags": ["shading", "lookdev", "assign", "shaders", "auto"],
"title": "Assign lookDefault Shader",
"tooltip": "Assign the latest 'lookDefault' to assets without any lookdev in the scene"
},
{
"command": "$COLORBLEED_SCRIPTS\\shading\\LightLinkUi.py",
"sourcetype": "file",
"tags": ["shading", "light", "link", "ui"],
"title": "Light Link UI",
"tooltip": ""
},
@ -1029,7 +1033,7 @@
"tooltip": ""
},
{
"command": "$COLORBLEED_SCRIPTS\\shading\\fixDefaultShaderSetBehavior",
"command": "$COLORBLEED_SCRIPTS\\shading\\fixDefaultShaderSetBehavior.py",
"sourcetype": "file",
"tags": [
"shading",
@ -1037,7 +1041,7 @@
"DefaultShaderSet",
"Behavior"
],
"title": "fixDefaultShaderSetBehavior",
"title": "Fix Default Shader Set Behavior",
"tooltip": ""
},
{
@ -1147,7 +1151,7 @@
"tooltip": ""
},
{
"command": "",
"command": "$COLORBLEED_SCRIPTS\\layout\\spPaint3d.py",
"sourcetype": "file",
"tags": ["layout", "spPaint3d", "paint", "tool"],
"title": "SP Paint 3d",
@ -1232,60 +1236,42 @@
{
"command": "",
"sourcetype": "file",
"tags": [
"particles",
"instancerToObjectsInstancesWithAnimation"
],
"tags": ["particles", "instancerToObjectsInstancesWithAnimation"],
"title": "instancerToObjectsInstancesWithAnimation",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"particles",
"objectsToParticles"
],
"tags": ["particles", "objectsToParticles"],
"title": "objectsToParticles",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"particles",
"add_particle_cacheFile_attrs"
],
"tags": ["particles", "add_particle_cacheFile_attrs"],
"title": "add_particle_cacheFile_attrs",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"particles",
"mergeParticleSystems"
],
"tags": ["particles", "mergeParticleSystems"],
"title": "mergeParticleSystems",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"particles",
"particlesToLocators"
],
"tags": ["particles", "particlesToLocators"],
"title": "particlesToLocators",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"particles",
"instancerToObjectsWithAnimation"
],
"tags": ["particles", "instancerToObjectsWithAnimation"],
"title": "instancerToObjectsWithAnimation",
"tooltip": ""
},
@ -1314,80 +1300,56 @@
{
"command": "",
"sourcetype": "file",
"tags": [
"cleanup",
"selectIntermediateObjects"
],
"tags": ["cleanup", "selectIntermediateObjects"],
"title": "selectIntermediateObjects",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"cleanup",
"resetViewportCache"
],
"tags": ["cleanup", "resetViewportCache"],
"title": "resetViewportCache",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"cleanup",
"selectNonUniqueNames"
],
"tags": ["cleanup", "selectNonUniqueNames"],
"title": "selectNonUniqueNames",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"cleanup",
"uniqifyNodeNames"
],
"tags": ["cleanup", "uniqifyNodeNames"],
"title": "uniqifyNodeNames",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"cleanup",
"selectByType"
],
"tags": ["cleanup", "selectByType"],
"title": "selectByType",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"cleanup",
"removeNamespaces"
],
"tags": ["cleanup", "removeNamespaces"],
"title": "removeNamespaces",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"cleanup",
"autoRenameFileNodes"
],
"tags": ["cleanup", "autoRenameFileNodes"],
"title": "autoRenameFileNodes",
"tooltip": ""
},
{
"command": "",
"sourcetype": "file",
"tags": [
"cleanup",
"remove_user_defined_attributes"
],
"tags": ["cleanup", "remove_user_defined_attributes"],
"title": "remove_user_defined_attributes",
"tooltip": ""
},

View file

@ -11,7 +11,7 @@ self = sys.modules[__name__]
self._menu = "colorbleed"
# set colorbleed scripts path in environment keys
os.environ["COLORBLEED_SCRIPTS"] = r"P:\pipeline\dev\git\cbMayaScripts\cbMayaScripts"
os.environ["COLORBLEED_SCRIPTS"] = "P:\pipeline\dev\git\cbMayaScripts\cbMayaScripts"
log = logging.getLogger(__name__)

View file

@ -23,7 +23,6 @@ class AbcLoader(api.Loader):
# Create unique namespace for the cameras
# Get name from asset being loaded
assert "_" in name, "Naming convention not followed"
assetname = "{}_".format(name.split("_")[0])
namespace = maya.unique_namespace(assetname,
format="%03d",

View file

@ -1,7 +1,7 @@
from maya import cmds
import maya.cmds as cmds
from avalon import api
from avalon import maya
import avalon.maya
class ModelLoader(api.Loader):
@ -10,22 +10,26 @@ class ModelLoader(api.Loader):
families = ["colorbleed.model"]
representations = ["ma"]
label = "Reference model"
label = "Reference Model"
order = -10
icon = "code-fork"
color = "orange"
def process(self, name, namespace, context, data):
with maya.maintained_selection():
nodes = cmds.file(
self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
groupName="{}:{}".format(namespace, name)
)
# Create a readable namespace
# Namespace should contain asset name and counter
# TEST_001{_descriptor} where `descriptor` can be `_abc` for example
assetname = "{}_".format(namespace.split("_")[0])
namespace = avalon.maya.unique_namespace(assetname, format="%03d")
with avalon.maya.maintained_selection():
nodes = cmds.file(self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
groupName="{}:{}".format(namespace, name))
self[:] = nodes
@ -54,15 +58,13 @@ class ModelGPUCacheLoader(api.Loader):
cmds.loadPlugin("gpuCache", quiet=True)
# Create transform with shape
transform = cmds.createNode("transform",
name=name)
cache = cmds.createNode("gpuCache",
parent=transform,
name="{0}Shape".format(name))
node_name = "{0}Shape".format(name)
transform = cmds.createNode("transform", name=name)
cache = cmds.createNode("gpuCache", parent=transform, name=node_name)
# Set the cache filepath
cmds.setAttr(cache + '.cacheFileName', path, type="string")
cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root
cmds.setAttr('{}.cacheFileName'.format(cache), path, type="string")
cmds.setAttr('{}.cacheGeomPath'.format(cache), "|", type="string") # root
# Select the transform
cmds.select(transform, r=1)

View file

@ -22,6 +22,7 @@ class RigLoader(api.Loader):
def process(self, name, namespace, context, data):
assert "_" in name, "Naming convention not followed"
assetname = "{}_".format(context["asset"]["name"])
unique_namespace = maya.unique_namespace(assetname, format="%03d")
nodes = cmds.file(self.fname,

View file

@ -2,17 +2,17 @@ import pyblish.api
import colorbleed.api
class ValidateLookNodeIds(pyblish.api.InstancePlugin):
class ValidateNodeIds(pyblish.api.InstancePlugin):
"""Validate nodes have colorbleed id attributes
All look sets should have id attributes.
"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.look']
label = 'Node Id Attributes'
families = ['colorbleed.look', 'colorbleed.model']
hosts = ['maya']
label = 'Look Id Attributes'
order = colorbleed.api.ValidatePipelineOrder
actions = [colorbleed.api.SelectInvalidAction,
colorbleed.api.GenerateUUIDsOnInvalidAction]
@ -20,14 +20,19 @@ class ValidateLookNodeIds(pyblish.api.InstancePlugin):
def get_invalid(instance):
import maya.cmds as cmds
nodes = instance.data["lookSets"]
nodes = instance.data["setMembers"]
# Ensure all nodes have a cbId
invalid = list()
data_id = {}
invalid = []
for node in nodes:
uuid = cmds.attributeQuery("mbId", node=node, exists=True)
if not uuid:
invalid.append(node)
try:
uuid = cmds.getAttr("{}.cbId".format(node))
data_id[uuid] = node
if uuid in data_id:
invalid.append(node)
except RuntimeError:
pass
return invalid
@ -37,5 +42,5 @@ class ValidateLookNodeIds(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Nodes found without "
raise RuntimeError("Nodes found with invalid"
"asset IDs: {0}".format(invalid))

View file

@ -1,6 +1,10 @@
import os
import pprint
from maya import cmds
import pyblish.api
import avalon.io as io
from cb.utils.maya import context, shaders
import cbra.utils.maya.node_uuid as id_utils
@ -120,9 +124,15 @@ class CollectLook(pyblish.api.InstancePlugin):
instance.data["lookData"] = {"attributes": attributes,
"relationships": sets.values(),
"sets": looksets}
# Collect textures
resources = [self.collect_resource(n) for n in cmds.ls(type="file")]
# Collect file nodes used by shading engines
history = cmds.listHistory(looksets)
files = cmds.ls(history, type="file", long=True)
# Collect textures,
resources = [self.collect_resource(n) for n in files]
instance.data["resources"] = resources
# pprint.pprint(resources)
# Log a warning when no relevant sets were retrieved for the look.
if not instance.data["lookData"]["sets"]:

View file

@ -11,7 +11,7 @@ class CollectModelData(pyblish.api.InstancePlugin):
"""
order = pyblish.api.CollectorOrder + 0.499
label = 'Model Data'
label = 'Collect Model Data'
families = ["colorbleed.model"]
def process(self, instance):

View file

@ -39,26 +39,35 @@ class ExtractLook(colorbleed.api.Extractor):
# Remove all members of the sets so they are not included in the
# exported file by accident
self.log.info("Extract sets (Maya ASCII)..")
self.log.info("Extract sets (Maya ASCII) ...")
lookdata = instance.data["lookData"]
sets = lookdata["sets"]
resources = instance.data["resources"]
remap = {}
for resource in resources:
attr = resource['attribute']
remap[attr] = resource['destination']
self.log.info("Finished remapping destinations ...")
# Extract in correct render layer
layer = instance.data.get("renderlayer", "defaultRenderLayer")
with context.renderlayer(layer):
# TODO: Ensure membership edits don't become renderlayer overrides
with context.empty_sets(sets):
with avalon.maya.maintained_selection():
cmds.select(sets, noExpand=True)
cmds.file(maya_path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=False,
channels=True,
constraints=True,
expressions=True,
constructionHistory=True)
with context.attribute_values(remap):
with avalon.maya.maintained_selection():
cmds.select(sets, noExpand=True)
cmds.file(maya_path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=False,
channels=True,
constraints=True,
expressions=True,
constructionHistory=True)
# Write the JSON data
self.log.info("Extract json..")

View file

@ -4,11 +4,14 @@ import pyblish.api
import colorbleed.api
class ValidateNamingConvention(pyblish.api.InstancePlugin):
class ValidateFileNameConvention(pyblish.api.InstancePlugin):
label = ""
families = ["colorbleed.model"]
families = ["colorbleed.lookdev"]
host = ["maya"]
optional = True
order = pyblish.api.ValidatorOrder
actions = [colorbleed.api.SelectInvalidAction]
@staticmethod

View file

@ -10,14 +10,17 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin):
"""Validate nodes have colorbleed id attributes"""
order = colorbleed.api.ValidatePipelineOrder
families = ['colorbleed.model']
hosts = ['maya']
label = 'Unique Id Attributes'
hosts = ['maya']
families = ['colorbleed.model',
'colorbleed.lookdev',
'colorbleed.rig']
actions = [colorbleed.api.SelectInvalidAction,
colorbleed.api.GenerateUUIDsOnInvalidAction]
@staticmethod
def get_invalid_dict(instance):
@classmethod
def get_invalid_dict(cls, instance):
"""Return a dictionary mapping of id key to list of member nodes"""
uuid_attr = "cbId"
@ -25,18 +28,21 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin):
# Collect each id with their members
ids = defaultdict(list)
for member in instance:
has_attr = cmds.attributeQuery(uuid_attr, node=member, exists=True)
if not has_attr:
try:
object_id = cmds.getAttr("{}.{}".format(member, uuid_attr))
except Exception as exception:
# Object will node have the attribute so skip
cls.log.debug(exception)
continue
mbid = cmds.getAttr("{}.{}".format(member, uuid_attr))
ids[mbid].append(member)
ids[object_id].append(member)
# Skip those without IDs (if everything should have an ID that should
# be another validation)
ids.pop(None, None)
# Take only the ids with more than one member
invalid = dict((id, members) for id, members in ids.iteritems() if
invalid = dict((_id, members) for _id, members in ids.iteritems() if
len(members) > 1)
return invalid
@ -61,3 +67,5 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin):
if invalid:
raise RuntimeError("Nodes found with non-unique "
"asset IDs: {0}".format(invalid))

View file

@ -0,0 +1,94 @@
import pyblish.api
import os
import avalon.io as io
class CollectResourceDestination(pyblish.api.InstancePlugin):
"""This plug-ins displays the comment dialog box per default"""
label = "Collect Resource Destination"
order = pyblish.api.CollectorOrder + 0.499
def process(self, instance):
self.create_destination_template(instance)
template_data = instance.data["assumedTemplateData"]
template = instance.data["template"]
mock_template = template.format(**template_data)
# For now assume resources end up in a "resources" folder in the
# published folder
mock_destination = os.path.join(os.path.dirname(mock_template),
"resources")
# Clean the path
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
# Define resource destination and transfers
resources = instance.data.get("resources", list())
transfers = instance.data.get("transfers", list())
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(resource["source"])
destination = os.path.join(mock_destination, source_filename)
resource['destination'] = destination
# Collect transfers for the individual files of the resource
# e.g. all individual files of a cache or UDIM textures.
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(mock_destination, fname)
transfers.append([fsrc, fdest])
instance.data["resources"] = resources
instance.data["transfers"] = transfers
def create_destination_template(self, instance):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
# get all the stuff from the database
subset_name = instance.data["subset"]
project_name = os.environ["AVALON_PROJECT"]
project = io.find_one({"type": "project",
"name": project_name},
projection={"config": True})
template = project["config"]["template"]["publish"]
subset = io.find_one({"type": "subset",
"name": subset_name})
# assume there is no version yet, we start at `1`
version_number = 1
if subset is not None:
version = io.find_one({"type": "version",
"parent": subset["_id"]},
sort=[("name", -1)])
# if there is a subset there ought to be version
version_number += version["name"]
template_data = {"root": os.environ["AVALON_ROOT"],
"project": project_name,
"silo": os.environ["AVALON_SILO"],
"asset": instance.data["asset"],
"subset": subset_name,
"version": version_number,
"representation": "TEMP"}
instance.data["assumedTemplateData"] = template_data
instance.data["template"] = template

View file

@ -1,15 +1,16 @@
import os
import logging
import shutil
import errno
import pyblish.api
from avalon import api, io
import colorbleed.filetypes as filetypes
log = logging.getLogger(__name__)
class PreIntegrateAsset(pyblish.api.InstancePlugin):
class IntegrateAsset(pyblish.api.InstancePlugin):
"""Resolve any dependency issies
This plug-in resolves any paths which, if not updated might break
@ -20,7 +21,7 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
publish the shading network. Same goes for file dependent assets.
"""
label = "Pre Intergrate Asset"
label = "Intergrate Asset"
order = pyblish.api.IntegratorOrder
families = ["colorbleed.model",
"colorbleed.rig",
@ -33,6 +34,17 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
def process(self, instance):
self.log.info("Integrating Asset in to the database ...")
self.register(instance)
self.intergrate(instance)
self.log.info("Removing temporary files and folders ...")
stagingdir = instance.data["stagingDir"]
shutil.rmtree(stagingdir)
def register(self, instance):
# Required environment variables
PROJECT = os.environ["AVALON_PROJECT"]
ASSET = instance.data.get("asset") or os.environ["AVALON_ASSET"]
@ -75,8 +87,12 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
self.log.debug("Establishing staging directory @ %s" % stagingdir)
project = io.find_one({"type": "project"})
asset = io.find_one({"name": ASSET})
project = io.find_one({"type": "project"},
projection={"config.template.publish": True})
asset = io.find_one({"type": "asset",
"name": ASSET,
"parent": project["_id"]})
assert all([project, asset]), ("Could not find current project or "
"asset '%s'" % ASSET)
@ -125,7 +141,6 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
template_publish = project["config"]["template"]["publish"]
representations = []
traffic = []
staging_content = os.listdir(stagingdir)
for v, fname in enumerate(staging_content):
@ -134,13 +149,6 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
src = os.path.join(stagingdir, fname)
dst = template_publish.format(**template_data)
if v == 0:
instance.data["versionFolder"] = os.path.dirname(dst)
# Files to copy as if or to specific folder
if ext in filetypes.accepted_images_types:
dirname = os.path.dirname(dst)
dst = os.path.join(dirname, fname)
# Backwards compatibility
if fname == ".metadata.json":
@ -148,7 +156,7 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
dst = os.path.join(dirname, fname)
# copy source to destination (library)
traffic.append([src, dst])
instance.data["transfers"].append([src, dst])
representation = {
"schema": "avalon-core:representation-2.0",
@ -173,10 +181,53 @@ class PreIntegrateAsset(pyblish.api.InstancePlugin):
# store data for database and source / destinations
instance.data["representations"] = representations
instance.data["traffic"] = traffic
return representations
def intergrate(self, instance):
"""Register the representations and move the files
Through the stored `representations` and `transfers`
Args:
instance: the instance to integrate
"""
# get needed data
traffic = instance.data["transfers"]
representations = instance.data["representations"]
self.log.info("Registering {} items".format(len(representations)))
io.insert_many(representations)
# moving files
for src, dest in traffic:
self.log.info("Copying file .. {} -> {}".format(src, dest))
self.copy_file(src, dest)
def copy_file(self, src, dst):
""" Copy given source to destination
Arguments:
src (str): the source file which needs to be copied
dst (str): the destination of the sourc file
Returns:
None
"""
dirname = os.path.dirname(dst)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
self.log.critical("An unexpected error occurred.")
raise
shutil.copy(src, dst)
def get_subset(self, asset, instance):
subset = io.find_one({"type": "subset",

View file

@ -1,80 +0,0 @@
import os
import errno
import shutil
import pyblish.api
from avalon import io
class IntegrateAsset(pyblish.api.InstancePlugin):
"""Write to files and metadata
This plug-in exposes your data to others by encapsulating it
into a new version.
Schema:
Data is written in the following format.
____________________
| |
| version |
| ________________ |
| | | |
| | representation | |
| |________________| |
| | | |
| | ... | |
| |________________| |
|____________________|
"""
label = "Integrate Asset"
order = pyblish.api.IntegratorOrder + 0.1
families = ["colorbleed.model",
"colorbleed.rig",
"colorbleed.animation",
"colorbleed.camera",
"colorbleed.lookdev",
"colorbleed.texture",
"colorbleed.historyLookdev",
"colorbleed.group"]
def process(self, instance):
# get needed data
traffic = instance.data["traffic"]
representations = instance.data["representations"]
self.log.info("Registering {} items".format(len(representations)))
io.insert_many(representations)
# moving files
for src, dest in traffic:
self.log.info("Copying file .. {} -> {}".format(src, dest))
self.copy_file(src, dest)
self.log.info("Removing temporary files and folders ...")
stagingdir = instance.data["stagingDir"]
shutil.rmtree(stagingdir)
def copy_file(self, src, dst):
""" Copy given source to destination
Arguments:
src (str): the source file which needs to be copied
dst (str): the destination of the sourc file
Returns:
None
"""
dirname = os.path.dirname(dst)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
self.log.critical("An unexpected error occurred.")
raise
shutil.copy(src, dst)

View file

@ -1,40 +0,0 @@
import json
import os
import colorbleed.maya.lib as lib
import pyblish.api
class IntegrateAsset(pyblish.api.InstancePlugin):
"""Remap source paths for lookdev and textures
"""
label = "Remap source paths"
order = pyblish.api.IntegratorOrder + 0.15
families = ["colorbleed.lookdev",
"colorbleed.texture"]
def process(self, instance):
family = instance.data['family']
resources = instance.data['resources']
version_folder = instance.data['versionFolder']
if family == "colorbleed.texture":
try:
lib.remap_resource_nodes(resources, folder=version_folder)
except Exception as e:
self.log.error(e)
if family == "colorbleed.lookdev":
try:
tmp_dir = lib.maya_temp_folder()
resource_file = os.path.join(tmp_dir, "resources.json")
with open(resource_file, "r") as f:
resources = json.load(f)
lib.remap_resource_nodes(resources)
except Exception as e:
self.log.error(e)

70
maya_environment.bat Normal file
View file

@ -0,0 +1,70 @@
@echo OFF
echo Entering Maya2016 environment...
:: Environment: Maya
set CB_MAYA_VERSION=2016
set CB_MAYA_SHARED=%CB_APP_SHARED%\maya_shared\%CB_MAYA_VERSION%
if "%CB_MAYA_SHARED%" == "" (
echo Error: "CB_MAYA_SHARED" not set
goto :eof
)
:: Colorbleed Maya
set PYTHONPATH=%CB_PIPELINE%\git\cbMayaScripts;%PYTHONPATH%
set PYTHONPATH=%CB_PIPELINE%\git\inventory\python;%PYTHONPATH%
:: Maya shared
set MAYA_PLUG_IN_PATH=%CB_MAYA_SHARED%\plugins;%MAYA_PLUGIN_PATH%
set MAYA_SHELF_PATH=%CB_MAYA_SHARED%\prefs\shelves;%MAYA_SHELF_PATH%
set MAYA_SCRIPT_PATH=%CB_MAYA_SHARED%\scripts;%MAYA_SCRIPT_PATH%
set XBMLANGPATH=%CB_MAYA_SHARED%\prefs\icons;%XBMLANGPATH%
set MAYA_PRESET_PATH=%CB_MAYA_SHARED%\prefs\attrPresets;%MAYA_PRESET_PATH%
set PYTHONPATH=%CB_MAYA_SHARED%\scripts;%PYTHONPATH%
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules;%MAYA_MODULE_PATH%
:: Additional modules
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\mGear_2016;%MAYA_MODULE_PATH%
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\SOuP;%MAYA_MODULE_PATH%
set MAYA_SHELF_PATH=%CB_MAYA_SHARED%\modules\SOuP\shelves;%MAYA_SHELF_PATH%
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\pdipro35c_Maya2016x64;%MAYA_MODULE_PATH%
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\ovdb\maya\maya2016;%MAYA_MODULE_PATH%
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\cvshapeinverter;%MAYA_MODULE_PATH%
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Toolchefs;%MAYA_MODULE_PATH%
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Exocortex;%MAYA_MODULE_PATH%
:: Miarmy
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Basefount\Miarmy;%MAYA_MODULE_PATH%
set PATH=%CB_MAYA_SHARED%\modules\Basefount\Miarmy\bin;%PATH%
set VRAY_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Basefount\Miarmy\bin\vray\vray_3.1_3.3_3.4\Maya2015and2016;%VRAY_PLUGINS_x64%;
:: Yeti
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64;%MAYA_MODULE_PATH%
set PATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%PATH%;
set VRAY_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%VRAY_PLUGINS_x64%;
set VRAY_FOR_MAYA2016_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%VRAY_FOR_MAYA2016_PLUGINS_x64%;
set REDSHIFT_MAYAEXTENSIONSPATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\plug-ins;%REDSHIFT_MAYAEXTENSIONSPATH%
set peregrinel_LICENSE=5053@CBserver
:: maya-capture
set PYTHONPATH=%CB_PIPELINE%\git\maya-capture;%PYTHONPATH%
set PYTHONPATH=%CB_PIPELINE%\git\maya-capture-gui;%PYTHONPATH%
set PYTHONPATH=%CB_PIPELINE%\git\maya-capture-gui-cb;%PYTHONPATH%
:: maya-matrix-deform
set PYTHONPATH=%CB_PIPELINE%\git\maya-matrix-deformers;%PYTHONPATH%
set MAYA_PLUG_IN_PATH=%CB_PIPELINE%\git\maya-matrix-deformers\plugin;%MAYA_PLUG_IN_PATH%
:: rapid-rig
set XBMLANGPATH=%CB_MAYA_SHARED%\scripts\RapidRig_Modular_V02;%XBMLANGPATH%
set MAYA_SCRIPT_PATH=%CB_MAYA_SHARED%\scripts\RapidRig_Modular_V02;%MAYA_SCRIPT_PATH%
:: Fix Maya Playblast Color Management depth
set MAYA_FLOATING_POINT_RT_PLAYBLAST=1
:: Fix V-ray forcing affinity to 100%
set VRAY_USE_THREAD_AFFINITY=0

30
python_environment.bat Normal file
View file

@ -0,0 +1,30 @@
@echo OFF
echo Entering Python environment...
set CB_PYTHON_VERSION=2.7
where /Q python.exe
if ERRORLEVEL 1 (
if EXIST C:\Python27\python.exe (
echo Adding C:\Python27 to PATH
set "PATH=%PATH%;C:\Python27"
goto:has-python
) else (
echo Adding embedded python (pipeline)
set "PATH=%PATH%;%CB_APP_SHARED%\python\standalone\%CB_PYTHON_VERSION%\bin"
goto:has-python
)
)
:has-python
:: Python universal (non-compiled)
set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\universal\site-packages
:: Python version/windows-specific
:: set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\win\%CB_PYTHON_VERSION%
:: Python standalone (compiled to version)
if NOT "%CB_PYTHON_STANDALONE%" == "0" (
echo Entering Python Standalone environment...
set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\standalone\%CB_PYTHON_VERSION%\site-packages
)

33
set_environment.bat Normal file
View file

@ -0,0 +1,33 @@
@echo off
echo Entering pipeline (raw development) environment...
:: Initialize environment
set CB_PIPELINE=P:\pipeline\dev
set CB_APP_SHARED=%CB_PIPELINE%\apps
if "%CB_APP_SHARED%" == "" (
echo Error: "CB_APP_SHARED" not set
goto :eof
)
echo setting STORAGE..
set STORAGE=P:
set LAUNCHER_ROOT=%~dp0/launchers
:: Core
echo Add cb core..
set PYTHONPATH=%CB_PIPELINE%\git\cb;%PYTHONPATH%
set PYTHONPATH=%CB_PIPELINE%\git\cbra;%PYTHONPATH%
:: Extra
set PYTHONPATH=%CB_PIPELINE%\git\pyseq;%PYTHONPATH%
set PYTHONPATH=%CB_PIPELINE%\git\Qt.py;%PYTHONPATH%
:: Ftrack-connect
::set PYTHONPATH=%CB_PIPELINE%\git\ftrack-connect\source;%PYTHONPATH%
:: FFMPEG
set FFMPEG_PATH=%CB_APP_SHARED%\ffmpeg\bin\ffmpeg.exe