resolved conflit

This commit is contained in:
aardschok 2017-10-23 15:40:19 +02:00
commit bc85f19fcb
28 changed files with 1065 additions and 128 deletions

View file

@ -142,7 +142,7 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
receive new UUIDs are actually invalid.
Requires:
- currentFile on context
- instance.data["asset"]
"""
@ -166,44 +166,38 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
# Get the nodes from the all instances that ran through this plug-in
invalid = []
all_invalid = []
for instance in instances:
invalid_nodes = plugin.get_invalid(instance)
if invalid_nodes:
invalid.extend(invalid_nodes)
invalid = plugin.get_invalid(instance)
if invalid:
if not invalid:
self.log.info("Fixing instance {}".format(instance.name))
self._update_id_attribute(instance, invalid)
all_invalid.extend(invalid)
if not all_invalid:
self.log.info("No invalid nodes found.")
return
# Ensure unique ( process each node only once )
invalid = list(set(invalid))
all_invalid = list(set(all_invalid))
self.log.info("Generated ids on nodes: {0}".format(all_invalid))
# Parse context from current file
self.log.info("Updating node IDs ...")
# Update the attributes
self._update_id_attribute(invalid)
self.log.info("Generated ids on nodes: {0}".format(invalid))
def _update_id_attribute(self, nodes):
def _update_id_attribute(self, instance, nodes):
"""Delete the id attribute
Args:
nodes (list): all nodes to remove the attribute from
instance: The instance we're fixing for
nodes (list): all nodes to regenerate ids on
"""
import colorbleed.maya.lib as lib
import avalon.io as io
asset = instance.data['asset']
asset_id = io.find_one({"name": asset, "type": "asset"},
projection={"_id": True})['_id']
for node in nodes:
# get the database asset id
attr = "{}.cbId".format(node)
id_attr = cmds.getAttr(attr)
asset_id = id_attr.split(":")[0]
# create a new unique id
_, uid = str(uuid.uuid4()).rsplit("-", 1)
cb_uid = "{}:{}".format(asset_id, uid)
# set the new id
cmds.setAttr(attr, cb_uid, type="string")
lib.remove_id(node)
lib.set_id(asset_id, node)

View file

@ -12,14 +12,14 @@ from collections import OrderedDict, defaultdict
from maya import cmds, mel
from avalon import Session, maya, io
from avalon import api, maya, io
from cb.utils.maya import core
log = logging.getLogger(__name__)
project = io.find_one({"type": "project",
"name": Session["AVALON_PROJECT"]},
"name": os.environ["AVALON_PROJECT"]},
projection={"config.template.publish": True,
"_id": False})
TEMPLATE = project["config"]["template"]["publish"]
@ -656,7 +656,7 @@ def generate_ids(nodes):
"""Assign a new id of the current active context to the nodes"""
# Get the asset ID from the database for the asset of current context
asset_id = io.find_one({"type": "asset", "name": Session["AVALON_ASSET"]},
asset_id = io.find_one({"type": "asset", "name": os.environ["AVALON_ASSET"]},
projection={"_id": True})
for node in nodes:
@ -687,22 +687,6 @@ def remove_id(node):
cmds.deleteAttr("{}.cbId".format(node))
def get_representation_file(representation, template=TEMPLATE):
"""
Rebuild the filepath of the representation's context
Args:
representation (dict): data of the registered in the database
template (str): the template to fill
Returns:
str
"""
context = representation["context"].copy()
context["root"] = Session["AVALON_ROOT"]
return template.format(**context)
def get_reference_node(path):
"""
Get the reference node when the path is found being used in a reference
@ -812,8 +796,8 @@ def assign_look_by_version(nodes, version_id):
"name": "json"})
# Load file
shader_filepath = get_representation_file(shader_file)
shader_relation = get_representation_file(shader_relations)
shader_filepath = api.get_representation_path(shader_file)
shader_relation = api.get_representation_path(shader_relations)
reference_node = get_reference_node(shader_filepath)
if reference_node is None:

View file

@ -808,6 +808,20 @@
"title": "Set filename prefix",
"tooltip": "Set the render file name prefix."
},
{
"type": "action",
"command": "$COLORBLEED_SCRIPTS\\shading\\assign_look_ui.py",
"sourcetype": "file",
"tags": [
"shading",
"lookdev",
"assign",
"shaders",
"auto"
],
"title": "Assign Look UI",
"tooltip": "Open the Assign Look UI for custom look assignment"
},
{
"type": "action",
"command": "$COLORBLEED_SCRIPTS\\shading\\autoLookdevAssignment.py",

View file

@ -5,11 +5,12 @@ from colorbleed.maya import lib
class CreateAnimation(avalon.maya.Creator):
"""THe animated objects in the scene"""
"""Animation output for character rigs"""
name = "animationDefault"
label = "Animation"
family = "colorbleed.animation"
icon = "male"
def __init__(self, *args, **kwargs):
super(CreateAnimation, self).__init__(*args, **kwargs)

View file

@ -4,11 +4,12 @@ from colorbleed.maya import lib
class CreateCamera(avalon.maya.Creator):
"""Single baked camera extraction"""
"""Single baked camera"""
name = "cameraDefault"
label = "Camera"
family = "colorbleed.camera"
icon = "video-camera"
def __init__(self, *args, **kwargs):
super(CreateCamera, self).__init__(*args, **kwargs)

View file

@ -4,11 +4,12 @@ from colorbleed.maya import lib
class CreateLook(avalon.maya.Creator):
"""Polygonal geometry for animation"""
"""Shader connections defining shape look"""
name = "look"
label = "Look"
family = "colorbleed.look"
icon = "paint-brush"
def __init__(self, *args, **kwargs):
super(CreateLook, self).__init__(*args, **kwargs)

View file

@ -2,8 +2,9 @@ import avalon.maya
class CreateMayaAscii(avalon.maya.Creator):
"""Raw Maya Ascii file of the item(s)"""
"""Raw Maya Ascii file export"""
name = "mayaAscii"
label = "Maya Ascii"
family = "colorbleed.mayaAscii"
icon = "file-archive-o"

View file

@ -2,8 +2,9 @@ import avalon.maya
class CreateModel(avalon.maya.Creator):
"""Polygonal geometry for animation"""
"""Polygonal static geometry"""
name = "modelDefault"
label = "Model"
family = "colorbleed.model"
icon = "cube"

View file

@ -5,11 +5,12 @@ from colorbleed.maya import lib
class CreatePointCache(avalon.maya.Creator):
"""Alembic extract"""
"""Alembic pointcache for animated data"""
name = "pointcache"
label = "Point Cache"
family = "colorbleed.pointcache"
icon = "gears"
def __init__(self, *args, **kwargs):
super(CreatePointCache, self).__init__(*args, **kwargs)

View file

@ -4,11 +4,12 @@ import avalon.maya
class CreateRig(avalon.maya.Creator):
"""Skeleton and controls for manipulation of the geometry"""
"""Artist-friendly rig with controls to direct motion"""
name = "rigDefault"
label = "Rig"
family = "colorbleed.rig"
icon = "wheelchair"
def process(self):
instance = super(CreateRig, self).process()

View file

@ -0,0 +1,10 @@
import avalon.maya
class CreateSetDress(avalon.maya.Creator):
"""A grouped package of loaded content"""
name = "setdress"
label = "Set Dress"
family = "colorbleed.setdress"
icon = "cubes"

View file

@ -1,10 +1,7 @@
import os
import avalon.maya.pipeline
from avalon import api
class AbcLoader(api.Loader):
class AbcLoader(avalon.maya.pipeline.ReferenceLoader):
"""Specific loader of Alembic for the avalon.animation family"""
families = ["colorbleed.animation",

View file

@ -18,7 +18,7 @@ class SetFrameRangeLoader(api.Loader):
icon = "clock-o"
color = "white"
def process(self, name, namespace, context, data):
def load(self, context, name, namespace, data):
import maya.cmds as cmds
@ -52,7 +52,7 @@ class SetFrameRangeWithHandlesLoader(api.Loader):
icon = "clock-o"
color = "white"
def process(self, name, namespace, context, data):
def load(self, context, name, namespace, data):
import maya.cmds as cmds

View file

@ -0,0 +1,56 @@
from avalon import api
import avalon.maya.pipeline
class AbcLoader(api.Loader):
"""Specific loader of Alembic for the avalon.animation family"""
families = ["colorbleed.animation",
"colorbleed.camera",
"colorbleed.pointcache"]
label = "Reference animation"
representations = ["abc"]
order = -10
icon = "code-fork"
color = "orange"
def process(self, name, namespace, context, data):
import maya.cmds as cmds
cmds.loadPlugin("AbcImport.mll", quiet=True)
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
groupReference=True,
groupName="{}:{}".format(namespace, name),
reference=True,
returnNewNodes=True)
self[:] = nodes
# class SetDressAlembicLoader(avalon.maya.pipeline.ReferenceLoader):
# """Load the setdress as alembic"""
#
# families = ["colorbleed.setdress"]
# label = "Reference Alembic"
# representations = ["abc"]
# order = -10
# icon = "code-fork"
# color = "orange"
#
# def process(self, name, namespace, context, data):
#
# import maya.cmds as cmds
#
# cmds.loadPlugin("AbcImport.mll", quiet=True)
# nodes = cmds.file(self.fname,
# namespace=namespace,
# sharedReferenceFile=False,
# groupReference=True,
# groupName="{}:{}".format(namespace, name),
# reference=True,
# returnNewNodes=True)
#
# self[:] = nodes

View file

@ -26,7 +26,7 @@ class OpenImageSequence(api.Loader):
icon = "play-circle"
color = "orange"
def process(self, name, namespace, context, data):
def load(self, context, name, namespace, data):
directory = self.fname
from avalon.vendor import clique

View file

@ -1,10 +1,10 @@
import os
import json
from avalon import api
import avalon.maya.pipeline
class LookLoader(api.Loader):
class LookLoader(avalon.maya.pipeline.ReferenceLoader):
"""Specific loader for lookdev"""
families = ["colorbleed.look"]
@ -32,14 +32,6 @@ class LookLoader(api.Loader):
from avalon import maya
import colorbleed.maya.lib as lib
# improve readability of the namespace
assetname = context["asset"]["name"]
ns_assetname = "{}_".format(assetname)
namespace = maya.unique_namespace(ns_assetname,
format="%03d",
suffix="_look")
# try / except here is to ensure that the get_reference_node
# does not fail when the file doesn't exist yet
reference_node = None

View file

@ -1,7 +1,7 @@
from avalon import api
import avalon.maya.pipeline
class MayaAsciiLoader(api.Loader):
class MayaAsciiLoader(avalon.maya.pipeline.ReferenceLoader):
"""Load the model"""
families = ["colorbleed.mayaAscii"]
@ -17,12 +17,6 @@ class MayaAsciiLoader(api.Loader):
import maya.cmds as cmds
from avalon import maya
# Create a readable namespace
# Namespace should contain asset name and counter
# TEST_001{_descriptor} where `descriptor` can be `_abc` for example
assetname = "{}_".format(namespace.split("_")[0])
namespace = maya.unique_namespace(assetname, format="%03d")
with maya.maintained_selection():
nodes = cmds.file(self.fname,
namespace=namespace,

View file

@ -1,7 +1,8 @@
from avalon import api
import avalon.maya.pipeline
import avalon.api
class ModelLoader(api.Loader):
class ModelLoader(avalon.maya.pipeline.ReferenceLoader):
"""Load the model"""
families = ["colorbleed.model"]
@ -17,12 +18,6 @@ class ModelLoader(api.Loader):
import maya.cmds as cmds
from avalon import maya
# Create a readable namespace
# Namespace should contain asset name and counter
# TEST_001{_descriptor} where `descriptor` can be `_abc` for example
assetname = "{}_".format(namespace.split("_")[0])
namespace = maya.unique_namespace(assetname, format="%03d")
with maya.maintained_selection():
nodes = cmds.file(self.fname,
namespace=namespace,
@ -31,4 +26,84 @@ class ModelLoader(api.Loader):
groupReference=True,
groupName="{}:{}".format(namespace, name))
self[:] = nodes
self[:] = nodes
class GpuCacheLoader(avalon.api.Loader):
"""Load model Alembic as gpuCache"""
families = ["colorbleed.model"]
representations = ["abc"]
label = "Import Gpu Cache"
order = -5
icon = "code-fork"
color = "orange"
def load(self, context, name, namespace, data):
import maya.cmds as cmds
import avalon.maya.lib as lib
asset = context['asset']['name']
namespace = namespace or lib.unique_namespace(
asset + "_",
prefix="_" if asset[0].isdigit() else "",
suffix="_",
)
cmds.loadPlugin("gpuCache", quiet=True)
# Root group
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
# Create transform with shape
transform_name = label + "_GPU"
transform = cmds.createNode("transform", name=transform_name,
parent=root)
cache = cmds.createNode("gpuCache",
parent=transform,
name="{0}Shape".format(transform_name))
# Set the cache filepath
cmds.setAttr(cache + '.cacheFileName', self.fname, type="string")
cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root
# Lock parenting of the transform and cache
cmds.lockNode([transform, cache], lock=True)
nodes = [root, transform, cache]
self[:] = nodes
return avalon.maya.pipeline.containerise(
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__)
def update(self, container, representation):
import maya.cmds as cmds
path = avalon.api.get_representation_path(representation)
# Update the cache
members = cmds.sets(container['objectName'], query=True)
caches = cmds.ls(members, type="gpuCache", long=True)
assert len(caches) == 1, "This is a bug"
for cache in caches:
cmds.setAttr(cache + ".cacheFileName", path, type="string")
cmds.setAttr(container["objectName"] + ".representation",
str(representation["_id"]),
type="string")
def remove(self, container):
import maya.cmds as cmds
members = cmds.sets(container['objectName'], query=True)
cmds.lockNode(members, lock=False)
cmds.delete([container['objectName']] + members)

View file

@ -3,7 +3,7 @@ from maya import cmds
from avalon import api, Session, maya
class RigLoader(api.Loader):
class RigLoader(maya.pipeline.ReferenceLoader):
"""Specific loader for rigs
This automatically creates an instance for animators upon load.
@ -20,8 +20,6 @@ class RigLoader(api.Loader):
def process(self, name, namespace, context, data):
assetname = "{}_".format(context["asset"]["name"])
unique_namespace = maya.unique_namespace(assetname, format="%03d")
nodes = cmds.file(self.fname,
namespace=namespace,
reference=True,
@ -32,10 +30,9 @@ class RigLoader(api.Loader):
# Store for post-process
self[:] = nodes
if data.get("post_process", True):
self._post_process(name, unique_namespace, context, data)
self._post_process(name, namespace, context, data)
def _post_process(self, name, namespace, context, data):
from avalon import maya
# TODO(marcus): We are hardcoding the name "out_SET" here.
# Better register this keyword, so that it can be used
@ -60,8 +57,8 @@ class RigLoader(api.Loader):
# Create the animation instance
with maya.maintained_selection():
cmds.select([output, controls] + roots, noExpand=True)
maya.create(name=namespace,
asset=asset,
family="colorbleed.animation",
options={"useSelection": True},
data={"dependencies": dependency})
api.create(name=namespace,
asset=asset,
family="colorbleed.animation",
options={"useSelection": True},
data={"dependencies": dependency})

View file

@ -0,0 +1,80 @@
from avalon import api
class SetDressLoader(api.Loader):
families = ["colorbleed.setdress"]
representations = ["json"]
label = "Load Set Dress"
order = -9
icon = "code-fork"
color = "orange"
def load(self, context, name, namespace, data):
from avalon.maya.pipeline import containerise
from avalon.maya import lib
asset = context['asset']['name']
namespace = namespace or lib.unique_namespace(
asset + "_",
prefix="_" if asset[0].isdigit() else "",
suffix="_",
)
from colorbleed import setdress_api
containers = setdress_api.load_package(filepath=self.fname,
name=name,
namespace=namespace)
self[:] = containers
# Only containerize if any nodes were loaded by the Loader
nodes = self[:]
if not nodes:
return
return containerise(
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__)
def update(self, container, representation):
from colorbleed import setdress_api
return setdress_api.update_package(container,
representation)
def remove(self, container):
"""Remove all sub containers"""
from avalon import api
from colorbleed import setdress_api
import maya.cmds as cmds
# Remove all members
member_containers = setdress_api.get_contained_containers(container)
for member_container in member_containers:
self.log.info("Removing container %s",
member_container['objectName'])
api.remove(member_container)
# Remove alembic hierarchy reference
# TODO: Check whether removing all contained references is safe enough
members = cmds.sets(container['objectName'], query=True) or []
references = cmds.ls(members, type="reference")
for reference in references:
self.log.info("Removing %s", reference)
fname = cmds.referenceQuery(reference, filename=True)
cmds.file(fname, removeReference=True)
# Delete container and its contents
if cmds.objExists(container['objectName']):
members = cmds.sets(container['objectName'], query=True) or []
cmds.delete([container['objectName']] + members)
# TODO: Ensure namespace is gone

View file

@ -47,12 +47,6 @@ class CollectInstances(pyblish.api.ContextPlugin):
objectset = cmds.ls("*.id", long=True, type="objectSet",
recursive=True, objectsOnly=True)
for objset in objectset:
self.log.info("Creating instance for {}".format(objset))
members = cmds.sets(objset, query=True)
if members is None:
self.log.info("Skipped empty Set: \"%s\" " % objset)
continue
if not cmds.attributeQuery("id", node=objset, exists=True):
continue
@ -68,6 +62,13 @@ class CollectInstances(pyblish.api.ContextPlugin):
exists=True)
assert has_family, "\"%s\" was missing a family" % objset
members = cmds.sets(objset, query=True)
if members is None:
self.log.warning("Skipped empty instance: \"%s\" " % objset)
continue
self.log.info("Creating instance for {}".format(objset))
data = dict()
# Apply each user defined attribute as data

View file

@ -0,0 +1,91 @@
from collections import defaultdict
import pyblish.api
from maya import cmds, mel
from avalon import maya as avalon
from colorbleed.maya import lib
# TODO : Publish of setdress: -unique namespace for all assets, VALIDATOR!
class CollectSetDress(pyblish.api.InstancePlugin):
"""Collect all relevant setdress items
Collected data:
* File name
* Compatible loader
* Matrix per instance
* Namespace
Note: GPU caches are currently not supported in the pipeline. There is no
logic yet which supports the swapping of GPU cache to renderable objects.
"""
order = pyblish.api.CollectorOrder + 0.49
label = "Set Dress"
families = ["colorbleed.setdress"]
def process(self, instance):
# Find containers
containers = avalon.ls()
# Get all content from the instance
instance_lookup = set(cmds.ls(instance, type="transform", long=True))
data = defaultdict(list)
hierarchy_nodes = []
for container in containers:
root = lib.get_container_transforms(container, root=True)
if root not in instance_lookup:
continue
# Retrieve the hierarchy
parent = cmds.listRelatives(root, parent=True, fullPath=True)[0]
hierarchy_nodes.append(parent)
# Temporary warning for GPU cache which are not supported yet
loader = container["loader"]
if loader == "GpuCacheLoader":
self.log.warning("GPU Cache Loader is currently not supported"
"in the pipeline, we will export it tho")
# Gather info for new data entry
representation_id = container["representation"]
instance_data = {"loader": loader,
"parent": parent,
"namespace": container["namespace"]}
# Check if matrix differs from default and store changes
matrix_data = self.get_matrix_data(root)
if matrix_data:
instance_data["matrix"] = matrix_data
data[representation_id].append(instance_data)
instance.data["scenedata"] = dict(data)
instance.data["hierarchy"] = list(set(hierarchy_nodes))
def get_file_rule(self, rule):
return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule))
def get_matrix_data(self, node):
"""Get the matrix of all members when they are not default
Each matrix which differs from the default will be stored in a
dictionary
Args:
members (list): list of transform nmodes
Returns:
dict
"""
matrix = cmds.xform(node, query=True, matrix=True)
if matrix == lib.DEFAULT_MATRIX:
return
return matrix

View file

@ -25,8 +25,8 @@ class ExtractColorbleedAlembic(colorbleed.api.Extractor):
nodes = instance[:]
# Collect the start and end including handles
start = instance.data["startFrame"]
end = instance.data["endFrame"]
start = instance.data.get("startFrame", 1)
end = instance.data.get("endFrame", 1)
handles = instance.data.get("handles", 0)
if handles:
start -= handles

View file

@ -0,0 +1,54 @@
import json
import os
import colorbleed.api
from colorbleed.maya.lib import extract_alembic
from maya import cmds
class ExtractSetDress(colorbleed.api.Extractor):
"""Produce an alembic of just point positions and normals.
Positions and normals are preserved, but nothing more,
for plain and predictable point caches.
"""
label = "Extract Set Dress"
hosts = ["maya"]
families = ["colorbleed.setdress"]
def process(self, instance):
parent_dir = self.staging_dir(instance)
hierarchy_filename = "{}.abc".format(instance.name)
hierarchy_path = os.path.join(parent_dir, hierarchy_filename)
json_filename = "{}.json".format(instance.name)
json_path = os.path.join(parent_dir, json_filename)
self.log.info("Dumping scene data for debugging ..")
with open(json_path, "w") as filepath:
json.dump(instance.data["scenedata"], filepath, ensure_ascii=False)
self.log.info("Extracting point cache ..")
cmds.select(instance.data["hierarchy"])
# Run basic alembic exporter
extract_alembic(file=hierarchy_path,
startFrame=1.0,
endFrame=1.0,
**{"step": 1.0,
"attr": ["cbId"],
"writeVisibility": True,
"writeCreases": True,
"uvWrite": True,
"selection": True})
instance.data["files"] = [json_path, hierarchy_path]
# Remove data
instance.data.pop("scenedata", None)
cmds.select(clear=True)

View file

@ -4,6 +4,7 @@ from maya import cmds
import pyblish.api
import colorbleed.api
import colorbleed.maya.lib as lib
log = logging.getLogger("Rig Controllers")
@ -78,13 +79,10 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
@staticmethod
def validate_transforms(control):
tolerance = 1e-30
identity = [1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0]
matrix = cmds.xform(control, query=True, matrix=True, objectSpace=True)
if not all(abs(x - y) < tolerance for x, y in zip(identity, matrix)):
if not all(abs(x - y) < tolerance for x, y in zip(lib.DEFAULT_MATRIX,
matrix)):
log.error("%s matrix : %s" % (control, matrix))
return False
return True
@ -106,11 +104,6 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
@classmethod
def repair(cls, instance):
identity = [1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0]
# lock all controllers in controls_SET
controls = cmds.sets("controls_SET", query=True)
for control in controls:
@ -123,4 +116,6 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
log.info("Repairing matrix")
if not cls.validate_transforms(control):
cmds.xform(control, matrix=identity, objectSpace=True)
cmds.xform(control,
matrix=lib.DEFAULT_MATRIX,
objectSpace=True)

View file

@ -0,0 +1,35 @@
import pyblish.api
from collection import defaultdict
class ValidateSetdressNamespaces(pyblish.api.InstancePlugin):
"""Ensure namespaces are not nested"""
label = "Validate Setdress Namespaces"
order = pyblish.api.ValidatorOrder
families = ["colorbleed.setdress"]
def process(self, instance):
self.log.info("Checking namespace for %s", instance.name)
if self.get_invalid(instance):
self.log.error("Nested namespaces found")
@classmethod
def get_invalid(cls, instance):
from maya import cmds
invalid = []
namspace_lookup = defaultdict(list)
for item in cmds.ls(instance):
namespace, node = item.rsplit(":", 1)[0]
namspace_lookup[namespace].append(node)
for namespace, nodes in namspace_lookup.items():
parts = [p for p in namespace.split(":") if p != ""]
if len(parts) > 1:
invalid.extend(nodes)
return invalid

View file

@ -30,6 +30,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"colorbleed.mayaAscii",
"colorbleed.model",
"colorbleed.pointcache",
"colorbleed.setdress",
"colorbleed.rig"]
def process(self, instance):
@ -47,10 +48,10 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
def register(self, instance):
# Required environment variables
PROJECT = Session["AVALON_PROJECT"]
PROJECT = os.environ["AVALON_PROJECT"]
ASSET = instance.data.get("asset") or Session["AVALON_ASSET"]
SILO = Session["AVALON_SILO"]
LOCATION = Session("AVALON_LOCATION")
SILO = os.environ["AVALON_SILO"]
LOCATION = os.environ("AVALON_LOCATION")
context = instance.context
# Atomicity
@ -139,7 +140,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
root = api.registered_root()
template_data = {"root": root,
"project": PROJECT,
"silo": SILO,
"silo": asset['silo'],
"asset": ASSET,
"subset": subset["name"],
"version": version["name"]}
@ -213,7 +214,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"context": {
"project": PROJECT,
"asset": ASSET,
"silo": SILO,
"silo": asset['silo'],
"subset": subset["name"],
"version": version["name"],
"representation": ext[1:]
@ -318,9 +319,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
current_families = instance.data.get("families", list())
instance_family = instance.data.get("family", None)
families += current_families
if instance_family is not None:
families.append(instance_family)
families += current_families
# create relative source path for DB
relative_path = os.path.relpath(context.data["currentFile"],
@ -333,4 +334,10 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"source": source,
"comment": context.data.get("comment")}
return dict(instance.data, **version_data)
# Include optional data if present in
optionals = ["startFrame", "endFrame", "step", "handles"]
for key in optionals:
if key in instance.data:
version_data[key] = instance.data[key]
return version_data

554
colorbleed/setdress_api.py Normal file
View file

@ -0,0 +1,554 @@
import logging
import json
import os
import contextlib
import copy
from maya import cmds
from avalon import api
import avalon.io as io
from avalon.maya.lib import unique_namespace
log = logging.getLogger("PackageLoader")
def matrix_equals(current_matrix, original_matrix, tolerance=1e-10):
"""
Compares two matrices with an imperfection tolerance
Args:
current_matrix (list, tuple): the matrix to check
original_matrix (list, tuple): the matrix to check against
tolerance (long): the precision of the differences
Returns:
bool : True or False
"""
zipped = zip(current_matrix, original_matrix)
if not all(abs(x - y) < tolerance for x, y in zipped):
return False
return True
def to_namespace(node, namespace):
"""Return node name as if it's inside the namespace.
Args:
node (str): Node name
namespace (str): Namespace
Returns:
str: The node in the namespace.
"""
namespace_prefix = "|{}:".format(namespace)
node = namespace_prefix.join(node.split("|"))
return node
@contextlib.contextmanager
def namespaced(namespace, new=True):
"""Work inside namespace during context
Args:
new (bool): When enabled this will rename the namespace to a unique
namespace if the input namespace already exists.
Yields:
str: The namespace that is used during the context
"""
original = cmds.namespaceInfo(cur=True)
if new:
namespace = unique_namespace(namespace)
cmds.namespace(add=namespace)
try:
cmds.namespace(set=namespace)
yield namespace
finally:
cmds.namespace(set=original)
@contextlib.contextmanager
def unlocked(nodes):
# Get node state by Maya's uuid
nodes = cmds.ls(nodes, long=True)
uuids = cmds.ls(nodes, uuid=True)
states = cmds.lockNode(nodes, query=True, lock=True)
states = {uuid: state for uuid, state in zip(uuids, states)}
try:
cmds.lockNode(nodes, lock=False)
yield
finally:
# Reapply original states
for uuid, state in states.iteritems():
nodes_from_id = cmds.ls(uuid, long=True)
if not nodes_from_id:
log.warning("Node not found: %s", uuid)
continue
cmds.lockNode(nodes_from_id[0], lock=state)
def load_package(filepath, name, namespace=None):
"""Load a package that was gathered elsewhere.
A package is a group of published instances, possibly with additional data
in a hierarchy.
"""
from avalon.tools.cbloader import lib
if namespace is None:
# Define a unique namespace for the package
namespace = os.path.basename(filepath).split(".")[0]
unique_namespace(namespace)
assert isinstance(namespace, basestring)
# Load the setdress package data
with open(filepath, "r") as fp:
data = json.load(fp)
# Load the setdress alembic hierarchy
# We import this into the namespace in which we'll load the package's
# instances into afterwards.
alembic = filepath.replace(".json", ".abc")
hierarchy = cmds.file(alembic,
reference=True,
namespace=namespace,
returnNewNodes=True,
groupReference=True,
groupName="{}:{}".format(namespace, name),
typ="Alembic")
# Get the top root node (the reference group)
root = "{}:{}".format(namespace, name)
containers = []
for representation_id, instances in data.items():
# Find the compatible loaders
loaders = list(lib.iter_loaders(representation_id))
for instance in instances:
container = _add(instance=instance,
representation_id=representation_id,
loaders=loaders,
namespace=namespace,
root=root)
containers.append(container)
# TODO: Do we want to cripple? Or do we want to add a 'parent' parameter?
# Cripple the original avalon containers so they don't show up in the
# manager
# for container in containers:
# cmds.setAttr("%s.id" % container,
# "colorbleed.setdress.container",
# type="string")
# TODO: Lock all loaded nodes
# This is to ensure the hierarchy remains unaltered by the artists
# for node in nodes:
# cmds.lockNode(node, lock=True)
return containers + hierarchy
def _add(instance, representation_id, loaders, namespace, root="|"):
"""Add an item from the package
Args:
instance (dict):
representation_id (str):
loaders (list):
namespace (str):
Returns:
str: The created Avalon container.
"""
from colorbleed.maya.lib import get_container_transforms
# Process within the namespace
with namespaced(namespace, new=False) as namespace:
# Get the used loader
Loader = next((x for x in loaders if
x.__name__ == instance['loader']),
None)
if Loader is None:
log.warning("Loader is missing: %s. Skipping %s",
instance['loader'], instance)
raise RuntimeError("Loader is missing.")
container = api.load(Loader,
representation_id,
namespace=instance['namespace'])
# Get the root from the loaded container
loaded_root = get_container_transforms({"objectName": container},
root=True)
# Apply matrix to root node (if any matrix edits)
matrix = instance.get("matrix", None)
if matrix:
cmds.xform(loaded_root, objectSpace=True, matrix=matrix)
# Parent into the setdress hierarchy
# Namespace is missing from parent node(s), add namespace
# manually
parent = root + to_namespace(instance["parent"], namespace)
cmds.parent(loaded_root, parent, relative=True)
return container
# Store root nodes based on representation and namespace
def _instances_by_namespace(data):
"""Rebuild instance data so we can look it up by namespace.
Note that the `representation` is added into the instance's
data with a `representation` key.
Args:
data (dict): scene build data
Returns:
dict
"""
result = {}
# Add new assets
for representation_id, instances in data.items():
# Ensure we leave the source data unaltered
instances = copy.deepcopy(instances)
for instance in instances:
instance['representation'] = representation_id
result[instance['namespace']] = instance
return result
def get_contained_containers(container):
"""Get the Avalon containers in this container
Args:
container (dict): The container dict.
Returns:
list: A list of member container dictionaries.
"""
import avalon.schema
from avalon.maya.pipeline import parse_container
# Get avalon containers in this package setdress container
containers = []
members = cmds.sets(container['objectName'], query=True)
for node in cmds.ls(members, type="objectSet"):
try:
member_container = parse_container(node)
containers.append(member_container)
except avalon.schema.ValidationError:
pass
return containers
def update_package_version(container, version):
"""
Update package by version number
Args:
container (dict): container data of the container node
version (int): the new version number of the package
Returns:
None
"""
# Versioning (from `core.maya.pipeline`)
current_representation = io.find_one({
"_id": io.ObjectId(container["representation"])
})
assert current_representation is not None, "This is a bug"
version_, subset, asset, project = io.parenthood(current_representation)
if version == -1:
new_version = io.find_one({
"type": "version",
"parent": subset["_id"]
}, sort=[("name", -1)])
else:
new_version = io.find_one({
"type": "version",
"parent": subset["_id"],
"name": version,
})
assert new_version is not None, "This is a bug"
# Get the new representation (new file)
new_representation = io.find_one({
"type": "representation",
"parent": new_version["_id"],
"name": current_representation["name"]
})
update_package(container, new_representation)
def update_package(set_container, representation):
"""Update any matrix changes in the scene based on the new data
Args:
set_container (dict): container data from `ls()`
representation (dict): the representation document from the database
Returns:
None
"""
# Load the original package data
current_representation = io.find_one({
"_id": io.ObjectId(set_container['representation']),
"type": "representation"
})
current_file = api.get_representation_path(current_representation)
assert current_file.endswith(".json")
with open(current_file, "r") as fp:
current_data = json.load(fp)
# Load the new package data
new_file = api.get_representation_path(representation)
assert new_file.endswith(".json")
with open(new_file, "r") as fp:
new_data = json.load(fp)
# Update scene content
containers = get_contained_containers(set_container)
update_scene(set_container, containers, current_data, new_data, new_file)
# TODO: This should be handled by the pipeline itself
cmds.setAttr(set_container['objectName'] + ".representation",
str(representation['_id']), type="string")
def update_scene(set_container, containers, current_data, new_data, new_file):
"""Updates the hierarchy, assets and their matrix
Updates the following withing the scene:
* Setdress hierarchy alembic
* Matrix
* Parenting
* Representations
It removes any assets which are not present in the new build data
Args:
set_container (dict): the setdress container of the scene
containers (list): the list of containers under the setdress container
current_data (dict): the current build data of the setdress
new_data (dict): the new build data of the setdres
Returns:
processed_containers (list): all new and updated containers
"""
from colorbleed.maya.lib import DEFAULT_MATRIX, get_container_transforms
from avalon.tools.cbloader import lib
set_namespace = set_container['namespace']
# Update the setdress hierarchy alembic
set_root = get_container_transforms(set_container, root=True)
set_hierarchy_root = cmds.listRelatives(set_root, fullPath=True)[0]
set_hierarchy_reference = cmds.referenceQuery(set_hierarchy_root,
referenceNode=True)
new_alembic = new_file.replace(".json", ".abc")
assert os.path.exists(new_alembic), "%s does not exist." % new_alembic
with unlocked(cmds.listRelatives(set_root, ad=True, fullPath=True)):
cmds.file(new_alembic,
loadReference=set_hierarchy_reference,
type="Alembic")
identity = DEFAULT_MATRIX[:]
processed_namespaces = set()
processed_containers = list()
new_lookup = _instances_by_namespace(new_data)
old_lookup = _instances_by_namespace(current_data)
for container in containers:
container_ns = container['namespace']
# Consider it processed here, even it it fails we want to store that
# the namespace was already available.
processed_namespaces.add(container_ns)
processed_containers.append(container['objectName'])
if container_ns in new_lookup:
root = get_container_transforms(container, root=True)
if not root:
log.error("Can't find root for %s", container['objectName'])
continue
old_instance = old_lookup.get(container_ns, {})
new_instance = new_lookup[container_ns]
# Update the matrix
# check matrix against old_data matrix to find local overrides
current_matrix = cmds.xform(root,
query=True,
matrix=True,
objectSpace=True)
original_matrix = old_instance.get("matrix", identity)
has_matrix_override = not matrix_equals(current_matrix,
original_matrix)
if has_matrix_override:
log.warning("Matrix override preserved on %s", container_ns)
else:
new_matrix = new_instance.get("matrix", identity)
cmds.xform(root, matrix=new_matrix, objectSpace=True)
# Update the parenting
if old_instance.get("parent", None) != new_instance["parent"]:
parent = to_namespace(new_instance['parent'], set_namespace)
if not cmds.objExists(parent):
log.error("Can't find parent %s", parent)
continue
# Set the new parent
cmds.lockNode(root, lock=False)
root = cmds.parent(root, parent, relative=True)
cmds.lockNode(root, lock=True)
# Update the representation
representation_current = container['representation']
representation_old = old_instance['representation']
representation_new = new_instance['representation']
has_representation_override = (representation_current !=
representation_old)
if representation_new != representation_current:
if has_representation_override:
log.warning("Your scene had local representation "
"overrides within the set. New "
"representations not loaded for %s.",
container_ns)
continue
# We check it against the current 'loader' in the scene instead
# of the original data of the package that was loaded because
# an Artist might have made scene local overrides
if new_instance['loader'] != container['loader']:
log.error("Switching loader between updates is not "
"supported. Skipping: %s", container_ns)
continue
# Check whether the conversion can be done by the Loader.
# They *must* use the same asset, subset and Loader for
# `api.update` to make sense.
old = io.find_one({"_id": io.ObjectId(representation_current)})
new = io.find_one({"_id": io.ObjectId(representation_new)})
is_valid = compare_representations(old=old, new=new)
if not is_valid:
log.error("Skipping: %s. See log for details.",
container_ns)
continue
new_version = new["context"]["version"]
api.update(container, version=new_version)
else:
# Remove this container because it's not in the new data
log.warning("Removing content: %s", container_ns)
api.remove(container)
# Add new assets
for representation_id, instances in new_data.items():
# Find the compatible loaders
loaders = list(lib.iter_loaders(representation_id))
for instance in instances:
# Already processed in update functionality
if instance['namespace'] in processed_namespaces:
continue
container = _add(instance=instance,
representation_id=representation_id,
loaders=loaders,
namespace=set_container['namespace'],
root=set_root)
# Add to the setdress container
cmds.sets(container,
addElement=set_container['objectName'])
processed_containers.append(container)
return processed_containers
def compare_representations(old, new):
"""Check if the old representation given can be updated
Due to limitations of the `api.update` function we cannot allow
differences in the following data:
* Representation name (extension)
* Asset name
* Subset name (variation)
If any of those data values differs, the function will raise an
RuntimeError
Args:
old(dict): representation data from the database
new(dict): representation data from the database
Returns:
bool: False if the representation is not invalid else True
"""
if new["name"] != old["name"]:
log.error("Cannot switch extensions")
return False
new_context = new["context"]
old_context = old["context"]
if new_context["asset"] != old_context["asset"]:
log.error("Changing assets between updates is "
"not supported.")
return False
if new_context["subset"] != old_context["subset"]:
log.error("Changing subsets between updates is "
"not supported.")
return False
return True