mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
resolved conflict
This commit is contained in:
commit
c720e542c5
46 changed files with 3598 additions and 1507 deletions
|
|
@ -1,4 +0,0 @@
|
|||
:: Set paths to ensure plugins have access to the inhouse tools
|
||||
|
||||
set PYTHONPATH=%PYTHONPATH%;P:\pipeline\dev\git\cb;
|
||||
set PYTHONPATH=%PYTHONPATH%;P:\pipeline\dev\git\cbra;
|
||||
|
|
@ -8,10 +8,11 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
|||
|
||||
def install():
|
||||
publish_path = os.path.join(PLUGINS_DIR, "publish")
|
||||
|
||||
print("Registering global plug-ins..")
|
||||
|
||||
pyblish.register_plugin_path(publish_path)
|
||||
|
||||
|
||||
def uninstall():
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
# absolute_import is needed to counter the `module has no cmds error` in Maya
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pyblish.api
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def get_errored_instances_from_context(context):
|
||||
|
||||
|
|
@ -34,7 +37,7 @@ def get_errored_plugins_from_data(context):
|
|||
plugins = list()
|
||||
results = context.data.get("results", [])
|
||||
for result in results:
|
||||
if result["success"] == True:
|
||||
if result["success"] is True:
|
||||
continue
|
||||
plugins.append(result["plugin"])
|
||||
|
||||
|
|
@ -150,8 +153,6 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
|
|||
icon = "wrench" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
import cbra.lib
|
||||
import cbra.utils.maya.node_uuid as id_utils
|
||||
|
||||
self.log.info("Finding bad nodes..")
|
||||
|
||||
|
|
@ -182,15 +183,73 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
|
|||
|
||||
# Parse context from current file
|
||||
self.log.info("Parsing current context..")
|
||||
try:
|
||||
current_file = context.data['currentFile']
|
||||
context = cbra.lib.parse_context(current_file)
|
||||
except RuntimeError, e:
|
||||
self.log.error("Can't generate UUIDs because scene isn't "
|
||||
"in new-style pipeline: ".format(current_file))
|
||||
raise e
|
||||
print(">>> DEBUG CONTEXT :", context)
|
||||
print(">>> DEBUG CONTEXT DATA:", context.data)
|
||||
|
||||
# Generate and add the ids to the nodes
|
||||
ids = id_utils.generate_ids(context, invalid)
|
||||
id_utils.add_ids(ids)
|
||||
# # Generate and add the ids to the nodes
|
||||
node_ids = self.generate_ids(context, invalid)
|
||||
self.apply_ids(node_ids)
|
||||
self.log.info("Generated ids on nodes: {0}".format(invalid))
|
||||
|
||||
def get_context(self, instance=None):
|
||||
|
||||
PROJECT = os.environ["AVALON_PROJECT"]
|
||||
ASSET = instance.data.get("asset") or os.environ["AVALON_ASSET"]
|
||||
SILO = os.environ["AVALON_SILO"]
|
||||
LOCATION = os.getenv("AVALON_LOCATION")
|
||||
|
||||
return {"project": PROJECT,
|
||||
"asset": ASSET,
|
||||
"silo": SILO,
|
||||
"location": LOCATION}
|
||||
|
||||
def generate_ids(self, context, nodes):
|
||||
"""Generate cb UUIDs for nodes.
|
||||
|
||||
The identifiers are formatted like:
|
||||
assets:character/test:bluey:46D221D9-4150-8E49-6B17-43B04BFC26B6
|
||||
|
||||
This is a concatenation of:
|
||||
- entity (shots or assets)
|
||||
- folders (parent hierarchy)
|
||||
- asset (the name of the asset)
|
||||
- uuid (unique id for node in the scene)
|
||||
|
||||
Raises:
|
||||
RuntimeError: When context can't be parsed of the current asset
|
||||
|
||||
Returns:
|
||||
dict: node, uuid dictionary
|
||||
|
||||
"""
|
||||
|
||||
# Make a copy of the context
|
||||
data = context.copy()
|
||||
|
||||
# Define folders
|
||||
|
||||
node_ids = dict()
|
||||
for node in nodes:
|
||||
# Generate a unique ID per node
|
||||
data['uuid'] = uuid.uuid4()
|
||||
unique_id = "{asset}:{item}:{uuid}".format(**data)
|
||||
node_ids[node] = unique_id
|
||||
|
||||
return node_ids
|
||||
|
||||
def apply_ids(self, node_ids):
|
||||
"""Apply the created unique IDs to the node
|
||||
Args:
|
||||
node_ids (dict): each node with a unique id
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
attribute = "mbId"
|
||||
for node, id in node_ids.items():
|
||||
# check if node has attribute
|
||||
if not cmds.attributeQuery(attribute, node=node, exists=True):
|
||||
cmds.addAttr(node, longName=attribute, dataType="string")
|
||||
|
||||
cmds.setAttr("{}.{}".format(node, attribute), id)
|
||||
|
|
|
|||
|
|
@ -21,7 +21,9 @@ from .action import (
|
|||
)
|
||||
|
||||
all = [
|
||||
# plugin classes
|
||||
"Extractor",
|
||||
# ordering
|
||||
"ValidatePipelineOrder",
|
||||
"ValidateContentsOrder",
|
||||
"ValidateSceneOrder",
|
||||
|
|
|
|||
2
colorbleed/filetypes.py
Normal file
2
colorbleed/filetypes.py
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
accepted_images_types = [".png", ".jpg", ".tga", ".tiff"]
|
||||
ignored_images_types = [".pds"]
|
||||
|
|
@ -1,9 +1,12 @@
|
|||
import os
|
||||
import site
|
||||
import uuid
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon import maya, io, api as avalon
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from . import menu
|
||||
|
||||
PARENT_DIR = os.path.dirname(__file__)
|
||||
|
|
@ -28,6 +31,11 @@ def install():
|
|||
|
||||
menu.install()
|
||||
|
||||
print("Installing callbacks ... ")
|
||||
avalon.on("init", on_init)
|
||||
avalon.on("new", on_new)
|
||||
avalon.on("save", on_save)
|
||||
|
||||
|
||||
def uninstall():
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
|
|
@ -35,3 +43,88 @@ def uninstall():
|
|||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
||||
menu.uninstall()
|
||||
|
||||
|
||||
def _set_uuid(asset_id, node):
|
||||
"""Add cbId to `node`
|
||||
Unless one already exists.
|
||||
"""
|
||||
|
||||
attr = "{0}.cbId".format(node)
|
||||
if not cmds.attributeQuery("cbId", node=node, exists=True):
|
||||
cmds.addAttr(node, longName="cbId", dataType="string")
|
||||
_, uid = str(uuid.uuid4()).rsplit("-", 1)
|
||||
cb_uid = "{}:{}".format(asset_id, uid)
|
||||
|
||||
cmds.setAttr(attr, cb_uid, type="string")
|
||||
|
||||
|
||||
def _copy_uuid(source, target):
|
||||
|
||||
source_attr = "{0}.cbId".format(source)
|
||||
target_attr = "{0}.cbId".format(target)
|
||||
if not cmds.attributeQuery("cbId", node=target, exists=True):
|
||||
cmds.addAttr(target, longName="cbId", dataType="string")
|
||||
|
||||
attribute_value = cmds.getAttr(source_attr)
|
||||
cmds.setAttr(target_attr, attribute_value, type="string")
|
||||
|
||||
|
||||
def on_init():
|
||||
avalon.logger.info("Running callback on init..")
|
||||
|
||||
maya.commands.reset_frame_range()
|
||||
maya.commands.reset_resolution()
|
||||
|
||||
|
||||
def on_new():
|
||||
avalon.logger.info("Running callback on new..")
|
||||
|
||||
# Load dependencies
|
||||
cmds.loadPlugin("AbcExport.mll", quiet=True)
|
||||
cmds.loadPlugin("AbcImport.mll", quiet=True)
|
||||
|
||||
maya.commands.reset_frame_range()
|
||||
maya.commands.reset_resolution()
|
||||
|
||||
|
||||
def on_save():
|
||||
"""Automatically add IDs to new nodes
|
||||
Any transform of a mesh, without an existing ID,
|
||||
is given one automatically on file save.
|
||||
"""
|
||||
|
||||
avalon.logger.info("Running callback on save..")
|
||||
|
||||
defaults = ["initialShadingGroup", "initialParticleSE"]
|
||||
|
||||
# the default items which always want to have an ID
|
||||
types = ["mesh", "shadingEngine", "file", "nurbsCurve"]
|
||||
|
||||
# the items which need to pass the id to their parent
|
||||
nodes = (set(cmds.ls(type=types, long=True)) -
|
||||
set(cmds.ls(long=True, readOnly=True)) -
|
||||
set(cmds.ls(long=True, lockedNodes=True)))
|
||||
|
||||
transforms = set()
|
||||
for n in cmds.ls(type=types, long=True):
|
||||
# pass id to parent of node if in subtypes
|
||||
relatives = cmds.listRelatives(n, parent=True, fullPath=True)
|
||||
if not relatives:
|
||||
continue
|
||||
|
||||
for r in cmds.listRelatives(n, parent=True, fullPath=True):
|
||||
transforms.add(r)
|
||||
|
||||
# merge transforms and nodes in one set to make sure every item
|
||||
# is unique
|
||||
nodes |= transforms
|
||||
|
||||
# Lead with asset ID from the database
|
||||
asset = os.environ["AVALON_ASSET"]
|
||||
asset_id = io.find_one({"type": "asset", "name": asset})
|
||||
|
||||
for node in nodes:
|
||||
if node in defaults:
|
||||
continue
|
||||
_set_uuid(str(asset_id["_id"]), node)
|
||||
|
|
|
|||
|
|
@ -612,10 +612,10 @@ def auto_connect_assets(src, dst):
|
|||
break
|
||||
|
||||
for input_transform in cmds.sets(in_set, query=True):
|
||||
mbid = cmds.getAttr(input_transform + ".mbID")
|
||||
mbid = cmds.getAttr(input_transform + ".cbId")
|
||||
input_shape = cmds.listRelatives(input_transform, shapes=True)[0]
|
||||
|
||||
for output_transform in lib.lsattr("mbID", value=mbid):
|
||||
for output_transform in lib.lsattr("cbId", value=mbid):
|
||||
|
||||
ref = cmds.referenceQuery(output_transform, referenceNode=True)
|
||||
if ref != src:
|
||||
|
|
|
|||
|
|
@ -1,15 +1,25 @@
|
|||
"""Standalone helper functions"""
|
||||
|
||||
import re
|
||||
import contextlib
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
import os
|
||||
import bson
|
||||
import json
|
||||
import logging
|
||||
import contextlib
|
||||
from collections import OrderedDict, defaultdict
|
||||
|
||||
from avalon import maya, io
|
||||
|
||||
from maya import cmds, mel
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
from maya import cmds
|
||||
project = io.find_one({"type": "project",
|
||||
"name": os.environ["AVALON_PROJECT"]},
|
||||
projection={"config.template.publish": True,
|
||||
"_id": False})
|
||||
TEMPLATE = project["config"]["template"]["publish"]
|
||||
|
||||
|
||||
def maintained_selection(arg=None):
|
||||
|
|
@ -249,6 +259,78 @@ def get_current_renderlayer():
|
|||
return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def no_undo(flush=False):
|
||||
"""Disable the undo queue during the context
|
||||
|
||||
Arguments:
|
||||
flush (bool): When True the undo queue will be emptied when returning
|
||||
from the context losing all undo history. Defaults to False.
|
||||
|
||||
"""
|
||||
original = cmds.undoInfo(query=True, state=True)
|
||||
keyword = 'state' if flush else 'stateWithoutFlush'
|
||||
|
||||
try:
|
||||
cmds.undoInfo(**{keyword: False})
|
||||
yield
|
||||
finally:
|
||||
cmds.undoInfo(**{keyword: original})
|
||||
|
||||
|
||||
def polyConstraint(components, *args, **kwargs):
|
||||
"""Return the list of *components* with the constraints applied.
|
||||
|
||||
A wrapper around Maya's `polySelectConstraint` to retrieve its results as
|
||||
a list without altering selections. For a list of possible constraints
|
||||
see `maya.cmds.polySelectConstraint` documentation.
|
||||
|
||||
Arguments:
|
||||
components (list): List of components of polygon meshes
|
||||
|
||||
Returns:
|
||||
list: The list of components filtered by the given constraints.
|
||||
|
||||
"""
|
||||
|
||||
kwargs.pop('mode', None)
|
||||
|
||||
with no_undo(flush=False):
|
||||
print("la")
|
||||
with maintained_selection():
|
||||
print("po")
|
||||
# Apply constraint using mode=2 (current and next) so
|
||||
# it applies to the selection made before it; because just
|
||||
# a `maya.cmds.select()` call will not trigger the constraint.
|
||||
with reset_polySelectConstraint():
|
||||
print("do")
|
||||
cmds.select(components, r=1)
|
||||
cmds.polySelectConstraint(*args, mode=2, **kwargs)
|
||||
result = cmds.ls(selection=True)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def reset_polySelectConstraint(reset=True):
|
||||
"""Context during which the given polyConstraint settings are disabled.
|
||||
|
||||
The original settings are restored after the context.
|
||||
|
||||
"""
|
||||
|
||||
original = cmds.polySelectConstraint(query=True, stateString=True)
|
||||
|
||||
try:
|
||||
if reset:
|
||||
# Reset all parameters
|
||||
mel.eval("resetPolySelectConstraint;")
|
||||
cmds.polySelectConstraint(disable=True)
|
||||
yield
|
||||
finally:
|
||||
mel.eval(original)
|
||||
|
||||
|
||||
def is_visible(node,
|
||||
displayLayer=True,
|
||||
intermediateObject=True,
|
||||
|
|
@ -344,8 +426,8 @@ def extract_alembic(file,
|
|||
startFrame=None,
|
||||
endFrame=None,
|
||||
selection=True,
|
||||
uvWrite=True,
|
||||
eulerFilter=True,
|
||||
uvWrite= True,
|
||||
eulerFilter= True,
|
||||
dataFormat="ogawa",
|
||||
verbose=False,
|
||||
**kwargs):
|
||||
|
|
@ -449,14 +531,14 @@ def extract_alembic(file,
|
|||
valid_types = _alembic_options[key]
|
||||
if not isinstance(value, valid_types):
|
||||
raise TypeError("Alembic option unsupported type: "
|
||||
"{0} (expected {1}}".format(value, valid_types))
|
||||
"{0} (expected {1})".format(value, valid_types))
|
||||
|
||||
# Format the job string from options
|
||||
job_args = list()
|
||||
for key, value in options.items():
|
||||
if isinstance(value, (list, tuple)):
|
||||
for entry in value:
|
||||
job_args.append("-{0} {1}".format(key, entry))
|
||||
job_args.append("-{} {}".format(key, entry))
|
||||
elif isinstance(value, bool):
|
||||
job_args.append("-{0}".format(key))
|
||||
else:
|
||||
|
|
@ -476,9 +558,282 @@ def extract_alembic(file,
|
|||
log.debug("Extracting Alembic with job arguments: %s", job_str)
|
||||
|
||||
# Perform extraction
|
||||
print("Alembic Job Arguments : {}".format(job_str))
|
||||
|
||||
cmds.AbcExport(j=job_str, verbose=verbose)
|
||||
|
||||
if verbose:
|
||||
log.debug("Extracted Alembic to: %s", file)
|
||||
|
||||
return file
|
||||
|
||||
|
||||
def maya_temp_folder():
|
||||
scene_dir = os.path.dirname(cmds.file(query=True, sceneName=True))
|
||||
tmp_dir = os.path.abspath(os.path.join(scene_dir, "..", "tmp"))
|
||||
if not os.path.isdir(tmp_dir):
|
||||
os.makedirs(tmp_dir)
|
||||
|
||||
return tmp_dir
|
||||
|
||||
|
||||
def remap_resource_nodes(resources, folder=None):
|
||||
|
||||
log.info("Updating resource nodes ...")
|
||||
for resource in resources:
|
||||
source = resource["source"]
|
||||
if folder:
|
||||
fname = os.path.basename(source)
|
||||
fpath = os.path.join(folder, fname)
|
||||
else:
|
||||
fpath = source
|
||||
|
||||
node_attr = resource["attribute"]
|
||||
cmds.setAttr(node_attr, fpath, type="string")
|
||||
|
||||
log.info("Saving file ...")
|
||||
cmds.file(save=True, type="mayaAscii")
|
||||
|
||||
|
||||
def _get_id(node):
|
||||
"""
|
||||
Get the `cbId` attribute of the given node
|
||||
Args:
|
||||
node (str): the name of the node to retrieve the attribute from
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
|
||||
if node is None:
|
||||
return
|
||||
|
||||
try:
|
||||
attr = "{}.cbId".format(node)
|
||||
attribute_value = cmds.getAttr(attr)
|
||||
except Exception as e:
|
||||
log.debug(e)
|
||||
return
|
||||
|
||||
return attribute_value
|
||||
|
||||
|
||||
def filter_by_id(nodes, uuids):
|
||||
"""Filter all nodes which match the UUIDs
|
||||
|
||||
Args:
|
||||
nodes (list): collection of nodes to check
|
||||
uuids (list): a list of UUIDs which are linked to the shader
|
||||
|
||||
Returns:
|
||||
list: matching nodes
|
||||
"""
|
||||
|
||||
filtered_nodes = []
|
||||
for node in nodes:
|
||||
if node is None:
|
||||
continue
|
||||
|
||||
if not cmds.attributeQuery("cbId", node=node, exists=True):
|
||||
continue
|
||||
|
||||
# Deformed shaped
|
||||
attr = "{}.cbId".format(node)
|
||||
attribute_value = cmds.getAttr(attr)
|
||||
if attribute_value not in uuids:
|
||||
continue
|
||||
|
||||
filtered_nodes.append(node)
|
||||
|
||||
return filtered_nodes
|
||||
|
||||
|
||||
def get_representation_file(representation, template=TEMPLATE):
|
||||
"""
|
||||
Rebuild the filepath of the representation's context
|
||||
Args:
|
||||
representation (dict): data of the registered in the database
|
||||
template (str): the template to fill
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
context = representation["context"].copy()
|
||||
context["root"] = os.environ["AVALON_ROOT"]
|
||||
return template.format(**context)
|
||||
|
||||
|
||||
def get_reference_node(path):
|
||||
"""
|
||||
Get the reference node when the path is found being used in a reference
|
||||
Args:
|
||||
path (str): the file path to check
|
||||
|
||||
Returns:
|
||||
node (str): name of the reference node in question
|
||||
"""
|
||||
node = cmds.file(path, query=True, referenceNode=True)
|
||||
reference_path = cmds.referenceQuery(path, filename=True)
|
||||
if os.path.normpath(path) == os.path.normpath(reference_path):
|
||||
return node
|
||||
|
||||
|
||||
def list_looks(asset_id):
|
||||
"""Return all look subsets for the given asset
|
||||
|
||||
This assumes all look subsets start with "look*" in their names.
|
||||
"""
|
||||
|
||||
# # get all subsets with look leading in
|
||||
# the name associated with the asset
|
||||
subset = io.find({"parent": asset_id,
|
||||
"type": "subset",
|
||||
"name": {"$regex": "look*"}})
|
||||
|
||||
return list(subset)
|
||||
|
||||
|
||||
def assign_look_by_version(nodes, version_id):
|
||||
"""Assign nodes a specific published look version by id.
|
||||
|
||||
This assumes the nodes correspond with the asset.
|
||||
|
||||
Args:
|
||||
nodes(list): nodes to assign look to
|
||||
version_id (bson.ObjectId)
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
# get representations of shader file and relationships
|
||||
shader_file = io.find_one({"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": "ma"})
|
||||
|
||||
shader_relations = io.find_one({"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": "json"})
|
||||
|
||||
# Load file
|
||||
shader_filepath = get_representation_file(shader_file)
|
||||
shader_relation = get_representation_file(shader_relations)
|
||||
|
||||
reference_node = get_reference_node(shader_filepath)
|
||||
if reference_node is None:
|
||||
log.info("Loading lookdev for the first time..")
|
||||
|
||||
# Define namespace
|
||||
assetname = shader_file['context']['asset']
|
||||
ns_assetname = "{}_".format(assetname)
|
||||
namespace = maya.unique_namespace(ns_assetname,
|
||||
format="%03d",
|
||||
suffix="_look")
|
||||
|
||||
# Reference the look file
|
||||
with maya.maintained_selection():
|
||||
shader_nodes = cmds.file(shader_filepath,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True)
|
||||
else:
|
||||
log.info("Reusing existing lookdev..")
|
||||
shader_nodes = cmds.referenceQuery(reference_node, nodes=True)
|
||||
|
||||
# Assign relationships
|
||||
with open(shader_relation, "r") as f:
|
||||
relationships = json.load(f)
|
||||
|
||||
apply_shaders(relationships, shader_nodes, nodes)
|
||||
|
||||
|
||||
def assign_look(nodes, subset="lookDefault"):
|
||||
"""Assigns a look to a node.
|
||||
|
||||
Optimizes the nodes by grouping by asset id and finding
|
||||
related subset by name.
|
||||
|
||||
Args:
|
||||
nodes (list): all nodes to assign the look to
|
||||
subset (str): name of the subset to find
|
||||
"""
|
||||
|
||||
# Group all nodes per asset id
|
||||
grouped = defaultdict(list)
|
||||
for node in nodes:
|
||||
colorbleed_id = cmds.getAttr("{}.cbId".format(node))
|
||||
asset_id = colorbleed_id.split(":")[0]
|
||||
grouped[asset_id].append(node)
|
||||
|
||||
for asset_id, asset_nodes in grouped.items():
|
||||
# create objectId for database
|
||||
asset_id = bson.ObjectId(asset_id)
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset,
|
||||
"parent": asset_id})
|
||||
|
||||
assert subset, "No subset found for {}".format(asset_id)
|
||||
|
||||
# get last version
|
||||
version = io.find_one({"parent": subset['_id'],
|
||||
"type": "version",
|
||||
"data.families":
|
||||
{"$in":["colorbleed.lookdev"]}
|
||||
},
|
||||
sort=[("name", -1)],
|
||||
projection={"_id": True})
|
||||
|
||||
log.debug("Assigning look '{}' <{}> to nodes: {}".format(subset,
|
||||
version,
|
||||
asset_nodes))
|
||||
|
||||
assign_look_by_version(asset_nodes, version['_id'])
|
||||
|
||||
|
||||
def apply_shaders(relationships, shader_nodes, nodes):
|
||||
"""Apply all shaders to the nodes based on the relationship data
|
||||
|
||||
Args:
|
||||
relationships (dict): shader to node relationships
|
||||
shader_nodes (list): shader network nodes
|
||||
nodes (list): nodes to assign to
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
shader_sets = relationships.get("sets", [])
|
||||
shading_engines = cmds.ls(shader_nodes, type="shadingEngine", long=True)
|
||||
assert len(shading_engines) > 0, ("Error in retrieving shading engine "
|
||||
"from reference")
|
||||
|
||||
# Pre-filter nodes and shader nodes
|
||||
nodes_by_id = defaultdict(list)
|
||||
shader_nodes_by_id = defaultdict(list)
|
||||
for node in nodes:
|
||||
_id = _get_id(node)
|
||||
nodes_by_id[_id].append(node)
|
||||
|
||||
for shader_node in shader_nodes:
|
||||
_id = _get_id(shader_node)
|
||||
shader_nodes_by_id[_id].append(shader_node)
|
||||
|
||||
# get all nodes which we need to link per shader
|
||||
for shader_set in shader_sets:
|
||||
# collect shading engine
|
||||
uuid = shader_set["uuid"]
|
||||
shading_engine = shader_nodes_by_id.get(uuid, [])
|
||||
assert len(shading_engine) == 1, ("Could not find the correct "
|
||||
"shading engine with cbId "
|
||||
"'{}'".format(uuid))
|
||||
|
||||
# collect members
|
||||
filtered_nodes = list()
|
||||
for member in shader_set["members"]:
|
||||
member_uuid = member["uuid"]
|
||||
members = nodes_by_id.get(member_uuid, [])
|
||||
filtered_nodes.extend(members)
|
||||
|
||||
cmds.sets(filtered_nodes, forceElement=shading_engine[0])
|
||||
|
|
|
|||
1861
colorbleed/maya/menu.json
Normal file
1861
colorbleed/maya/menu.json
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -1,72 +1,69 @@
|
|||
import sys
|
||||
from maya import cmds
|
||||
import os
|
||||
import logging
|
||||
import site
|
||||
|
||||
from avalon.vendor.Qt import QtWidgets, QtCore
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._menu = "colorbleed"
|
||||
self._parent = {
|
||||
widget.objectName(): widget
|
||||
for widget in QtWidgets.QApplication.topLevelWidgets()
|
||||
}.get("MayaWindow")
|
||||
|
||||
# set colorbleed scripts path in environment keys
|
||||
os.environ["COLORBLEED_SCRIPTS"] = r"P:\pipeline\dev\git\cbMayaScripts\cbMayaScripts"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def install():
|
||||
from . import interactive
|
||||
def deferred():
|
||||
|
||||
uninstall()
|
||||
# todo: replace path with server / library path
|
||||
site.addsitedir("C:\Users\User\Documents\development\scriptsmenu\python")
|
||||
|
||||
def deferred():
|
||||
cmds.menu(self._menu,
|
||||
label="Colorbleed",
|
||||
tearOff=True,
|
||||
parent="MayaWindow")
|
||||
from scriptsmenu import launchformaya
|
||||
import scriptsmenu.scriptsmenu as menu
|
||||
|
||||
# Modeling sub-menu
|
||||
cmds.menuItem("Modeling",
|
||||
label="Modeling",
|
||||
tearOff=True,
|
||||
subMenu=True,
|
||||
parent=self._menu)
|
||||
reload(launchformaya)
|
||||
reload(menu)
|
||||
|
||||
cmds.menuItem("Combine", command=interactive.combine)
|
||||
log.info("Attempting to install ...")
|
||||
|
||||
# Rigging sub-menu
|
||||
cmds.menuItem("Rigging",
|
||||
label="Rigging",
|
||||
tearOff=True,
|
||||
subMenu=True,
|
||||
parent=self._menu)
|
||||
# load configuration of custom menu
|
||||
config_path = os.path.join(os.path.dirname(__file__), "menu.json")
|
||||
config = menu.load_configuration(config_path)
|
||||
|
||||
cmds.menuItem("Auto Connect", command=interactive.auto_connect)
|
||||
cmds.menuItem("Clone (Local)", command=interactive.clone_localspace)
|
||||
cmds.menuItem("Clone (World)", command=interactive.clone_worldspace)
|
||||
cmds.menuItem("Clone (Special)", command=interactive.clone_special)
|
||||
cmds.menuItem("Create Follicle", command=interactive.follicle)
|
||||
# get Maya menubar
|
||||
parent = launchformaya._maya_main_menubar()
|
||||
cb_menu = menu.ScriptsMenu(objectName=self._menu,
|
||||
title=self._menu.title(),
|
||||
parent=parent)
|
||||
|
||||
# Animation sub-menu
|
||||
cmds.menuItem("Animation",
|
||||
label="Animation",
|
||||
tearOff=True,
|
||||
subMenu=True,
|
||||
parent=self._menu)
|
||||
# register modifiers
|
||||
modifiers = QtCore.Qt.ControlModifier | QtCore.Qt.ShiftModifier
|
||||
cb_menu.register_callback(modifiers, launchformaya.to_shelf)
|
||||
|
||||
cmds.menuItem("Set Defaults", command=interactive.set_defaults)
|
||||
|
||||
cmds.setParent("..", menu=True)
|
||||
|
||||
cmds.menuItem(divider=True)
|
||||
|
||||
cmds.menuItem("Auto Connect", command=interactive.auto_connect_assets)
|
||||
|
||||
# Allow time for uninstallation to finish.
|
||||
QtCore.QTimer.singleShot(100, deferred)
|
||||
# apply configuration
|
||||
menu.load_from_configuration(cb_menu, config)
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
||||
log.info("Attempting to uninstall ..")
|
||||
app = QtWidgets.QApplication.instance()
|
||||
widgets = dict((w.objectName(), w) for w in app.allWidgets())
|
||||
menu = widgets.get(self._menu)
|
||||
|
||||
if menu:
|
||||
menu.deleteLater()
|
||||
del(menu)
|
||||
try:
|
||||
menu.deleteLater()
|
||||
del menu
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
|
||||
|
||||
def install():
|
||||
|
||||
uninstall()
|
||||
# Allow time for uninstallation to finish.
|
||||
cmds.evalDeferred(deferred)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import tempfile
|
||||
import pyblish.api
|
||||
|
||||
|
||||
ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05
|
||||
ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1
|
||||
ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2
|
||||
|
|
@ -17,7 +18,7 @@ class Extractor(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
order = 2.0
|
||||
|
||||
def staging_dir(self, instance):
|
||||
"""Provide a temporary directory in which to store extracted files
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ class CreateLook(avalon.maya.Creator):
|
|||
|
||||
name = "lookDefault"
|
||||
label = "Look Dev"
|
||||
family = "colorbleed.look"
|
||||
family = "colorbleed.lookdev"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateLook, self).__init__(*args, **kwargs)
|
||||
|
|
|
|||
9
colorbleed/plugins/maya/create/colorbleed_texture.py
Normal file
9
colorbleed/plugins/maya/create/colorbleed_texture.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import avalon.maya
|
||||
|
||||
|
||||
class CreateTexture(avalon.maya.Creator):
|
||||
"""Polygonal geometry for animation"""
|
||||
|
||||
name = "texturesDefault"
|
||||
label = "Textures"
|
||||
family = "colorbleed.texture"
|
||||
|
|
@ -1,4 +1,8 @@
|
|||
from avalon import api
|
||||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from avalon import api, maya
|
||||
|
||||
|
||||
class AbcLoader(api.Loader):
|
||||
|
|
@ -13,12 +17,17 @@ class AbcLoader(api.Loader):
|
|||
color = "orange"
|
||||
|
||||
def process(self, name, namespace, context, data):
|
||||
from maya import cmds
|
||||
|
||||
cmds.loadPlugin("AbcImport.mll", quiet=True)
|
||||
# Prevent identical alembic nodes from being shared
|
||||
# Create unique namespace for the cameras
|
||||
|
||||
# Get name from asset being loaded
|
||||
assert "_" in name, "Naming convention not followed"
|
||||
assetname = "{}_".format(name.split("_")[0])
|
||||
namespace = maya.unique_namespace(assetname,
|
||||
format="%03d",
|
||||
suffix="_abc")
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
|
|
@ -27,6 +36,8 @@ class AbcLoader(api.Loader):
|
|||
reference=True,
|
||||
returnNewNodes=True)
|
||||
|
||||
# load colorbleed ID attribute
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
|
||||
|
|
@ -76,24 +87,19 @@ class CurvesLoader(api.Loader):
|
|||
])
|
||||
|
||||
with maya.maintained_selection():
|
||||
cmds.select(
|
||||
control_set,
|
||||
replace=True,
|
||||
cmds.select(control_set,
|
||||
replace=True,
|
||||
# Support controllers being embedded in
|
||||
# additional selection sets.
|
||||
noExpand=False)
|
||||
|
||||
# Support controllers being embedded in
|
||||
# additional selection sets.
|
||||
noExpand=False
|
||||
)
|
||||
|
||||
nodes = cmds.file(
|
||||
self.fname,
|
||||
i=True,
|
||||
type="atomImport",
|
||||
renameAll=True,
|
||||
namespace=namespace,
|
||||
options=options,
|
||||
returnNewNodes=True,
|
||||
)
|
||||
nodes = cmds.file(self.fname,
|
||||
i=True,
|
||||
type="atomImport",
|
||||
renameAll=True,
|
||||
namespace=namespace,
|
||||
options=options,
|
||||
returnNewNodes=True)
|
||||
|
||||
self[:] = nodes + cmds.sets(container, query=True) + [container]
|
||||
|
||||
|
|
@ -101,8 +107,7 @@ class CurvesLoader(api.Loader):
|
|||
self._post_process(name, namespace, context, data)
|
||||
|
||||
def _post_process(self, name, namespace, context, data):
|
||||
import os
|
||||
from maya import cmds
|
||||
|
||||
from avalon import maya, io
|
||||
|
||||
# Task-dependent post-process
|
||||
|
|
@ -136,6 +141,7 @@ class CurvesLoader(api.Loader):
|
|||
cmds.select([output, controls], noExpand=True)
|
||||
|
||||
dependencies = [context["representation"]["_id"]]
|
||||
dependencies = " ".join(str(d) for d in dependencies)
|
||||
name = "anim{}_".format(dependency["name"].title())
|
||||
|
||||
# TODO(marcus): Hardcoding the family here, better separate this.
|
||||
|
|
@ -143,8 +149,7 @@ class CurvesLoader(api.Loader):
|
|||
assert len(family) == 1, ("None or multiple animation "
|
||||
"families found")
|
||||
family = family[0]
|
||||
maya.create(
|
||||
name=maya.unique_name(name, suffix="_SET"),
|
||||
family=family,
|
||||
options={"useSelection": True},
|
||||
data={"dependencies": " ".join(str(d) for d in dependencies)})
|
||||
maya.create(name=maya.unique_name(name, suffix="_SET"),
|
||||
family=family,
|
||||
options={"useSelection": True},
|
||||
data={"dependencies": dependencies})
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ import os
|
|||
import json
|
||||
|
||||
from maya import cmds
|
||||
from avalon import api
|
||||
from avalon import api, maya
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
class LookLoader(api.Loader):
|
||||
|
|
@ -17,31 +18,49 @@ class LookLoader(api.Loader):
|
|||
color = "orange"
|
||||
|
||||
def process(self, name, namespace, context, data):
|
||||
from avalon import maya
|
||||
try:
|
||||
existing_reference = cmds.file(self.fname,
|
||||
query=True,
|
||||
referenceNode=True)
|
||||
except RuntimeError as e:
|
||||
if e.message.rstrip() != "Cannot find the scene file.":
|
||||
raise
|
||||
"""
|
||||
Load and try to ssign Lookdev to nodes based on relationship data
|
||||
Args:
|
||||
name:
|
||||
namespace:
|
||||
context:
|
||||
data:
|
||||
|
||||
self.log.info("Loading lookdev for the first time..")
|
||||
Returns:
|
||||
|
||||
"""
|
||||
|
||||
|
||||
|
||||
# improve readability of the namespace
|
||||
assetname = context["asset"]["name"]
|
||||
ns_assetname = "{}_".format(assetname)
|
||||
|
||||
namespace = maya.unique_namespace(ns_assetname,
|
||||
format="%03d",
|
||||
suffix="_look")
|
||||
|
||||
# try / except here is to ensure that the get_reference_node
|
||||
# does not fail when the file doesn't exist yet
|
||||
reference_node = None
|
||||
try:
|
||||
reference_node = lib.get_reference_node(self.fname)
|
||||
except:
|
||||
pass
|
||||
|
||||
if reference_node is None:
|
||||
self.log.info("Loading lookdev for the first time ...")
|
||||
with maya.maintained_selection():
|
||||
nodes = cmds.file(
|
||||
self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True
|
||||
)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True)
|
||||
else:
|
||||
self.log.info("Reusing existing lookdev..")
|
||||
nodes = cmds.referenceQuery(existing_reference, nodes=True)
|
||||
namespace = nodes[0].split(":", 1)[0]
|
||||
self.log.info("Reusing existing lookdev ...")
|
||||
nodes = cmds.referenceQuery(reference_node, nodes=True)
|
||||
|
||||
# Assign shaders
|
||||
self.fname = self.fname.rsplit(".", 1)[0] + ".json"
|
||||
|
||||
if not os.path.isfile(self.fname):
|
||||
self.log.warning("Look development asset "
|
||||
"has no relationship data.")
|
||||
|
|
@ -50,6 +69,65 @@ class LookLoader(api.Loader):
|
|||
with open(self.fname) as f:
|
||||
relationships = json.load(f)
|
||||
|
||||
maya.apply_shaders(relationships, namespace)
|
||||
# Get all nodes which belong to a matching name space
|
||||
# Currently this is the safest way to get all the nodes
|
||||
namespace_nodes = self.get_namespace_nodes(assetname)
|
||||
lib.apply_shaders(relationships, nodes, namespace_nodes)
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
def get_namespace_nodes(self, assetname):
|
||||
"""
|
||||
Get all nodes of namespace `asset_*` and check if they have a shader
|
||||
assigned, if not add to list
|
||||
Args:
|
||||
context (dict): current context of asset
|
||||
|
||||
Returns:
|
||||
list
|
||||
|
||||
"""
|
||||
|
||||
# types = ["transform", "mesh"]
|
||||
list_nodes = []
|
||||
|
||||
namespaces = cmds.namespaceInfo(listOnlyNamespaces=True)
|
||||
|
||||
# remove basic namespaces
|
||||
namespaces.remove("UI")
|
||||
namespaces.remove("shared")
|
||||
|
||||
for ns in namespaces:
|
||||
if not ns.startswith(assetname):
|
||||
continue
|
||||
# get reference nodes
|
||||
ns_nodes = cmds.namespaceInfo(ns, listOnlyDependencyNodes=True)
|
||||
# TODO: might need to extend the types
|
||||
# check if any nodes are connected to something else than lambert1
|
||||
list_nodes = cmds.ls(ns_nodes, long=True)
|
||||
unassigned_nodes = [self.has_default_shader(n) for n in list_nodes]
|
||||
nodes = [n for n in unassigned_nodes if n is not None]
|
||||
|
||||
list_nodes.extend(nodes)
|
||||
|
||||
return set(list_nodes)
|
||||
|
||||
def has_default_shader(self, node):
|
||||
"""Check if the nodes have `initialShadingGroup` shader assigned
|
||||
|
||||
Args:
|
||||
node (str): node to check
|
||||
|
||||
Returns:
|
||||
str
|
||||
"""
|
||||
|
||||
shaders = cmds.listConnections(node, type="shadingEngine") or []
|
||||
if "initialShadingGroup" in shaders:
|
||||
# return transform node
|
||||
transform = cmds.listRelatives(node, parent=True, type="transform",
|
||||
fullPath=True)
|
||||
if not transform:
|
||||
return []
|
||||
|
||||
return transform[0]
|
||||
|
|
|
|||
|
|
@ -42,8 +42,6 @@ class ModelGPUCacheLoader(api.Loader):
|
|||
|
||||
def process(self, name, namespace, context, data):
|
||||
|
||||
from maya import cmds
|
||||
|
||||
# todo: This will likely not be entirely safe with "containerize"
|
||||
# also this cannot work in the manager because it only works
|
||||
# on references at the moment!
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
from avalon import api
|
||||
|
||||
from avalon import api, maya
|
||||
|
||||
|
||||
class RigLoader(api.Loader):
|
||||
|
|
@ -18,18 +21,20 @@ class RigLoader(api.Loader):
|
|||
color = "orange"
|
||||
|
||||
def process(self, name, namespace, context, data):
|
||||
|
||||
assetname = "{}_".format(context["asset"]["name"])
|
||||
unique_namespace = maya.unique_namespace(assetname, format="%03d")
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName=namespace + ":" + name)
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
|
||||
# Store for post-process
|
||||
self[:] = nodes
|
||||
|
||||
if data.get("post_process", True):
|
||||
self._post_process(name, namespace, context, data)
|
||||
self._post_process(name, unique_namespace, context, data)
|
||||
|
||||
def _post_process(self, name, namespace, context, data):
|
||||
from avalon import maya
|
||||
|
|
@ -38,28 +43,33 @@ class RigLoader(api.Loader):
|
|||
# Better register this keyword, so that it can be used
|
||||
# elsewhere, such as in the Integrator plug-in,
|
||||
# without duplication.
|
||||
output = next(
|
||||
(node for node in self
|
||||
if node.endswith("out_SET")), None)
|
||||
controls = next(
|
||||
(node for node in self
|
||||
if node.endswith("controls_SET")), None)
|
||||
|
||||
output = next((node for node in self if
|
||||
node.endswith("out_SET")), None)
|
||||
controls = next((node for node in self if
|
||||
node.endswith("controls_SET")), None)
|
||||
|
||||
assert output, "No out_SET in rig, this is a bug."
|
||||
assert controls, "No controls_SET in rig, this is a bug."
|
||||
|
||||
# To ensure the asset under which is published is actually the shot
|
||||
# not the asset to which the rig belongs to.
|
||||
current_task = os.environ["AVALON_TASK"]
|
||||
asset_name = context["asset"]["name"]
|
||||
if current_task == "animate":
|
||||
asset = "{}".format(os.environ["AVALON_ASSET"])
|
||||
else:
|
||||
asset = "{}".format(asset_name)
|
||||
|
||||
with maya.maintained_selection():
|
||||
cmds.select([output, controls], noExpand=True)
|
||||
|
||||
dependencies = [context["representation"]["_id"]]
|
||||
asset = context["asset"]["name"] + "_"
|
||||
|
||||
# TODO(marcus): Hardcoding the family here, better separate this.
|
||||
maya.create(
|
||||
name=maya.unique_name(asset, suffix="_SET"),
|
||||
asset=context["asset"]["name"],
|
||||
family="colorbleed.animation",
|
||||
options={"useSelection": True},
|
||||
data={
|
||||
"dependencies": " ".join(str(d) for d in dependencies)
|
||||
})
|
||||
dependencies = [context["representation"]["_id"]]
|
||||
dependencies = " ".join(str(d) for d in dependencies)
|
||||
|
||||
maya.create(name=namespace,
|
||||
asset=asset,
|
||||
family="colorbleed.animation",
|
||||
options={"useSelection": True},
|
||||
data={"dependencies": dependencies})
|
||||
|
|
|
|||
|
|
@ -1,177 +0,0 @@
|
|||
from collections import defaultdict
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import cbra.utils.maya.node_uuid as node_uuid
|
||||
import cbra.lib
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstancePerItem(pyblish.api.ContextPlugin):
|
||||
"""Collect instances from the Maya scene and breaks them down per item id
|
||||
|
||||
An instance is identified by having an _INST suffix
|
||||
and a .family user-defined attribute.
|
||||
|
||||
All other user-defined attributes of the object set
|
||||
is accessible within each instance's data.
|
||||
|
||||
This collector breaks the instances down to each Item member it contains,
|
||||
by using the IDs on the nodes in the instance it will split up the instance
|
||||
into separate instances for each unique "item" id it finds.
|
||||
|
||||
Note:
|
||||
- Only breaks down based on children members and ignores parent members.
|
||||
- Discards members without IDs.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
hosts = ["maya"]
|
||||
label = "Instance per Item"
|
||||
|
||||
_include_families = ["colorbleed.look"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
invalid = list()
|
||||
|
||||
for objset in cmds.ls("*_SET",
|
||||
objectsOnly=True,
|
||||
type='objectSet',
|
||||
long=True,
|
||||
recursive=True): # Include namespace
|
||||
|
||||
try:
|
||||
family = cmds.getAttr("{}.family".format(objset))
|
||||
except ValueError:
|
||||
self.log.error("Found: %s found, but no family." % objset)
|
||||
continue
|
||||
|
||||
if family not in self._include_families:
|
||||
continue
|
||||
|
||||
# ignore referenced sets
|
||||
if cmds.referenceQuery(objset, isNodeReferenced=True):
|
||||
continue
|
||||
|
||||
instances = self.build_instances(context, objset)
|
||||
if not instances:
|
||||
|
||||
# Log special error messages when objectSet is completely
|
||||
# empty (has no members) to clarify to artists the root of
|
||||
# their problem.
|
||||
if not cmds.sets(objset, query=True):
|
||||
self.log.error("Instance objectSet has no members: "
|
||||
"{}".format(objset))
|
||||
|
||||
self.log.error("No instances retrieved from objectSet: "
|
||||
"{}".format(objset))
|
||||
invalid.append(objset)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("Invalid instances: {}".format(invalid))
|
||||
|
||||
# Sort context based on family
|
||||
context[:] = sorted(
|
||||
context, key=lambda instance: instance.data("family"))
|
||||
|
||||
def build_instances(self, context, objset):
|
||||
"""Build the instances for a single instance objectSet
|
||||
|
||||
Returns:
|
||||
list: The constructed instances from the objectSet.
|
||||
|
||||
"""
|
||||
|
||||
self.log.info("Collecting: %s" % objset)
|
||||
|
||||
short_name = objset.rsplit("|", 1)[-1].rsplit(":", 1)[-1]
|
||||
|
||||
# Default data
|
||||
default_data = {"name": short_name[:-5],
|
||||
"subset": "default"}
|
||||
|
||||
# Get user data from user defined attributes
|
||||
user_data = dict()
|
||||
for attr in cmds.listAttr(objset, userDefined=True):
|
||||
try:
|
||||
value = cmds.getAttr("{}.{}".format(objset, attr))
|
||||
user_data[attr] = value
|
||||
except RuntimeError:
|
||||
continue
|
||||
|
||||
# Maintain nested object sets
|
||||
members = cmds.sets(objset, query=True)
|
||||
members = cmds.ls(members, long=True)
|
||||
|
||||
children = cmds.listRelatives(members,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
# Exclude intermediate objects
|
||||
children = cmds.ls(children, noIntermediate=True, long=True)
|
||||
|
||||
nodes = members + children
|
||||
nodes = list(set(nodes))
|
||||
|
||||
# Group nodes using ids to an Item
|
||||
nodes_id = node_uuid.build_cache(nodes, include_without_ids=True)
|
||||
|
||||
# Log warning for nodes without ids
|
||||
if None in nodes_id:
|
||||
self.log.warning("Skipping nodes without ids: "
|
||||
"{}".format(nodes_id[None]))
|
||||
|
||||
# ignore nodes without ids
|
||||
context.data["instancePerItemNodesWithoutId"] = nodes_id.pop(None,
|
||||
None)
|
||||
|
||||
item_groups = defaultdict(list)
|
||||
|
||||
for id, nodes in nodes_id.iteritems():
|
||||
item_id = id.rsplit(":", 1)[0]
|
||||
item_groups[item_id].extend(nodes)
|
||||
|
||||
instances = list()
|
||||
for item_id, item_nodes in item_groups.iteritems():
|
||||
|
||||
ctx = node_uuid.parse_id("{}:fake_node_uuid".format(item_id))
|
||||
|
||||
# Use itemPath to parse full blown context using official lib
|
||||
ctx = cbra.lib.parse_context(ctx['itemPath'])
|
||||
|
||||
item = ctx.get('item', None)
|
||||
if item is None:
|
||||
self.log.info("Unparsed item id: {}".format(item_id))
|
||||
self.log.error("Item can't be parsed and seems to be "
|
||||
"non-existent. Was an asset renamed? Or your"
|
||||
"project set incorrectly?")
|
||||
raise RuntimeError("Item not parsed. See log for description.")
|
||||
|
||||
instance = context.create_instance(objset)
|
||||
|
||||
# Set the related members
|
||||
instance[:] = item_nodes
|
||||
instance.data['setMembers'] = item_nodes
|
||||
|
||||
# Set defaults and user data
|
||||
instance.data.update(default_data.copy())
|
||||
instance.data.update(user_data.copy())
|
||||
|
||||
# Override the label to be clear
|
||||
name = instance.data['name']
|
||||
instance.data['label'] = "{0} ({1})".format(name, item)
|
||||
|
||||
# Store that the instance is collected per item
|
||||
instance.data['_instancePerItem'] = True
|
||||
instance.data['_itemContext'] = ctx
|
||||
|
||||
assert "family" in instance.data, "No family data in instance"
|
||||
assert "name" in instance.data, ("No objectSet name data "
|
||||
"in instance")
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
return instances
|
||||
|
|
@ -1,156 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
import pyseq
|
||||
import glob
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class SeletYetiCachesAction(pyblish.api.Action):
|
||||
"""Select the nodes related to the collected file textures"""
|
||||
|
||||
label = "Select yeti nodes"
|
||||
on = "succeeded" # This action is only available on a failed plug-in
|
||||
icon = "search" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
self.log.info("Finding textures..")
|
||||
|
||||
# Get the errored instances
|
||||
instances = []
|
||||
for result in context.data["results"]:
|
||||
instance = result["instance"]
|
||||
if instance is None:
|
||||
continue
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(instances, plugin)
|
||||
|
||||
# Get the texture nodes from the instances
|
||||
nodes = []
|
||||
for instance in instances:
|
||||
texture_nodes = instance.data['yetiCaches'].keys()
|
||||
nodes.extend(texture_nodes)
|
||||
|
||||
# Ensure unique
|
||||
nodes = list(set(nodes))
|
||||
|
||||
if nodes:
|
||||
self.log.info("Selecting nodes: %s" % ", ".join(nodes))
|
||||
cmds.select(nodes, r=True, noExpand=True)
|
||||
else:
|
||||
self.log.info("No nodes found.")
|
||||
cmds.select(deselect=True)
|
||||
|
||||
|
||||
def get_sequence(filename, pattern="%04d"):
|
||||
"""Get pyseq sequence from filename
|
||||
|
||||
Supports negative frame ranges like (-001, 0000, 0001 and -0001, 0000, 0001).
|
||||
|
||||
Arguments:
|
||||
filename (str): The full path to filename containing the given pattern.
|
||||
pattern (str): The pattern to swap with the variable frame number.
|
||||
|
||||
Returns:
|
||||
pyseq.Sequence: file sequence.
|
||||
|
||||
"""
|
||||
|
||||
glob_pattern = filename.replace(pattern, "*")
|
||||
|
||||
escaped = re.escape(filename)
|
||||
re_pattern = escaped.replace(pattern, "-?[0-9]+")
|
||||
|
||||
files = glob.glob(glob_pattern)
|
||||
files = [str(f) for f in files if re.match(re_pattern, f)]
|
||||
|
||||
return pyseq.get_sequences(files)
|
||||
|
||||
|
||||
class CollectYetiCaches(pyblish.api.InstancePlugin):
|
||||
"""Collect used yeti caches.
|
||||
|
||||
Collects the file sequences from pgYetiMaya.cacheFileName
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.495
|
||||
label = 'Yeti Caches'
|
||||
families = ["colorbleed.groom"]
|
||||
actions = [SeletYetiCachesAction]
|
||||
|
||||
TYPES = {"pgYetiMaya": "cacheFileName"}
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Get textures from sets
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members, dag=True, shapes=True, type="pgYetiMaya",
|
||||
noIntermediate=True, long=True)
|
||||
if not members:
|
||||
raise RuntimeError("Instance appears to be empty (no members)")
|
||||
|
||||
# Collect only those cache frames that are required
|
||||
# If handles are required it is assumed to already be included
|
||||
# in the start frame and end frames.
|
||||
# (e.g. using frame handle collector)
|
||||
start_frame = instance.data("startFrame")
|
||||
end_frame = instance.data("endFrame")
|
||||
required = set(range(int(start_frame), int(end_frame) + 1))
|
||||
|
||||
history = cmds.listHistory(members) or []
|
||||
|
||||
resources = instance.data.get("resources", [])
|
||||
yeti_caches = dict()
|
||||
|
||||
for node_type, attr in self.TYPES.iteritems():
|
||||
for node in cmds.ls(history, type=node_type, long=True):
|
||||
|
||||
attribute = "{0}.{1}".format(node, attr)
|
||||
|
||||
# Source
|
||||
source = cmds.getAttr(attribute)
|
||||
if not source:
|
||||
self.log.error("Node does not have a file set: "
|
||||
"{0}".format(node))
|
||||
|
||||
# Collect the source as expanded path because that's also
|
||||
# how the attribute must be 'set' for yeti nodes.
|
||||
source = os.path.realpath(cmds.workspace(expandName=source))
|
||||
|
||||
# Collect the frames we need from the sequence
|
||||
sequences = get_sequence(source)
|
||||
files = list()
|
||||
for sequence in sequences:
|
||||
for index, frame in enumerate(sequence.frames()):
|
||||
if frame not in required:
|
||||
continue
|
||||
|
||||
item = sequence[index]
|
||||
files.append(item.path)
|
||||
|
||||
# Define the resource
|
||||
resource = {"tags": ["maya", "yeti", "attribute"],
|
||||
"node": node,
|
||||
"attribute": attribute,
|
||||
"source": source, # required for resources
|
||||
"files": files, # required for resources
|
||||
"subfolder": "caches" # optional for resources
|
||||
}
|
||||
|
||||
resources.append(resource)
|
||||
|
||||
# For validations
|
||||
yeti_caches[node] = {"attribute": attribute,
|
||||
"source": source,
|
||||
"sequences": sequences}
|
||||
|
||||
# Store data on instance
|
||||
instance.data['yetiCaches'] = yeti_caches
|
||||
instance.data['resources'] = resources
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
import json
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import avalon.maya
|
||||
import colorbleed.api
|
||||
|
||||
import cb.utils.maya.context as context
|
||||
import cbra.utils.maya.layout as layout
|
||||
|
||||
|
||||
def get_upstream_hierarchy_fast(nodes):
|
||||
"""Passed in nodes must be long names!"""
|
||||
|
||||
matched = set()
|
||||
parents = []
|
||||
|
||||
for node in nodes:
|
||||
hierarchy = node.split("|")
|
||||
num = len(hierarchy)
|
||||
for x in range(1, num-1):
|
||||
parent = "|".join(hierarchy[:num-x])
|
||||
if parent in parents:
|
||||
break
|
||||
else:
|
||||
parents.append(parent)
|
||||
matched.add(parent)
|
||||
|
||||
return parents
|
||||
|
||||
|
||||
class ExtractLayout(colorbleed.api.Extractor):
|
||||
"""Extract Layout as both gpuCache and Alembic"""
|
||||
|
||||
label = "Layout (gpuCache & alembic)"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.layout"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Define extract output file path
|
||||
dir_path = self.staging_dir(instance)
|
||||
|
||||
start = instance.data.get("startFrame", 1)
|
||||
end = instance.data.get("endFrame", 1)
|
||||
step = instance.data.get("step", 1.0)
|
||||
placeholder = instance.data.get("placeholder", False)
|
||||
write_color_sets = instance.data.get("writeColorSets", False)
|
||||
renderable_only = instance.data.get("renderableOnly", False)
|
||||
visible_only = instance.data.get("visibleOnly", False)
|
||||
|
||||
layers = instance.data.get("animLayersActive", None)
|
||||
if layers:
|
||||
layers = json.loads(layers)
|
||||
self.log.info("Publishing with animLayers active: "
|
||||
"{0}".format(layers))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
with avalon.maya.maintained_selection():
|
||||
|
||||
# Get children hierarchy
|
||||
nodes = instance.data['setMembers']
|
||||
cmds.select(nodes, r=True, hierarchy=True)
|
||||
hierarchy = cmds.ls(selection=True, long=True)
|
||||
|
||||
with context.evaluation("off"):
|
||||
with context.no_refresh():
|
||||
with context.active_anim_layers(layers):
|
||||
layout.extract_layout(hierarchy,
|
||||
dir_path,
|
||||
start=start,
|
||||
end=end,
|
||||
step=step,
|
||||
placeholder=placeholder,
|
||||
write_color_sets=write_color_sets,
|
||||
renderable_only=renderable_only,
|
||||
visible_only=visible_only)
|
||||
|
||||
self.log.info("Extracted instance '{0}' to: {1}".format(
|
||||
instance.name, dir_path))
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
import os
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
import cbra.lib
|
||||
from cb.utils.python.decorators import memorize
|
||||
|
||||
|
||||
def isclose(a, b, rel_tol=1e-9, abs_tol=0.0):
|
||||
return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
|
||||
|
||||
|
||||
@memorize
|
||||
def is_published_path(path):
|
||||
"""Return whether path is from a published file"""
|
||||
|
||||
# Quick check (optimization) without going through the folder
|
||||
# structure
|
||||
if cbra.lib.DIR_PUBLISH.lower() not in path.lower():
|
||||
return False
|
||||
|
||||
try:
|
||||
context = cbra.lib.parse_context(path)
|
||||
except RuntimeError:
|
||||
context = dict()
|
||||
|
||||
return all([context.get("family", None),
|
||||
context.get("subset", None),
|
||||
context.get("version", None)])
|
||||
|
||||
|
||||
class ValidateLayoutNodes(pyblish.api.InstancePlugin):
|
||||
"""Validates that layout nodes behave to certain rules
|
||||
|
||||
Gpu caches in a layout may not have sub-frame offsets, like offsets with a
|
||||
value after the decimal point. (e.g. 1.45)
|
||||
|
||||
Gpu caches loaded in a layout MUST come from a published source that has
|
||||
family and version.
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidateContentsOrder
|
||||
label = 'Layout Nodes'
|
||||
families = ['colorbleed.layout']
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
caches = cmds.ls(instance, type="gpuCache", long=True)
|
||||
|
||||
# Validate sub-frame offsets
|
||||
invalid_offsets = list()
|
||||
for cache in caches:
|
||||
|
||||
offset = cmds.getAttr("{}.animOffset".format(cache))
|
||||
if not isclose(offset, round(offset)):
|
||||
cls.log.warning("Invalid sub-frame offset on: %s" % cache)
|
||||
invalid_offsets.append(cache)
|
||||
|
||||
# Validate gpuCache paths are from published files
|
||||
invalid_paths = list()
|
||||
for cache in caches:
|
||||
path = cmds.getAttr("{}.cacheFileName".format(cache))
|
||||
path = os.path.normpath(path)
|
||||
if not is_published_path(path):
|
||||
cls.log.warning("GpuCache path not from published file: "
|
||||
"{0} -> {1}".format(cache, path))
|
||||
invalid_paths.append(cache)
|
||||
|
||||
invalid = invalid_offsets + invalid_paths
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Clear cache only once per publish. So we store a value on
|
||||
# the context on the first instance so we clear only once.
|
||||
name = self.__class__.__name__
|
||||
key = "_plugin_{0}_processed".format(name)
|
||||
if not instance.context.data.get(key, False):
|
||||
is_published_path.cache.clear()
|
||||
instance.context.data[key] = True
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Invalid nodes found: {0}".format(invalid))
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
import cbra.utils.maya.node_uuid as id_utils
|
||||
import cbra.lib
|
||||
|
||||
|
||||
class ValidateRelatedNodeIds(pyblish.api.InstancePlugin):
|
||||
"""Validate nodes have related colorbleed ids.
|
||||
|
||||
An ID is 'related' if its built in the current Item.
|
||||
|
||||
Note that this doesn't ensure it's from the current Task. An ID created
|
||||
from `lookdev` has the same relation to the Item as one coming from others,
|
||||
like `rigging` or `modeling`.
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidatePipelineOrder
|
||||
families = ['colorbleed.model']
|
||||
hosts = ['maya']
|
||||
label = 'Related Id Attributes'
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
"""Return the member nodes that are invalid"""
|
||||
|
||||
context = instance.context
|
||||
current_file = context.data.get('currentFile', None)
|
||||
if not current_file:
|
||||
raise RuntimeError("No current file information: "
|
||||
"{0}".format(current_file))
|
||||
|
||||
try:
|
||||
context = cbra.lib.parse_context(current_file)
|
||||
except RuntimeError, e:
|
||||
cls.log.error("Can't generate UUIDs because scene isn't "
|
||||
"in new-style pipeline: ".format(current_file))
|
||||
raise e
|
||||
|
||||
def to_item(id):
|
||||
"""Split the item id part from a node id"""
|
||||
return id.rsplit(":", 1)[0]
|
||||
|
||||
# Generate a fake id in the current context to retrieve the item
|
||||
# id prefix that should match with ids on the nodes
|
||||
fake_node = "__node__"
|
||||
ids = id_utils.generate_ids(context, [fake_node])
|
||||
id = ids[fake_node]
|
||||
item_prefix = to_item(id)
|
||||
|
||||
# Take only the ids with more than one member
|
||||
invalid = list()
|
||||
invalid_items = set()
|
||||
for member in instance:
|
||||
member_id = id_utils.get_id(member)
|
||||
|
||||
# skip nodes without ids
|
||||
if not member_id:
|
||||
continue
|
||||
|
||||
if not member_id.startswith(item_prefix):
|
||||
invalid.append(member)
|
||||
invalid_items.add(to_item(member_id))
|
||||
|
||||
# Log invalid item ids
|
||||
if invalid_items:
|
||||
for item_id in sorted(invalid_items):
|
||||
cls.log.warning("Found invalid item id: {0}".format(item_id))
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
|
||||
# Ensure all nodes have a cbId
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("Nodes found with non-related "
|
||||
"asset IDs: {0}".format(invalid))
|
||||
|
|
@ -1,167 +0,0 @@
|
|||
import os
|
||||
from collections import defaultdict
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
import cbra.lib
|
||||
from cbra.utils.maya.abc import get_alembic_ids
|
||||
from cbra.utils.maya.node_uuid import get_id
|
||||
|
||||
|
||||
def get_subset_path(context):
|
||||
return os.path.join(context['itemPath'],
|
||||
cbra.lib.DIR_PUBLISH,
|
||||
context['family'],
|
||||
context['subset'])
|
||||
|
||||
|
||||
class ValidateUniqueIdsInItem(pyblish.api.InstancePlugin):
|
||||
"""Checks whether IDs are unique across other subsets
|
||||
|
||||
This ensures a model to be published can't have ids
|
||||
which are already present in another subset. For example
|
||||
the "default" model can't have ids present in the "high"
|
||||
subset.
|
||||
|
||||
Note:
|
||||
This will also invalidate the instance if it contains
|
||||
nodes that are present in another instance in the scene.
|
||||
So ensure the instance you're publishing actually has
|
||||
the correct set members.
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidateMeshOrder
|
||||
families = ['colorbleed.model']
|
||||
hosts = ['maya']
|
||||
label = 'Unique Ids in Item'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
optional = True
|
||||
|
||||
@classmethod
|
||||
def iter_invalid(cls, instance):
|
||||
|
||||
verbose = instance.data.get("verbose", False)
|
||||
|
||||
def _get_instance_ids(instance):
|
||||
"""Collect ids in an instance"""
|
||||
nodes_per_id = defaultdict(list)
|
||||
for node in instance:
|
||||
node_id = get_id(node)
|
||||
if node_id:
|
||||
nodes_per_id[node_id].append(node)
|
||||
return nodes_per_id
|
||||
|
||||
nodes_per_id = _get_instance_ids(instance)
|
||||
if not nodes_per_id:
|
||||
return
|
||||
|
||||
ids_lookup = set(nodes_per_id.keys())
|
||||
|
||||
instance_context = instance.data["instanceContext"]
|
||||
instance_subset = instance.data['subset']
|
||||
|
||||
assert instance_context, "Instance must have 'instanceContext' data"
|
||||
assert instance_subset, "Instance must have 'subset' data"
|
||||
|
||||
subsets_checked = set()
|
||||
subsets_checked.add(instance_subset) # we can skip this subset
|
||||
|
||||
# Compare with all other *currently publishing instances*
|
||||
# of family 'model' for this item
|
||||
for other_instance in instance.context:
|
||||
if other_instance is instance:
|
||||
continue
|
||||
|
||||
if other_instance.data['subset'] == instance_subset:
|
||||
cls.log.error("Another instance has the same subset? "
|
||||
"This should never happen.")
|
||||
|
||||
if other_instance.data['family'] != "model":
|
||||
continue
|
||||
|
||||
if other_instance.data['instanceContext']['item'] != \
|
||||
instance_context['item']:
|
||||
cls.log.error("Also publishing model for other item? "
|
||||
"This should never happen.")
|
||||
continue
|
||||
other_ids = _get_instance_ids(other_instance).keys()
|
||||
|
||||
# Perform comparison
|
||||
intersection = ids_lookup.intersection(other_ids)
|
||||
if intersection:
|
||||
for node_id in intersection:
|
||||
nodes = nodes_per_id[node_id]
|
||||
for node in nodes:
|
||||
yield node
|
||||
|
||||
# Those that are invalid don't need to be checked again
|
||||
ids_lookup.difference_update(other_ids)
|
||||
|
||||
if not ids_lookup:
|
||||
# Once we have no ids to check for anymore we can already
|
||||
# return
|
||||
return
|
||||
|
||||
subsets_checked.add(other_instance.data['subset'])
|
||||
|
||||
# Compare with all previously *published instances*
|
||||
# of family 'model' for this item
|
||||
ctx = instance_context.copy()
|
||||
ctx['family'] = "model"
|
||||
|
||||
published_subsets = cbra.lib.list_subsets(ctx)
|
||||
published_subsets = set(x for x in published_subsets if
|
||||
x != instance_subset)
|
||||
|
||||
for published_subset in published_subsets:
|
||||
ctx['subset'] = published_subset
|
||||
ctx['subsetPath'] = get_subset_path(ctx)
|
||||
|
||||
versions = cbra.lib.list_versions(ctx)
|
||||
version = cbra.lib.find_highest_version(versions)
|
||||
if not version:
|
||||
cls.log.debug("No published version for "
|
||||
"'model': {0}".format(published_subset))
|
||||
continue
|
||||
|
||||
ctx['currentVersion'] = version
|
||||
publish_abc = cbra.lib.get_filepath(ctx) + ".abc"
|
||||
|
||||
if not os.path.exists(publish_abc):
|
||||
cls.log.error("Published file to compare with does not exist: "
|
||||
"{0}".format(publish_abc))
|
||||
continue
|
||||
|
||||
if verbose:
|
||||
cls.log.debug("Comparing with: {0}".format(publish_abc))
|
||||
|
||||
abc_ids = set(get_alembic_ids(publish_abc).values())
|
||||
|
||||
# Perform comparison
|
||||
intersection = ids_lookup.intersection(abc_ids)
|
||||
if intersection:
|
||||
for node_id in intersection:
|
||||
nodes = nodes_per_id[node_id]
|
||||
for node in nodes:
|
||||
yield node
|
||||
|
||||
# Those that are invalid don't need to be checked again
|
||||
ids_lookup.difference_update(abc_ids)
|
||||
|
||||
if not ids_lookup:
|
||||
# Once we have no ids to check for anymore we can already
|
||||
# return
|
||||
return
|
||||
|
||||
return
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
return list(cls.iter_invalid(instance))
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
if any(self.iter_invalid(instance)):
|
||||
raise RuntimeError("Invalid nodes found in {0}".format(instance))
|
||||
|
|
@ -13,7 +13,3 @@ class DebugPlugin(pyblish.api.InstancePlugin):
|
|||
self.log("\n\n----------------------")
|
||||
self.log("Instance")
|
||||
pprint.pprint(instance)
|
||||
|
||||
self.log("\n\n----------------------")
|
||||
self.log("Instance.data")
|
||||
pprint.pprint(instance.data)
|
||||
|
|
|
|||
|
|
@ -1,85 +0,0 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
import pyblish_cb.lib
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class IntegrateColorbleedAssets(colorbleed.api.Integrator):
|
||||
"""Name and position instances on disk for instances.
|
||||
|
||||
The files are transferred from the `extractDir` to the
|
||||
computed `integrationDir` and are renamed as:
|
||||
- "{item}_{family}_{subsetName}_{version}.{ext}"
|
||||
|
||||
Assumptions:
|
||||
- Each extracted instance is 1 file (no directories)
|
||||
|
||||
"""
|
||||
|
||||
label = "Asset"
|
||||
families = ["colorbleed.model", "colorbleed.rig", "colorbleed.pointcache",
|
||||
"colorbleed.proxy", "colorbleed.layout", "colorbleed.look",
|
||||
"colorbleed.vrmeshReplace", "colorbleed.review",
|
||||
"colorbleed.instancer", "colorbleed.camera",
|
||||
"colorbleed.mayaAscii",
|
||||
"colorbleed.furYeti"]
|
||||
|
||||
def process(self, instance):
|
||||
super(IntegrateColorbleedAssets, self).process(instance)
|
||||
|
||||
self.log.info("Integrating {0}..".format(instance))
|
||||
|
||||
integration = pyblish_cb.lib.compute_integration(instance)
|
||||
|
||||
# Store reference for upcoming plug-ins
|
||||
instance.data["integrationDir"] = integration['path']
|
||||
instance.data["integrationVersion"] = integration['versionNum']
|
||||
|
||||
path = integration['path']
|
||||
data = integration.copy()
|
||||
|
||||
try:
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
|
||||
self.log.info("Moving files to %s" % path)
|
||||
|
||||
tmp = instance.data["extractDir"]
|
||||
for src in (os.path.join(tmp, f) for f in os.listdir(tmp)):
|
||||
|
||||
self.log.debug("Integrating %s" % src)
|
||||
|
||||
# Source must be a file
|
||||
if not os.path.isfile(src):
|
||||
self.log.error("Source is not a file: {0}".format(src))
|
||||
continue
|
||||
|
||||
# TODO(marcus): Consider files without extension
|
||||
data["ext"] = src.split(".", 1)[-1]
|
||||
dst = os.path.join(path, "{item}_"
|
||||
"{family}_"
|
||||
"{subsetName}_"
|
||||
"{version}.{ext}".format(
|
||||
**data))
|
||||
|
||||
# Copy
|
||||
self.log.info("\"%s\" -> \"%s\"" % (src, dst))
|
||||
shutil.copyfile(src, dst)
|
||||
|
||||
self.log.debug("Tagged %s with .Version" % path)
|
||||
|
||||
try:
|
||||
subset_path = os.path.dirname(path)
|
||||
cquery.tag(subset_path, ".Subset")
|
||||
self.log.debug("Tagged %s with .Subset" % subset_path)
|
||||
except cquery.TagExists:
|
||||
pass
|
||||
|
||||
except OSError as e:
|
||||
# If, for whatever reason, this instance did not get written.
|
||||
instance.data.pop("integrationDir")
|
||||
raise e
|
||||
|
||||
except Exception as e:
|
||||
raise Exception("An unknown error occured: %s" % e)
|
||||
|
|
@ -2,8 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
from pyblish_cb.tmp_utils import polyConstraint
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
|
||||
|
|
@ -41,17 +40,16 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
|
|||
edges = ['{0}.e[*]'.format(node) for node in meshes]
|
||||
|
||||
# Filter by constraint on edge length
|
||||
invalid = polyConstraint(edges,
|
||||
t=0x8000, # type=edge
|
||||
length=1,
|
||||
lengthbound=(0, cls.__tolerance))
|
||||
invalid = lib.polyConstraint(edges,
|
||||
t=0x8000, # type=edge
|
||||
length=1,
|
||||
lengthbound=(0, cls.__tolerance))
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("Meshes found with zero "
|
||||
"edge length: {0}".format(invalid))
|
||||
|
|
@ -3,7 +3,7 @@ from maya import cmds
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectMindbenderInstances(pyblish.api.ContextPlugin):
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
"""Gather instances by objectSet and pre-defined attribute
|
||||
|
||||
This collector takes into account assets that are associated with
|
||||
|
|
@ -38,14 +38,24 @@ class CollectMindbenderInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
"""
|
||||
|
||||
label = "Collect Mindbender Instances"
|
||||
label = "Collect Instances"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["maya"]
|
||||
isntance_order = ["colorbleed.model",
|
||||
"colorbleed.rig",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.camera",
|
||||
"colorbleed.texture",
|
||||
"colorbleed.lookdev",
|
||||
"colorbleed.historyLookdev",
|
||||
"colorbleed.group"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
objectset = cmds.ls("*.id", long=True, type="objectSet",
|
||||
recursive=True, objectsOnly=True)
|
||||
for objset in objectset:
|
||||
self.log.info("Creating instance for {}".format(objset))
|
||||
|
||||
members = cmds.sets(objset, query=True)
|
||||
if members is None:
|
||||
|
|
@ -61,7 +71,8 @@ class CollectMindbenderInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
# The developer is responsible for specifying
|
||||
# the family of each instance.
|
||||
has_family = cmds.attributeQuery("family", node=objset,
|
||||
has_family = cmds.attributeQuery("family",
|
||||
node=objset,
|
||||
exists=True)
|
||||
assert has_family, "\"%s\" was missing a family" % objset
|
||||
|
||||
|
|
@ -70,7 +81,7 @@ class CollectMindbenderInstances(pyblish.api.ContextPlugin):
|
|||
# Apply each user defined attribute as data
|
||||
for attr in cmds.listAttr(objset, userDefined=True) or list():
|
||||
try:
|
||||
value = cmds.getAttr("{}.{}".format(objset, attr))
|
||||
value = cmds.getAttr("%s.%s" % (objset, attr))
|
||||
except Exception:
|
||||
# Some attributes cannot be read directly,
|
||||
# such as mesh and color attributes. These
|
||||
|
|
@ -82,9 +93,10 @@ class CollectMindbenderInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
# Collect members
|
||||
members = cmds.ls(members, long=True) or []
|
||||
|
||||
children = cmds.listRelatives(members,
|
||||
allDescendents=True,
|
||||
fullPath=True)
|
||||
fullPath=True) or []
|
||||
parents = self.get_all_parents(members)
|
||||
members_hierarchy = list(set(members + children + parents))
|
||||
|
||||
|
|
@ -99,6 +111,10 @@ class CollectMindbenderInstances(pyblish.api.ContextPlugin):
|
|||
# user interface interested in visualising it.
|
||||
self.log.info("Found: \"%s\" " % instance.data["name"])
|
||||
|
||||
context[:] = sorted(context)
|
||||
|
||||
return context
|
||||
|
||||
def get_all_parents(self, nodes):
|
||||
"""Get all parents by using string operations (optimization)
|
||||
|
||||
|
|
@ -108,6 +124,7 @@ class CollectMindbenderInstances(pyblish.api.ContextPlugin):
|
|||
Returns:
|
||||
list
|
||||
"""
|
||||
|
||||
parents = []
|
||||
for node in nodes:
|
||||
splitted = node.split("|")
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
from maya import cmds
|
||||
|
||||
from cb.utils.maya import context
|
||||
import cbra.utils.maya.node_uuid as id_utils
|
||||
import pyblish.api
|
||||
|
||||
from cb.utils.maya import context, shaders
|
||||
import cbra.utils.maya.node_uuid as id_utils
|
||||
|
||||
SHAPE_ATTRS = ["castsShadows",
|
||||
"receiveShadows",
|
||||
|
|
@ -62,7 +62,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
families = ["colorbleed.look"]
|
||||
families = ["colorbleed.lookdev"]
|
||||
label = "Collect Look"
|
||||
hosts = ["maya"]
|
||||
|
||||
|
|
@ -87,7 +87,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
# Discover related object sets
|
||||
self.log.info("Gathering sets..")
|
||||
self.gather_sets(instance)
|
||||
sets = self.gather_sets(instance)
|
||||
|
||||
# Lookup with absolute names (from root namespace)
|
||||
instance_lookup = set([str(x) for x in cmds.ls(instance,
|
||||
|
|
@ -95,9 +95,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
absoluteName=True)])
|
||||
|
||||
self.log.info("Gathering set relations..")
|
||||
sets = self.gather_sets(instance)
|
||||
for objset in sets:
|
||||
|
||||
self.log.debug("From %s.." % objset)
|
||||
content = cmds.sets(objset, query=True)
|
||||
objset_members = sets[objset]["members"]
|
||||
|
|
@ -108,22 +106,23 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
verbose)
|
||||
if not member_data:
|
||||
continue
|
||||
|
||||
sets[objset]["members"].append(member_data)
|
||||
|
||||
# Remove sets that didn't have any members assigned in the end
|
||||
sets = self.clean_sets(sets)
|
||||
# Member attributes (shapes + transforms)
|
||||
sets = self.remove_sets_without_members(sets)
|
||||
|
||||
self.log.info("Gathering attribute changes to instance members..")
|
||||
|
||||
attributes = self.collect_attributes_changes(instance)
|
||||
attributes = self.collect_attributes_changed(instance)
|
||||
looksets = cmds.ls(sets.keys(), absoluteName=True, long=True)
|
||||
|
||||
# Store data on the instance
|
||||
instance.data["lookData"] = {"attributes": attributes,
|
||||
"relationships": sets.values(),
|
||||
"sets": looksets}
|
||||
# Collect textures
|
||||
resources = [self.collect_resource(n) for n in cmds.ls(type="file")]
|
||||
instance.data["resources"] = resources
|
||||
|
||||
# Log a warning when no relevant sets were retrieved for the look.
|
||||
if not instance.data["lookData"]["sets"]:
|
||||
|
|
@ -133,41 +132,76 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
self.log.info("Collected look for %s" % instance)
|
||||
|
||||
def gather_sets(self, instance):
|
||||
"""Gather all objectSets which are of importance for publishing
|
||||
|
||||
It checks if all nodes in the instance are related to any objectSet
|
||||
which need to be
|
||||
|
||||
Args:
|
||||
instance (list): all nodes to be published
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
# Get view sets (so we can ignore those sets later)
|
||||
sets = dict()
|
||||
view_sets = set()
|
||||
model_panels = cmds.getPanel(type="modelPanel")
|
||||
for panel in model_panels:
|
||||
for panel in cmds.getPanel(type="modelPanel"):
|
||||
view_set = cmds.modelEditor(panel, query=True, viewObjects=True)
|
||||
if view_set:
|
||||
view_sets.add(view_set)
|
||||
|
||||
for node in instance:
|
||||
node_sets = self.filter_sets(node, view_sets)
|
||||
if not node_sets:
|
||||
related_sets = self.get_related_sets(node, view_sets)
|
||||
if not related_sets:
|
||||
continue
|
||||
|
||||
for objset in node_sets:
|
||||
for objset in related_sets:
|
||||
if objset in sets:
|
||||
continue
|
||||
|
||||
unique_id = cmds.getAttr("%s.cbId" % objset)
|
||||
sets[objset] = {"name": objset,
|
||||
"uuid": id_utils.get_id(objset),
|
||||
"uuid": unique_id,
|
||||
"members": list()}
|
||||
return sets
|
||||
|
||||
def filter_sets(self, node, view_sets):
|
||||
def get_related_sets(self, node, view_sets):
|
||||
"""Get the sets which do not belong to any specific group
|
||||
|
||||
node_sets = cmds.listSets(object=node, extendToShape=False) or []
|
||||
if not node_sets:
|
||||
return
|
||||
Filters out based on:
|
||||
- id attribute is NOT `pyblish.avalon.container`
|
||||
- shapes and deformer shapes (alembic creates meshShapeDeformed)
|
||||
- set name ends with any from a predefined list
|
||||
- set in not in viewport set (isolate selected for example)
|
||||
|
||||
Args:
|
||||
node (str): name of the current not to check
|
||||
"""
|
||||
|
||||
ignored = ["pyblish.avalon.instance", "pyblish.avalon.container"]
|
||||
|
||||
related_sets = cmds.listSets(object=node, extendToShape=False)
|
||||
if not related_sets:
|
||||
return []
|
||||
|
||||
# Ignore `avalon.container`
|
||||
sets = [s for s in related_sets if
|
||||
not cmds.attributeQuery("id", node=s, exists=True) or
|
||||
not cmds.getAttr("%s.id" % s) in ignored]
|
||||
|
||||
# Exclude deformer sets
|
||||
# Autodesk documentation on listSets command:
|
||||
# type(uint) : Returns all sets in the scene of the given
|
||||
# >>> type:
|
||||
# >>> 1 - all rendering sets
|
||||
# >>> 2 - all deformer sets
|
||||
deformer_sets = cmds.listSets(object=node,
|
||||
extendToShape=False,
|
||||
type=2) or []
|
||||
deformer_sets = set(deformer_sets) # optimize lookup
|
||||
sets = [s for s in node_sets if s not in deformer_sets]
|
||||
sets = [s for s in sets if s not in deformer_sets]
|
||||
|
||||
# Ignore specifically named sets
|
||||
sets = [s for s in sets if not any(s.endswith(x) for x in self.IGNORE)]
|
||||
|
|
@ -176,16 +210,24 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
# viewports)
|
||||
sets = [s for s in sets if s not in view_sets]
|
||||
|
||||
self.log.info("Found sets {0} for {1}".format(node_sets, node))
|
||||
self.log.info("Found sets %s for %s" % (related_sets, node))
|
||||
|
||||
return sets
|
||||
|
||||
def clean_sets(self, sets):
|
||||
def remove_sets_without_members(self, sets):
|
||||
"""Remove any set which does not have any members
|
||||
|
||||
Args:
|
||||
sets (dict): collection if sets with data as value
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
for objset, data in sets.items():
|
||||
if not data['members']:
|
||||
self.log.debug("Removing redundant set "
|
||||
"information: %s" % objset)
|
||||
self.log.debug("Removing redundant set information: "
|
||||
"%s" % objset)
|
||||
sets.pop(objset)
|
||||
|
||||
return sets
|
||||
|
|
@ -215,10 +257,24 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
if member in [m["name"] for m in objset_members]:
|
||||
return
|
||||
|
||||
# check node type, if mesh get parent! makes assigning shaders easier
|
||||
if cmds.nodeType(node) == "mesh":
|
||||
parent = cmds.listRelatives(node, parent=True, fullPath=True)
|
||||
# a mesh NEEDS to have a parent in Maya logic, no reason for
|
||||
# assertions or extra checking
|
||||
parent = parent[0]
|
||||
if cmds.attributeQuery("cbId", node=parent, exists=True):
|
||||
node = parent
|
||||
else:
|
||||
self.log.error("Transform group of mesh '{}' has no attribute "
|
||||
"'cbId', this is manditory")
|
||||
return
|
||||
|
||||
if verbose:
|
||||
self.log.debug("Such as %s.." % member)
|
||||
|
||||
member_data = {"name": node, "uuid": id_utils.get_id(node)}
|
||||
member_data = {"name": node,
|
||||
"uuid": cmds.getAttr("{}.cbId".format(node))}
|
||||
|
||||
# Include components information when components are assigned
|
||||
if components:
|
||||
|
|
@ -226,7 +282,22 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
return member_data
|
||||
|
||||
def collect_attributes_changes(self, instance):
|
||||
def collect_attributes_changed(self, instance):
|
||||
"""Collect all userDefined attributes which have changed
|
||||
|
||||
Each node gets checked for user defined attributes which have been
|
||||
altered during development. Each changes gets logged in a dictionary
|
||||
|
||||
[{name: node,
|
||||
uuid: uuid,
|
||||
attributes: {attribute: value}}]
|
||||
|
||||
Args:
|
||||
instance (list): all nodes which will be published
|
||||
|
||||
Returns:
|
||||
list
|
||||
"""
|
||||
|
||||
attributes = []
|
||||
for node in instance:
|
||||
|
|
@ -250,3 +321,46 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
attributes.append(data)
|
||||
|
||||
return attributes
|
||||
|
||||
def collect_resource(self, node, verbose=False):
|
||||
"""Collect the link to the file(s) used (resource)
|
||||
Args:
|
||||
node (str): name of the node
|
||||
verbose (bool): enable debug information
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
attribute = "{}.fileTextureName".format(node)
|
||||
source = cmds.getAttr(attribute)
|
||||
|
||||
# Get the computed file path (e.g. the one with the <UDIM> pattern
|
||||
# in it) So we can reassign it this computed file path whenever
|
||||
# we need to.
|
||||
computed_attribute = "{}.computedFileTextureNamePattern".format(node)
|
||||
computed_source = cmds.getAttr(computed_attribute)
|
||||
if source != computed_source:
|
||||
if verbose:
|
||||
self.log.debug("File node computed pattern differs from "
|
||||
"original pattern: {0} "
|
||||
"({1} -> {2})".format(node,
|
||||
source,
|
||||
computed_source))
|
||||
|
||||
# We replace backslashes with forward slashes because V-Ray
|
||||
# can't handle the UDIM files with the backslashes in the
|
||||
# paths as the computed patterns
|
||||
source = computed_source.replace("\\", "/")
|
||||
|
||||
files = shaders.get_file_node_files(node)
|
||||
if not files:
|
||||
self.log.error("File node does not have a texture set: "
|
||||
"{0}".format(node))
|
||||
return
|
||||
|
||||
# Define the resource
|
||||
return {"node": node,
|
||||
"attribute": attribute,
|
||||
"source": source, # required for resources
|
||||
"files": files} # required for resources
|
||||
|
|
|
|||
|
|
@ -1,152 +0,0 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import cb.utils.maya.shaders as shader
|
||||
|
||||
TAGS = ["maya", "attribute", "look"]
|
||||
TAGS_LOOKUP = set(TAGS)
|
||||
|
||||
|
||||
class SelectTextureNodesAction(pyblish.api.Action):
|
||||
"""Select the nodes related to the collected file textures"""
|
||||
|
||||
label = "Select texture nodes"
|
||||
on = "succeeded" # This action is only available on a failed plug-in
|
||||
icon = "search" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
self.log.info("Finding textures..")
|
||||
|
||||
# Get the errored instances
|
||||
instances = []
|
||||
for result in context.data["results"]:
|
||||
instance = result["instance"]
|
||||
if instance is None:
|
||||
continue
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(instances, plugin)
|
||||
|
||||
# Get the texture nodes from the instances
|
||||
nodes = []
|
||||
for instance in instances:
|
||||
for resource in instance.data.get("resources", []):
|
||||
if self.is_texture_resource(resource):
|
||||
node = resource['node']
|
||||
nodes.append(node)
|
||||
|
||||
# Ensure unique
|
||||
nodes = list(set(nodes))
|
||||
|
||||
if nodes:
|
||||
self.log.info("Selecting texture nodes: %s" % ", ".join(nodes))
|
||||
cmds.select(nodes, r=True, noExpand=True)
|
||||
else:
|
||||
self.log.info("No texture nodes found.")
|
||||
cmds.select(deselect=True)
|
||||
|
||||
def is_texture_resource(self, resource):
|
||||
"""Return whether the resource is a texture"""
|
||||
|
||||
tags = resource.get("tags", [])
|
||||
if not TAGS_LOOKUP.issubset(tags):
|
||||
return False
|
||||
|
||||
if resource.get("subfolder", None) != "textures":
|
||||
return False
|
||||
|
||||
if "node" not in resource:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class CollectLookTextures(pyblish.api.InstancePlugin):
|
||||
"""Collect look textures
|
||||
|
||||
Includes the link from source to destination.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.498
|
||||
label = 'Textures'
|
||||
families = ["colorbleed.look"]
|
||||
actions = [SelectTextureNodesAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
verbose = instance.data.get("verbose", False)
|
||||
|
||||
# Get textures from sets
|
||||
sets = instance.data["lookData"]["sets"]
|
||||
if not sets:
|
||||
raise RuntimeError("No look sets found for the nodes in the "
|
||||
"instance. %s" % sets)
|
||||
|
||||
# Get the file nodes
|
||||
history = cmds.listHistory(sets) or []
|
||||
files = cmds.ls(history, type="file")
|
||||
files = list(set(files))
|
||||
|
||||
resources = instance.data.get("resources", [])
|
||||
for node in files:
|
||||
resource = self.collect_resources(node, verbose)
|
||||
if not resource:
|
||||
continue
|
||||
resources.append(resource)
|
||||
|
||||
# Store resources
|
||||
instance.data['resources'] = resources
|
||||
|
||||
def collect_resources(self, node, verbose=False):
|
||||
"""Collect the link to the file(s) used (resource)
|
||||
Args:
|
||||
node (str): name of the node
|
||||
verbose (bool): enable debug information
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
attribute = "{}.fileTextureName".format(node)
|
||||
source = cmds.getAttr(attribute)
|
||||
|
||||
# Get the computed file path (e.g. the one with the <UDIM> pattern
|
||||
# in it) So we can reassign it this computed file path whenever
|
||||
# we need to.
|
||||
|
||||
computed_attribute = "{}.computedFileTextureNamePattern".format(node)
|
||||
computed_source = cmds.getAttr(computed_attribute)
|
||||
if source != computed_source:
|
||||
if verbose:
|
||||
self.log.debug("File node computed pattern differs from "
|
||||
"original pattern: {0} "
|
||||
"({1} -> {2})".format(node,
|
||||
source,
|
||||
computed_source))
|
||||
|
||||
# We replace backslashes with forward slashes because V-Ray
|
||||
# can't handle the UDIM files with the backslashes in the
|
||||
# paths as the computed patterns
|
||||
source = computed_source.replace("\\", "/")
|
||||
|
||||
files = shader.get_file_node_files(node)
|
||||
if not files:
|
||||
self.log.error("File node does not have a texture set: "
|
||||
"{0}".format(node))
|
||||
return
|
||||
|
||||
# Define the resource
|
||||
resource = {"tags": TAGS[:],
|
||||
"node": node,
|
||||
"attribute": attribute,
|
||||
"source": source, # required for resources
|
||||
"files": files, # required for resources
|
||||
"subfolder": "textures" # optional for resources
|
||||
}
|
||||
|
||||
return resource
|
||||
284
colorbleed/plugins/maya/publish/collect_textures.py
Normal file
284
colorbleed/plugins/maya/publish/collect_textures.py
Normal file
|
|
@ -0,0 +1,284 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import cb.utils.maya.shaders as shaders
|
||||
|
||||
TAGS = ["maya", "attribute", "look"]
|
||||
TAGS_LOOKUP = set(TAGS)
|
||||
|
||||
|
||||
class SelectTextureNodesAction(pyblish.api.Action):
|
||||
"""Select the nodes related to the collected file textures"""
|
||||
|
||||
label = "Select texture nodes"
|
||||
on = "succeeded" # This action is only available on a failed plug-in
|
||||
icon = "search" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
self.log.info("Finding textures..")
|
||||
|
||||
# Get the errored instances
|
||||
instances = []
|
||||
for result in context.data["results"]:
|
||||
instance = result["instance"]
|
||||
if instance is None:
|
||||
continue
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(instances, plugin)
|
||||
|
||||
# Get the texture nodes from the instances
|
||||
nodes = []
|
||||
for instance in instances:
|
||||
for resource in instance.data.get("resources", []):
|
||||
if self.is_texture_resource(resource):
|
||||
node = resource['node']
|
||||
nodes.append(node)
|
||||
|
||||
# Ensure unique
|
||||
nodes = list(set(nodes))
|
||||
|
||||
if nodes:
|
||||
self.log.info("Selecting texture nodes: %s" % ", ".join(nodes))
|
||||
cmds.select(nodes, r=True, noExpand=True)
|
||||
else:
|
||||
self.log.info("No texture nodes found.")
|
||||
cmds.select(deselect=True)
|
||||
|
||||
def is_texture_resource(self, resource):
|
||||
"""Return whether the resource is a texture"""
|
||||
|
||||
tags = resource.get("tags", [])
|
||||
if not TAGS_LOOKUP.issubset(tags):
|
||||
return False
|
||||
|
||||
if resource.get("subfolder", None) != "textures":
|
||||
return False
|
||||
|
||||
if "node" not in resource:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class CollectLookTextures(pyblish.api.InstancePlugin):
|
||||
"""Collect look textures
|
||||
|
||||
Includes the link from source to destination.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.35
|
||||
label = 'Collect Look Textures'
|
||||
families = ["colorbleed.texture"]
|
||||
actions = [SelectTextureNodesAction]
|
||||
|
||||
IGNORE = ["out_SET", "controls_SET", "_INST"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
verbose = instance.data.get("verbose", False)
|
||||
|
||||
# Get all texture nodes from the shader networks
|
||||
sets = self.gather_sets(instance)
|
||||
instance_members = {str(i) for i in cmds.ls(instance, long=True,
|
||||
absoluteName=True)}
|
||||
|
||||
self.log.info("Gathering set relations..")
|
||||
for objset in sets:
|
||||
self.log.debug("From %s.." % objset)
|
||||
content = cmds.sets(objset, query=True)
|
||||
objset_members = sets[objset]["members"]
|
||||
for member in cmds.ls(content, long=True, absoluteName=True):
|
||||
member_data = self.collect_member_data(member,
|
||||
objset_members,
|
||||
instance_members,
|
||||
verbose)
|
||||
if not member_data:
|
||||
continue
|
||||
|
||||
# Get the file nodes
|
||||
history = cmds.listHistory(sets.keys()) or []
|
||||
files = cmds.ls(history, type="file")
|
||||
files = list(set(files))
|
||||
|
||||
resources = instance.data.get("resources", [])
|
||||
for node in files:
|
||||
resource = self.collect_resources(node, verbose)
|
||||
if not resource:
|
||||
continue
|
||||
resources.append(resource)
|
||||
|
||||
instance.data['resources'] = resources
|
||||
|
||||
def gather_sets(self, instance):
|
||||
"""Gather all objectSets which are of importance for publishing
|
||||
|
||||
It checks if all nodes in the instance are related to any objectSet
|
||||
which need to be
|
||||
|
||||
Args:
|
||||
instance (list): all nodes to be published
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
# Get view sets (so we can ignore those sets later)
|
||||
sets = dict()
|
||||
view_sets = set()
|
||||
for panel in cmds.getPanel(type="modelPanel"):
|
||||
view_set = cmds.modelEditor(panel, query=True,
|
||||
viewObjects=True)
|
||||
if view_set:
|
||||
view_sets.add(view_set)
|
||||
|
||||
for node in instance:
|
||||
related_sets = self.get_related_sets(node, view_sets)
|
||||
if not related_sets:
|
||||
continue
|
||||
|
||||
for objset in related_sets:
|
||||
if objset in sets:
|
||||
continue
|
||||
unique_id = cmds.getAttr("%s.cbId" % objset)
|
||||
sets[objset] = {"name": objset,
|
||||
"uuid": unique_id,
|
||||
"members": list()}
|
||||
return sets
|
||||
|
||||
def collect_resources(self, node, verbose=False):
|
||||
"""Collect the link to the file(s) used (resource)
|
||||
Args:
|
||||
node (str): name of the node
|
||||
verbose (bool): enable debug information
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
# assure node includes full path
|
||||
node = cmds.ls(node, long=True)[0]
|
||||
attribute = "{}.fileTextureName".format(node)
|
||||
source = cmds.getAttr(attribute)
|
||||
|
||||
# Get the computed file path (e.g. the one with the <UDIM> pattern
|
||||
# in it) So we can reassign it this computed file path whenever
|
||||
# we need to.
|
||||
|
||||
computed_attribute = "{}.computedFileTextureNamePattern".format(node)
|
||||
computed_source = cmds.getAttr(computed_attribute)
|
||||
if source != computed_source:
|
||||
if verbose:
|
||||
self.log.debug("File node computed pattern differs from "
|
||||
"original pattern: {0} "
|
||||
"({1} -> {2})".format(node,
|
||||
source,
|
||||
computed_source))
|
||||
|
||||
# We replace backslashes with forward slashes because V-Ray
|
||||
# can't handle the UDIM files with the backslashes in the
|
||||
# paths as the computed patterns
|
||||
source = computed_source.replace("\\", "/")
|
||||
|
||||
files = shaders.get_file_node_files(node)
|
||||
if not files:
|
||||
self.log.error("File node does not have a texture set: "
|
||||
"{0}".format(node))
|
||||
return
|
||||
|
||||
# Define the resource
|
||||
# todo: find a way to generate the destination for the publisher
|
||||
resource = {"tags": TAGS[:],
|
||||
"node": node,
|
||||
"attribute": attribute,
|
||||
"source": source, # required for resources
|
||||
"files": files} # required for resources
|
||||
|
||||
return resource
|
||||
|
||||
def collect_member_data(self, member, objset_members, instance_members,
|
||||
verbose=False):
|
||||
"""Get all information of the node
|
||||
Args:
|
||||
member (str): the name of the node to check
|
||||
objset_members (list): the objectSet members
|
||||
instance_members (set): the collected instance members
|
||||
verbose (bool): get debug information
|
||||
|
||||
Returns:
|
||||
dict
|
||||
|
||||
"""
|
||||
|
||||
node, components = (member.rsplit(".", 1) + [None])[:2]
|
||||
|
||||
# Only include valid members of the instance
|
||||
if node not in instance_members:
|
||||
if verbose:
|
||||
self.log.info("Skipping member %s" % member)
|
||||
return
|
||||
|
||||
if member in [m["name"] for m in objset_members]:
|
||||
return
|
||||
|
||||
if verbose:
|
||||
self.log.debug("Such as %s.." % member)
|
||||
|
||||
member_data = {"name": node,
|
||||
"uuid": cmds.getAttr("{}.cbId".format(node, ))}
|
||||
|
||||
# Include components information when components are assigned
|
||||
if components:
|
||||
member_data["components"] = components
|
||||
|
||||
return member_data
|
||||
|
||||
def get_related_sets(self, node, view_sets):
|
||||
"""Get the sets which do not belong to any specific group
|
||||
|
||||
Filters out based on:
|
||||
- id attribute is NOT `pyblish.avalon.container`
|
||||
- shapes and deformer shapes (alembic creates meshShapeDeformed)
|
||||
- set name ends with any from a predefined list
|
||||
- set in not in viewport set (isolate selected for example)
|
||||
|
||||
Args:
|
||||
node (str): name of the current not to check
|
||||
"""
|
||||
|
||||
ignored = ["pyblish.avalon.instance", "pyblish.avalon.container"]
|
||||
|
||||
related_sets = cmds.listSets(object=node, extendToShape=False)
|
||||
if not related_sets:
|
||||
return []
|
||||
|
||||
# Ignore containers
|
||||
sets = [s for s in related_sets if
|
||||
not cmds.attributeQuery("id", node=s, exists=True) or
|
||||
not cmds.getAttr("%s.id" % s) in ignored]
|
||||
|
||||
# Exclude deformer sets
|
||||
# Autodesk documentation on listSets command:
|
||||
# type(uint) : Returns all sets in the scene of the given
|
||||
# >>> type:
|
||||
# >>> 1 - all rendering sets
|
||||
# >>> 2 - all deformer sets
|
||||
deformer_sets = cmds.listSets(object=node, extendToShape=False,
|
||||
type=2) or []
|
||||
deformer_sets = set(deformer_sets) # optimize lookup
|
||||
sets = [s for s in sets if s not in deformer_sets]
|
||||
|
||||
# Ignore specifically named sets
|
||||
sets = [s for s in sets if not any(s.endswith(x) for x in self.IGNORE)]
|
||||
|
||||
# Ignore viewport filter view sets (from isolate select and
|
||||
# viewports)
|
||||
sets = [s for s in sets if s not in view_sets]
|
||||
|
||||
self.log.info("Found sets %s for %s" % (related_sets, node))
|
||||
|
||||
return sets
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
import os
|
||||
import copy
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import avalon.maya
|
||||
import colorbleed.api
|
||||
from colorbleed.maya.lib import extract_alembic
|
||||
|
|
@ -14,38 +16,42 @@ class ExtractColorbleedAlembic(colorbleed.api.Extractor):
|
|||
|
||||
"""
|
||||
label = "Alembic"
|
||||
families = ["colorbleed.model",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.proxy"]
|
||||
families = ["colorbleed.model", "colorbleed.pointcache"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
parent_dir = self.staging_dir(instance)
|
||||
filename = "%s.abc" % instance.name
|
||||
path = os.path.join(parent_dir, filename)
|
||||
options = dict()
|
||||
|
||||
# Collect the start and end including handles if any provided
|
||||
# otherwise assume frame 1 as startFrame and the same as endFrame
|
||||
start = instance.data.get("startFrame", 1)
|
||||
end = instance.data.get("endFrame", start)
|
||||
handles = instance.data.get("handles", 0)
|
||||
if handles:
|
||||
start -= handles
|
||||
end += handles
|
||||
options['frameRange'] = (start, end)
|
||||
attrPrefix = instance.data.get("attrPrefix", [])
|
||||
attrPrefix.append("cb")
|
||||
|
||||
# Default verbosity to False
|
||||
options['verbose'] = instance.data.get("verbose", False)
|
||||
options = copy.deepcopy(instance.data)
|
||||
options['attrPrefix'] = attrPrefix
|
||||
|
||||
# Collect instance options if found in `instance.data`
|
||||
# for specific settings (for user customization)
|
||||
for key in ["renderableOnly", "writeColorSets"]:
|
||||
if key in instance.data:
|
||||
options[key] = instance.data[key]
|
||||
# Ensure visibility keys are written
|
||||
options['writeVisibility'] = True
|
||||
|
||||
# Write creases
|
||||
options['writeCreases'] = True
|
||||
|
||||
# Ensure UVs are written
|
||||
options['uvWrite'] = True
|
||||
|
||||
options['selection'] = True
|
||||
options["attr"] = ["cbId"]
|
||||
|
||||
# force elect items to ensure all items get exported by Alembic
|
||||
members = instance.data("setMembers")
|
||||
print "Members : {}".format(members)
|
||||
|
||||
cmds.select(members)
|
||||
with avalon.maya.suspended_refresh():
|
||||
with avalon.maya.maintained_selection():
|
||||
nodes = instance[:]
|
||||
cmds.select(nodes, replace=True, noExpand=True)
|
||||
extract_alembic(file=path, **options)
|
||||
|
||||
cmds.select(clear=True)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,10 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import avalon.maya
|
||||
import colorbleed.api
|
||||
from colorbleed.maya.lib import extract_alembic
|
||||
|
||||
|
||||
class ExtractColorbleedAnimation(colorbleed.api.Extractor):
|
||||
|
|
@ -14,10 +20,6 @@ class ExtractColorbleedAnimation(colorbleed.api.Extractor):
|
|||
families = ["colorbleed.animation"]
|
||||
|
||||
def process(self, instance):
|
||||
import os
|
||||
from maya import cmds
|
||||
import avalon.maya
|
||||
from colorbleed.maya.lib import extract_alembic
|
||||
|
||||
# Collect the out set nodes
|
||||
out_sets = [node for node in instance if node.endswith("out_SET")]
|
||||
|
|
@ -27,11 +29,13 @@ class ExtractColorbleedAnimation(colorbleed.api.Extractor):
|
|||
out_set = out_sets[0]
|
||||
nodes = cmds.sets(out_set, query=True)
|
||||
|
||||
# Include all descendents
|
||||
# Include all descendants
|
||||
nodes += cmds.listRelatives(nodes,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
print("Exporting {} as alembic".format(nodes))
|
||||
|
||||
# Collect the start and end including handles
|
||||
start = instance.data["startFrame"]
|
||||
end = instance.data["endFrame"]
|
||||
|
|
@ -52,13 +56,10 @@ class ExtractColorbleedAnimation(colorbleed.api.Extractor):
|
|||
with avalon.maya.suspended_refresh():
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(nodes, noExpand=True)
|
||||
extract_alembic(file=path, **{
|
||||
"selection": True,
|
||||
"frameRange": (start, end),
|
||||
"writeVisibility": True,
|
||||
"writeUV": True,
|
||||
"step": instance.data.get("step", 1.0),
|
||||
"attributePrefix": ("mb",)
|
||||
})
|
||||
extract_alembic(file=path,
|
||||
startFrame=start,
|
||||
endFrame=end,
|
||||
**{"step": instance.data.get("step", 1.0),
|
||||
"attr": ["cbId"]})
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
|
|
|
|||
|
|
@ -3,10 +3,11 @@ import json
|
|||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import avalon.maya
|
||||
import colorbleed.api
|
||||
|
||||
import cb.utils.maya.context as context
|
||||
from cb.utils.maya import context
|
||||
|
||||
|
||||
class ExtractLook(colorbleed.api.Extractor):
|
||||
|
|
@ -18,9 +19,10 @@ class ExtractLook(colorbleed.api.Extractor):
|
|||
|
||||
"""
|
||||
|
||||
label = "Look (Maya ASCII + JSON)"
|
||||
label = "Extract Look (Maya ASCII + JSON)"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.look"]
|
||||
families = ["colorbleed.lookdev"]
|
||||
order = pyblish.api.ExtractorOrder + 0.2
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -29,6 +31,7 @@ class ExtractLook(colorbleed.api.Extractor):
|
|||
maya_fname = "{0}.ma".format(instance.name)
|
||||
json_fname = "{0}.json".format(instance.name)
|
||||
|
||||
# Make texture dump folder
|
||||
maya_path = os.path.join(dir_path, maya_fname)
|
||||
json_path = os.path.join(dir_path, json_fname)
|
||||
|
||||
|
|
@ -37,42 +40,31 @@ class ExtractLook(colorbleed.api.Extractor):
|
|||
# Remove all members of the sets so they are not included in the
|
||||
# exported file by accident
|
||||
self.log.info("Extract sets (Maya ASCII)..")
|
||||
sets = instance.data["lookSets"]
|
||||
lookdata = instance.data["lookData"]
|
||||
sets = lookdata["sets"]
|
||||
|
||||
# Define the texture file node remapping
|
||||
resource_remap = dict()
|
||||
# required tags to be a look resource
|
||||
required_tags = ["maya", "attribute", "look"]
|
||||
resources = instance.data.get("resources", [])
|
||||
for resource in resources:
|
||||
resource_tags = resource.get("tags", [])
|
||||
if all(tag in resource_tags for tag in required_tags):
|
||||
node = resource['node']
|
||||
destination = resource['destination']
|
||||
resource_remap["{}.fileTextureName".format(node)] = destination
|
||||
|
||||
# Extract in corect render layer
|
||||
# Extract in correct render layer
|
||||
layer = instance.data.get("renderlayer", "defaultRenderLayer")
|
||||
with context.renderlayer(layer):
|
||||
# TODO: Ensure membership edits don't become renderlayer overrides
|
||||
with context.empty_sets(sets):
|
||||
with context.attribute_values(resource_remap):
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(sets, noExpand=True)
|
||||
cmds.file(maya_path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=True,
|
||||
constraints=True,
|
||||
expressions=True,
|
||||
constructionHistory=True)
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(sets, noExpand=True)
|
||||
cmds.file(maya_path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=True,
|
||||
constraints=True,
|
||||
expressions=True,
|
||||
constructionHistory=True)
|
||||
|
||||
# Write the JSON data
|
||||
self.log.info("Extract json..")
|
||||
data = {"attributes": instance.data["lookAttributes"],
|
||||
"sets": instance.data["lookSetRelations"]}
|
||||
data = {"attributes": lookdata["attributes"],
|
||||
"sets": lookdata["relationships"]}
|
||||
|
||||
with open(json_path, "w") as f:
|
||||
json.dump(data, f)
|
||||
|
||||
|
|
|
|||
41
colorbleed/plugins/maya/publish/extract_textures.py
Normal file
41
colorbleed/plugins/maya/publish/extract_textures.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
class ExtractTextures(colorbleed.api.Extractor):
|
||||
|
||||
label = "Extract Textures"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.texture"]
|
||||
order = pyblish.api.ExtractorOrder + 0.1
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.log.info("Extracting textures ...")
|
||||
|
||||
dir_path = self.staging_dir(instance)
|
||||
resources = instance.data["resources"]
|
||||
for resource in resources:
|
||||
self.copy_files(dir_path, resource["files"])
|
||||
|
||||
self.log.info("Storing cross instance information ...")
|
||||
self.store_data(resources)
|
||||
|
||||
def store_data(self, data):
|
||||
tmp_dir = lib.maya_temp_folder()
|
||||
tmp_file = os.path.join(tmp_dir, "resources.json")
|
||||
with open(tmp_file, "w") as f:
|
||||
json.dump(data, fp=f,
|
||||
separators=[",", ":"],
|
||||
ensure_ascii=False)
|
||||
|
||||
def copy_files(self, dest, files):
|
||||
for f in files:
|
||||
fname = os.path.basename(f)
|
||||
dest_file = os.path.join(dest, fname)
|
||||
shutil.copy(f, dest_file)
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
import maya.cmds as cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
import cbra.utils.maya.node_uuid as id_utils
|
||||
|
||||
|
||||
class ValidateLookMembersNodeIds(pyblish.api.InstancePlugin):
|
||||
"""Validate look members have colorbleed id attributes
|
||||
|
|
@ -20,7 +20,8 @@ class ValidateLookMembersNodeIds(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.look']
|
||||
hosts = ['maya']
|
||||
label = 'Look Members Id Attributes'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
|
@ -40,7 +41,7 @@ class ValidateLookMembersNodeIds(pyblish.api.InstancePlugin):
|
|||
# Ensure all nodes have a cbId
|
||||
invalid = list()
|
||||
for node in members:
|
||||
if not id_utils.has_id(node):
|
||||
if not cmds.getAttr("{}.cbId".format(node)):
|
||||
invalid.append(node)
|
||||
|
||||
return invalid
|
||||
|
|
|
|||
|
|
@ -5,7 +5,14 @@ from maya import cmds
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
import cbra.utils.maya.node_uuid as id_utils
|
||||
|
||||
def get_unique_id(node):
|
||||
attr = 'cbId'
|
||||
unique_id = None
|
||||
has_attribute = cmds.attributeQuery(attr, node=node, exists=True)
|
||||
if has_attribute:
|
||||
unique_id = cmds.getAttr("{}.{}".format(node, attr))
|
||||
return unique_id
|
||||
|
||||
|
||||
class ValidateLookMembersUnique(pyblish.api.InstancePlugin):
|
||||
|
|
@ -25,15 +32,16 @@ class ValidateLookMembersUnique(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.look']
|
||||
hosts = ['maya']
|
||||
label = 'Look Members Unique'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
||||
# Get all members from the sets
|
||||
members = []
|
||||
relations = instance.data["lookData"]["sets"]
|
||||
for sg in relations:
|
||||
relationships = instance.data["lookData"]["relationships"]
|
||||
for sg in relationships:
|
||||
sg_members = sg['members']
|
||||
sg_members = [member['name'] for member in sg_members]
|
||||
members.extend(sg_members)
|
||||
|
|
@ -45,10 +53,9 @@ class ValidateLookMembersUnique(pyblish.api.InstancePlugin):
|
|||
# Group members per id
|
||||
id_nodes = defaultdict(set)
|
||||
for node in members:
|
||||
node_id = id_utils.get_id(node)
|
||||
node_id = get_unique_id(node)
|
||||
if not node_id:
|
||||
continue
|
||||
|
||||
id_nodes[node_id].add(node)
|
||||
|
||||
invalid = list()
|
||||
|
|
@ -61,8 +68,9 @@ class ValidateLookMembersUnique(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
print self.actions
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Members found without "
|
||||
"asset IDs: {0}".format(invalid))
|
||||
|
|
|
|||
|
|
@ -1,9 +1,10 @@
|
|||
from collections import defaultdict
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
import cbra.utils.maya.node_uuid as id_utils
|
||||
|
||||
|
||||
class ValidateLookNodeUniqueIds(pyblish.api.InstancePlugin):
|
||||
"""Validate look sets have unique colorbleed id attributes
|
||||
|
|
@ -15,7 +16,7 @@ class ValidateLookNodeUniqueIds(pyblish.api.InstancePlugin):
|
|||
hosts = ['maya']
|
||||
label = 'Look Id Unique Attributes'
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
|
@ -26,13 +27,15 @@ class ValidateLookNodeUniqueIds(pyblish.api.InstancePlugin):
|
|||
id_sets = defaultdict(list)
|
||||
invalid = list()
|
||||
for node in nodes:
|
||||
id = id_utils.get_id(node)
|
||||
if not id:
|
||||
unique_id = None
|
||||
if cmds.attributeQuery("mbId", node=node, exists=True):
|
||||
unique_id = cmds.getAttr("{}.mbId".format(node))
|
||||
if not unique_id:
|
||||
continue
|
||||
|
||||
id_sets[id].append(node)
|
||||
id_sets[unique_id].append(node)
|
||||
|
||||
for id, nodes in id_sets.iteritems():
|
||||
for unique_id, nodes in id_sets.iteritems():
|
||||
if len(nodes) > 1:
|
||||
invalid.extend(nodes)
|
||||
|
||||
|
|
@ -42,7 +45,6 @@ class ValidateLookNodeUniqueIds(pyblish.api.InstancePlugin):
|
|||
"""Process all meshes"""
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("Nodes found without "
|
||||
"asset IDs: {0}".format(invalid))
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
import re
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateNamingConvention(pyblish.api.InstancePlugin):
|
||||
|
||||
label = ""
|
||||
families = ["colorbleed.model"]
|
||||
host = ["maya"]
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
||||
invalid = []
|
||||
# todo: change pattern to company standard
|
||||
pattern = re.compile("[a-zA-Z]+_[A-Z]{3}")
|
||||
|
||||
nodes = list(instance)
|
||||
for node in nodes:
|
||||
match = pattern.match(node)
|
||||
if not match:
|
||||
invalid.append(node)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
self.log.error("Found invalid naming convention. Failed noted :\n"
|
||||
"%s" % invalid)
|
||||
|
|
@ -23,9 +23,7 @@ class ValidateResources(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
|
||||
for resource in instance.data.get('resources', []):
|
||||
|
||||
# Required data
|
||||
assert "source" in resource
|
||||
assert "destination" in resource
|
||||
assert "files" in resource
|
||||
assert "source" in resource, "No source found"
|
||||
assert "files" in resource, "No files from source"
|
||||
assert all(os.path.exists(f) for f in resource['files'])
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
|
|||
output_content = cmds.sets("out_SET", query=True) or []
|
||||
assert output_content, "Must have members in rig out_SET"
|
||||
|
||||
controls_content = cmds.set("controls_SET", query=True) or []
|
||||
controls_content = cmds.sets("controls_SET", query=True) or []
|
||||
assert controls_content, "Must have members in rig controls_SET"
|
||||
|
||||
root_node = cmds.ls(set_members, assemblies=True)
|
||||
|
|
@ -56,15 +56,15 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
|
|||
self.invalid_controls = self.validate_controls(controls_content,
|
||||
hierarchy)
|
||||
|
||||
if self.invalid_hierachy:
|
||||
if self.invalid_hierarchy:
|
||||
self.log.error("Found nodes which reside outside of root group "
|
||||
"while they are set up for publishing."
|
||||
"\n%s" % self.invalid_hierachy)
|
||||
"\n%s" % self.invalid_hierarchy)
|
||||
error = True
|
||||
|
||||
if self.not_transforms:
|
||||
if self.invalid_controls:
|
||||
self.log.error("Only transforms can be part of the controls_SET."
|
||||
"\n%s" % self.not_transforms)
|
||||
"\n%s" % self.invalid_controls)
|
||||
error = True
|
||||
|
||||
if self.invalid_geometry:
|
||||
|
|
@ -100,6 +100,7 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
errors = []
|
||||
for node in nodes:
|
||||
print node
|
||||
if node not in hierarchy:
|
||||
errors.append(node)
|
||||
return errors
|
||||
|
|
@ -128,10 +129,12 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
|
|||
# The user can add the shape node to the out_set, this will result
|
||||
# in none when querying allDescendents
|
||||
all_shapes = set_members + shapes
|
||||
all_long_names = [cmds.ls(i, long=True)[0] for i in all_shapes]
|
||||
|
||||
# geometry
|
||||
invalid_shapes = self.validate_hierarchy(hierarchy, all_shapes)
|
||||
self.invalid_hierachy.extend(invalid_shapes)
|
||||
invalid_shapes = self.validate_hierarchy(hierarchy,
|
||||
all_long_names)
|
||||
self.invalid_hierarchy.extend(invalid_shapes)
|
||||
for shape in all_shapes:
|
||||
nodetype = cmds.nodeType(shape)
|
||||
if nodetype in self.ignore_nodes:
|
||||
|
|
@ -157,8 +160,10 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
errors = []
|
||||
invalid_controllers = self.validate_hierarchy(hierarchy, set_members)
|
||||
self.invalid_hierachy.extend(invalid_controllers)
|
||||
all_long_names = [cmds.ls(i, long=True)[0] for i in set_members]
|
||||
invalid_controllers = self.validate_hierarchy(hierarchy,
|
||||
all_long_names)
|
||||
self.invalid_hierarchy.extend(invalid_controllers)
|
||||
for node in set_members:
|
||||
nodetype = cmds.nodeType(node)
|
||||
if nodetype in self.ignore_nodes:
|
||||
|
|
|
|||
|
|
@ -1,8 +1,12 @@
|
|||
import logging
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
log = logging.getLogger("Rig Controllers")
|
||||
|
||||
|
||||
class ValidateRigControllers(pyblish.api.InstancePlugin):
|
||||
"""Check if the controllers have the transformation attributes set to
|
||||
|
|
@ -12,6 +16,7 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
|
|||
label = "Rig Controllers"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.rig"]
|
||||
actions = [colorbleed.api.RepairAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -21,7 +26,7 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
|
|||
is_offset = list()
|
||||
|
||||
controls = cmds.sets("controls_SET", query=True)
|
||||
assert controls, "Must have controls in rig control_SET"
|
||||
assert controls, "Must have controls in rig controls_SET"
|
||||
|
||||
for control in controls:
|
||||
valid_keyed = self.validate_keyed_state(control)
|
||||
|
|
@ -41,15 +46,18 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
|
|||
if is_keyed:
|
||||
self.log.error("No controls can be keyes. Failed :\n"
|
||||
"%s" % is_keyed)
|
||||
error = True
|
||||
|
||||
if is_offset:
|
||||
self.log.error("All controls default transformation values. "
|
||||
"Failed :\n%s" % is_offset)
|
||||
error = True
|
||||
|
||||
if not_locked:
|
||||
self.log.error("All controls must have visibility "
|
||||
"attribute locked. Failed :\n"
|
||||
"%s" % not_locked)
|
||||
error = True
|
||||
|
||||
if error:
|
||||
raise RuntimeError("Invalid rig controllers. See log for details.")
|
||||
|
|
@ -78,3 +86,17 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
|
|||
if animation_curves:
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
||||
# lock all controllers in controls_SET
|
||||
controls = cmds.sets("controls_SET", query=True)
|
||||
for control in controls:
|
||||
attr = "{}.visibility".format(control)
|
||||
locked = cmds.getAttr(attr, lock=True)
|
||||
if not locked:
|
||||
print("Locking visibility for %s" % control)
|
||||
cmds.setAttr(attr, lock=True)
|
||||
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -1,3 +1,7 @@
|
|||
from collections import defaultdict
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
|
@ -16,12 +20,9 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin):
|
|||
def get_invalid_dict(instance):
|
||||
"""Return a dictionary mapping of id key to list of member nodes"""
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
uuid_attr = "mbId"
|
||||
uuid_attr = "cbId"
|
||||
|
||||
# Collect each id with their members
|
||||
from collections import defaultdict
|
||||
ids = defaultdict(list)
|
||||
for member in instance:
|
||||
has_attr = cmds.attributeQuery(uuid_attr, node=member, exists=True)
|
||||
|
|
@ -60,4 +61,3 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin):
|
|||
if invalid:
|
||||
raise RuntimeError("Nodes found with non-unique "
|
||||
"asset IDs: {0}".format(invalid))
|
||||
|
||||
|
|
@ -1,13 +1,12 @@
|
|||
import os
|
||||
import errno
|
||||
import shutil
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api, io
|
||||
from avalon import io
|
||||
|
||||
|
||||
class IntegrateMindbenderAsset(pyblish.api.InstancePlugin):
|
||||
class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||
"""Write to files and metadata
|
||||
|
||||
This plug-in exposes your data to others by encapsulating it
|
||||
|
|
@ -30,226 +29,33 @@ class IntegrateMindbenderAsset(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
label = "Integrate Asset"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
families = [
|
||||
"colorbleed.model",
|
||||
"colorbleed.rig",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.camera",
|
||||
"colorbleed.lookdev",
|
||||
"colorbleed.historyLookdev",
|
||||
"colorbleed.group",
|
||||
"colorbleed.pointcache"
|
||||
]
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
families = ["colorbleed.model",
|
||||
"colorbleed.rig",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.camera",
|
||||
"colorbleed.lookdev",
|
||||
"colorbleed.texture",
|
||||
"colorbleed.historyLookdev",
|
||||
"colorbleed.group"]
|
||||
|
||||
def process(self, instance):
|
||||
# Required environment variables
|
||||
PROJECT = os.environ["AVALON_PROJECT"]
|
||||
ASSET = instance.data.get("asset") or os.environ["AVALON_ASSET"]
|
||||
SILO = os.environ["AVALON_SILO"]
|
||||
LOCATION = os.getenv("AVALON_LOCATION")
|
||||
|
||||
# todo(marcus): avoid hardcoding labels in the integrator
|
||||
representation_labels = {".ma": "Maya Ascii",
|
||||
".source": "Original source file",
|
||||
".abc": "Alembic"}
|
||||
# get needed data
|
||||
traffic = instance.data["traffic"]
|
||||
representations = instance.data["representations"]
|
||||
|
||||
context = instance.context
|
||||
# Atomicity
|
||||
#
|
||||
# Guarantee atomic publishes - each asset contains
|
||||
# an identical set of members.
|
||||
# __
|
||||
# / o
|
||||
# / \
|
||||
# | o |
|
||||
# \ /
|
||||
# o __/
|
||||
#
|
||||
assert all(result["success"] for result in context.data["results"]), (
|
||||
"Atomicity not held, aborting.")
|
||||
self.log.info("Registering {} items".format(len(representations)))
|
||||
io.insert_many(representations)
|
||||
|
||||
# Assemble
|
||||
#
|
||||
# |
|
||||
# v
|
||||
# ---> <----
|
||||
# ^
|
||||
# |
|
||||
#
|
||||
stagingdir = instance.data.get("stagingDir")
|
||||
assert stagingdir, ("Incomplete instance \"%s\": "
|
||||
"Missing reference to staging area." % instance)
|
||||
# moving files
|
||||
for src, dest in traffic:
|
||||
self.log.info("Copying file .. {} -> {}".format(src, dest))
|
||||
self.copy_file(src, dest)
|
||||
|
||||
self.log.debug("Establishing staging directory @ %s" % stagingdir)
|
||||
|
||||
project = io.find_one({"type": "project"})
|
||||
asset = io.find_one({"name": ASSET})
|
||||
|
||||
assert all([project, asset]), ("Could not find current project or "
|
||||
"asset '%s'" % ASSET)
|
||||
|
||||
subset = self.get_subset(asset, instance)
|
||||
|
||||
# get next version
|
||||
latest_version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
{"name": True},
|
||||
sort=[("name", -1)])
|
||||
|
||||
next_version = 1
|
||||
if latest_version is not None:
|
||||
next_version += latest_version["name"]
|
||||
|
||||
self.log.debug("Next version: %i" % next_version)
|
||||
|
||||
version_data = self.create_version_data(context, instance)
|
||||
version = self.create_version(subset=subset,
|
||||
version_number=next_version,
|
||||
locations=[LOCATION],
|
||||
data=version_data)
|
||||
|
||||
self.log.debug("Creating version: %s" % pformat(version))
|
||||
version_id = io.insert_one(version).inserted_id
|
||||
|
||||
# Write to disk
|
||||
# _
|
||||
# | |
|
||||
# _| |_
|
||||
# ____\ /
|
||||
# |\ \ / \
|
||||
# \ \ v \
|
||||
# \ \________.
|
||||
# \|________|
|
||||
#
|
||||
template_data = {
|
||||
"root": api.registered_root(),
|
||||
"project": PROJECT,
|
||||
"silo": SILO,
|
||||
"asset": ASSET,
|
||||
"subset": subset["name"],
|
||||
"version": version["name"],
|
||||
}
|
||||
|
||||
template_publish = project["config"]["template"]["publish"]
|
||||
|
||||
for fname in os.listdir(stagingdir):
|
||||
name, ext = os.path.splitext(fname)
|
||||
template_data["representation"] = ext[1:]
|
||||
|
||||
src = os.path.join(stagingdir, fname)
|
||||
dst = template_publish.format(**template_data)
|
||||
|
||||
# Backwards compatibility
|
||||
if fname == ".metadata.json":
|
||||
dirname = os.path.dirname(dst)
|
||||
dst = os.path.join(dirname, ".metadata.json")
|
||||
|
||||
self.log.info("Copying %s -> %s" % (src, dst))
|
||||
|
||||
# copy source to destination (library)
|
||||
self.copy_file(src, dst)
|
||||
|
||||
representation = {
|
||||
"schema": "avalon-core:representation-2.0",
|
||||
"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": ext[1:],
|
||||
"data": {"label": representation_labels.get(ext)},
|
||||
"dependencies": instance.data.get("dependencies", "").split(),
|
||||
|
||||
# Imprint shortcut to context
|
||||
# for performance reasons.
|
||||
"context": {
|
||||
"project": PROJECT,
|
||||
"asset": ASSET,
|
||||
"silo": SILO,
|
||||
"subset": subset["name"],
|
||||
"version": version["name"],
|
||||
"representation": ext[1:]
|
||||
}
|
||||
}
|
||||
|
||||
io.insert_one(representation)
|
||||
|
||||
self.log.info("Successfully integrated \"%s\" to \"%s\"" % (
|
||||
instance, dst))
|
||||
|
||||
def get_subset(self, asset, instance):
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": instance.data["subset"]})
|
||||
|
||||
if subset is None:
|
||||
subset_name = instance.data["subset"]
|
||||
self.log.info("Subset '%s' not found, creating.." % subset_name)
|
||||
|
||||
_id = io.insert_one({
|
||||
"schema": "avalon-core:subset-2.0",
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"data": {},
|
||||
"parent": asset["_id"]
|
||||
}).inserted_id
|
||||
|
||||
subset = io.find_one({"_id": _id})
|
||||
|
||||
return subset
|
||||
|
||||
def create_representation(self):
|
||||
pass
|
||||
|
||||
def create_version(self, subset, version_number, locations, data=None):
|
||||
""" Copy given source to destination
|
||||
|
||||
Arguments:
|
||||
subset (dict): the registered subset of the asset
|
||||
version_number (int): the version number
|
||||
locations (list): the currently registered locations
|
||||
"""
|
||||
# Imprint currently registered location
|
||||
version_locations = [location for location in locations if
|
||||
location is not None]
|
||||
|
||||
return {"schema": "avalon-core:version-2.0",
|
||||
"type": "version",
|
||||
"parent": subset["_id"],
|
||||
"name": version_number,
|
||||
"locations": version_locations,
|
||||
"data": data}
|
||||
|
||||
def create_version_data(self, context, instance):
|
||||
"""
|
||||
Create the data collection for th version
|
||||
Args:
|
||||
context (object): the current context
|
||||
instance(object): the current instance being published
|
||||
|
||||
Returns:
|
||||
dict: the required information with instance.data as key
|
||||
"""
|
||||
|
||||
families = []
|
||||
current_families = instance.data.get("families", list())
|
||||
instance_family = instance.data.get("family", None)
|
||||
|
||||
families += current_families
|
||||
if instance_family is not None:
|
||||
families.append(instance_family)
|
||||
|
||||
# create relative source path for DB
|
||||
relative_path = os.path.relpath(context.data["currentFile"],
|
||||
api.registered_root())
|
||||
source = os.path.join("{root}", relative_path).replace("\\", "/")
|
||||
|
||||
version_data = {"families": families,
|
||||
"time": context.data["time"],
|
||||
"author": context.data["user"],
|
||||
"source": source,
|
||||
"comment": context.data.get("comment")}
|
||||
|
||||
return dict(instance.data, **version_data)
|
||||
self.log.info("Removing temporary files and folders ...")
|
||||
stagingdir = instance.data["stagingDir"]
|
||||
shutil.rmtree(stagingdir)
|
||||
|
||||
def copy_file(self, src, dst):
|
||||
""" Copy given source to destination
|
||||
|
|
@ -271,4 +77,4 @@ class IntegrateMindbenderAsset(pyblish.api.InstancePlugin):
|
|||
self.log.critical("An unexpected error occurred.")
|
||||
raise
|
||||
|
||||
shutil.copy(src, dst)
|
||||
shutil.copy(src, dst)
|
||||
|
|
|
|||
40
colorbleed/plugins/publish/post_look_integrate.py
Normal file
40
colorbleed/plugins/publish/post_look_integrate.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
import json
|
||||
import os
|
||||
|
||||
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||
"""Remap source paths for lookdev and textures
|
||||
|
||||
"""
|
||||
|
||||
label = "Remap source paths"
|
||||
order = pyblish.api.IntegratorOrder + 0.15
|
||||
families = ["colorbleed.lookdev",
|
||||
"colorbleed.texture"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
family = instance.data['family']
|
||||
resources = instance.data['resources']
|
||||
version_folder = instance.data['versionFolder']
|
||||
|
||||
if family == "colorbleed.texture":
|
||||
try:
|
||||
lib.remap_resource_nodes(resources, folder=version_folder)
|
||||
except Exception as e:
|
||||
self.log.error(e)
|
||||
|
||||
if family == "colorbleed.lookdev":
|
||||
try:
|
||||
tmp_dir = lib.maya_temp_folder()
|
||||
resource_file = os.path.join(tmp_dir, "resources.json")
|
||||
with open(resource_file, "r") as f:
|
||||
resources = json.load(f)
|
||||
lib.remap_resource_nodes(resources)
|
||||
except Exception as e:
|
||||
self.log.error(e)
|
||||
251
colorbleed/plugins/publish/pre_integrate.py
Normal file
251
colorbleed/plugins/publish/pre_integrate.py
Normal file
|
|
@ -0,0 +1,251 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api, io
|
||||
import colorbleed.filetypes as filetypes
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PreIntegrateAsset(pyblish.api.InstancePlugin):
|
||||
"""Resolve any dependency issies
|
||||
|
||||
This plug-in resolves any paths which, if not updated might break
|
||||
the published file.
|
||||
|
||||
The order of families is important, when working with lookdev you want to
|
||||
first publish the texture, update the texture paths in the nodes and then
|
||||
publish the shading network. Same goes for file dependent assets.
|
||||
"""
|
||||
|
||||
label = "Pre Intergrate Asset"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
families = ["colorbleed.model",
|
||||
"colorbleed.rig",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.camera",
|
||||
"colorbleed.lookdev",
|
||||
"colorbleed.texture",
|
||||
"colorbleed.historyLookdev",
|
||||
"colorbleed.group"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Required environment variables
|
||||
PROJECT = os.environ["AVALON_PROJECT"]
|
||||
ASSET = instance.data.get("asset") or os.environ["AVALON_ASSET"]
|
||||
SILO = os.environ["AVALON_SILO"]
|
||||
LOCATION = os.getenv("AVALON_LOCATION")
|
||||
|
||||
# todo(marcus): avoid hardcoding labels in the integrator
|
||||
representation_labels = {".ma": "Maya Ascii",
|
||||
".source": "Original source file",
|
||||
".abc": "Alembic"}
|
||||
|
||||
context = instance.context
|
||||
# Atomicity
|
||||
#
|
||||
# Guarantee atomic publishes - each asset contains
|
||||
# an identical set of members.
|
||||
# __
|
||||
# / o
|
||||
# / \
|
||||
# | o |
|
||||
# \ /
|
||||
# o __/
|
||||
#
|
||||
assert all(result["success"] for result in context.data["results"]), (
|
||||
"Atomicity not held, aborting.")
|
||||
|
||||
# Assemble
|
||||
#
|
||||
# |
|
||||
# v
|
||||
# ---> <----
|
||||
# ^
|
||||
# |
|
||||
#
|
||||
stagingdir = instance.data.get("stagingDir")
|
||||
assert stagingdir, ("Incomplete instance \"%s\": "
|
||||
"Missing reference to staging area." % instance)
|
||||
|
||||
# extra check if stagingDir actually exists and is available
|
||||
|
||||
self.log.debug("Establishing staging directory @ %s" % stagingdir)
|
||||
|
||||
project = io.find_one({"type": "project"})
|
||||
asset = io.find_one({"name": ASSET})
|
||||
|
||||
assert all([project, asset]), ("Could not find current project or "
|
||||
"asset '%s'" % ASSET)
|
||||
|
||||
subset = self.get_subset(asset, instance)
|
||||
|
||||
# get next version
|
||||
latest_version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
{"name": True},
|
||||
sort=[("name", -1)])
|
||||
|
||||
next_version = 1
|
||||
if latest_version is not None:
|
||||
next_version += latest_version["name"]
|
||||
|
||||
self.log.debug("Next version: %i" % next_version)
|
||||
|
||||
version_data = self.create_version_data(context, instance)
|
||||
version = self.create_version(subset=subset,
|
||||
version_number=next_version,
|
||||
locations=[LOCATION],
|
||||
data=version_data)
|
||||
|
||||
self.log.debug("Creating version ...")
|
||||
version_id = io.insert_one(version).inserted_id
|
||||
|
||||
# Write to disk
|
||||
# _
|
||||
# | |
|
||||
# _| |_
|
||||
# ____\ /
|
||||
# |\ \ / \
|
||||
# \ \ v \
|
||||
# \ \________.
|
||||
# \|________|
|
||||
#
|
||||
root = api.registered_root()
|
||||
template_data = {"root": root,
|
||||
"project": PROJECT,
|
||||
"silo": SILO,
|
||||
"asset": ASSET,
|
||||
"subset": subset["name"],
|
||||
"version": version["name"]}
|
||||
|
||||
template_publish = project["config"]["template"]["publish"]
|
||||
|
||||
representations = []
|
||||
traffic = []
|
||||
staging_content = os.listdir(stagingdir)
|
||||
for v, fname in enumerate(staging_content):
|
||||
|
||||
name, ext = os.path.splitext(fname)
|
||||
template_data["representation"] = ext[1:]
|
||||
|
||||
src = os.path.join(stagingdir, fname)
|
||||
dst = template_publish.format(**template_data)
|
||||
if v == 0:
|
||||
instance.data["versionFolder"] = os.path.dirname(dst)
|
||||
|
||||
# Files to copy as if or to specific folder
|
||||
if ext in filetypes.accepted_images_types:
|
||||
dirname = os.path.dirname(dst)
|
||||
dst = os.path.join(dirname, fname)
|
||||
|
||||
# Backwards compatibility
|
||||
if fname == ".metadata.json":
|
||||
dirname = os.path.dirname(dst)
|
||||
dst = os.path.join(dirname, fname)
|
||||
|
||||
# copy source to destination (library)
|
||||
traffic.append([src, dst])
|
||||
|
||||
representation = {
|
||||
"schema": "avalon-core:representation-2.0",
|
||||
"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": ext[1:],
|
||||
"data": {"label": representation_labels.get(ext)},
|
||||
"dependencies": instance.data.get("dependencies", "").split(),
|
||||
|
||||
# Imprint shortcut to context
|
||||
# for performance reasons.
|
||||
"context": {
|
||||
"project": PROJECT,
|
||||
"asset": ASSET,
|
||||
"silo": SILO,
|
||||
"subset": subset["name"],
|
||||
"version": version["name"],
|
||||
"representation": ext[1:]
|
||||
}
|
||||
}
|
||||
representations.append(representation)
|
||||
|
||||
# store data for database and source / destinations
|
||||
instance.data["representations"] = representations
|
||||
instance.data["traffic"] = traffic
|
||||
|
||||
return representations
|
||||
|
||||
def get_subset(self, asset, instance):
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": instance.data["subset"]})
|
||||
|
||||
if subset is None:
|
||||
subset_name = instance.data["subset"]
|
||||
self.log.info("Subset '%s' not found, creating.." % subset_name)
|
||||
|
||||
_id = io.insert_one({
|
||||
"schema": "avalon-core:subset-2.0",
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"data": {},
|
||||
"parent": asset["_id"]
|
||||
}).inserted_id
|
||||
|
||||
subset = io.find_one({"_id": _id})
|
||||
|
||||
return subset
|
||||
|
||||
def create_version(self, subset, version_number, locations, data=None):
|
||||
""" Copy given source to destination
|
||||
|
||||
Arguments:
|
||||
subset (dict): the registered subset of the asset
|
||||
version_number (int): the version number
|
||||
locations (list): the currently registered locations
|
||||
"""
|
||||
# Imprint currently registered location
|
||||
version_locations = [location for location in locations if
|
||||
location is not None]
|
||||
|
||||
return {"schema": "avalon-core:version-2.0",
|
||||
"type": "version",
|
||||
"parent": subset["_id"],
|
||||
"name": version_number,
|
||||
"locations": version_locations,
|
||||
"data": data}
|
||||
|
||||
def create_version_data(self, context, instance):
|
||||
"""Create the data collection for th version
|
||||
|
||||
Args:
|
||||
context: the current context
|
||||
instance: the current instance being published
|
||||
|
||||
Returns:
|
||||
dict: the required information with instance.data as key
|
||||
"""
|
||||
|
||||
families = []
|
||||
current_families = instance.data.get("families", list())
|
||||
instance_family = instance.data.get("family", None)
|
||||
|
||||
families += current_families
|
||||
if instance_family is not None:
|
||||
families.append(instance_family)
|
||||
|
||||
# create relative source path for DB
|
||||
relative_path = os.path.relpath(context.data["currentFile"],
|
||||
api.registered_root())
|
||||
source = os.path.join("{root}", relative_path).replace("\\", "/")
|
||||
|
||||
version_data = {"families": families,
|
||||
"time": context.data["time"],
|
||||
"author": context.data["user"],
|
||||
"source": source,
|
||||
"comment": context.data.get("comment")}
|
||||
|
||||
return dict(instance.data, **version_data)
|
||||
Loading…
Add table
Add a link
Reference in a new issue