mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
incorpirated the dynamic menu, wip
This commit is contained in:
parent
e0ecc72e49
commit
92be4387ae
9 changed files with 1734 additions and 965 deletions
File diff suppressed because it is too large
Load diff
|
|
@ -1,90 +1,64 @@
|
|||
import sys
|
||||
import os
|
||||
import logging
|
||||
import site
|
||||
|
||||
from avalon.vendor.Qt import QtWidgets, QtCore
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self._menu = "colorbleed"
|
||||
self._parent = {widget.objectName(): widget for widget in
|
||||
QtWidgets.QApplication.topLevelWidgets()}.get("MayaWindow")
|
||||
|
||||
# set colorbleed scripts path in environment keys
|
||||
os.environ["COLORBLEED_SCRIPTS"] = r"P:\pipeline\dev\git\cbMayaScripts\cbMayaScripts"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def install():
|
||||
# from . import interactive
|
||||
def deferred():
|
||||
|
||||
uninstall()
|
||||
# todo: replace path with server / library path
|
||||
site.addsitedir("C:\Users\User\Documents\development\scriptsmenu\python")
|
||||
|
||||
def deferred():
|
||||
from scriptsmenu import launchformaya
|
||||
import scriptsmenu.scriptsmenu as menu
|
||||
|
||||
import site
|
||||
import os
|
||||
log.info("Attempting to install ...")
|
||||
|
||||
# todo: replace path with server / library path
|
||||
site.addsitedir("C:\Users\User\Documents\development\scriptsmenu\python")
|
||||
# load configuration of custom menu
|
||||
config_path = os.path.join(os.path.dirname(__file__), "menu.json")
|
||||
config = menu.load_configuration(config_path)
|
||||
|
||||
from scriptsmenu import launchformaya
|
||||
import scriptsmenu.scriptsmenu as menu
|
||||
# hack?
|
||||
parent = launchformaya._maya_main_menubar()
|
||||
cb_menu = menu.ScriptsMenu(title=self._menu.title(), parent=parent)
|
||||
|
||||
# load configuration of custon menu
|
||||
config_path = os.path.join(os.path.dirname(__file__), "menu.json")
|
||||
config = menu.load_configuration(config_path)
|
||||
# register modifiers
|
||||
modifiers = QtCore.Qt.ControlModifier | QtCore.Qt.ShiftModifier
|
||||
cb_menu.register_callback(modifiers, launchformaya.to_shelf)
|
||||
|
||||
# create menu in appliction
|
||||
cb_menu = launchformaya.main(title=self._menu, parent=self._parent)
|
||||
|
||||
# apply configuration
|
||||
menu.load_from_configuration(cb_menu, config)
|
||||
|
||||
# cmds.menu(self._menu,
|
||||
# label=self._menu.capitalize(),
|
||||
# tearOff=True,
|
||||
# parent="MayaWindow")
|
||||
#
|
||||
# # Modeling sub-menu
|
||||
# cmds.menuItem("Modeling",
|
||||
# label="Modeling",
|
||||
# tearOff=True,
|
||||
# subMenu=True,
|
||||
# parent=self._menu)
|
||||
#
|
||||
# cmds.menuItem("Combine", command=interactive.combine)
|
||||
#
|
||||
# # Rigging sub-menu
|
||||
# cmds.menuItem("Rigging",
|
||||
# label="Rigging",
|
||||
# tearOff=True,
|
||||
# subMenu=True,
|
||||
# parent=self._menu)
|
||||
#
|
||||
# cmds.menuItem("Auto Connect", command=interactive.auto_connect)
|
||||
# cmds.menuItem("Clone (Local)", command=interactive.clone_localspace)
|
||||
# cmds.menuItem("Clone (World)", command=interactive.clone_worldspace)
|
||||
# cmds.menuItem("Clone (Special)", command=interactive.clone_special)
|
||||
# cmds.menuItem("Create Follicle", command=interactive.follicle)
|
||||
#
|
||||
# # Animation sub-menu
|
||||
# cmds.menuItem("Animation",
|
||||
# label="Animation",
|
||||
# tearOff=True,
|
||||
# subMenu=True,
|
||||
# parent=self._menu)
|
||||
#
|
||||
# cmds.menuItem("Set Defaults", command=interactive.set_defaults)
|
||||
#
|
||||
# cmds.setParent("..", menu=True)
|
||||
#
|
||||
# cmds.menuItem(divider=True)
|
||||
#
|
||||
# cmds.menuItem("Auto Connect", command=interactive.auto_connect_assets)
|
||||
|
||||
# Allow time for uninstallation to finish.
|
||||
QtCore.QTimer.singleShot(100, deferred)
|
||||
# apply configuration
|
||||
menu.load_from_configuration(cb_menu, config)
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
||||
log.info("Attempting to uninstall ..")
|
||||
app = QtWidgets.QApplication.instance()
|
||||
widgets = dict((w.objectName(), w) for w in app.allWidgets())
|
||||
menu = widgets.get(self._menu)
|
||||
|
||||
if menu:
|
||||
menu.deleteLater()
|
||||
del(menu)
|
||||
try:
|
||||
menu.deleteLater()
|
||||
del menu
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
|
||||
|
||||
def install():
|
||||
|
||||
uninstall()
|
||||
# Allow time for uninstallation to finish.
|
||||
cmds.evalDeferred(deferred)
|
||||
|
|
|
|||
|
|
@ -1,177 +0,0 @@
|
|||
from collections import defaultdict
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import cbra.utils.maya.node_uuid as node_uuid
|
||||
import cbra.lib
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstancePerItem(pyblish.api.ContextPlugin):
|
||||
"""Collect instances from the Maya scene and breaks them down per item id
|
||||
|
||||
An instance is identified by having an _INST suffix
|
||||
and a .family user-defined attribute.
|
||||
|
||||
All other user-defined attributes of the object set
|
||||
is accessible within each instance's data.
|
||||
|
||||
This collector breaks the instances down to each Item member it contains,
|
||||
by using the IDs on the nodes in the instance it will split up the instance
|
||||
into separate instances for each unique "item" id it finds.
|
||||
|
||||
Note:
|
||||
- Only breaks down based on children members and ignores parent members.
|
||||
- Discards members without IDs.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
hosts = ["maya"]
|
||||
label = "Instance per Item"
|
||||
|
||||
_include_families = ["colorbleed.look"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
invalid = list()
|
||||
|
||||
for objset in cmds.ls("*_SET",
|
||||
objectsOnly=True,
|
||||
type='objectSet',
|
||||
long=True,
|
||||
recursive=True): # Include namespace
|
||||
|
||||
try:
|
||||
family = cmds.getAttr("{}.family".format(objset))
|
||||
except ValueError:
|
||||
self.log.error("Found: %s found, but no family." % objset)
|
||||
continue
|
||||
|
||||
if family not in self._include_families:
|
||||
continue
|
||||
|
||||
# ignore referenced sets
|
||||
if cmds.referenceQuery(objset, isNodeReferenced=True):
|
||||
continue
|
||||
|
||||
instances = self.build_instances(context, objset)
|
||||
if not instances:
|
||||
|
||||
# Log special error messages when objectSet is completely
|
||||
# empty (has no members) to clarify to artists the root of
|
||||
# their problem.
|
||||
if not cmds.sets(objset, query=True):
|
||||
self.log.error("Instance objectSet has no members: "
|
||||
"{}".format(objset))
|
||||
|
||||
self.log.error("No instances retrieved from objectSet: "
|
||||
"{}".format(objset))
|
||||
invalid.append(objset)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("Invalid instances: {}".format(invalid))
|
||||
|
||||
# Sort context based on family
|
||||
context[:] = sorted(
|
||||
context, key=lambda instance: instance.data("family"))
|
||||
|
||||
def build_instances(self, context, objset):
|
||||
"""Build the instances for a single instance objectSet
|
||||
|
||||
Returns:
|
||||
list: The constructed instances from the objectSet.
|
||||
|
||||
"""
|
||||
|
||||
self.log.info("Collecting: %s" % objset)
|
||||
|
||||
short_name = objset.rsplit("|", 1)[-1].rsplit(":", 1)[-1]
|
||||
|
||||
# Default data
|
||||
default_data = {"name": short_name[:-5],
|
||||
"subset": "default"}
|
||||
|
||||
# Get user data from user defined attributes
|
||||
user_data = dict()
|
||||
for attr in cmds.listAttr(objset, userDefined=True):
|
||||
try:
|
||||
value = cmds.getAttr("{}.{}".format(objset, attr))
|
||||
user_data[attr] = value
|
||||
except RuntimeError:
|
||||
continue
|
||||
|
||||
# Maintain nested object sets
|
||||
members = cmds.sets(objset, query=True)
|
||||
members = cmds.ls(members, long=True)
|
||||
|
||||
children = cmds.listRelatives(members,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
# Exclude intermediate objects
|
||||
children = cmds.ls(children, noIntermediate=True, long=True)
|
||||
|
||||
nodes = members + children
|
||||
nodes = list(set(nodes))
|
||||
|
||||
# Group nodes using ids to an Item
|
||||
nodes_id = node_uuid.build_cache(nodes, include_without_ids=True)
|
||||
|
||||
# Log warning for nodes without ids
|
||||
if None in nodes_id:
|
||||
self.log.warning("Skipping nodes without ids: "
|
||||
"{}".format(nodes_id[None]))
|
||||
|
||||
# ignore nodes without ids
|
||||
context.data["instancePerItemNodesWithoutId"] = nodes_id.pop(None,
|
||||
None)
|
||||
|
||||
item_groups = defaultdict(list)
|
||||
|
||||
for id, nodes in nodes_id.iteritems():
|
||||
item_id = id.rsplit(":", 1)[0]
|
||||
item_groups[item_id].extend(nodes)
|
||||
|
||||
instances = list()
|
||||
for item_id, item_nodes in item_groups.iteritems():
|
||||
|
||||
ctx = node_uuid.parse_id("{}:fake_node_uuid".format(item_id))
|
||||
|
||||
# Use itemPath to parse full blown context using official lib
|
||||
ctx = cbra.lib.parse_context(ctx['itemPath'])
|
||||
|
||||
item = ctx.get('item', None)
|
||||
if item is None:
|
||||
self.log.info("Unparsed item id: {}".format(item_id))
|
||||
self.log.error("Item can't be parsed and seems to be "
|
||||
"non-existent. Was an asset renamed? Or your"
|
||||
"project set incorrectly?")
|
||||
raise RuntimeError("Item not parsed. See log for description.")
|
||||
|
||||
instance = context.create_instance(objset)
|
||||
|
||||
# Set the related members
|
||||
instance[:] = item_nodes
|
||||
instance.data['setMembers'] = item_nodes
|
||||
|
||||
# Set defaults and user data
|
||||
instance.data.update(default_data.copy())
|
||||
instance.data.update(user_data.copy())
|
||||
|
||||
# Override the label to be clear
|
||||
name = instance.data['name']
|
||||
instance.data['label'] = "{0} ({1})".format(name, item)
|
||||
|
||||
# Store that the instance is collected per item
|
||||
instance.data['_instancePerItem'] = True
|
||||
instance.data['_itemContext'] = ctx
|
||||
|
||||
assert "family" in instance.data, "No family data in instance"
|
||||
assert "name" in instance.data, ("No objectSet name data "
|
||||
"in instance")
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
return instances
|
||||
|
|
@ -1,156 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
import pyseq
|
||||
import glob
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class SeletYetiCachesAction(pyblish.api.Action):
|
||||
"""Select the nodes related to the collected file textures"""
|
||||
|
||||
label = "Select yeti nodes"
|
||||
on = "succeeded" # This action is only available on a failed plug-in
|
||||
icon = "search" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
self.log.info("Finding textures..")
|
||||
|
||||
# Get the errored instances
|
||||
instances = []
|
||||
for result in context.data["results"]:
|
||||
instance = result["instance"]
|
||||
if instance is None:
|
||||
continue
|
||||
|
||||
instances.append(instance)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(instances, plugin)
|
||||
|
||||
# Get the texture nodes from the instances
|
||||
nodes = []
|
||||
for instance in instances:
|
||||
texture_nodes = instance.data['yetiCaches'].keys()
|
||||
nodes.extend(texture_nodes)
|
||||
|
||||
# Ensure unique
|
||||
nodes = list(set(nodes))
|
||||
|
||||
if nodes:
|
||||
self.log.info("Selecting nodes: %s" % ", ".join(nodes))
|
||||
cmds.select(nodes, r=True, noExpand=True)
|
||||
else:
|
||||
self.log.info("No nodes found.")
|
||||
cmds.select(deselect=True)
|
||||
|
||||
|
||||
def get_sequence(filename, pattern="%04d"):
|
||||
"""Get pyseq sequence from filename
|
||||
|
||||
Supports negative frame ranges like (-001, 0000, 0001 and -0001, 0000, 0001).
|
||||
|
||||
Arguments:
|
||||
filename (str): The full path to filename containing the given pattern.
|
||||
pattern (str): The pattern to swap with the variable frame number.
|
||||
|
||||
Returns:
|
||||
pyseq.Sequence: file sequence.
|
||||
|
||||
"""
|
||||
|
||||
glob_pattern = filename.replace(pattern, "*")
|
||||
|
||||
escaped = re.escape(filename)
|
||||
re_pattern = escaped.replace(pattern, "-?[0-9]+")
|
||||
|
||||
files = glob.glob(glob_pattern)
|
||||
files = [str(f) for f in files if re.match(re_pattern, f)]
|
||||
|
||||
return pyseq.get_sequences(files)
|
||||
|
||||
|
||||
class CollectYetiCaches(pyblish.api.InstancePlugin):
|
||||
"""Collect used yeti caches.
|
||||
|
||||
Collects the file sequences from pgYetiMaya.cacheFileName
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.495
|
||||
label = 'Yeti Caches'
|
||||
families = ["colorbleed.groom"]
|
||||
actions = [SeletYetiCachesAction]
|
||||
|
||||
TYPES = {"pgYetiMaya": "cacheFileName"}
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Get textures from sets
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members, dag=True, shapes=True, type="pgYetiMaya",
|
||||
noIntermediate=True, long=True)
|
||||
if not members:
|
||||
raise RuntimeError("Instance appears to be empty (no members)")
|
||||
|
||||
# Collect only those cache frames that are required
|
||||
# If handles are required it is assumed to already be included
|
||||
# in the start frame and end frames.
|
||||
# (e.g. using frame handle collector)
|
||||
start_frame = instance.data("startFrame")
|
||||
end_frame = instance.data("endFrame")
|
||||
required = set(range(int(start_frame), int(end_frame) + 1))
|
||||
|
||||
history = cmds.listHistory(members) or []
|
||||
|
||||
resources = instance.data.get("resources", [])
|
||||
yeti_caches = dict()
|
||||
|
||||
for node_type, attr in self.TYPES.iteritems():
|
||||
for node in cmds.ls(history, type=node_type, long=True):
|
||||
|
||||
attribute = "{0}.{1}".format(node, attr)
|
||||
|
||||
# Source
|
||||
source = cmds.getAttr(attribute)
|
||||
if not source:
|
||||
self.log.error("Node does not have a file set: "
|
||||
"{0}".format(node))
|
||||
|
||||
# Collect the source as expanded path because that's also
|
||||
# how the attribute must be 'set' for yeti nodes.
|
||||
source = os.path.realpath(cmds.workspace(expandName=source))
|
||||
|
||||
# Collect the frames we need from the sequence
|
||||
sequences = get_sequence(source)
|
||||
files = list()
|
||||
for sequence in sequences:
|
||||
for index, frame in enumerate(sequence.frames()):
|
||||
if frame not in required:
|
||||
continue
|
||||
|
||||
item = sequence[index]
|
||||
files.append(item.path)
|
||||
|
||||
# Define the resource
|
||||
resource = {"tags": ["maya", "yeti", "attribute"],
|
||||
"node": node,
|
||||
"attribute": attribute,
|
||||
"source": source, # required for resources
|
||||
"files": files, # required for resources
|
||||
"subfolder": "caches" # optional for resources
|
||||
}
|
||||
|
||||
resources.append(resource)
|
||||
|
||||
# For validations
|
||||
yeti_caches[node] = {"attribute": attribute,
|
||||
"source": source,
|
||||
"sequences": sequences}
|
||||
|
||||
# Store data on instance
|
||||
instance.data['yetiCaches'] = yeti_caches
|
||||
instance.data['resources'] = resources
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
import json
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import avalon.maya
|
||||
import colorbleed.api
|
||||
|
||||
import cb.utils.maya.context as context
|
||||
import cbra.utils.maya.layout as layout
|
||||
|
||||
|
||||
def get_upstream_hierarchy_fast(nodes):
|
||||
"""Passed in nodes must be long names!"""
|
||||
|
||||
matched = set()
|
||||
parents = []
|
||||
|
||||
for node in nodes:
|
||||
hierarchy = node.split("|")
|
||||
num = len(hierarchy)
|
||||
for x in range(1, num-1):
|
||||
parent = "|".join(hierarchy[:num-x])
|
||||
if parent in parents:
|
||||
break
|
||||
else:
|
||||
parents.append(parent)
|
||||
matched.add(parent)
|
||||
|
||||
return parents
|
||||
|
||||
|
||||
class ExtractLayout(colorbleed.api.Extractor):
|
||||
"""Extract Layout as both gpuCache and Alembic"""
|
||||
|
||||
label = "Layout (gpuCache & alembic)"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.layout"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Define extract output file path
|
||||
dir_path = self.staging_dir(instance)
|
||||
|
||||
start = instance.data.get("startFrame", 1)
|
||||
end = instance.data.get("endFrame", 1)
|
||||
step = instance.data.get("step", 1.0)
|
||||
placeholder = instance.data.get("placeholder", False)
|
||||
write_color_sets = instance.data.get("writeColorSets", False)
|
||||
renderable_only = instance.data.get("renderableOnly", False)
|
||||
visible_only = instance.data.get("visibleOnly", False)
|
||||
|
||||
layers = instance.data.get("animLayersActive", None)
|
||||
if layers:
|
||||
layers = json.loads(layers)
|
||||
self.log.info("Publishing with animLayers active: "
|
||||
"{0}".format(layers))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
with avalon.maya.maintained_selection():
|
||||
|
||||
# Get children hierarchy
|
||||
nodes = instance.data['setMembers']
|
||||
cmds.select(nodes, r=True, hierarchy=True)
|
||||
hierarchy = cmds.ls(selection=True, long=True)
|
||||
|
||||
with context.evaluation("off"):
|
||||
with context.no_refresh():
|
||||
with context.active_anim_layers(layers):
|
||||
layout.extract_layout(hierarchy,
|
||||
dir_path,
|
||||
start=start,
|
||||
end=end,
|
||||
step=step,
|
||||
placeholder=placeholder,
|
||||
write_color_sets=write_color_sets,
|
||||
renderable_only=renderable_only,
|
||||
visible_only=visible_only)
|
||||
|
||||
self.log.info("Extracted instance '{0}' to: {1}".format(
|
||||
instance.name, dir_path))
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
import os
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
import cbra.lib
|
||||
from cb.utils.python.decorators import memorize
|
||||
|
||||
|
||||
def isclose(a, b, rel_tol=1e-9, abs_tol=0.0):
|
||||
return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
|
||||
|
||||
|
||||
@memorize
|
||||
def is_published_path(path):
|
||||
"""Return whether path is from a published file"""
|
||||
|
||||
# Quick check (optimization) without going through the folder
|
||||
# structure
|
||||
if cbra.lib.DIR_PUBLISH.lower() not in path.lower():
|
||||
return False
|
||||
|
||||
try:
|
||||
context = cbra.lib.parse_context(path)
|
||||
except RuntimeError:
|
||||
context = dict()
|
||||
|
||||
return all([context.get("family", None),
|
||||
context.get("subset", None),
|
||||
context.get("version", None)])
|
||||
|
||||
|
||||
class ValidateLayoutNodes(pyblish.api.InstancePlugin):
|
||||
"""Validates that layout nodes behave to certain rules
|
||||
|
||||
Gpu caches in a layout may not have sub-frame offsets, like offsets with a
|
||||
value after the decimal point. (e.g. 1.45)
|
||||
|
||||
Gpu caches loaded in a layout MUST come from a published source that has
|
||||
family and version.
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidateContentsOrder
|
||||
label = 'Layout Nodes'
|
||||
families = ['colorbleed.layout']
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
caches = cmds.ls(instance, type="gpuCache", long=True)
|
||||
|
||||
# Validate sub-frame offsets
|
||||
invalid_offsets = list()
|
||||
for cache in caches:
|
||||
|
||||
offset = cmds.getAttr("{}.animOffset".format(cache))
|
||||
if not isclose(offset, round(offset)):
|
||||
cls.log.warning("Invalid sub-frame offset on: %s" % cache)
|
||||
invalid_offsets.append(cache)
|
||||
|
||||
# Validate gpuCache paths are from published files
|
||||
invalid_paths = list()
|
||||
for cache in caches:
|
||||
path = cmds.getAttr("{}.cacheFileName".format(cache))
|
||||
path = os.path.normpath(path)
|
||||
if not is_published_path(path):
|
||||
cls.log.warning("GpuCache path not from published file: "
|
||||
"{0} -> {1}".format(cache, path))
|
||||
invalid_paths.append(cache)
|
||||
|
||||
invalid = invalid_offsets + invalid_paths
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Clear cache only once per publish. So we store a value on
|
||||
# the context on the first instance so we clear only once.
|
||||
name = self.__class__.__name__
|
||||
key = "_plugin_{0}_processed".format(name)
|
||||
if not instance.context.data.get(key, False):
|
||||
is_published_path.cache.clear()
|
||||
instance.context.data[key] = True
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Invalid nodes found: {0}".format(invalid))
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
import cbra.utils.maya.node_uuid as id_utils
|
||||
import cbra.lib
|
||||
|
||||
|
||||
class ValidateRelatedNodeIds(pyblish.api.InstancePlugin):
|
||||
"""Validate nodes have related colorbleed ids.
|
||||
|
||||
An ID is 'related' if its built in the current Item.
|
||||
|
||||
Note that this doesn't ensure it's from the current Task. An ID created
|
||||
from `lookdev` has the same relation to the Item as one coming from others,
|
||||
like `rigging` or `modeling`.
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidatePipelineOrder
|
||||
families = ['colorbleed.model']
|
||||
hosts = ['maya']
|
||||
label = 'Related Id Attributes'
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
"""Return the member nodes that are invalid"""
|
||||
|
||||
context = instance.context
|
||||
current_file = context.data.get('currentFile', None)
|
||||
if not current_file:
|
||||
raise RuntimeError("No current file information: "
|
||||
"{0}".format(current_file))
|
||||
|
||||
try:
|
||||
context = cbra.lib.parse_context(current_file)
|
||||
except RuntimeError, e:
|
||||
cls.log.error("Can't generate UUIDs because scene isn't "
|
||||
"in new-style pipeline: ".format(current_file))
|
||||
raise e
|
||||
|
||||
def to_item(id):
|
||||
"""Split the item id part from a node id"""
|
||||
return id.rsplit(":", 1)[0]
|
||||
|
||||
# Generate a fake id in the current context to retrieve the item
|
||||
# id prefix that should match with ids on the nodes
|
||||
fake_node = "__node__"
|
||||
ids = id_utils.generate_ids(context, [fake_node])
|
||||
id = ids[fake_node]
|
||||
item_prefix = to_item(id)
|
||||
|
||||
# Take only the ids with more than one member
|
||||
invalid = list()
|
||||
invalid_items = set()
|
||||
for member in instance:
|
||||
member_id = id_utils.get_id(member)
|
||||
|
||||
# skip nodes without ids
|
||||
if not member_id:
|
||||
continue
|
||||
|
||||
if not member_id.startswith(item_prefix):
|
||||
invalid.append(member)
|
||||
invalid_items.add(to_item(member_id))
|
||||
|
||||
# Log invalid item ids
|
||||
if invalid_items:
|
||||
for item_id in sorted(invalid_items):
|
||||
cls.log.warning("Found invalid item id: {0}".format(item_id))
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
|
||||
# Ensure all nodes have a cbId
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("Nodes found with non-related "
|
||||
"asset IDs: {0}".format(invalid))
|
||||
|
|
@ -1,167 +0,0 @@
|
|||
import os
|
||||
from collections import defaultdict
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
import cbra.lib
|
||||
from cbra.utils.maya.abc import get_alembic_ids
|
||||
from cbra.utils.maya.node_uuid import get_id
|
||||
|
||||
|
||||
def get_subset_path(context):
|
||||
return os.path.join(context['itemPath'],
|
||||
cbra.lib.DIR_PUBLISH,
|
||||
context['family'],
|
||||
context['subset'])
|
||||
|
||||
|
||||
class ValidateUniqueIdsInItem(pyblish.api.InstancePlugin):
|
||||
"""Checks whether IDs are unique across other subsets
|
||||
|
||||
This ensures a model to be published can't have ids
|
||||
which are already present in another subset. For example
|
||||
the "default" model can't have ids present in the "high"
|
||||
subset.
|
||||
|
||||
Note:
|
||||
This will also invalidate the instance if it contains
|
||||
nodes that are present in another instance in the scene.
|
||||
So ensure the instance you're publishing actually has
|
||||
the correct set members.
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidateMeshOrder
|
||||
families = ['colorbleed.model']
|
||||
hosts = ['maya']
|
||||
label = 'Unique Ids in Item'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
optional = True
|
||||
|
||||
@classmethod
|
||||
def iter_invalid(cls, instance):
|
||||
|
||||
verbose = instance.data.get("verbose", False)
|
||||
|
||||
def _get_instance_ids(instance):
|
||||
"""Collect ids in an instance"""
|
||||
nodes_per_id = defaultdict(list)
|
||||
for node in instance:
|
||||
node_id = get_id(node)
|
||||
if node_id:
|
||||
nodes_per_id[node_id].append(node)
|
||||
return nodes_per_id
|
||||
|
||||
nodes_per_id = _get_instance_ids(instance)
|
||||
if not nodes_per_id:
|
||||
return
|
||||
|
||||
ids_lookup = set(nodes_per_id.keys())
|
||||
|
||||
instance_context = instance.data["instanceContext"]
|
||||
instance_subset = instance.data['subset']
|
||||
|
||||
assert instance_context, "Instance must have 'instanceContext' data"
|
||||
assert instance_subset, "Instance must have 'subset' data"
|
||||
|
||||
subsets_checked = set()
|
||||
subsets_checked.add(instance_subset) # we can skip this subset
|
||||
|
||||
# Compare with all other *currently publishing instances*
|
||||
# of family 'model' for this item
|
||||
for other_instance in instance.context:
|
||||
if other_instance is instance:
|
||||
continue
|
||||
|
||||
if other_instance.data['subset'] == instance_subset:
|
||||
cls.log.error("Another instance has the same subset? "
|
||||
"This should never happen.")
|
||||
|
||||
if other_instance.data['family'] != "model":
|
||||
continue
|
||||
|
||||
if other_instance.data['instanceContext']['item'] != \
|
||||
instance_context['item']:
|
||||
cls.log.error("Also publishing model for other item? "
|
||||
"This should never happen.")
|
||||
continue
|
||||
other_ids = _get_instance_ids(other_instance).keys()
|
||||
|
||||
# Perform comparison
|
||||
intersection = ids_lookup.intersection(other_ids)
|
||||
if intersection:
|
||||
for node_id in intersection:
|
||||
nodes = nodes_per_id[node_id]
|
||||
for node in nodes:
|
||||
yield node
|
||||
|
||||
# Those that are invalid don't need to be checked again
|
||||
ids_lookup.difference_update(other_ids)
|
||||
|
||||
if not ids_lookup:
|
||||
# Once we have no ids to check for anymore we can already
|
||||
# return
|
||||
return
|
||||
|
||||
subsets_checked.add(other_instance.data['subset'])
|
||||
|
||||
# Compare with all previously *published instances*
|
||||
# of family 'model' for this item
|
||||
ctx = instance_context.copy()
|
||||
ctx['family'] = "model"
|
||||
|
||||
published_subsets = cbra.lib.list_subsets(ctx)
|
||||
published_subsets = set(x for x in published_subsets if
|
||||
x != instance_subset)
|
||||
|
||||
for published_subset in published_subsets:
|
||||
ctx['subset'] = published_subset
|
||||
ctx['subsetPath'] = get_subset_path(ctx)
|
||||
|
||||
versions = cbra.lib.list_versions(ctx)
|
||||
version = cbra.lib.find_highest_version(versions)
|
||||
if not version:
|
||||
cls.log.debug("No published version for "
|
||||
"'model': {0}".format(published_subset))
|
||||
continue
|
||||
|
||||
ctx['currentVersion'] = version
|
||||
publish_abc = cbra.lib.get_filepath(ctx) + ".abc"
|
||||
|
||||
if not os.path.exists(publish_abc):
|
||||
cls.log.error("Published file to compare with does not exist: "
|
||||
"{0}".format(publish_abc))
|
||||
continue
|
||||
|
||||
if verbose:
|
||||
cls.log.debug("Comparing with: {0}".format(publish_abc))
|
||||
|
||||
abc_ids = set(get_alembic_ids(publish_abc).values())
|
||||
|
||||
# Perform comparison
|
||||
intersection = ids_lookup.intersection(abc_ids)
|
||||
if intersection:
|
||||
for node_id in intersection:
|
||||
nodes = nodes_per_id[node_id]
|
||||
for node in nodes:
|
||||
yield node
|
||||
|
||||
# Those that are invalid don't need to be checked again
|
||||
ids_lookup.difference_update(abc_ids)
|
||||
|
||||
if not ids_lookup:
|
||||
# Once we have no ids to check for anymore we can already
|
||||
# return
|
||||
return
|
||||
|
||||
return
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
return list(cls.iter_invalid(instance))
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
if any(self.iter_invalid(instance)):
|
||||
raise RuntimeError("Invalid nodes found in {0}".format(instance))
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
import pyblish_cb.lib
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class IntegrateColorbleedAssets(colorbleed.api.Integrator):
|
||||
"""Name and position instances on disk for instances.
|
||||
|
||||
The files are transferred from the `extractDir` to the
|
||||
computed `integrationDir` and are renamed as:
|
||||
- "{item}_{family}_{subsetName}_{version}.{ext}"
|
||||
|
||||
Assumptions:
|
||||
- Each extracted instance is 1 file (no directories)
|
||||
|
||||
"""
|
||||
|
||||
label = "Asset"
|
||||
families = ["colorbleed.model", "colorbleed.rig", "colorbleed.pointcache",
|
||||
"colorbleed.proxy", "colorbleed.layout", "colorbleed.look",
|
||||
"colorbleed.vrmeshReplace", "colorbleed.review",
|
||||
"colorbleed.instancer", "colorbleed.camera",
|
||||
"colorbleed.mayaAscii",
|
||||
"colorbleed.furYeti"]
|
||||
|
||||
def process(self, instance):
|
||||
super(IntegrateColorbleedAssets, self).process(instance)
|
||||
|
||||
self.log.info("Integrating {0}..".format(instance))
|
||||
|
||||
integration = pyblish_cb.lib.compute_integration(instance)
|
||||
|
||||
# Store reference for upcoming plug-ins
|
||||
instance.data["integrationDir"] = integration['path']
|
||||
instance.data["integrationVersion"] = integration['versionNum']
|
||||
|
||||
path = integration['path']
|
||||
data = integration.copy()
|
||||
|
||||
try:
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
|
||||
self.log.info("Moving files to %s" % path)
|
||||
|
||||
tmp = instance.data["extractDir"]
|
||||
for src in (os.path.join(tmp, f) for f in os.listdir(tmp)):
|
||||
|
||||
self.log.debug("Integrating %s" % src)
|
||||
|
||||
# Source must be a file
|
||||
if not os.path.isfile(src):
|
||||
self.log.error("Source is not a file: {0}".format(src))
|
||||
continue
|
||||
|
||||
# TODO(marcus): Consider files without extension
|
||||
data["ext"] = src.split(".", 1)[-1]
|
||||
dst = os.path.join(path, "{item}_"
|
||||
"{family}_"
|
||||
"{subsetName}_"
|
||||
"{version}.{ext}".format(
|
||||
**data))
|
||||
|
||||
# Copy
|
||||
self.log.info("\"%s\" -> \"%s\"" % (src, dst))
|
||||
shutil.copyfile(src, dst)
|
||||
|
||||
self.log.debug("Tagged %s with .Version" % path)
|
||||
|
||||
try:
|
||||
subset_path = os.path.dirname(path)
|
||||
cquery.tag(subset_path, ".Subset")
|
||||
self.log.debug("Tagged %s with .Subset" % subset_path)
|
||||
except cquery.TagExists:
|
||||
pass
|
||||
|
||||
except OSError as e:
|
||||
# If, for whatever reason, this instance did not get written.
|
||||
instance.data.pop("integrationDir")
|
||||
raise e
|
||||
|
||||
except Exception as e:
|
||||
raise Exception("An unknown error occured: %s" % e)
|
||||
Loading…
Add table
Add a link
Reference in a new issue