mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-26 22:02:15 +01:00
Merge pull request #1 from Colorbleed/master
UP to date with colorbleed
This commit is contained in:
commit
4ac5d47209
104 changed files with 4330 additions and 2415 deletions
|
|
@ -1,5 +1,6 @@
|
|||
# absolute_import is needed to counter the `module has no cmds error` in Maya
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
@ -41,7 +42,7 @@ def get_errored_plugins_from_data(context):
|
|||
class RepairAction(pyblish.api.Action):
|
||||
"""Repairs the action
|
||||
|
||||
To process the repairing this requires a static `repair(instance)` method
|
||||
To process the repairing this requires a static `repair(instance)` method
|
||||
is available on the plugin.
|
||||
|
||||
"""
|
||||
|
|
@ -67,7 +68,7 @@ class RepairAction(pyblish.api.Action):
|
|||
class RepairContextAction(pyblish.api.Action):
|
||||
"""Repairs the action
|
||||
|
||||
To process the repairing this requires a static `repair(instance)` method
|
||||
To process the repairing this requires a static `repair(instance)` method
|
||||
is available on the plugin.
|
||||
|
||||
"""
|
||||
|
|
@ -89,114 +90,3 @@ class RepairContextAction(pyblish.api.Action):
|
|||
plugin.repair()
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
"""Select invalid nodes in Maya when plug-in failed.
|
||||
|
||||
To retrieve the invalid nodes this assumes a static `get_invalid()`
|
||||
method is available on the plugin.
|
||||
|
||||
"""
|
||||
label = "Select invalid"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
icon = "search" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
try:
|
||||
from maya import cmds
|
||||
except ImportError:
|
||||
raise ImportError("Current host is not Maya")
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes..")
|
||||
invalid = list()
|
||||
for instance in instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
|
||||
# Ensure unique (process each node only once)
|
||||
invalid = list(set(invalid))
|
||||
|
||||
if invalid:
|
||||
self.log.info("Selecting invalid nodes: %s" % ", ".join(invalid))
|
||||
cmds.select(invalid, replace=True, noExpand=True)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
cmds.select(deselect=True)
|
||||
|
||||
|
||||
class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
|
||||
"""Generate UUIDs on the invalid nodes in the instance.
|
||||
|
||||
Invalid nodes are those returned by the plugin's `get_invalid` method.
|
||||
As such it is the plug-in's responsibility to ensure the nodes that
|
||||
receive new UUIDs are actually invalid.
|
||||
|
||||
Requires:
|
||||
- instance.data["asset"]
|
||||
|
||||
"""
|
||||
|
||||
label = "Regenerate UUIDs"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
icon = "wrench" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
self.log.info("Finding bad nodes..")
|
||||
|
||||
# Get the errored instances
|
||||
errored_instances = []
|
||||
for result in context.data["results"]:
|
||||
if result["error"] is not None and result["instance"] is not None:
|
||||
if result["error"]:
|
||||
instance = result["instance"]
|
||||
errored_instances.append(instance)
|
||||
|
||||
# Apply pyblish logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the nodes from the all instances that ran through this plug-in
|
||||
all_invalid = []
|
||||
for instance in instances:
|
||||
invalid = plugin.get_invalid(instance)
|
||||
if invalid:
|
||||
|
||||
self.log.info("Fixing instance {}".format(instance.name))
|
||||
self._update_id_attribute(instance, invalid)
|
||||
|
||||
all_invalid.extend(invalid)
|
||||
|
||||
if not all_invalid:
|
||||
self.log.info("No invalid nodes found.")
|
||||
return
|
||||
|
||||
all_invalid = list(set(all_invalid))
|
||||
self.log.info("Generated ids on nodes: {0}".format(all_invalid))
|
||||
|
||||
def _update_id_attribute(self, instance, nodes):
|
||||
"""Delete the id attribute
|
||||
|
||||
Args:
|
||||
instance: The instance we're fixing for
|
||||
nodes (list): all nodes to regenerate ids on
|
||||
"""
|
||||
|
||||
import colorbleed.maya.lib as lib
|
||||
import avalon.io as io
|
||||
|
||||
asset = instance.data['asset']
|
||||
asset_id = io.find_one({"name": asset, "type": "asset"},
|
||||
projection={"_id": True})['_id']
|
||||
for node, _id in lib.generate_ids(nodes, asset_id=asset_id):
|
||||
lib.set_id(node, _id, overwrite=True)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
from .plugin import (
|
||||
|
||||
Extractor,
|
||||
|
|
@ -12,15 +10,12 @@ from .plugin import (
|
|||
|
||||
# temporary fix, might
|
||||
from .action import (
|
||||
|
||||
get_errored_instances_from_context,
|
||||
SelectInvalidAction,
|
||||
GenerateUUIDsOnInvalidAction,
|
||||
RepairAction,
|
||||
RepairContextAction
|
||||
)
|
||||
|
||||
all = [
|
||||
__all__ = [
|
||||
# plugin classes
|
||||
"Extractor",
|
||||
# ordering
|
||||
|
|
@ -30,7 +25,5 @@ all = [
|
|||
"ValidateMeshOrder",
|
||||
# action
|
||||
"get_errored_instances_from_context",
|
||||
"SelectInvalidAction",
|
||||
"GenerateUUIDsOnInvalidAction",
|
||||
"RepairAction"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -29,8 +29,6 @@ log = logging.getLogger("colorbleed.houdini")
|
|||
|
||||
def install():
|
||||
|
||||
# Set
|
||||
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
|
@ -46,11 +44,11 @@ def install():
|
|||
avalon.data["familiesStateToggled"] = ["colorbleed.imagesequence"]
|
||||
|
||||
|
||||
def on_init(_):
|
||||
def on_init(*args):
|
||||
houdini.on_houdini_initialize()
|
||||
|
||||
|
||||
def on_save(_):
|
||||
def on_save(*args):
|
||||
|
||||
avalon.logger.info("Running callback on save..")
|
||||
|
||||
|
|
@ -61,20 +59,19 @@ def on_save(_):
|
|||
lib.set_id(node, new_id, overwrite=False)
|
||||
|
||||
|
||||
def on_open():
|
||||
def on_open(*args):
|
||||
|
||||
avalon.logger.info("Running callback on open..")
|
||||
|
||||
update_task_from_path(hou.hipFile.path())
|
||||
|
||||
if any_outdated():
|
||||
from avalon.vendor.Qt import QtWidgets
|
||||
from ..widgets import popup
|
||||
|
||||
log.warning("Scene has outdated content.")
|
||||
|
||||
# Find maya main window
|
||||
top_level_widgets = {w.objectName(): w for w in
|
||||
QtWidgets.QApplication.topLevelWidgets()}
|
||||
parent = top_level_widgets.get("MayaWindow", None)
|
||||
# Get main window
|
||||
parent = hou.ui.mainQtWindow()
|
||||
|
||||
if parent is None:
|
||||
log.info("Skipping outdated content pop-up "
|
||||
|
|
@ -92,8 +89,3 @@ def on_open():
|
|||
"your Maya scene.")
|
||||
dialog.on_show.connect(_on_show_inventory)
|
||||
dialog.show()
|
||||
|
||||
|
||||
def on_task_changed(*args):
|
||||
"""Wrapped function of app initialize and maya's on task changed"""
|
||||
pass
|
||||
|
|
@ -95,6 +95,88 @@ def get_additional_data(container):
|
|||
return container
|
||||
|
||||
|
||||
def set_parameter_callback(node, parameter, language, callback):
|
||||
"""Link a callback to a parameter of a node
|
||||
|
||||
Args:
|
||||
node(hou.Node): instance of the nodee
|
||||
parameter(str): name of the parameter
|
||||
language(str): name of the language, e.g.: python
|
||||
callback(str): command which needs to be triggered
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
|
||||
template_grp = node.parmTemplateGroup()
|
||||
template = template_grp.find(parameter)
|
||||
if not template:
|
||||
return
|
||||
|
||||
script_language = (hou.scriptLanguage.Python if language == "python" else
|
||||
hou.scriptLanguage.Hscript)
|
||||
|
||||
template.setScriptCallbackLanguage(script_language)
|
||||
template.setScriptCallback(callback)
|
||||
|
||||
template.setTags({"script_callback": callback,
|
||||
"script_callback_language": language.lower()})
|
||||
|
||||
# Replace the existing template with the adjusted one
|
||||
template_grp.replace(parameter, template)
|
||||
|
||||
node.setParmTemplateGroup(template_grp)
|
||||
|
||||
|
||||
def set_parameter_callbacks(node, parameter_callbacks):
|
||||
"""Set callbacks for multiple parameters of a node
|
||||
|
||||
Args:
|
||||
node(hou.Node): instance of a hou.Node
|
||||
parameter_callbacks(dict): collection of parameter and callback data
|
||||
example: {"active" :
|
||||
{"language": "python",
|
||||
"callback": "print('hello world)'"}
|
||||
}
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
for parameter, data in parameter_callbacks.items():
|
||||
language = data["language"]
|
||||
callback = data["callback"]
|
||||
|
||||
set_parameter_callback(node, parameter, language, callback)
|
||||
|
||||
|
||||
def get_output_parameter(node):
|
||||
"""Return the render output parameter name of the given node
|
||||
|
||||
Example:
|
||||
root = hou.node("/obj")
|
||||
my_alembic_node = root.createNode("alembic")
|
||||
get_output_parameter(my_alembic_node)
|
||||
# Result: "output"
|
||||
|
||||
Args:
|
||||
node(hou.Node): node instance
|
||||
|
||||
Returns:
|
||||
hou.Parm
|
||||
|
||||
"""
|
||||
|
||||
node_type = node.type().name()
|
||||
if node_type == "geometry":
|
||||
return node.parm("sopoutput")
|
||||
|
||||
elif node_type == "alembic":
|
||||
return node.parm("filename")
|
||||
|
||||
else:
|
||||
raise TypeError("Node type '%s' not supported" % node_type)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def attribute_values(node, data):
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import os
|
|||
import re
|
||||
import logging
|
||||
import importlib
|
||||
import itertools
|
||||
|
||||
from .vendor import pather
|
||||
from .vendor.pather.error import ParseError
|
||||
|
|
@ -12,6 +13,24 @@ import avalon.api
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def pairwise(iterable):
|
||||
"""s -> (s0,s1), (s2,s3), (s4, s5), ..."""
|
||||
a = iter(iterable)
|
||||
return itertools.izip(a, a)
|
||||
|
||||
|
||||
def grouper(iterable, n, fillvalue=None):
|
||||
"""Collect data into fixed-length chunks or blocks
|
||||
|
||||
Examples:
|
||||
grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
|
||||
|
||||
"""
|
||||
|
||||
args = [iter(iterable)] * n
|
||||
return itertools.izip_longest(fillvalue=fillvalue, *args)
|
||||
|
||||
|
||||
def is_latest(representation):
|
||||
"""Return whether the representation is from latest version
|
||||
|
||||
|
|
@ -252,7 +271,7 @@ def collect_container_metadata(container):
|
|||
return hostlib.get_additional_data(container)
|
||||
|
||||
|
||||
def get_project_fps():
|
||||
def get_asset_fps():
|
||||
"""Returns project's FPS, if not found will return 25 by default
|
||||
|
||||
Returns:
|
||||
|
|
@ -260,10 +279,20 @@ def get_project_fps():
|
|||
|
||||
"""
|
||||
|
||||
data = get_project_data()
|
||||
fps = data.get("fps", 25.0)
|
||||
key = "fps"
|
||||
|
||||
return fps
|
||||
# FPS from asset data (if set)
|
||||
asset_data = get_asset_data()
|
||||
if key in asset_data:
|
||||
return asset_data[key]
|
||||
|
||||
# FPS from project data (if set)
|
||||
project_data = get_project_data()
|
||||
if key in project_data:
|
||||
return project_data[key]
|
||||
|
||||
# Fallback to 25 FPS
|
||||
return 25.0
|
||||
|
||||
|
||||
def get_project_data():
|
||||
|
|
@ -298,7 +327,7 @@ def get_asset_data(asset=None):
|
|||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
|
||||
asset_name = asset or avalon.api.Session["AVALON_ASSET"]
|
||||
document = io.find_one({"name": asset_name,
|
||||
"type": "asset"})
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import weakref
|
|||
from maya import utils, cmds, mel
|
||||
|
||||
from avalon import api as avalon, pipeline, maya
|
||||
from avalon.maya.pipeline import IS_HEADLESS
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from ..lib import (
|
||||
|
|
@ -34,12 +35,18 @@ def install():
|
|||
|
||||
log.info("Installing callbacks ... ")
|
||||
avalon.on("init", on_init)
|
||||
|
||||
# Callbacks below are not required for headless mode, the `init` however
|
||||
# is important to load referenced Alembics correctly at rendertime.
|
||||
if IS_HEADLESS:
|
||||
log.info("Running in headless mode, skipping Colorbleed Maya "
|
||||
"save/open/new callback installation..")
|
||||
return
|
||||
|
||||
avalon.on("save", on_save)
|
||||
avalon.on("open", on_open)
|
||||
|
||||
avalon.before("save", on_before_save)
|
||||
|
||||
avalon.on("new", on_new)
|
||||
avalon.before("save", on_before_save)
|
||||
|
||||
log.info("Overriding existing event 'taskChanged'")
|
||||
override_event("taskChanged", on_task_changed)
|
||||
|
|
@ -128,12 +135,13 @@ def on_open(_):
|
|||
from avalon.vendor.Qt import QtWidgets
|
||||
from ..widgets import popup
|
||||
|
||||
# Ensure scene's FPS is set to project config
|
||||
lib.validate_fps()
|
||||
|
||||
# Update current task for the current scene
|
||||
update_task_from_path(cmds.file(query=True, sceneName=True))
|
||||
|
||||
# Validate FPS after update_task_from_path to
|
||||
# ensure it is using correct FPS for the asset
|
||||
lib.validate_fps()
|
||||
|
||||
if any_outdated():
|
||||
log.warning("Scene has outdated content.")
|
||||
|
||||
|
|
|
|||
128
colorbleed/maya/action.py
Normal file
128
colorbleed/maya/action.py
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
# absolute_import is needed to counter the `module has no cmds error` in Maya
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
from ..action import get_errored_instances_from_context
|
||||
|
||||
|
||||
class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
|
||||
"""Generate UUIDs on the invalid nodes in the instance.
|
||||
|
||||
Invalid nodes are those returned by the plugin's `get_invalid` method.
|
||||
As such it is the plug-in's responsibility to ensure the nodes that
|
||||
receive new UUIDs are actually invalid.
|
||||
|
||||
Requires:
|
||||
- instance.data["asset"]
|
||||
|
||||
"""
|
||||
|
||||
label = "Regenerate UUIDs"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
icon = "wrench" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
from maya import cmds
|
||||
|
||||
self.log.info("Finding bad nodes..")
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
|
||||
# Apply pyblish logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the nodes from the all instances that ran through this plug-in
|
||||
all_invalid = []
|
||||
for instance in instances:
|
||||
invalid = plugin.get_invalid(instance)
|
||||
|
||||
# Don't allow referenced nodes to get their ids regenerated to
|
||||
# avoid loaded content getting messed up with reference edits
|
||||
if invalid:
|
||||
referenced = {node for node in invalid if
|
||||
cmds.referenceQuery(node, isNodeReferenced=True)}
|
||||
if referenced:
|
||||
self.log.warning("Skipping UUID generation on referenced "
|
||||
"nodes: {}".format(list(referenced)))
|
||||
invalid = [node for node in invalid
|
||||
if node not in referenced]
|
||||
|
||||
if invalid:
|
||||
|
||||
self.log.info("Fixing instance {}".format(instance.name))
|
||||
self._update_id_attribute(instance, invalid)
|
||||
|
||||
all_invalid.extend(invalid)
|
||||
|
||||
if not all_invalid:
|
||||
self.log.info("No invalid nodes found.")
|
||||
return
|
||||
|
||||
all_invalid = list(set(all_invalid))
|
||||
self.log.info("Generated ids on nodes: {0}".format(all_invalid))
|
||||
|
||||
def _update_id_attribute(self, instance, nodes):
|
||||
"""Delete the id attribute
|
||||
|
||||
Args:
|
||||
instance: The instance we're fixing for
|
||||
nodes (list): all nodes to regenerate ids on
|
||||
"""
|
||||
|
||||
import colorbleed.maya.lib as lib
|
||||
import avalon.io as io
|
||||
|
||||
asset = instance.data['asset']
|
||||
asset_id = io.find_one({"name": asset, "type": "asset"},
|
||||
projection={"_id": True})['_id']
|
||||
for node, _id in lib.generate_ids(nodes, asset_id=asset_id):
|
||||
lib.set_id(node, _id, overwrite=True)
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
"""Select invalid nodes in Maya when plug-in failed.
|
||||
|
||||
To retrieve the invalid nodes this assumes a static `get_invalid()`
|
||||
method is available on the plugin.
|
||||
|
||||
"""
|
||||
label = "Select invalid"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
icon = "search" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
try:
|
||||
from maya import cmds
|
||||
except ImportError:
|
||||
raise ImportError("Current host is not Maya")
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes..")
|
||||
invalid = list()
|
||||
for instance in instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
|
||||
# Ensure unique (process each node only once)
|
||||
invalid = list(set(invalid))
|
||||
|
||||
if invalid:
|
||||
self.log.info("Selecting invalid nodes: %s" % ", ".join(invalid))
|
||||
cmds.select(invalid, replace=True, noExpand=True)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
cmds.select(deselect=True)
|
||||
|
|
@ -11,6 +11,7 @@ import contextlib
|
|||
from collections import OrderedDict, defaultdict
|
||||
|
||||
from maya import cmds, mel
|
||||
import maya.api.OpenMaya as om
|
||||
|
||||
from avalon import api, maya, io, pipeline
|
||||
from avalon.vendor.six import string_types
|
||||
|
|
@ -93,6 +94,11 @@ INT_FPS = {15, 24, 25, 30, 48, 50, 60, 44100, 48000}
|
|||
FLOAT_FPS = {23.976, 29.97, 47.952, 59.94}
|
||||
|
||||
|
||||
def _get_mel_global(name):
|
||||
"""Return the value of a mel global variable"""
|
||||
return mel.eval("$%s = $%s;" % (name, name))
|
||||
|
||||
|
||||
def matrix_equals(a, b, tolerance=1e-10):
|
||||
"""
|
||||
Compares two matrices with an imperfection tolerance
|
||||
|
|
@ -305,6 +311,33 @@ def attribute_values(attr_values):
|
|||
cmds.setAttr(attr, value)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def keytangent_default(in_tangent_type='auto',
|
||||
out_tangent_type='auto'):
|
||||
"""Set the default keyTangent for new keys during this context"""
|
||||
|
||||
original_itt = cmds.keyTangent(query=True, g=True, itt=True)[0]
|
||||
original_ott = cmds.keyTangent(query=True, g=True, ott=True)[0]
|
||||
cmds.keyTangent(g=True, itt=in_tangent_type)
|
||||
cmds.keyTangent(g=True, ott=out_tangent_type)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
cmds.keyTangent(g=True, itt=original_itt)
|
||||
cmds.keyTangent(g=True, ott=original_ott)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def undo_chunk():
|
||||
"""Open a undo chunk during context."""
|
||||
|
||||
try:
|
||||
cmds.undoInfo(openChunk=True)
|
||||
yield
|
||||
finally:
|
||||
cmds.undoInfo(closeChunk=True)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def renderlayer(layer):
|
||||
"""Set the renderlayer during the context"""
|
||||
|
|
@ -338,6 +371,126 @@ def evaluation(mode="off"):
|
|||
cmds.evaluationManager(mode=original)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def no_refresh():
|
||||
"""Temporarily disables Maya's UI updates
|
||||
|
||||
Note:
|
||||
This only disabled the main pane and will sometimes still
|
||||
trigger updates in torn off panels.
|
||||
|
||||
"""
|
||||
|
||||
pane = _get_mel_global('gMainPane')
|
||||
state = cmds.paneLayout(pane, query=True, manage=True)
|
||||
cmds.paneLayout(pane, edit=True, manage=False)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
cmds.paneLayout(pane, edit=True, manage=state)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def empty_sets(sets, force=False):
|
||||
"""Remove all members of the sets during the context"""
|
||||
|
||||
assert isinstance(sets, (list, tuple))
|
||||
|
||||
original = dict()
|
||||
original_connections = []
|
||||
|
||||
# Store original state
|
||||
for obj_set in sets:
|
||||
members = cmds.sets(obj_set, query=True)
|
||||
original[obj_set] = members
|
||||
|
||||
try:
|
||||
for obj_set in sets:
|
||||
cmds.sets(clear=obj_set)
|
||||
if force:
|
||||
# Break all connections if force is enabled, this way we
|
||||
# prevent Maya from exporting any reference nodes which are
|
||||
# connected with placeHolder[x] attributes
|
||||
plug = "%s.dagSetMembers" % obj_set
|
||||
connections = cmds.listConnections(plug,
|
||||
source=True,
|
||||
destination=False,
|
||||
plugs=True,
|
||||
connections=True) or []
|
||||
original_connections.extend(connections)
|
||||
for dest, src in lib.pairwise(connections):
|
||||
cmds.disconnectAttr(src, dest)
|
||||
yield
|
||||
finally:
|
||||
|
||||
for dest, src in lib.pairwise(original_connections):
|
||||
cmds.connectAttr(src, dest)
|
||||
|
||||
# Restore original members
|
||||
for origin_set, members in original.iteritems():
|
||||
cmds.sets(members, forceElement=origin_set)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def renderlayer(layer):
|
||||
"""Set the renderlayer during the context
|
||||
|
||||
Arguments:
|
||||
layer (str): Name of layer to switch to.
|
||||
|
||||
"""
|
||||
|
||||
original = cmds.editRenderLayerGlobals(query=True,
|
||||
currentRenderLayer=True)
|
||||
|
||||
try:
|
||||
cmds.editRenderLayerGlobals(currentRenderLayer=layer)
|
||||
yield
|
||||
finally:
|
||||
cmds.editRenderLayerGlobals(currentRenderLayer=original)
|
||||
|
||||
|
||||
class delete_after(object):
|
||||
"""Context Manager that will delete collected nodes after exit.
|
||||
|
||||
This allows to ensure the nodes added to the context are deleted
|
||||
afterwards. This is useful if you want to ensure nodes are deleted
|
||||
even if an error is raised.
|
||||
|
||||
Examples:
|
||||
with delete_after() as delete_bin:
|
||||
cube = maya.cmds.polyCube()
|
||||
delete_bin.extend(cube)
|
||||
# cube exists
|
||||
# cube deleted
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, nodes=None):
|
||||
|
||||
self._nodes = list()
|
||||
|
||||
if nodes:
|
||||
self.extend(nodes)
|
||||
|
||||
def append(self, node):
|
||||
self._nodes.append(node)
|
||||
|
||||
def extend(self, nodes):
|
||||
self._nodes.extend(nodes)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._nodes)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
if self._nodes:
|
||||
cmds.delete(self._nodes)
|
||||
|
||||
|
||||
def get_renderer(layer):
|
||||
with renderlayer(layer):
|
||||
return cmds.getAttr("defaultRenderGlobals.currentRenderer")
|
||||
|
|
@ -366,6 +519,161 @@ def no_undo(flush=False):
|
|||
cmds.undoInfo(**{keyword: original})
|
||||
|
||||
|
||||
def get_shader_assignments_from_shapes(shapes):
|
||||
"""Return the shape assignment per related shading engines.
|
||||
|
||||
Returns a dictionary where the keys are shadingGroups and the values are
|
||||
lists of assigned shapes or shape-components.
|
||||
|
||||
For the 'shapes' this will return a dictionary like:
|
||||
{
|
||||
"shadingEngineX": ["nodeX", "nodeY"],
|
||||
"shadingEngineY": ["nodeA", "nodeB"]
|
||||
}
|
||||
|
||||
Args:
|
||||
shapes (list): The shapes to collect the assignments for.
|
||||
|
||||
Returns:
|
||||
dict: The {shadingEngine: shapes} relationships
|
||||
|
||||
"""
|
||||
|
||||
shapes = cmds.ls(shapes,
|
||||
long=True,
|
||||
selection=True,
|
||||
shapes=True,
|
||||
objectsOnly=True)
|
||||
if not shapes:
|
||||
return {}
|
||||
|
||||
# Collect shading engines and their shapes
|
||||
assignments = defaultdict(list)
|
||||
for shape in shapes:
|
||||
|
||||
# Get unique shading groups for the shape
|
||||
shading_groups = cmds.listConnections(shape,
|
||||
source=False,
|
||||
destination=True,
|
||||
plugs=False,
|
||||
connections=False,
|
||||
type="shadingEngine") or []
|
||||
shading_groups = list(set(shading_groups))
|
||||
for shading_group in shading_groups:
|
||||
assignments[shading_group].add(shape)
|
||||
|
||||
return dict(assignments)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def shader(nodes, shadingEngine="initialShadingGroup"):
|
||||
"""Assign a shader to nodes during the context"""
|
||||
|
||||
shapes = cmds.ls(nodes, dag=1, o=1, shapes=1, long=1)
|
||||
original = get_shader_assignments_from_shapes(shapes)
|
||||
|
||||
try:
|
||||
# Assign override shader
|
||||
if shapes:
|
||||
cmds.sets(shapes, edit=True, forceElement=shadingEngine)
|
||||
yield
|
||||
finally:
|
||||
|
||||
# Assign original shaders
|
||||
for sg, members in original.items():
|
||||
if members:
|
||||
cmds.sets(shapes, edit=True, forceElement=shadingEngine)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def displaySmoothness(nodes,
|
||||
divisionsU=0,
|
||||
divisionsV=0,
|
||||
pointsWire=4,
|
||||
pointsShaded=1,
|
||||
polygonObject=1):
|
||||
"""Set the displaySmoothness during the context"""
|
||||
|
||||
# Ensure only non-intermediate shapes
|
||||
nodes = cmds.ls(nodes,
|
||||
dag=1,
|
||||
shapes=1,
|
||||
long=1,
|
||||
noIntermediate=True)
|
||||
|
||||
def parse(node):
|
||||
"""Parse the current state of a node"""
|
||||
state = {}
|
||||
for key in ["divisionsU",
|
||||
"divisionsV",
|
||||
"pointsWire",
|
||||
"pointsShaded",
|
||||
"polygonObject"]:
|
||||
value = cmds.displaySmoothness(node, query=1, **{key: True})
|
||||
if value is not None:
|
||||
state[key] = value[0]
|
||||
return state
|
||||
|
||||
originals = dict((node, parse(node)) for node in nodes)
|
||||
|
||||
try:
|
||||
# Apply current state
|
||||
cmds.displaySmoothness(nodes,
|
||||
divisionsU=divisionsU,
|
||||
divisionsV=divisionsV,
|
||||
pointsWire=pointsWire,
|
||||
pointsShaded=pointsShaded,
|
||||
polygonObject=polygonObject)
|
||||
yield
|
||||
finally:
|
||||
# Revert state
|
||||
for node, state in originals.iteritems():
|
||||
if state:
|
||||
cmds.displaySmoothness(node, **state)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def no_display_layers(nodes):
|
||||
"""Ensure nodes are not in a displayLayer during context.
|
||||
|
||||
Arguments:
|
||||
nodes (list): The nodes to remove from any display layer.
|
||||
|
||||
"""
|
||||
|
||||
# Ensure long names
|
||||
nodes = cmds.ls(nodes, long=True)
|
||||
|
||||
# Get the original state
|
||||
lookup = set(nodes)
|
||||
original = {}
|
||||
for layer in cmds.ls(type='displayLayer'):
|
||||
|
||||
# Skip default layer
|
||||
if layer == "defaultLayer":
|
||||
continue
|
||||
|
||||
members = cmds.editDisplayLayerMembers(layer,
|
||||
query=True,
|
||||
fullNames=True)
|
||||
if not members:
|
||||
continue
|
||||
members = set(members)
|
||||
|
||||
included = lookup.intersection(members)
|
||||
if included:
|
||||
original[layer] = list(included)
|
||||
|
||||
try:
|
||||
# Add all nodes to default layer
|
||||
cmds.editDisplayLayerMembers("defaultLayer", nodes, noRecurse=True)
|
||||
yield
|
||||
finally:
|
||||
# Restore original members
|
||||
for layer, members in original.iteritems():
|
||||
cmds.editDisplayLayerMembers(layer, members, noRecurse=True)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def namespaced(namespace, new=True):
|
||||
"""Work inside namespace during context
|
||||
|
|
@ -764,10 +1072,20 @@ def get_id(node):
|
|||
if node is None:
|
||||
return
|
||||
|
||||
if not cmds.attributeQuery("cbId", node=node, exists=True):
|
||||
sel = om.MSelectionList()
|
||||
sel.add(node)
|
||||
|
||||
api_node = sel.getDependNode(0)
|
||||
fn = om.MFnDependencyNode(api_node)
|
||||
|
||||
if not fn.hasAttribute("cbId"):
|
||||
return
|
||||
|
||||
return cmds.getAttr("{}.cbId".format(node))
|
||||
try:
|
||||
return fn.findPlug("cbId", False).asString()
|
||||
except RuntimeError:
|
||||
log.warning("Failed to retrieve cbId on %s", node)
|
||||
return
|
||||
|
||||
|
||||
def generate_ids(nodes, asset_id=None):
|
||||
|
|
@ -828,7 +1146,6 @@ def set_id(node, unique_id, overwrite=False):
|
|||
|
||||
"""
|
||||
|
||||
attr = "{0}.cbId".format(node)
|
||||
exists = cmds.attributeQuery("cbId", node=node, exists=True)
|
||||
|
||||
# Add the attribute if it does not exist yet
|
||||
|
|
@ -837,6 +1154,7 @@ def set_id(node, unique_id, overwrite=False):
|
|||
|
||||
# Set the value
|
||||
if not exists or overwrite:
|
||||
attr = "{0}.cbId".format(node)
|
||||
cmds.setAttr(attr, unique_id, type="string")
|
||||
|
||||
|
||||
|
|
@ -1393,9 +1711,29 @@ def set_scene_fps(fps, update=True):
|
|||
else:
|
||||
raise ValueError("Unsupported FPS value: `%s`" % fps)
|
||||
|
||||
log.info("Updating FPS to '{}'".format(unit))
|
||||
# Get time slider current state
|
||||
start_frame = cmds.playbackOptions(query=True, minTime=True)
|
||||
end_frame = cmds.playbackOptions(query=True, maxTime=True)
|
||||
|
||||
# Get animation data
|
||||
animation_start = cmds.playbackOptions(query=True, animationStartTime=True)
|
||||
animation_end = cmds.playbackOptions(query=True, animationEndTime=True)
|
||||
|
||||
current_frame = cmds.currentTime(query=True)
|
||||
|
||||
log.info("Setting scene FPS to: '{}'".format(unit))
|
||||
cmds.currentUnit(time=unit, updateAnimation=update)
|
||||
|
||||
# Set time slider data back to previous state
|
||||
cmds.playbackOptions(edit=True, minTime=start_frame)
|
||||
cmds.playbackOptions(edit=True, maxTime=end_frame)
|
||||
|
||||
# Set animation data
|
||||
cmds.playbackOptions(edit=True, animationStartTime=animation_start)
|
||||
cmds.playbackOptions(edit=True, animationEndTime=animation_end)
|
||||
|
||||
cmds.currentTime(current_frame, edit=True, update=True)
|
||||
|
||||
# Force file stated to 'modified'
|
||||
cmds.file(modified=True)
|
||||
|
||||
|
|
@ -1424,7 +1762,7 @@ def set_scene_resolution(width, height):
|
|||
log.error("Can't set VRay resolution because there is no node "
|
||||
"named: `%s`" % vray_node)
|
||||
|
||||
log.info("Setting project resolution to: %s x %s" % (width, height))
|
||||
log.info("Setting scene resolution to: %s x %s" % (width, height))
|
||||
cmds.setAttr("%s.width" % control_node, width)
|
||||
cmds.setAttr("%s.height" % control_node, height)
|
||||
|
||||
|
|
@ -1472,8 +1810,7 @@ def validate_fps():
|
|||
|
||||
"""
|
||||
|
||||
asset_data = lib.get_asset_data()
|
||||
fps = asset_data.get("fps", lib.get_project_fps()) # can be int or float
|
||||
fps = lib.get_asset_fps()
|
||||
current_fps = mel.eval('currentTimeUnitToFPS()') # returns float
|
||||
|
||||
if current_fps != fps:
|
||||
|
|
@ -1504,3 +1841,194 @@ def validate_fps():
|
|||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def bake(nodes,
|
||||
frame_range=None,
|
||||
step=1.0,
|
||||
simulation=True,
|
||||
preserve_outside_keys=False,
|
||||
disable_implicit_control=True,
|
||||
shape=True):
|
||||
"""Bake the given nodes over the time range.
|
||||
|
||||
This will bake all attributes of the node, including custom attributes.
|
||||
|
||||
Args:
|
||||
nodes (list): Names of transform nodes, eg. camera, light.
|
||||
frame_range (list): frame range with start and end frame.
|
||||
or if None then takes timeSliderRange
|
||||
simulation (bool): Whether to perform a full simulation of the
|
||||
attributes over time.
|
||||
preserve_outside_keys (bool): Keep keys that are outside of the baked
|
||||
range.
|
||||
disable_implicit_control (bool): When True will disable any
|
||||
constraints to the object.
|
||||
shape (bool): When True also bake attributes on the children shapes.
|
||||
step (float): The step size to sample by.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
|
||||
# Parse inputs
|
||||
if not nodes:
|
||||
return
|
||||
|
||||
assert isinstance(nodes, (list, tuple)), "Nodes must be a list or tuple"
|
||||
|
||||
# If frame range is None fall back to time slider playback time range
|
||||
if frame_range is None:
|
||||
frame_range = [cmds.playbackOptions(query=True, minTime=True),
|
||||
cmds.playbackOptions(query=True, maxTime=True)]
|
||||
|
||||
# If frame range is single frame bake one frame more,
|
||||
# otherwise maya.cmds.bakeResults gets confused
|
||||
if frame_range[1] == frame_range[0]:
|
||||
frame_range[1] += 1
|
||||
|
||||
# Bake it
|
||||
with keytangent_default(in_tangent_type='auto',
|
||||
out_tangent_type='auto'):
|
||||
cmds.bakeResults(nodes,
|
||||
simulation=simulation,
|
||||
preserveOutsideKeys=preserve_outside_keys,
|
||||
disableImplicitControl=disable_implicit_control,
|
||||
shape=shape,
|
||||
sampleBy=step,
|
||||
time=(frame_range[0], frame_range[1]))
|
||||
|
||||
|
||||
def bake_to_world_space(nodes,
|
||||
frame_range=None,
|
||||
simulation=True,
|
||||
preserve_outside_keys=False,
|
||||
disable_implicit_control=True,
|
||||
shape=True,
|
||||
step=1.0):
|
||||
"""Bake the nodes to world space transformation (incl. other attributes)
|
||||
|
||||
Bakes the transforms to world space (while maintaining all its animated
|
||||
attributes and settings) by duplicating the node. Then parents it to world
|
||||
and constrains to the original.
|
||||
|
||||
Other attributes are also baked by connecting all attributes directly.
|
||||
Baking is then done using Maya's bakeResults command.
|
||||
|
||||
See `bake` for the argument documentation.
|
||||
|
||||
Returns:
|
||||
list: The newly created and baked node names.
|
||||
|
||||
"""
|
||||
|
||||
def _get_attrs(node):
|
||||
"""Workaround for buggy shape attribute listing with listAttr"""
|
||||
attrs = cmds.listAttr(node,
|
||||
write=True,
|
||||
scalar=True,
|
||||
settable=True,
|
||||
connectable=True,
|
||||
keyable=True,
|
||||
shortNames=True) or []
|
||||
valid_attrs = []
|
||||
for attr in attrs:
|
||||
node_attr = '{0}.{1}'.format(node, attr)
|
||||
|
||||
# Sometimes Maya returns 'non-existent' attributes for shapes
|
||||
# so we filter those out
|
||||
if not cmds.attributeQuery(attr, node=node, exists=True):
|
||||
continue
|
||||
|
||||
# We only need those that have a connection, just to be safe
|
||||
# that it's actually keyable/connectable anyway.
|
||||
if cmds.connectionInfo(node_attr,
|
||||
isDestination=True):
|
||||
valid_attrs.append(attr)
|
||||
|
||||
return valid_attrs
|
||||
|
||||
transform_attrs = set(["t", "r", "s",
|
||||
"tx", "ty", "tz",
|
||||
"rx", "ry", "rz",
|
||||
"sx", "sy", "sz"])
|
||||
|
||||
world_space_nodes = []
|
||||
with delete_after() as delete_bin:
|
||||
|
||||
# Create the duplicate nodes that are in world-space connected to
|
||||
# the originals
|
||||
for node in nodes:
|
||||
|
||||
# Duplicate the node
|
||||
short_name = node.rsplit("|", 1)[-1]
|
||||
new_name = "{0}_baked".format(short_name)
|
||||
new_node = cmds.duplicate(node,
|
||||
name=new_name,
|
||||
renameChildren=True)[0]
|
||||
|
||||
# Connect all attributes on the node except for transform
|
||||
# attributes
|
||||
attrs = _get_attrs(node)
|
||||
attrs = set(attrs) - transform_attrs if attrs else []
|
||||
|
||||
for attr in attrs:
|
||||
orig_node_attr = '{0}.{1}'.format(node, attr)
|
||||
new_node_attr = '{0}.{1}'.format(new_node, attr)
|
||||
|
||||
# unlock to avoid connection errors
|
||||
cmds.setAttr(new_node_attr, lock=False)
|
||||
|
||||
cmds.connectAttr(orig_node_attr,
|
||||
new_node_attr,
|
||||
force=True)
|
||||
|
||||
# If shapes are also baked then connect those keyable attributes
|
||||
if shape:
|
||||
children_shapes = cmds.listRelatives(new_node,
|
||||
children=True,
|
||||
fullPath=True,
|
||||
shapes=True)
|
||||
if children_shapes:
|
||||
orig_children_shapes = cmds.listRelatives(node,
|
||||
children=True,
|
||||
fullPath=True,
|
||||
shapes=True)
|
||||
for orig_shape, new_shape in zip(orig_children_shapes,
|
||||
children_shapes):
|
||||
attrs = _get_attrs(orig_shape)
|
||||
for attr in attrs:
|
||||
orig_node_attr = '{0}.{1}'.format(orig_shape, attr)
|
||||
new_node_attr = '{0}.{1}'.format(new_shape, attr)
|
||||
|
||||
# unlock to avoid connection errors
|
||||
cmds.setAttr(new_node_attr, lock=False)
|
||||
|
||||
cmds.connectAttr(orig_node_attr,
|
||||
new_node_attr,
|
||||
force=True)
|
||||
|
||||
# Parent to world
|
||||
if cmds.listRelatives(new_node, parent=True):
|
||||
new_node = cmds.parent(new_node, world=True)[0]
|
||||
|
||||
# Unlock transform attributes so constraint can be created
|
||||
for attr in transform_attrs:
|
||||
cmds.setAttr('{0}.{1}'.format(new_node, attr), lock=False)
|
||||
|
||||
# Constraints
|
||||
delete_bin.extend(cmds.parentConstraint(node, new_node, mo=False))
|
||||
delete_bin.extend(cmds.scaleConstraint(node, new_node, mo=False))
|
||||
|
||||
world_space_nodes.append(new_node)
|
||||
|
||||
bake(world_space_nodes,
|
||||
frame_range=frame_range,
|
||||
step=step,
|
||||
simulation=simulation,
|
||||
preserve_outside_keys=preserve_outside_keys,
|
||||
disable_implicit_control=disable_implicit_control,
|
||||
shape=shape)
|
||||
|
||||
return world_space_nodes
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -23,10 +23,15 @@ def _get_menu():
|
|||
|
||||
def deferred():
|
||||
|
||||
import scriptsmenu.launchformaya as launchformaya
|
||||
import scriptsmenu.scriptsmenu as scriptsmenu
|
||||
log.info("Attempting to install scripts menu..")
|
||||
|
||||
log.info("Attempting to install ...")
|
||||
try:
|
||||
import scriptsmenu.launchformaya as launchformaya
|
||||
import scriptsmenu.scriptsmenu as scriptsmenu
|
||||
except ImportError:
|
||||
log.warning("Skipping colorbleed.menu install, because "
|
||||
"'scriptsmenu' module seems unavailable.")
|
||||
return
|
||||
|
||||
# load configuration of custom menu
|
||||
config_path = os.path.join(os.path.dirname(__file__), "menu.json")
|
||||
|
|
@ -44,7 +49,7 @@ def uninstall():
|
|||
|
||||
menu = _get_menu()
|
||||
if menu:
|
||||
log.info("Attempting to uninstall ..")
|
||||
log.info("Attempting to uninstall..")
|
||||
|
||||
try:
|
||||
menu.deleteLater()
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"colorbleed.mayaAscii",
|
||||
"colorbleed.model",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.vdbcache",
|
||||
"colorbleed.setdress",
|
||||
"colorbleed.rig",
|
||||
"colorbleed.vrayproxy",
|
||||
|
|
|
|||
|
|
@ -293,6 +293,10 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
) for index, key in enumerate(environment)
|
||||
})
|
||||
|
||||
# Avoid copied pools and remove secondary pool
|
||||
payload["JobInfo"]["Pool"] = "none"
|
||||
payload["JobInfo"].pop("SecondaryPool", None)
|
||||
|
||||
self.log.info("Submitting..")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
|
|
|
|||
34
colorbleed/plugins/houdini/create/create_alembic_camera.py
Normal file
34
colorbleed/plugins/houdini/create/create_alembic_camera.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
from avalon import houdini
|
||||
|
||||
|
||||
class CreateAlembicCamera(houdini.Creator):
|
||||
|
||||
name = "camera"
|
||||
label = "Camera (Abc)"
|
||||
family = "colorbleed.camera"
|
||||
icon = "camera"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateAlembicCamera, self).__init__(*args, **kwargs)
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
|
||||
# Set node type to create for output
|
||||
self.data.update({"node_type": "alembic"})
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateAlembicCamera, self).process()
|
||||
|
||||
parms = {"use_sop_path": True,
|
||||
"build_from_path": True,
|
||||
"path_attrib": "path",
|
||||
"filename": "$HIP/pyblish/%s.abc" % self.name}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"sop_path": node.path()})
|
||||
|
||||
instance.setParms(parms)
|
||||
|
|
@ -1,7 +1,3 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
import hou
|
||||
|
||||
from avalon import houdini
|
||||
|
||||
|
||||
|
|
@ -16,15 +12,29 @@ class CreatePointCache(houdini.Creator):
|
|||
def __init__(self, *args, **kwargs):
|
||||
super(CreatePointCache, self).__init__(*args, **kwargs)
|
||||
|
||||
# create an ordered dict with the existing data first
|
||||
data = OrderedDict(**self.data)
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
|
||||
# Set node type to create for output
|
||||
data["node_type"] = "alembic"
|
||||
self.data.update({"node_type": "alembic"})
|
||||
|
||||
# Collect animation data for point cache exporting
|
||||
start, end = hou.playbar.timelineRange()
|
||||
data["startFrame"] = start
|
||||
data["endFrame"] = end
|
||||
def process(self):
|
||||
instance = super(CreatePointCache, self).process()
|
||||
|
||||
self.data = data
|
||||
parms = {"use_sop_path": True, # Export single node from SOP Path
|
||||
"build_from_path": True, # Direct path of primitive in output
|
||||
"path_attrib": "path", # Pass path attribute for output\
|
||||
"prim_to_detail_pattern": "cbId",
|
||||
"format": 2, # Set format to Ogawa
|
||||
"filename": "$HIP/pyblish/%s.abc" % self.name}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"sop_path": node.path()})
|
||||
|
||||
instance.setParms(parms)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["prim_to_detail_pattern"]
|
||||
for name in to_lock:
|
||||
parm = instance.parm(name)
|
||||
parm.lock(True)
|
||||
|
|
|
|||
33
colorbleed/plugins/houdini/create/create_vbd_cache.py
Normal file
33
colorbleed/plugins/houdini/create/create_vbd_cache.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
from avalon import houdini
|
||||
|
||||
|
||||
class CreateVDBCache(houdini.Creator):
|
||||
"""Alembic pointcache for animated data"""
|
||||
|
||||
name = "vbdcache"
|
||||
label = "VDB Cache"
|
||||
family = "colorbleed.vdbcache"
|
||||
icon = "cloud"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateVDBCache, self).__init__(*args, **kwargs)
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
self.data.pop("active", None)
|
||||
|
||||
self.data.update({
|
||||
"node_type": "geometry", # Set node type to create for output
|
||||
"executeBackground": True # Render node in background
|
||||
})
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateVDBCache, self).process()
|
||||
|
||||
parms = {"sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name,
|
||||
"initsim": True}
|
||||
|
||||
if self.nodes:
|
||||
node = self.nodes[0]
|
||||
parms.update({"sop_path": node.path()})
|
||||
|
||||
instance.setParms(parms)
|
||||
|
|
@ -6,8 +6,10 @@ from avalon.houdini import pipeline, lib
|
|||
class AbcLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
families = ["colorbleed.animation", "colorbleed.pointcache"]
|
||||
label = "Load Animation"
|
||||
families = ["colorbleed.model",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.pointcache"]
|
||||
label = "Load Alembic"
|
||||
representations = ["abc"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
|
|
@ -40,30 +42,37 @@ class AbcLoader(api.Loader):
|
|||
container = obj.createNode("geo", node_name=node_name)
|
||||
|
||||
# Remove the file node, it only loads static meshes
|
||||
node_path = "/obj/{}/file1".format(node_name)
|
||||
hou.node(node_path)
|
||||
file_node = container.node("file1".format(node_name))
|
||||
file_node.destroy()
|
||||
|
||||
# Create an alembic node (supports animation)
|
||||
alembic = container.createNode("alembic", node_name=node_name)
|
||||
alembic.setParms({"fileName": file_path})
|
||||
|
||||
# Add unpack node
|
||||
unpack = container.createNode("unpack")
|
||||
unpack_name = "unpack_{}".format(name)
|
||||
unpack = container.createNode("unpack", node_name=unpack_name)
|
||||
unpack.setInput(0, alembic)
|
||||
unpack.setParms({"transfer_attributes": "path"})
|
||||
|
||||
# Add normal to points
|
||||
# Order of menu ['point', 'vertex', 'prim', 'detail']
|
||||
normal_name = "normal_{}".format(name)
|
||||
normal_node = container.createNode("normal", node_name=normal_name)
|
||||
normal_node.setParms({"type": 0})
|
||||
|
||||
normal_node.setInput(0, unpack)
|
||||
|
||||
null = container.createNode("null", node_name="OUT".format(name))
|
||||
null.setInput(0, normal_node)
|
||||
|
||||
# Set display on last node
|
||||
null.setDisplayFlag(True)
|
||||
|
||||
# Set new position for unpack node else it gets cluttered
|
||||
unpack.setPosition([0, -1])
|
||||
|
||||
# set unpack as display node
|
||||
unpack.setDisplayFlag(True)
|
||||
|
||||
null_node = container.createNode("null",
|
||||
node_name="OUT_{}".format(name))
|
||||
null_node.setPosition([0, -2])
|
||||
null_node.setInput(0, unpack)
|
||||
|
||||
nodes = [container, alembic, unpack, null_node]
|
||||
nodes = [container, alembic, unpack, normal_node, null]
|
||||
for nr, node in enumerate(nodes):
|
||||
node.setPosition([0, (0 - nr)])
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
|
|
|
|||
119
colorbleed/plugins/houdini/load/load_camera.py
Normal file
119
colorbleed/plugins/houdini/load/load_camera.py
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
from avalon import api
|
||||
|
||||
from avalon.houdini import pipeline, lib
|
||||
|
||||
|
||||
class CameraLoader(api.Loader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
|
||||
families = ["colorbleed.camera"]
|
||||
label = "Load Camera (abc)"
|
||||
representations = ["abc"]
|
||||
order = -10
|
||||
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
import os
|
||||
import hou
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = os.path.normpath(self.fname)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Get the root node
|
||||
obj = hou.node("/obj")
|
||||
|
||||
# Create a unique name
|
||||
counter = 1
|
||||
asset_name = context["asset"]["name"]
|
||||
|
||||
namespace = namespace if namespace else asset_name
|
||||
formatted = "{}_{}".format(namespace, name) if namespace else name
|
||||
node_name = "{0}_{1:03d}".format(formatted, counter)
|
||||
|
||||
children = lib.children_as_string(hou.node("/obj"))
|
||||
while node_name in children:
|
||||
counter += 1
|
||||
node_name = "{0}_{1:03d}".format(formatted, counter)
|
||||
|
||||
# Create a archive node
|
||||
container = self.create_and_connect(obj, "alembicarchive", node_name)
|
||||
|
||||
# TODO: add FPS of project / asset
|
||||
container.setParms({"fileName": file_path,
|
||||
"channelRef": True})
|
||||
|
||||
# Apply some magic
|
||||
container.parm("buildHierarchy").pressButton()
|
||||
container.moveToGoodPosition()
|
||||
|
||||
# Create an alembic xform node
|
||||
nodes = [container]
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return pipeline.containerise(node_name,
|
||||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Update the file path
|
||||
file_path = api.get_representation_path(representation)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Update attributes
|
||||
node.setParms({"fileName": file_path,
|
||||
"representation": str(representation["_id"])})
|
||||
|
||||
# Rebuild
|
||||
node.parm("buildHierarchy").pressButton()
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
def create_and_connect(self, node, node_type, name=None):
|
||||
"""Create a node within a node which and connect it to the input
|
||||
|
||||
Args:
|
||||
node(hou.Node): parent of the new node
|
||||
node_type(str) name of the type of node, eg: 'alembic'
|
||||
name(str, Optional): name of the node
|
||||
|
||||
Returns:
|
||||
hou.Node
|
||||
|
||||
"""
|
||||
|
||||
import hou
|
||||
|
||||
try:
|
||||
|
||||
if name:
|
||||
new_node = node.createNode(node_type, node_name=name)
|
||||
else:
|
||||
new_node = node.createNode(node_type)
|
||||
|
||||
new_node.moveToGoodPosition()
|
||||
|
||||
try:
|
||||
input_node = next(i for i in node.allItems() if
|
||||
isinstance(i, hou.SubnetIndirectInput))
|
||||
except StopIteration:
|
||||
return new_node
|
||||
|
||||
new_node.setInput(0, input_node)
|
||||
return new_node
|
||||
|
||||
except Exception:
|
||||
raise RuntimeError("Could not created node type `%s` in node `%s`"
|
||||
% (node_type, node))
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAlembicNodes(pyblish.api.InstancePlugin):
|
||||
|
||||
label = "Collect Alembic Nodes"
|
||||
|
||||
def process(self, instance):
|
||||
pass
|
||||
|
|
@ -3,7 +3,7 @@ import hou
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectMayaCurrentFile(pyblish.api.ContextPlugin):
|
||||
class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
|
|
|
|||
66
colorbleed/plugins/houdini/publish/collect_frames.py
Normal file
66
colorbleed/plugins/houdini/publish/collect_frames.py
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
from colorbleed.houdini import lib
|
||||
|
||||
|
||||
class CollectFrames(pyblish.api.InstancePlugin):
|
||||
"""Collect all frames which would be a resukl"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Frames"
|
||||
families = ["colorbleed.vdbcache"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
output_parm = lib.get_output_parameter(ropnode)
|
||||
output = output_parm.eval()
|
||||
|
||||
file_name = os.path.basename(output)
|
||||
match = re.match("(\w+)\.(\d+)\.vdb", file_name)
|
||||
result = file_name
|
||||
|
||||
start_frame = instance.data.get("startFrame", None)
|
||||
end_frame = instance.data.get("endFrame", None)
|
||||
|
||||
if match and start_frame is not None:
|
||||
|
||||
# Check if frames are bigger than 1 (file collection)
|
||||
# override the result
|
||||
if end_frame - start_frame > 1:
|
||||
result = self.create_file_list(match,
|
||||
int(start_frame),
|
||||
int(end_frame))
|
||||
|
||||
instance.data.update({"frames": result})
|
||||
|
||||
def create_file_list(self, match, start_frame, end_frame):
|
||||
"""Collect files based on frame range and regex.match
|
||||
|
||||
Args:
|
||||
match(re.match): match object
|
||||
start_frame(int): start of the animation
|
||||
end_frame(int): end of the animation
|
||||
|
||||
Returns:
|
||||
list
|
||||
|
||||
"""
|
||||
|
||||
result = []
|
||||
|
||||
padding = len(match.group(2))
|
||||
name = match.group(1)
|
||||
padding_format = "{number:0{width}d}"
|
||||
|
||||
count = start_frame
|
||||
while count <= end_frame:
|
||||
str_count = padding_format.format(number=count, width=padding)
|
||||
file_name = "{}.{}.vdb".format(name, str_count)
|
||||
result.append(file_name)
|
||||
count += 1
|
||||
|
||||
return result
|
||||
|
|
@ -24,8 +24,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.01
|
||||
label = "Collect Instances"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["houdini"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -38,20 +38,32 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
if not node.parm("id"):
|
||||
continue
|
||||
|
||||
if node.parm("id").eval() != "pyblish.avalon.instance":
|
||||
if node.evalParm("id") != "pyblish.avalon.instance":
|
||||
continue
|
||||
|
||||
has_family = node.parm("family").eval()
|
||||
has_family = node.evalParm("family")
|
||||
assert has_family, "'%s' is missing 'family'" % node.name()
|
||||
|
||||
data = lib.read(node)
|
||||
# Check bypass state and reverse
|
||||
data.update({"active": not node.isBypassed()})
|
||||
|
||||
# temporarily translation of `active` to `publish` till issue has
|
||||
# been resolved, https://github.com/pyblish/pyblish-base/issues/307
|
||||
if "active" in data:
|
||||
data["publish"] = data["active"]
|
||||
|
||||
instance = context.create_instance(data.get("name", node.name()))
|
||||
data.update(self.get_frame_data(node))
|
||||
|
||||
# Create nice name
|
||||
# All nodes in the Outputs graph have the 'Valid Frame Range'
|
||||
# attribute, we check here if any frames are set
|
||||
label = data.get("name", node.name())
|
||||
if "startFrame" in data:
|
||||
frames = "[{startFrame} - {endFrame}]".format(**data)
|
||||
label = "{} {}".format(label, frames)
|
||||
|
||||
instance = context.create_instance(label)
|
||||
|
||||
instance[:] = [node]
|
||||
instance.data.update(data)
|
||||
|
|
@ -66,3 +78,27 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
context[:] = sorted(context, key=sort_by_family)
|
||||
|
||||
return context
|
||||
|
||||
def get_frame_data(self, node):
|
||||
"""Get the frame data: start frame, end frame and steps
|
||||
Args:
|
||||
node(hou.Node)
|
||||
|
||||
Returns:
|
||||
dict
|
||||
|
||||
"""
|
||||
|
||||
data = {}
|
||||
|
||||
if node.parm("trange") is None:
|
||||
return data
|
||||
|
||||
if node.evalParm("trange") == 0:
|
||||
return data
|
||||
|
||||
data["startFrame"] = node.evalParm("f1")
|
||||
data["endFrame"] = node.evalParm("f2")
|
||||
data["steps"] = node.evalParm("f3")
|
||||
|
||||
return data
|
||||
|
|
|
|||
27
colorbleed/plugins/houdini/publish/collect_output_node.py
Normal file
27
colorbleed/plugins/houdini/publish/collect_output_node.py
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Collect the out node which of the instance"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
label = "Collect Output Node"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
import hou
|
||||
|
||||
node = instance[0]
|
||||
|
||||
# Get sop path
|
||||
if node.type().name() == "alembic":
|
||||
sop_path_parm = "sop_path"
|
||||
else:
|
||||
sop_path_parm = "soppath"
|
||||
|
||||
sop_path = node.parm(sop_path_parm).eval()
|
||||
out_node = hou.node(sop_path)
|
||||
|
||||
instance.data["output_node"] = out_node
|
||||
|
|
@ -2,32 +2,29 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
from colorbleed.houdini import lib
|
||||
|
||||
|
||||
class ExtractAlembic(colorbleed.api.Extractor):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Pointcache (Alembic)"
|
||||
label = "Extract Alembic"
|
||||
hosts = ["houdini"]
|
||||
families = ["colorbleed.pointcache"]
|
||||
families = ["colorbleed.pointcache", "colorbleed.camera"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
file_name = "{}.abc".format(instance.data["subset"])
|
||||
tmp_filepath = os.path.join(staging_dir, file_name)
|
||||
|
||||
start_frame = float(instance.data["startFrame"])
|
||||
end_frame = float(instance.data["endFrame"])
|
||||
|
||||
ropnode = instance[0]
|
||||
attributes = {"filename": tmp_filepath,
|
||||
"trange": 2}
|
||||
|
||||
with lib.attribute_values(ropnode, attributes):
|
||||
ropnode.render(frame_range=(start_frame, end_frame, 1))
|
||||
# Get the filename from the filename parameter
|
||||
output = ropnode.evalParm("filename")
|
||||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
file_name = os.path.basename(output)
|
||||
|
||||
# We run the render
|
||||
self.log.info("Writing alembic '%s' to '%s'" % (file_name, staging_dir))
|
||||
ropnode.render()
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = []
|
||||
|
|
|
|||
36
colorbleed/plugins/houdini/publish/extract_vdb_cache.py
Normal file
36
colorbleed/plugins/houdini/publish/extract_vdb_cache.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ExtractVDBCache(colorbleed.api.Extractor):
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.1
|
||||
label = "Extract VDB Cache"
|
||||
families = ["colorbleed.vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
ropnode = instance[0]
|
||||
|
||||
# Get the filename from the filename parameter
|
||||
# `.evalParm(parameter)` will make sure all tokens are resolved
|
||||
sop_output = ropnode.evalParm("sopoutput")
|
||||
staging_dir = os.path.normpath(os.path.dirname(sop_output))
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
if instance.data.get("executeBackground", True):
|
||||
self.log.info("Creating background task..")
|
||||
ropnode.parm("executebackground").pressButton()
|
||||
self.log.info("Finished")
|
||||
else:
|
||||
ropnode.render()
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = []
|
||||
|
||||
output = instance.data["frames"]
|
||||
|
||||
instance.data["files"].append(output)
|
||||
46
colorbleed/plugins/houdini/publish/valiate_vdb_input_node.py
Normal file
46
colorbleed/plugins/houdini/publish/valiate_vdb_input_node.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateVDBInputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output node is of type VDB
|
||||
|
||||
Regardless of the amount of VDBs create the output will need to have an
|
||||
equal amount of VDBs, points, primitives and vertices
|
||||
|
||||
A VDB is an inherited type of Prim, holds the following data:
|
||||
- Primitives: 1
|
||||
- Points: 1
|
||||
- Vertices: 1
|
||||
- VDBs: 1
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidateContentsOrder + 0.1
|
||||
families = ["colorbleed.vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Input Node (VDB)"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Node connected to the output node is not"
|
||||
"of type VDB!")
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = instance.data["output_node"]
|
||||
|
||||
prims = node.geometry().prims()
|
||||
nr_of_prims = len(prims)
|
||||
|
||||
nr_of_points = len(node.geometry().points())
|
||||
if nr_of_points != nr_of_prims:
|
||||
cls.log.error("The number of primitives and points do not match")
|
||||
return [instance]
|
||||
|
||||
for prim in prims:
|
||||
if prim.numVertices() != 1:
|
||||
cls.log.error("Found primitive with more than 1 vertex!")
|
||||
return [instance]
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the node connected to the output is correct
|
||||
|
||||
The connected node cannot be of the following types for Alembic:
|
||||
- VDB
|
||||
- Volumne
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidateContentsOrder + 0.1
|
||||
families = ["colorbleed.pointcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Input Node (Abc)"
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Node connected to the output node incorrect")
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
invalid_nodes = ["VDB", "Volume"]
|
||||
node = instance.data["output_node"]
|
||||
|
||||
prims = node.geometry().prims()
|
||||
|
||||
for prim in prims:
|
||||
prim_type = prim.type().name()
|
||||
if prim_type in invalid_nodes:
|
||||
cls.log.error("Found a primitive which is of type '%s' !"
|
||||
% prim_type)
|
||||
return [instance]
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
import pyblish.api
|
||||
|
||||
from colorbleed.houdini import lib
|
||||
|
||||
|
||||
class ValidateAnimationSettings(pyblish.api.InstancePlugin):
|
||||
"""Validate if the unexpanded string contains the frame ('$F') token
|
||||
|
||||
This validator will only check the output parameter of the node if
|
||||
the Valid Frame Range is not set to 'Render Current Frame'
|
||||
|
||||
Rules:
|
||||
If you render out a frame range it is mandatory to have the
|
||||
frame token - '$F4' or similar - to ensure that each frame gets
|
||||
written. If this is not the case you will override the same file
|
||||
every time a frame is written out.
|
||||
|
||||
Examples:
|
||||
Good: 'my_vbd_cache.$F4.vdb'
|
||||
Bad: 'my_vbd_cache.vdb'
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Frame Settings"
|
||||
families = ["colorbleed.vdbcache"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Output settings do no match for '%s'" %
|
||||
instance)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
node = instance[0]
|
||||
|
||||
# Check trange parm, 0 means Render Current Frame
|
||||
frame_range = node.evalParm("trange")
|
||||
if frame_range == 0:
|
||||
return []
|
||||
|
||||
output_parm = lib.get_output_parameter(node)
|
||||
unexpanded_str = output_parm.unexpandedString()
|
||||
|
||||
if "$F" not in unexpanded_str:
|
||||
cls.log.error("No frame token found in '%s'" % node.path())
|
||||
return [instance]
|
||||
|
|
@ -2,7 +2,7 @@ import pyblish.api
|
|||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidatIntermediateDirectoriesChecked(pyblish.api.InstancePlugin):
|
||||
class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin):
|
||||
"""Validate if node attribute Create intermediate Directories is turned on
|
||||
|
||||
Rules:
|
||||
|
|
|
|||
|
|
@ -29,13 +29,22 @@ class ValidatOutputNodeExists(pyblish.api.InstancePlugin):
|
|||
result = set()
|
||||
|
||||
node = instance[0]
|
||||
sop_path = node.parm("sop_path").eval()
|
||||
if not sop_path.endswith("OUT"):
|
||||
cls.log.error("SOP Path does not end path at output node")
|
||||
result.add(node.path())
|
||||
if node.type().name() == "alembic":
|
||||
soppath_parm = "sop_path"
|
||||
else:
|
||||
# Fall back to geometry node
|
||||
soppath_parm = "soppath"
|
||||
|
||||
if hou.node(sop_path) is None:
|
||||
sop_path = node.parm(soppath_parm).eval()
|
||||
output_node = hou.node(sop_path)
|
||||
|
||||
if output_node is None:
|
||||
cls.log.error("Node at '%s' does not exist" % sop_path)
|
||||
result.add(node.path())
|
||||
|
||||
# Added cam as this is a legit output type (cameras can't
|
||||
if output_node.type().name() not in ["output", "cam"]:
|
||||
cls.log.error("SOP Path does not end path at output node")
|
||||
result.add(node.path())
|
||||
|
||||
return result
|
||||
|
|
|
|||
45
colorbleed/plugins/houdini/publish/validate_output_node.py
Normal file
45
colorbleed/plugins/houdini/publish/validate_output_node.py
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate if output node:
|
||||
- exists
|
||||
- is of type 'output'
|
||||
- has an input"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["*"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Output node(s) `%s` are incorrect" % invalid)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
output_node = instance.data["output_node"]
|
||||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
cls.log.error("Output node at '%s' does not exist, see source" %
|
||||
node.path())
|
||||
|
||||
return node.path()
|
||||
|
||||
# Check if type is correct
|
||||
type_name = output_node.type().name()
|
||||
if type_name not in ["output", "cam"]:
|
||||
cls.log.error("Output node `%s` is not an accepted type `output` "
|
||||
"or `camera`" %
|
||||
output_node.path())
|
||||
return [output_node.path()]
|
||||
|
||||
# Check if output node has incoming connections
|
||||
if type_name == "output" and not output_node.inputConnections():
|
||||
cls.log.error("Output node `%s` has no incoming connections"
|
||||
% output_node.path())
|
||||
return [output_node.path()]
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
import avalon.maya
|
||||
|
||||
|
||||
|
|
@ -8,3 +10,14 @@ class CreateModel(avalon.maya.Creator):
|
|||
label = "Model"
|
||||
family = "colorbleed.model"
|
||||
icon = "cube"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateModel, self).__init__(*args, **kwargs)
|
||||
|
||||
# create an ordered dict with the existing data first
|
||||
data = OrderedDict(**self.data)
|
||||
|
||||
# Write vertex colors with the geometry.
|
||||
data["writeColorSets"] = True
|
||||
|
||||
self.data = data
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import colorbleed.maya.plugin
|
|||
|
||||
|
||||
class AbcLoader(colorbleed.maya.plugin.ReferenceLoader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Specific loader of Alembic for the colorbleed.animation family"""
|
||||
|
||||
families = ["colorbleed.animation",
|
||||
"colorbleed.pointcache"]
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import colorbleed.maya.plugin
|
|||
|
||||
|
||||
class CameraLoader(colorbleed.maya.plugin.ReferenceLoader):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Specific loader of Alembic for the colorbleed.camera family"""
|
||||
|
||||
families = ["colorbleed.camera"]
|
||||
label = "Reference camera"
|
||||
|
|
|
|||
69
colorbleed/plugins/maya/load/load_vdb_to_redshift.py
Normal file
69
colorbleed/plugins/maya/load/load_vdb_to_redshift.py
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
from avalon import api
|
||||
|
||||
|
||||
class LoadVDBtoRedShift(api.Loader):
|
||||
"""Load OpenVDB in a Redshift Volume Shape"""
|
||||
|
||||
families = ["colorbleed.vdbcache"]
|
||||
representations = ["vdb"]
|
||||
|
||||
label = "Load VDB to RedShift"
|
||||
icon = "cloud"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
from maya import cmds
|
||||
import avalon.maya.lib as lib
|
||||
from avalon.maya.pipeline import containerise
|
||||
|
||||
# Check if the plugin for redshift is available on the pc
|
||||
try:
|
||||
cmds.loadPlugin("redshift4maya", quiet=True)
|
||||
except Exception as exc:
|
||||
self.log.error("Encountered exception:\n%s" % exc)
|
||||
return
|
||||
|
||||
# Check if viewport drawing engine is Open GL Core (compat)
|
||||
render_engine = None
|
||||
compatible = "OpenGL"
|
||||
if cmds.optionVar(exists="vp2RenderingEngine"):
|
||||
render_engine = cmds.optionVar(query="vp2RenderingEngine")
|
||||
|
||||
if not render_engine or not render_engine.startswith(compatible):
|
||||
raise RuntimeError("Current scene's settings are incompatible."
|
||||
"See Preferences > Display > Viewport 2.0 to "
|
||||
"set the render engine to '%s<type>'"
|
||||
% compatible)
|
||||
|
||||
asset = context['asset']
|
||||
|
||||
asset_name = asset["name"]
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset_name + "_",
|
||||
prefix="_" if asset_name[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = cmds.group(name=label, empty=True)
|
||||
|
||||
# Create VR
|
||||
volume_node = cmds.createNode("RedshiftVolumeShape",
|
||||
name="{}RVSShape".format(label),
|
||||
parent=root)
|
||||
|
||||
cmds.setAttr("{}.fileName".format(volume_node),
|
||||
self.fname,
|
||||
type="string")
|
||||
|
||||
nodes = [root, volume_node]
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
62
colorbleed/plugins/maya/load/load_vdb_to_vray.py
Normal file
62
colorbleed/plugins/maya/load/load_vdb_to_vray.py
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
from avalon import api
|
||||
|
||||
|
||||
class LoadVDBtoVRay(api.Loader):
|
||||
|
||||
families = ["colorbleed.vdbcache"]
|
||||
representations = ["vdb"]
|
||||
|
||||
label = "Load VDB to VRay"
|
||||
icon = "cloud"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
from maya import cmds
|
||||
import avalon.maya.lib as lib
|
||||
from avalon.maya.pipeline import containerise
|
||||
|
||||
# Check if viewport drawing engine is Open GL Core (compat)
|
||||
render_engine = None
|
||||
compatible = "OpenGLCoreProfileCompat"
|
||||
if cmds.optionVar(exists="vp2RenderingEngine"):
|
||||
render_engine = cmds.optionVar(query="vp2RenderingEngine")
|
||||
|
||||
if not render_engine or render_engine != compatible:
|
||||
raise RuntimeError("Current scene's settings are incompatible."
|
||||
"See Preferences > Display > Viewport 2.0 to "
|
||||
"set the render engine to '%s'" % compatible)
|
||||
|
||||
asset = context['asset']
|
||||
version = context["version"]
|
||||
|
||||
asset_name = asset["name"]
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset_name + "_",
|
||||
prefix="_" if asset_name[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = cmds.group(name=label, empty=True)
|
||||
|
||||
# Create VR
|
||||
grid_node = cmds.createNode("VRayVolumeGrid",
|
||||
name="{}VVGShape".format(label),
|
||||
parent=root)
|
||||
|
||||
# Set attributes
|
||||
cmds.setAttr("{}.inFile".format(grid_node), self.fname, type="string")
|
||||
cmds.setAttr("{}.inReadOffset".format(grid_node),
|
||||
version["startFrames"])
|
||||
|
||||
nodes = [root, grid_node]
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
|
@ -284,6 +284,8 @@ class YetiCacheLoader(api.Loader):
|
|||
|
||||
# Apply attributes to pgYetiMaya node
|
||||
for attr, value in attributes.items():
|
||||
if value is None:
|
||||
continue
|
||||
lib.set_attribute(attr, value, yeti_node)
|
||||
|
||||
# Fix for : YETI-6
|
||||
|
|
|
|||
|
|
@ -29,8 +29,11 @@ class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin):
|
|||
out_set = next((i for i in instance.data["setMembers"] if
|
||||
i.endswith("out_SET")), None)
|
||||
|
||||
assert out_set, ("Expecting out_SET for instance of family"
|
||||
" '%s'" % family)
|
||||
if out_set is None:
|
||||
warning = "Expecting out_SET for instance of family '%s'" % family
|
||||
self.log.warning(warning)
|
||||
return
|
||||
|
||||
members = cmds.ls(cmds.sets(out_set, query=True), long=True)
|
||||
|
||||
# Get all the relatives of the members
|
||||
|
|
|
|||
|
|
@ -102,6 +102,13 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
parents = self.get_all_parents(members)
|
||||
members_hierarchy = list(set(members + children + parents))
|
||||
|
||||
# Create the instance
|
||||
instance = context.create_instance(objset)
|
||||
instance[:] = members_hierarchy
|
||||
|
||||
# Store the exact members of the object set
|
||||
instance.data["setMembers"] = members
|
||||
|
||||
# Define nice label
|
||||
name = cmds.ls(objset, long=False)[0] # use short name
|
||||
label = "{0} ({1})".format(name,
|
||||
|
|
@ -112,10 +119,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
label += " [{0}-{1}]".format(int(data["startFrame"]),
|
||||
int(data["endFrame"]))
|
||||
|
||||
# Create the instance
|
||||
instance = context.create_instance(label)
|
||||
instance[:] = members_hierarchy
|
||||
instance.data["setMembers"] = members
|
||||
instance.data["label"] = label
|
||||
|
||||
instance.data.update(data)
|
||||
|
||||
# Produce diagnostic message for any graphical
|
||||
|
|
|
|||
|
|
@ -1,7 +1,10 @@
|
|||
import re
|
||||
import os
|
||||
import glob
|
||||
|
||||
from maya import cmds
|
||||
import pyblish.api
|
||||
import colorbleed.maya.lib as lib
|
||||
from cb.utils.maya import context, shaders
|
||||
|
||||
SHAPE_ATTRS = ["castsShadows",
|
||||
"receiveShadows",
|
||||
|
|
@ -48,6 +51,139 @@ def get_look_attrs(node):
|
|||
return result
|
||||
|
||||
|
||||
def node_uses_image_sequence(node):
|
||||
"""Return whether file node uses an image sequence or single image.
|
||||
|
||||
Determine if a node uses an image sequence or just a single image,
|
||||
not always obvious from its file path alone.
|
||||
|
||||
Args:
|
||||
node (str): Name of the Maya node
|
||||
|
||||
Returns:
|
||||
bool: True if node uses an image sequence
|
||||
|
||||
"""
|
||||
|
||||
# useFrameExtension indicates an explicit image sequence
|
||||
node_path = get_file_node_path(node).lower()
|
||||
|
||||
# The following tokens imply a sequence
|
||||
patterns = ["<udim>", "<tile>", "<uvtile>", "u<u>_v<v>", "<frame0"]
|
||||
|
||||
return (cmds.getAttr('%s.useFrameExtension' % node) or
|
||||
any(pattern in node_path for pattern in patterns))
|
||||
|
||||
|
||||
def seq_to_glob(path):
|
||||
"""Takes an image sequence path and returns it in glob format,
|
||||
with the frame number replaced by a '*'.
|
||||
|
||||
Image sequences may be numerical sequences, e.g. /path/to/file.1001.exr
|
||||
will return as /path/to/file.*.exr.
|
||||
|
||||
Image sequences may also use tokens to denote sequences, e.g.
|
||||
/path/to/texture.<UDIM>.tif will return as /path/to/texture.*.tif.
|
||||
|
||||
Args:
|
||||
path (str): the image sequence path
|
||||
|
||||
Returns:
|
||||
str: Return glob string that matches the filename pattern.
|
||||
|
||||
"""
|
||||
|
||||
if path is None:
|
||||
return path
|
||||
|
||||
# If any of the patterns, convert the pattern
|
||||
patterns = {
|
||||
"<udim>": "<udim>",
|
||||
"<tile>": "<tile>",
|
||||
"<uvtile>": "<uvtile>",
|
||||
"#": "#",
|
||||
"u<u>_v<v>": "<u>|<v>",
|
||||
"<frame0": "<frame0\d+>",
|
||||
"<f>": "<f>"
|
||||
}
|
||||
|
||||
lower = path.lower()
|
||||
has_pattern = False
|
||||
for pattern, regex_pattern in patterns.items():
|
||||
if pattern in lower:
|
||||
path = re.sub(regex_pattern, "*", path, flags=re.IGNORECASE)
|
||||
has_pattern = True
|
||||
|
||||
if has_pattern:
|
||||
return path
|
||||
|
||||
base = os.path.basename(path)
|
||||
matches = list(re.finditer(r'\d+', base))
|
||||
if matches:
|
||||
match = matches[-1]
|
||||
new_base = '{0}*{1}'.format(base[:match.start()],
|
||||
base[match.end():])
|
||||
head = os.path.dirname(path)
|
||||
return os.path.join(head, new_base)
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
def get_file_node_path(node):
|
||||
"""Get the file path used by a Maya file node.
|
||||
|
||||
Args:
|
||||
node (str): Name of the Maya file node
|
||||
|
||||
Returns:
|
||||
str: the file path in use
|
||||
|
||||
"""
|
||||
# if the path appears to be sequence, use computedFileTextureNamePattern,
|
||||
# this preserves the <> tag
|
||||
if cmds.attributeQuery('computedFileTextureNamePattern',
|
||||
node=node,
|
||||
exists=True):
|
||||
plug = '{0}.computedFileTextureNamePattern'.format(node)
|
||||
texture_pattern = cmds.getAttr(plug)
|
||||
|
||||
patterns = ["<udim>",
|
||||
"<tile>",
|
||||
"u<u>_v<v>",
|
||||
"<f>",
|
||||
"<frame0",
|
||||
"<uvtile>"]
|
||||
lower = texture_pattern.lower()
|
||||
if any(pattern in lower for pattern in patterns):
|
||||
return texture_pattern
|
||||
|
||||
# otherwise use fileTextureName
|
||||
return cmds.getAttr('{0}.fileTextureName'.format(node))
|
||||
|
||||
|
||||
def get_file_node_files(node):
|
||||
"""Return the file paths related to the file node
|
||||
|
||||
Note:
|
||||
Will only return existing files. Returns an empty list
|
||||
if not valid existing files are linked.
|
||||
|
||||
Returns:
|
||||
list: List of full file paths.
|
||||
|
||||
"""
|
||||
|
||||
path = get_file_node_path(node)
|
||||
path = cmds.workspace(expandName=path)
|
||||
if node_uses_image_sequence(node):
|
||||
glob_pattern = seq_to_glob(path)
|
||||
return glob.glob(glob_pattern)
|
||||
elif os.path.exists(path):
|
||||
return [path]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
class CollectLook(pyblish.api.InstancePlugin):
|
||||
"""Collect look data for instance.
|
||||
|
||||
|
|
@ -74,7 +210,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
"""Collect the Look in the instance with the correct layer settings"""
|
||||
|
||||
with context.renderlayer(instance.data["renderlayer"]):
|
||||
with lib.renderlayer(instance.data["renderlayer"]):
|
||||
self.collect(instance)
|
||||
|
||||
def collect(self, instance):
|
||||
|
|
@ -268,7 +404,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
# paths as the computed patterns
|
||||
source = source.replace("\\", "/")
|
||||
|
||||
files = shaders.get_file_node_files(node)
|
||||
files = get_file_node_files(node)
|
||||
if len(files) == 0:
|
||||
self.log.error("No valid files found from node `%s`" % node)
|
||||
|
||||
|
|
|
|||
|
|
@ -41,9 +41,9 @@ class CollectRenderLayerAOVS(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.info("Renderer found: {}".format(renderer))
|
||||
|
||||
rp_node_types = {"vray": "VRayRenderElement",
|
||||
"arnold": "aiAOV",
|
||||
"redshift": "RedshiftAOV"}
|
||||
rp_node_types = {"vray": ["VRayRenderElement", "VRayRenderElementSet"],
|
||||
"arnold": ["aiAOV"],
|
||||
"redshift": ["RedshiftAOV"]}
|
||||
|
||||
if renderer not in rp_node_types.keys():
|
||||
self.log.error("Unsupported renderer found: '{}'".format(renderer))
|
||||
|
|
@ -52,7 +52,8 @@ class CollectRenderLayerAOVS(pyblish.api.InstancePlugin):
|
|||
result = []
|
||||
|
||||
# Collect all AOVs / Render Elements
|
||||
with lib.renderlayer(instance.name):
|
||||
layer = instance.data["setMembers"]
|
||||
with lib.renderlayer(layer):
|
||||
|
||||
node_type = rp_node_types[renderer]
|
||||
render_elements = cmds.ls(type=node_type)
|
||||
|
|
@ -64,32 +65,36 @@ class CollectRenderLayerAOVS(pyblish.api.InstancePlugin):
|
|||
continue
|
||||
|
||||
pass_name = self.get_pass_name(renderer, element)
|
||||
render_pass = "%s.%s" % (instance.name, pass_name)
|
||||
render_pass = "%s.%s" % (instance.data["subset"], pass_name)
|
||||
|
||||
result.append(render_pass)
|
||||
|
||||
self.log.info("Found {} render elements / AOVs for "
|
||||
"'{}'".format(len(result), instance.name))
|
||||
"'{}'".format(len(result), instance.data["subset"]))
|
||||
|
||||
instance.data["renderPasses"] = result
|
||||
|
||||
def get_pass_name(self, renderer, node):
|
||||
|
||||
if renderer == "vray":
|
||||
|
||||
# Get render element pass type
|
||||
vray_node_attr = next(attr for attr in cmds.listAttr(node)
|
||||
if attr.startswith("vray_name"))
|
||||
|
||||
pass_type = vray_node_attr.rsplit("_", 1)[-1]
|
||||
|
||||
# Support V-Ray extratex explicit name (if set by user)
|
||||
if pass_type == "extratex":
|
||||
vray_node_attr = "vray_explicit_name_extratex"
|
||||
explicit_attr = "{}.vray_explicit_name_extratex".format(node)
|
||||
explicit_name = cmds.getAttr(explicit_attr)
|
||||
if explicit_name:
|
||||
return explicit_name
|
||||
|
||||
# Node type is in the attribute name but we need to check if value
|
||||
# of the attribute as it can be changed
|
||||
pass_name = cmds.getAttr("{}.{}".format(node, vray_node_attr))
|
||||
return cmds.getAttr("{}.{}".format(node, vray_node_attr))
|
||||
|
||||
elif renderer in ["arnold", "redshift"]:
|
||||
pass_name = cmds.getAttr("{}.name".format(node))
|
||||
return cmds.getAttr("{}.name".format(node))
|
||||
else:
|
||||
raise RuntimeError("Unsupported renderer: '{}'".format(renderer))
|
||||
|
||||
return pass_name
|
||||
|
|
@ -103,7 +103,13 @@ class CollectMayaRenderlayers(pyblish.api.ContextPlugin):
|
|||
overrides = self.parse_options(render_globals)
|
||||
data.update(**overrides)
|
||||
|
||||
# Define nice label
|
||||
label = "{0} ({1})".format(layername, data["asset"])
|
||||
label += " [{0}-{1}]".format(int(data["startFrame"]),
|
||||
int(data["endFrame"]))
|
||||
|
||||
instance = context.create_instance(layername)
|
||||
instance.data["label"] = label
|
||||
instance.data.update(data)
|
||||
|
||||
def get_render_attribute(self, attr):
|
||||
|
|
|
|||
|
|
@ -26,20 +26,22 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
assert "input_SET" in cmds.sets(instance.name, query=True), (
|
||||
assert "input_SET" in instance.data["setMembers"], (
|
||||
"Yeti Rig must have an input_SET")
|
||||
|
||||
# Get the input meshes information
|
||||
input_content = cmds.sets("input_SET", query=True)
|
||||
input_nodes = cmds.listRelatives(input_content,
|
||||
allDescendents=True,
|
||||
fullPath=True) or input_content
|
||||
input_content = cmds.ls(cmds.sets("input_SET", query=True), long=True)
|
||||
|
||||
# Get all the shapes
|
||||
input_shapes = cmds.ls(input_nodes, long=True, noIntermediate=True)
|
||||
# Include children
|
||||
input_content += cmds.listRelatives(input_content,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
# Ignore intermediate objects
|
||||
input_content = cmds.ls(input_content, long=True, noIntermediate=True)
|
||||
|
||||
# Store all connections
|
||||
connections = cmds.listConnections(input_shapes,
|
||||
connections = cmds.listConnections(input_content,
|
||||
source=True,
|
||||
destination=False,
|
||||
connections=True,
|
||||
|
|
@ -62,10 +64,9 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
|
||||
# Collect any textures if used
|
||||
yeti_resources = []
|
||||
yeti_nodes = cmds.ls(instance[:], type="pgYetiMaya")
|
||||
yeti_nodes = cmds.ls(instance[:], type="pgYetiMaya", long=True)
|
||||
for node in yeti_nodes:
|
||||
# Get Yeti resources (textures)
|
||||
# TODO: referenced files in Yeti Graph
|
||||
resources = self.get_yeti_resources(node)
|
||||
yeti_resources.extend(resources)
|
||||
|
||||
|
|
@ -78,11 +79,16 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
instance.data["endFrame"] = 1
|
||||
|
||||
def get_yeti_resources(self, node):
|
||||
"""Get all texture file paths
|
||||
"""Get all resource file paths
|
||||
|
||||
If a texture is a sequence it gathers all sibling files to ensure
|
||||
the texture sequence is complete.
|
||||
|
||||
References can be used in the Yeti graph, this means that it is
|
||||
possible to load previously caches files. The information will need
|
||||
to be stored and, if the file not publish, copied to the resource
|
||||
folder.
|
||||
|
||||
Args:
|
||||
node (str): node name of the pgYetiMaya node
|
||||
|
||||
|
|
@ -91,15 +97,25 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
resources = []
|
||||
image_search_path = cmds.getAttr("{}.imageSearchPath".format(node))
|
||||
|
||||
# List all related textures
|
||||
texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
|
||||
self.log.info("Found %i texture(s)" % len(texture_filenames))
|
||||
|
||||
# Get all reference nodes
|
||||
reference_nodes = cmds.pgYetiGraph(node,
|
||||
listNodes=True,
|
||||
type="reference")
|
||||
self.log.info("Found %i reference node(s)" % len(reference_nodes))
|
||||
|
||||
if texture_filenames and not image_search_path:
|
||||
raise ValueError("pgYetiMaya node '%s' is missing the path to the "
|
||||
"files in the 'imageSearchPath "
|
||||
"atttribute'" % node)
|
||||
|
||||
# Collect all texture files
|
||||
for texture in texture_filenames:
|
||||
node_resources = {"files": [], "source": texture, "node": node}
|
||||
item = {"files": [], "source": texture, "node": node}
|
||||
texture_filepath = os.path.join(image_search_path, texture)
|
||||
if len(texture.split(".")) > 2:
|
||||
|
||||
|
|
@ -107,20 +123,46 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
if "<UDIM>" in texture:
|
||||
sequences = self.get_sequence(texture_filepath,
|
||||
pattern="<UDIM>")
|
||||
node_resources["files"].extend(sequences)
|
||||
item["files"].extend(sequences)
|
||||
|
||||
# Based textures (animated masks f.e)
|
||||
elif "%04d" in texture:
|
||||
sequences = self.get_sequence(texture_filepath,
|
||||
pattern="%04d")
|
||||
node_resources["files"].extend(sequences)
|
||||
item["files"].extend(sequences)
|
||||
# Assuming it is a fixed name
|
||||
else:
|
||||
node_resources["files"].append(texture_filepath)
|
||||
item["files"].append(texture_filepath)
|
||||
else:
|
||||
node_resources["files"].append(texture_filepath)
|
||||
item["files"].append(texture_filepath)
|
||||
|
||||
resources.append(node_resources)
|
||||
resources.append(item)
|
||||
|
||||
# Collect all referenced files
|
||||
for reference_node in reference_nodes:
|
||||
ref_file = cmds.pgYetiGraph(node,
|
||||
node=reference_node,
|
||||
param="reference_file",
|
||||
getParamValue=True)
|
||||
|
||||
if not os.path.isfile(ref_file):
|
||||
raise RuntimeError("Reference file must be a full file path!")
|
||||
|
||||
# Create resource dict
|
||||
item = {"files": [],
|
||||
"source": ref_file,
|
||||
"node": node,
|
||||
"graphnode": reference_node,
|
||||
"param": "reference_file"}
|
||||
|
||||
ref_file_name = os.path.basename(ref_file)
|
||||
if "%04d" in ref_file_name:
|
||||
ref_files = self.get_sequence(ref_file)
|
||||
item["files"].extend(ref_files)
|
||||
else:
|
||||
item["files"].append(ref_file)
|
||||
|
||||
resources.append(item)
|
||||
|
||||
return resources
|
||||
|
||||
|
|
@ -139,7 +181,6 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
list: file sequence.
|
||||
|
||||
"""
|
||||
|
||||
from avalon.vendor import clique
|
||||
|
||||
escaped = re.escape(filename)
|
||||
|
|
@ -150,7 +191,6 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
if re.match(re_pattern, f)]
|
||||
|
||||
pattern = [clique.PATTERNS["frames"]]
|
||||
collection, remainder = clique.assemble(files,
|
||||
patterns=pattern)
|
||||
collection, remainder = clique.assemble(files, patterns=pattern)
|
||||
|
||||
return collection
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from maya import cmds
|
|||
import avalon.maya
|
||||
import colorbleed.api
|
||||
|
||||
import cb.utils.maya.context as context
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
class ExtractCameraAlembic(colorbleed.api.Extractor):
|
||||
|
|
@ -66,8 +66,8 @@ class ExtractCameraAlembic(colorbleed.api.Extractor):
|
|||
|
||||
job_str += ' -file "{0}"'.format(path)
|
||||
|
||||
with context.evaluation("off"):
|
||||
with context.no_refresh():
|
||||
with lib.evaluation("off"):
|
||||
with lib.no_refresh():
|
||||
cmds.AbcExport(j=job_str, verbose=False)
|
||||
|
||||
if "files" not in instance.data:
|
||||
|
|
|
|||
|
|
@ -4,9 +4,8 @@ from maya import cmds
|
|||
|
||||
import avalon.maya
|
||||
import colorbleed.api
|
||||
|
||||
import cb.utils.maya.context as context
|
||||
from cb.utils.maya.animation import bakeToWorldSpace
|
||||
from colorbleed.lib import grouper
|
||||
from colorbleed.maya import lib
|
||||
|
||||
|
||||
def massage_ma_file(path):
|
||||
|
|
@ -35,6 +34,37 @@ def massage_ma_file(path):
|
|||
f.close()
|
||||
|
||||
|
||||
def unlock(plug):
|
||||
"""Unlocks attribute and disconnects inputs for a plug.
|
||||
|
||||
This will also recursively unlock the attribute
|
||||
upwards to any parent attributes for compound
|
||||
attributes, to ensure it's fully unlocked and free
|
||||
to change the value.
|
||||
|
||||
"""
|
||||
node, attr = plug.rsplit(".", 1)
|
||||
|
||||
# Unlock attribute
|
||||
cmds.setAttr(plug, lock=False)
|
||||
|
||||
# Also unlock any parent attribute (if compound)
|
||||
parents = cmds.attributeQuery(attr, node=node, listParent=True)
|
||||
if parents:
|
||||
for parent in parents:
|
||||
unlock("{0}.{1}".format(node, parent))
|
||||
|
||||
# Break incoming connections
|
||||
connections = cmds.listConnections(plug,
|
||||
source=True,
|
||||
destination=False,
|
||||
plugs=True,
|
||||
connections=True)
|
||||
if connections:
|
||||
for destination, source in grouper(connections, 2):
|
||||
cmds.disconnectAttr(source, destination)
|
||||
|
||||
|
||||
class ExtractCameraMayaAscii(colorbleed.api.Extractor):
|
||||
"""Extract a Camera as Maya Ascii.
|
||||
|
||||
|
|
@ -67,8 +97,8 @@ class ExtractCameraMayaAscii(colorbleed.api.Extractor):
|
|||
# TODO: Implement a bake to non-world space
|
||||
# Currently it will always bake the resulting camera to world-space
|
||||
# and it does not allow to include the parent hierarchy, even though
|
||||
# with `bakeToWorldSpace` set to False it should include its hierarchy
|
||||
# to be correct with the family implementation.
|
||||
# with `bakeToWorldSpace` set to False it should include its
|
||||
# hierarchy to be correct with the family implementation.
|
||||
if not bake_to_worldspace:
|
||||
self.log.warning("Camera (Maya Ascii) export only supports world"
|
||||
"space baked camera extractions. The disabled "
|
||||
|
|
@ -96,17 +126,30 @@ class ExtractCameraMayaAscii(colorbleed.api.Extractor):
|
|||
# Perform extraction
|
||||
self.log.info("Performing camera bakes for: {0}".format(transform))
|
||||
with avalon.maya.maintained_selection():
|
||||
with context.evaluation("off"):
|
||||
with context.no_refresh():
|
||||
baked = bakeToWorldSpace(transform,
|
||||
frameRange=range_with_handles,
|
||||
step=step)
|
||||
with lib.evaluation("off"):
|
||||
with lib.no_refresh():
|
||||
baked = lib.bake_to_world_space(
|
||||
transform,
|
||||
frame_range=range_with_handles,
|
||||
step=step
|
||||
)
|
||||
baked_shapes = cmds.ls(baked,
|
||||
type="camera",
|
||||
dag=True,
|
||||
shapes=True,
|
||||
long=True)
|
||||
|
||||
# Fix PLN-178: Don't allow background color to be non-black
|
||||
for cam in baked_shapes:
|
||||
attrs = {"backgroundColorR": 0.0,
|
||||
"backgroundColorG": 0.0,
|
||||
"backgroundColorB": 0.0,
|
||||
"overscan": 1.0}
|
||||
for attr, value in attrs.items():
|
||||
plug = "{0}.{1}".format(cam, attr)
|
||||
unlock(plug)
|
||||
cmds.setAttr(plug, value)
|
||||
|
||||
self.log.info("Performing extraction..")
|
||||
cmds.select(baked_shapes, noExpand=True)
|
||||
cmds.file(path,
|
||||
|
|
|
|||
|
|
@ -6,10 +6,9 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import avalon.maya
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.lib as maya
|
||||
|
||||
from cb.utils.maya import context
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
class ExtractLook(colorbleed.api.Extractor):
|
||||
|
|
@ -63,10 +62,10 @@ class ExtractLook(colorbleed.api.Extractor):
|
|||
|
||||
# Extract in correct render layer
|
||||
layer = instance.data.get("renderlayer", "defaultRenderLayer")
|
||||
with context.renderlayer(layer):
|
||||
with lib.renderlayer(layer):
|
||||
# TODO: Ensure membership edits don't become renderlayer overrides
|
||||
with context.empty_sets(sets, force=True):
|
||||
with maya.attribute_values(remap):
|
||||
with lib.empty_sets(sets, force=True):
|
||||
with lib.attribute_values(remap):
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(sets, noExpand=True)
|
||||
cmds.file(maya_path,
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@ from maya import cmds
|
|||
|
||||
import avalon.maya
|
||||
import colorbleed.api
|
||||
|
||||
from cb.utils.maya import context
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
class ExtractModel(colorbleed.api.Extractor):
|
||||
|
|
@ -47,15 +46,15 @@ class ExtractModel(colorbleed.api.Extractor):
|
|||
noIntermediate=True,
|
||||
long=True)
|
||||
|
||||
with context.no_display_layers(instance):
|
||||
with context.displaySmoothness(members,
|
||||
divisionsU=0,
|
||||
divisionsV=0,
|
||||
pointsWire=4,
|
||||
pointsShaded=1,
|
||||
polygonObject=1):
|
||||
with context.shader(members,
|
||||
shadingEngine="initialShadingGroup"):
|
||||
with lib.no_display_layers(instance):
|
||||
with lib.displaySmoothness(members,
|
||||
divisionsU=0,
|
||||
divisionsV=0,
|
||||
pointsWire=4,
|
||||
pointsShaded=1,
|
||||
polygonObject=1):
|
||||
with lib.shader(members,
|
||||
shadingEngine="initialShadingGroup"):
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(members, noExpand=True)
|
||||
cmds.file(path,
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import colorbleed.maya.lib as maya
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def disconnected_attributes(settings, members):
|
||||
def disconnect_plugs(settings, members):
|
||||
|
||||
members = cmds.ls(members, long=True)
|
||||
original_connections = []
|
||||
|
|
@ -19,35 +19,32 @@ def disconnected_attributes(settings, members):
|
|||
|
||||
# Get source shapes
|
||||
source_nodes = lib.lsattr("cbId", input["sourceID"])
|
||||
sources = [i for i in source_nodes if
|
||||
not cmds.referenceQuery(i, isNodeReferenced=True)
|
||||
and i in members]
|
||||
try:
|
||||
source = sources[0]
|
||||
except IndexError:
|
||||
print("source_id:", input["sourceID"])
|
||||
if not source_nodes:
|
||||
continue
|
||||
|
||||
source = next(s for s in source_nodes if s not in members)
|
||||
|
||||
# Get destination shapes (the shapes used as hook up)
|
||||
destination_nodes = lib.lsattr("cbId", input["destinationID"])
|
||||
destinations = [i for i in destination_nodes if i not in members
|
||||
and i not in sources]
|
||||
destination = destinations[0]
|
||||
destination = next(i for i in destination_nodes if i in members)
|
||||
|
||||
# Break connection
|
||||
# Create full connection
|
||||
connections = input["connections"]
|
||||
src_attribute = "%s.%s" % (source, connections[0])
|
||||
dst_attribute = "%s.%s" % (destination, connections[1])
|
||||
|
||||
# store connection pair
|
||||
# Check if there is an actual connection
|
||||
if not cmds.isConnected(src_attribute, dst_attribute):
|
||||
print("No connection between %s and %s" % (
|
||||
src_attribute, dst_attribute))
|
||||
continue
|
||||
|
||||
# Break and store connection
|
||||
cmds.disconnectAttr(src_attribute, dst_attribute)
|
||||
original_connections.append([src_attribute, dst_attribute])
|
||||
yield
|
||||
finally:
|
||||
# restore connections
|
||||
# Restore previous connections
|
||||
for connection in original_connections:
|
||||
try:
|
||||
cmds.connectAttr(connection[0], connection[1])
|
||||
|
|
@ -56,13 +53,43 @@ def disconnected_attributes(settings, members):
|
|||
continue
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def yetigraph_attribute_values(assumed_destination, resources):
|
||||
|
||||
try:
|
||||
for resource in resources:
|
||||
if "graphnode" not in resource:
|
||||
continue
|
||||
|
||||
fname = os.path.basename(resource["source"])
|
||||
new_fpath = os.path.join(assumed_destination, fname)
|
||||
new_fpath = new_fpath.replace("\\", "/")
|
||||
|
||||
try:
|
||||
cmds.pgYetiGraph(resource["node"],
|
||||
node=resource["graphnode"],
|
||||
param=resource["param"],
|
||||
setParamValueString=new_fpath)
|
||||
except Exception as exc:
|
||||
print(">>> Exception:", exc)
|
||||
yield
|
||||
|
||||
finally:
|
||||
for resource in resources:
|
||||
if "graphnode" not in resources:
|
||||
continue
|
||||
|
||||
try:
|
||||
cmds.pgYetiGraph(resource["node"],
|
||||
node=resource["graphnode"],
|
||||
param=resource["param"],
|
||||
setParamValue=resource["source"])
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
|
||||
class ExtractYetiRig(colorbleed.api.Extractor):
|
||||
"""Produce an alembic of just point positions and normals.
|
||||
|
||||
Positions and normals are preserved, but nothing more,
|
||||
for plain and predictable point caches.
|
||||
|
||||
"""
|
||||
"""Extract the Yeti rig to a MayaAscii and write the Yeti rig data"""
|
||||
|
||||
label = "Extract Yeti Rig"
|
||||
hosts = ["maya"]
|
||||
|
|
@ -83,44 +110,49 @@ class ExtractYetiRig(colorbleed.api.Extractor):
|
|||
|
||||
self.log.info("Writing metadata file")
|
||||
|
||||
image_search_path = ""
|
||||
# Create assumed destination folder for imageSearchPath
|
||||
assumed_temp_data = instance.data["assumedTemplateData"]
|
||||
template = instance.data["template"]
|
||||
template_formatted = template.format(**assumed_temp_data)
|
||||
|
||||
destination_folder = os.path.dirname(template_formatted)
|
||||
|
||||
image_search_path = os.path.join(destination_folder, "resources")
|
||||
image_search_path = os.path.normpath(image_search_path)
|
||||
|
||||
settings = instance.data.get("rigsettings", None)
|
||||
if settings is not None:
|
||||
|
||||
# Create assumed destination folder for imageSearchPath
|
||||
assumed_temp_data = instance.data["assumedTemplateData"]
|
||||
template = instance.data["template"]
|
||||
template_formatted = template.format(**assumed_temp_data)
|
||||
|
||||
destination_folder = os.path.dirname(template_formatted)
|
||||
image_search_path = os.path.join(destination_folder, "resources")
|
||||
image_search_path = os.path.normpath(image_search_path)
|
||||
|
||||
if settings:
|
||||
settings["imageSearchPath"] = image_search_path
|
||||
with open(settings_path, "w") as fp:
|
||||
json.dump(settings, fp, ensure_ascii=False)
|
||||
|
||||
# Ensure the imageSearchPath is being remapped to the publish folder
|
||||
attr_value = {"%s.imageSearchPath" % n: str(image_search_path) for
|
||||
n in yeti_nodes}
|
||||
|
||||
# Get input_SET members
|
||||
input_set = [i for i in instance if i == "input_SET"]
|
||||
input_set = next(i for i in instance if i == "input_SET")
|
||||
|
||||
# Get all items
|
||||
set_members = cmds.sets(input_set[0], query=True)
|
||||
members = cmds.listRelatives(set_members, ad=True, fullPath=True) or []
|
||||
members += cmds.ls(set_members, long=True)
|
||||
set_members = cmds.sets(input_set, query=True)
|
||||
set_members += cmds.listRelatives(set_members,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
members = cmds.ls(set_members, long=True)
|
||||
|
||||
nodes = instance.data["setMembers"]
|
||||
with disconnected_attributes(settings, members):
|
||||
with maya.attribute_values(attr_value):
|
||||
cmds.select(nodes, noExpand=True)
|
||||
cmds.file(maya_path,
|
||||
force=True,
|
||||
exportSelected=True,
|
||||
typ="mayaAscii",
|
||||
preserveReferences=False,
|
||||
constructionHistory=True,
|
||||
shader=False)
|
||||
resources = instance.data.get("resources", {})
|
||||
with disconnect_plugs(settings, members):
|
||||
with yetigraph_attribute_values(destination_folder, resources):
|
||||
with maya.attribute_values(attr_value):
|
||||
cmds.select(nodes, noExpand=True)
|
||||
cmds.file(maya_path,
|
||||
force=True,
|
||||
exportSelected=True,
|
||||
typ="mayaAscii",
|
||||
preserveReferences=False,
|
||||
constructionHistory=True,
|
||||
shader=False)
|
||||
|
||||
# Ensure files can be stored
|
||||
if "files" not in instance.data:
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
scene = os.path.splitext(filename)[0]
|
||||
dirname = os.path.join(workspace, "renders")
|
||||
renderlayer = instance.data['setMembers'] # rs_beauty
|
||||
renderlayer_name = instance.name # beauty
|
||||
renderlayer_name = instance.data['subset'] # beauty
|
||||
renderlayer_globals = instance.data["renderGlobals"]
|
||||
legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
|
||||
deadline_user = context.data.get("deadlineUser", getpass.getuser())
|
||||
|
|
@ -207,6 +207,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
# todo: This is a temporary fix for yeti variables
|
||||
"PEREGRINEL_LICENSE",
|
||||
"REDSHIFT_MAYAEXTENSIONSPATH",
|
||||
"REDSHIFT_DISABLEOUTPUTLOCKFILES"
|
||||
"VRAY_FOR_MAYA2018_PLUGINS_X64",
|
||||
"VRAY_PLUGINS_X64",
|
||||
"VRAY_USE_THREAD_AFFINITY",
|
||||
|
|
@ -230,7 +231,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
render_globals = instance.data.get("renderGlobals", {})
|
||||
payload["JobInfo"].update(render_globals)
|
||||
|
||||
self.log.info("using render plugin : {}".format(payload["JobInfo"]["Plugin"]))
|
||||
plugin = payload["JobInfo"]["Plugin"]
|
||||
self.log.info("using render plugin : {}".format(plugin))
|
||||
|
||||
self.preflight_check(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateAnimationContent(pyblish.api.InstancePlugin):
|
||||
|
|
@ -14,10 +15,19 @@ class ValidateAnimationContent(pyblish.api.InstancePlugin):
|
|||
hosts = ["maya"]
|
||||
families = ["colorbleed.animation"]
|
||||
label = "Animation Content"
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
out_set = next((i for i in instance.data["setMembers"] if
|
||||
i.endswith("out_SET")), None)
|
||||
|
||||
assert out_set, ("Instance '%s' has no objectSet named: `OUT_set`. "
|
||||
"If this instance is an unloaded reference, "
|
||||
"please deactivate by toggling the 'Active' attribute"
|
||||
% instance.name)
|
||||
|
||||
assert 'out_hierarchy' in instance.data, "Missing `out_hierarchy` data"
|
||||
|
||||
# All nodes in the `out_hierarchy` must be among the nodes that are
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import maya.cmds as cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
|
|
@ -19,7 +20,7 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.animation', "colorbleed.pointcache"]
|
||||
hosts = ['maya']
|
||||
label = 'Animation Out Set Related Node Ids'
|
||||
actions = [colorbleed.api.SelectInvalidAction, colorbleed.api.RepairAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction, colorbleed.api.RepairAction]
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateCameraAttributes(pyblish.api.InstancePlugin):
|
||||
|
|
@ -17,7 +18,7 @@ class ValidateCameraAttributes(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.camera']
|
||||
hosts = ['maya']
|
||||
label = 'Camera Attributes'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
DEFAULTS = [
|
||||
("filmFitOffset", 0.0),
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateCameraContents(pyblish.api.InstancePlugin):
|
||||
|
|
@ -9,7 +10,7 @@ class ValidateCameraContents(pyblish.api.InstancePlugin):
|
|||
|
||||
A Camera instance may only hold a SINGLE camera's transform, nothing else.
|
||||
|
||||
It may hold a "locator" as shape, but different shapes are down the
|
||||
It may hold a "locator" as shape, but different shapes are down the
|
||||
hierarchy.
|
||||
|
||||
"""
|
||||
|
|
@ -18,7 +19,7 @@ class ValidateCameraContents(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.camera']
|
||||
hosts = ['maya']
|
||||
label = 'Camera Contents'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -20,7 +20,8 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
|
|||
families = ["colorbleed.animation",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.camera",
|
||||
"colorbleed.renderlayer"]
|
||||
"colorbleed.renderlayer",
|
||||
"oolorbleed.vrayproxy"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateInstanceHasMembers(pyblish.api.InstancePlugin):
|
||||
|
|
@ -8,7 +9,7 @@ class ValidateInstanceHasMembers(pyblish.api.InstancePlugin):
|
|||
order = colorbleed.api.ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
label = 'Instance has members'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
|
|
@ -22,7 +23,7 @@ class ValidateJointsHidden(pyblish.api.InstancePlugin):
|
|||
category = 'rig'
|
||||
version = (0, 1, 0)
|
||||
label = "Joints Hidden"
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateLookContents(pyblish.api.InstancePlugin):
|
||||
|
|
@ -20,7 +21,7 @@ class ValidateLookContents(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.look']
|
||||
hosts = ['maya']
|
||||
label = 'Look Data Contents'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all the nodes in the instance"""
|
||||
|
|
|
|||
|
|
@ -0,0 +1,98 @@
|
|||
from collections import defaultdict
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateLookIdReferenceEdits(pyblish.api.InstancePlugin):
|
||||
"""Validate nodes in look have no reference edits to cbId.
|
||||
|
||||
Note:
|
||||
This only validates the cbId edits on the referenced nodes that are
|
||||
used in the look. For example, a transform can have its cbId changed
|
||||
without being invalidated when it is not used in the look's assignment.
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidateContentsOrder
|
||||
families = ['colorbleed.look']
|
||||
hosts = ['maya']
|
||||
label = 'Look Id Reference Edits'
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("Invalid nodes %s" % (invalid,))
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
||||
# Collect all referenced members
|
||||
references = defaultdict(set)
|
||||
relationships = instance.data["lookData"]["relationships"]
|
||||
for relationship in relationships.values():
|
||||
for member in relationship['members']:
|
||||
node = member["name"]
|
||||
|
||||
if cmds.referenceQuery(node, isNodeReferenced=True):
|
||||
ref = cmds.referenceQuery(node, referenceNode=True)
|
||||
references[ref].add(node)
|
||||
|
||||
# Validate whether any has changes to 'cbId' attribute
|
||||
invalid = list()
|
||||
for ref, nodes in references.items():
|
||||
edits = cmds.referenceQuery(editAttrs=True,
|
||||
editNodes=True,
|
||||
showDagPath=True,
|
||||
showNamespace=True,
|
||||
onReferenceNode=ref)
|
||||
for edit in edits:
|
||||
|
||||
# Ensure it is an attribute ending with .cbId
|
||||
# thus also ignore just node edits (like parenting)
|
||||
if not edit.endswith(".cbId"):
|
||||
continue
|
||||
|
||||
# Ensure the attribute is 'cbId' (and not a nested attribute)
|
||||
node, attr = edit.split(".", 1)
|
||||
if attr != "cbId":
|
||||
continue
|
||||
|
||||
if node in nodes:
|
||||
invalid.append(node)
|
||||
|
||||
return invalid
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
||||
invalid = cls.get_invalid(instance)
|
||||
|
||||
# Group invalid nodes by reference node
|
||||
references = defaultdict(set)
|
||||
for node in invalid:
|
||||
ref = cmds.referenceQuery(node, referenceNode=True)
|
||||
references[ref].add(node)
|
||||
|
||||
# Remove the reference edits on the nodes per reference node
|
||||
for ref, nodes in references.items():
|
||||
for node in nodes:
|
||||
|
||||
# Somehow this only works if you run the the removal
|
||||
# per edit command.
|
||||
for command in ["addAttr",
|
||||
"connectAttr",
|
||||
"deleteAttr",
|
||||
"disconnectAttr",
|
||||
"setAttr"]:
|
||||
cmds.referenceEdit("{}.cbId".format(node),
|
||||
removeEdits=True,
|
||||
successfulEdits=True,
|
||||
failedEdits=True,
|
||||
editCommand=command,
|
||||
onReferenceNode=ref)
|
||||
|
|
@ -2,6 +2,7 @@ from collections import defaultdict
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateUniqueRelationshipMembers(pyblish.api.InstancePlugin):
|
||||
|
|
@ -24,8 +25,8 @@ class ValidateUniqueRelationshipMembers(pyblish.api.InstancePlugin):
|
|||
hosts = ['maya']
|
||||
families = ['colorbleed.look']
|
||||
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.maya.action.GenerateUUIDsOnInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateLookNoDefaultShaders(pyblish.api.InstancePlugin):
|
||||
|
|
@ -26,7 +27,7 @@ class ValidateLookNoDefaultShaders(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.look']
|
||||
hosts = ['maya']
|
||||
label = 'Look No Default Shaders'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
DEFAULT_SHADERS = {"lambert1", "initialShadingGroup",
|
||||
"initialParticleSE", "particleCloud1"}
|
||||
|
|
|
|||
|
|
@ -1,33 +1,38 @@
|
|||
import colorbleed.maya.action
|
||||
from colorbleed.maya import lib
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
from cb.utils.maya import context
|
||||
|
||||
|
||||
class ValidateLookSets(pyblish.api.InstancePlugin):
|
||||
"""Validate if any sets are missing from the instance and look data
|
||||
|
||||
A node might have a relationship with a shader but has no Colorbleed ID.
|
||||
A shader can be assigned to a node that is missing a Colorbleed ID.
|
||||
Because it is missing the ID it has not been collected in the instance.
|
||||
This validator ensures no relationships and thus considers it invalid
|
||||
if a relationship was not collected.
|
||||
|
||||
When the relationship needs to be maintained the artist might need to
|
||||
create a different* relationship or ensure the node has the Colorbleed ID.
|
||||
|
||||
* The relationship might be too broad (assigned to top node if hierarchy).
|
||||
*The relationship might be too broad (assigned to top node of hierarchy).
|
||||
This can be countered by creating the relationship on the shape or its
|
||||
transform.
|
||||
In essence, ensure item the shader is assigned to has the Colorbleed ID!
|
||||
transform. In essence, ensure item the shader is assigned to has the
|
||||
Colorbleed ID!
|
||||
|
||||
Displacement shaders:
|
||||
Ensure all geometry is added to the displacement objectSet.
|
||||
It is best practice to add the transform group of the shape to the
|
||||
displacement objectSet
|
||||
Example content:
|
||||
[asset_GRP|geometry_GRP|body_GES,
|
||||
asset_GRP|geometry_GRP|L_eye_GES,
|
||||
asset_GRP|geometry_GRP|R_eye_GES,
|
||||
asset_GRP|geometry_GRP|wings_GEO]
|
||||
Examples:
|
||||
|
||||
- Displacement objectSets (like V-Ray):
|
||||
|
||||
It is best practice to add the transform group of the shape to the
|
||||
displacement objectSet.
|
||||
|
||||
Example content:
|
||||
[asset_GRP|geometry_GRP|body_GES,
|
||||
asset_GRP|geometry_GRP|L_eye_GES,
|
||||
asset_GRP|geometry_GRP|R_eye_GES,
|
||||
asset_GRP|geometry_GRP|wings_GEO]
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -35,7 +40,7 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.look']
|
||||
hosts = ['maya']
|
||||
label = 'Look Sets'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all the nodes in the instance"""
|
||||
|
|
@ -56,7 +61,7 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
|
|||
invalid = []
|
||||
|
||||
renderlayer = instance.data.get("renderlayer", "defaultRenderLayer")
|
||||
with context.renderlayer(renderlayer):
|
||||
with lib.renderlayer(renderlayer):
|
||||
for node in instance:
|
||||
# get the connected objectSets of the node
|
||||
sets = lib.get_related_sets(node)
|
||||
|
|
@ -91,7 +96,3 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
|
|||
continue
|
||||
|
||||
return invalid
|
||||
|
||||
@classmethod
|
||||
def repair(cls, context, instance):
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateSingleShader(pyblish.api.InstancePlugin):
|
||||
|
|
@ -15,7 +16,7 @@ class ValidateSingleShader(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.look']
|
||||
hosts = ['maya']
|
||||
label = 'Look Single Shader Per Shape'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
# The default connections to check
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -16,11 +16,12 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
|
||||
# Collected units
|
||||
linearunits = context.data('linearUnits')
|
||||
angularunits = context.data('angularUnits')
|
||||
|
||||
fps = context.data['fps']
|
||||
project_fps = lib.get_project_fps()
|
||||
|
||||
asset_fps = lib.get_asset_fps()
|
||||
|
||||
self.log.info('Units (linear): {0}'.format(linearunits))
|
||||
self.log.info('Units (angular): {0}'.format(angularunits))
|
||||
|
|
@ -32,7 +33,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
|
|||
|
||||
assert angularunits and angularunits == 'deg', ("Scene angular units "
|
||||
"must be degrees")
|
||||
assert fps and fps == project_fps, "Scene must be %s FPS" % project_fps
|
||||
assert fps and fps == asset_fps, "Scene must be %s FPS" % asset_fps
|
||||
|
||||
@classmethod
|
||||
def repair(cls):
|
||||
|
|
@ -49,5 +50,5 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
|
|||
cls.log.debug(current_linear)
|
||||
|
||||
cls.log.info("Setting time unit to match project")
|
||||
project_fps = lib.get_project_fps()
|
||||
mayalib.set_scene_fps(project_fps)
|
||||
asset_fps = lib.get_asset_fps()
|
||||
mayalib.set_scene_fps(asset_fps)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
def len_flattened(components):
|
||||
|
|
@ -13,7 +14,7 @@ def len_flattened(components):
|
|||
when requesting with `maya.cmds.ls` without the `flatten`
|
||||
flag. Though enabling `flatten` on a large list (e.g. millions)
|
||||
will result in a slow result. This command will return the amount
|
||||
of entries in a non-flattened list by parsing the result with
|
||||
of entries in a non-flattened list by parsing the result with
|
||||
regex.
|
||||
|
||||
Args:
|
||||
|
|
@ -49,7 +50,7 @@ class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.model']
|
||||
category = 'geometry'
|
||||
label = 'Mesh Has UVs'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
optional = True
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin):
|
||||
|
|
@ -17,7 +18,7 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin):
|
|||
category = 'geometry'
|
||||
version = (0, 1, 0)
|
||||
label = 'Mesh Lamina Faces'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateMeshNoNegativeScale(pyblish.api.Validator):
|
||||
|
|
@ -20,7 +21,7 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator):
|
|||
hosts = ['maya']
|
||||
families = ['colorbleed.model']
|
||||
label = 'Mesh No Negative Scale'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateMeshNonManifold(pyblish.api.Validator):
|
||||
|
|
@ -16,7 +17,7 @@ class ValidateMeshNonManifold(pyblish.api.Validator):
|
|||
hosts = ['maya']
|
||||
families = ['colorbleed.model']
|
||||
label = 'Mesh Non-Manifold Vertices/Edges'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
|
|
@ -21,7 +22,7 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
|
|||
category = 'geometry'
|
||||
version = (0, 1, 0)
|
||||
label = 'Mesh Edge Length Non Zero'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
__tolerance = 1e-5
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateMeshNormalsUnlocked(pyblish.api.Validator):
|
||||
|
|
@ -18,7 +19,7 @@ class ValidateMeshNormalsUnlocked(pyblish.api.Validator):
|
|||
category = 'geometry'
|
||||
version = (0, 1, 0)
|
||||
label = 'Mesh Normals Unlocked'
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
optional = True
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
def pairs(iterable):
|
||||
|
|
@ -76,7 +77,7 @@ class ValidateMeshShaderConnections(pyblish.api.InstancePlugin):
|
|||
hosts = ['maya']
|
||||
families = ['colorbleed.model']
|
||||
label = "Mesh Shader Connections"
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
|
|
@ -21,7 +22,7 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin):
|
|||
optional = True
|
||||
version = (0, 1, 0)
|
||||
label = "Mesh Single UV Set"
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
91
colorbleed/plugins/maya/publish/validate_mesh_uv_set_map1.py
Normal file
91
colorbleed/plugins/maya/publish/validate_mesh_uv_set_map1.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin):
|
||||
"""Validate model's default set exists and is named 'map1'.
|
||||
|
||||
In Maya meshes by default have a uv set named "map1" that cannot be
|
||||
deleted. It can be renamed however, introducing some issues with some
|
||||
renderers. As such we ensure the first (default) UV set index is named
|
||||
"map1".
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidateMeshOrder
|
||||
hosts = ['maya']
|
||||
families = ['colorbleed.model']
|
||||
optional = True
|
||||
label = "Mesh has map1 UV Set"
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
||||
meshes = cmds.ls(instance, type='mesh', long=True)
|
||||
|
||||
invalid = []
|
||||
for mesh in meshes:
|
||||
|
||||
# Get existing mapping of uv sets by index
|
||||
indices = cmds.polyUVSet(mesh, query=True, allUVSetsIndices=True)
|
||||
maps = cmds.polyUVSet(mesh, query=True, allUVSets=True)
|
||||
mapping = dict(zip(indices, maps))
|
||||
|
||||
# Get the uv set at index zero.
|
||||
name = mapping[0]
|
||||
if name != "map1":
|
||||
invalid.append(mesh)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all the nodes in the instance 'objectSet'"""
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise ValueError("Meshes found without 'map1' "
|
||||
"UV set: {0}".format(invalid))
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
"""Rename uv map at index zero to map1"""
|
||||
|
||||
for mesh in cls.get_invalid(instance):
|
||||
|
||||
# Get existing mapping of uv sets by index
|
||||
indices = cmds.polyUVSet(mesh, query=True, allUVSetsIndices=True)
|
||||
maps = cmds.polyUVSet(mesh, query=True, allUVSets=True)
|
||||
mapping = dict(zip(indices, maps))
|
||||
|
||||
# Ensure there is no uv set named map1 to avoid
|
||||
# a clash on renaming the "default uv set" to map1
|
||||
existing = set(maps)
|
||||
if "map1" in existing:
|
||||
|
||||
# Find a unique name index
|
||||
i = 2
|
||||
while True:
|
||||
name = "map{0}".format(i)
|
||||
if name not in existing:
|
||||
break
|
||||
i += 1
|
||||
|
||||
cls.log.warning("Renaming clashing uv set name on mesh"
|
||||
" %s to '%s'", mesh, name)
|
||||
|
||||
cmds.polyUVSet(mesh,
|
||||
rename=True,
|
||||
uvSet="map1",
|
||||
newUVSet=name)
|
||||
|
||||
# Rename the initial index to map1
|
||||
original = mapping[0]
|
||||
cmds.polyUVSet(mesh,
|
||||
rename=True,
|
||||
uvSet=original,
|
||||
newUVSet="map1")
|
||||
|
|
@ -4,6 +4,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
def len_flattened(components):
|
||||
|
|
@ -61,7 +62,7 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.model']
|
||||
category = 'geometry'
|
||||
label = 'Mesh Vertices Have Edges'
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
|
|
@ -17,7 +18,7 @@ class ValidateModelContent(pyblish.api.InstancePlugin):
|
|||
hosts = ["maya"]
|
||||
families = ["colorbleed.model"]
|
||||
label = "Model Content"
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateNoAnimation(pyblish.api.Validator):
|
||||
|
|
@ -18,7 +19,7 @@ class ValidateNoAnimation(pyblish.api.Validator):
|
|||
hosts = ["maya"]
|
||||
families = ["colorbleed.model"]
|
||||
optional = True
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateNoDefaultCameras(pyblish.api.InstancePlugin):
|
||||
|
|
@ -17,7 +18,7 @@ class ValidateNoDefaultCameras(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.camera']
|
||||
version = (0, 1, 0)
|
||||
label = "No Default Cameras"
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import maya.cmds as cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
def get_namespace(node_name):
|
||||
|
|
@ -21,7 +22,7 @@ class ValidateNoNamespace(pyblish.api.InstancePlugin):
|
|||
category = 'cleanup'
|
||||
version = (0, 1, 0)
|
||||
label = 'No Namespaces'
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import maya.cmds as cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
def has_shape_children(node):
|
||||
|
|
@ -42,7 +43,8 @@ class ValidateNoNullTransforms(pyblish.api.InstancePlugin):
|
|||
category = 'cleanup'
|
||||
version = (0, 1, 0)
|
||||
label = 'No Empty/Null Transforms'
|
||||
actions = [colorbleed.api.RepairAction, colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.api.RepairAction,
|
||||
colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateNoUnknownNodes(pyblish.api.InstancePlugin):
|
||||
|
|
@ -20,7 +21,7 @@ class ValidateNoUnknownNodes(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.model', 'colorbleed.rig']
|
||||
optional = True
|
||||
label = "Unknown Nodes"
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
|
|
|||
|
|
@ -1,14 +1,15 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
from colorbleed.maya import lib
|
||||
|
||||
|
||||
class ValidateNodeIDs(pyblish.api.InstancePlugin):
|
||||
"""Validate nodes have a Colorbleed Id.
|
||||
|
||||
When IDs are missing from nodes *save your scene* and they should be
|
||||
automatically generated because IDs are created on non-referenced nodes
|
||||
|
||||
When IDs are missing from nodes *save your scene* and they should be
|
||||
automatically generated because IDs are created on non-referenced nodes
|
||||
in Maya upon scene save.
|
||||
|
||||
"""
|
||||
|
|
@ -21,10 +22,11 @@ class ValidateNodeIDs(pyblish.api.InstancePlugin):
|
|||
"colorbleed.rig",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.setdress"]
|
||||
"colorbleed.setdress",
|
||||
"colorbleed.yetiRig"]
|
||||
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.maya.action.GenerateUUIDsOnInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
|
|
@ -19,7 +20,7 @@ class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.look']
|
||||
hosts = ['maya']
|
||||
label = 'Deformed shape ids'
|
||||
actions = [colorbleed.api.SelectInvalidAction, colorbleed.api.RepairAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction, colorbleed.api.RepairAction]
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all the nodes in the instance"""
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import pyblish.api
|
|||
import avalon.io as io
|
||||
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
from colorbleed.maya import lib
|
||||
|
||||
|
||||
|
|
@ -22,7 +23,7 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin):
|
|||
hosts = ['maya']
|
||||
families = ["*"]
|
||||
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import pyblish.api
|
|||
import colorbleed.api
|
||||
|
||||
import avalon.io as io
|
||||
import colorbleed.maya.action
|
||||
|
||||
from colorbleed.maya import lib
|
||||
|
||||
|
|
@ -19,8 +20,8 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin):
|
|||
"colorbleed.rig"]
|
||||
optional = True
|
||||
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.maya.action.GenerateUUIDsOnInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all nodes in instance (including hierarchy)"""
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from collections import defaultdict
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
|
|
@ -16,10 +17,11 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin):
|
|||
hosts = ['maya']
|
||||
families = ["colorbleed.model",
|
||||
"colorbleed.look",
|
||||
"colorbleed.rig"]
|
||||
"colorbleed.rig",
|
||||
"colorbleed.yetiRig"]
|
||||
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.maya.action.GenerateUUIDsOnInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateNodeNoGhosting(pyblish.api.InstancePlugin):
|
||||
|
|
@ -20,7 +21,7 @@ class ValidateNodeNoGhosting(pyblish.api.InstancePlugin):
|
|||
hosts = ['maya']
|
||||
families = ['colorbleed.model', 'colorbleed.rig']
|
||||
label = "No Ghosting"
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
_attributes = {'ghosting': 0}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,41 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
class ValidateRenderNoDefaultCameras(pyblish.api.InstancePlugin):
|
||||
"""Ensure no default (startup) cameras are to be rendered."""
|
||||
|
||||
order = colorbleed.api.ValidateContentsOrder
|
||||
hosts = ['maya']
|
||||
families = ['colorbleed.renderlayer']
|
||||
label = "No Default Cameras Renderable"
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
||||
layer = instance.data["setMembers"]
|
||||
|
||||
# Collect default cameras
|
||||
cameras = cmds.ls(type='camera', long=True)
|
||||
defaults = [cam for cam in cameras if
|
||||
cmds.camera(cam, query=True, startupCamera=True)]
|
||||
|
||||
invalid = []
|
||||
with lib.renderlayer(layer):
|
||||
for cam in defaults:
|
||||
if cmds.getAttr(cam + ".renderable"):
|
||||
invalid.append(cam)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all the cameras in the instance"""
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Renderable default cameras "
|
||||
"found: {0}".format(invalid))
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
class ValidateRenderSingleCamera(pyblish.api.InstancePlugin):
|
||||
"""Only one camera may be renderable in a layer.
|
||||
|
||||
Currently the pipeline supports only a single camera per layer.
|
||||
This is because when multiple cameras are rendered the output files
|
||||
automatically get different names because the <Camera> render token
|
||||
is not in the output path. As such the output files conflict with how
|
||||
our pipeline expects the output.
|
||||
|
||||
"""
|
||||
|
||||
order = colorbleed.api.ValidateContentsOrder
|
||||
hosts = ['maya']
|
||||
families = ['colorbleed.renderlayer']
|
||||
label = "Render Single Camera"
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
||||
layer = instance.data["setMembers"]
|
||||
|
||||
cameras = cmds.ls(type='camera', long=True)
|
||||
|
||||
with lib.renderlayer(layer):
|
||||
renderable = [cam for cam in cameras if
|
||||
cmds.getAttr(cam + ".renderable")]
|
||||
|
||||
if len(renderable) == 0:
|
||||
raise RuntimeError("No renderable cameras found.")
|
||||
elif len(renderable) > 1:
|
||||
return renderable
|
||||
else:
|
||||
return []
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all the cameras in the instance"""
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Multiple renderable cameras"
|
||||
"found: {0}".format(invalid))
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import pyblish.api
|
||||
|
||||
import colorbleed.maya.action
|
||||
from avalon import io
|
||||
import colorbleed.api
|
||||
|
||||
|
|
@ -24,7 +25,7 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin):
|
|||
label = "Render Passes / AOVs Are Registered"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.renderlayer"]
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import colorbleed.api
|
||||
from cb.utils.maya.context import undo_chunk
|
||||
import colorbleed.maya.action
|
||||
from colorbleed.maya.lib import undo_chunk
|
||||
|
||||
|
||||
class ValidateRigControllers(pyblish.api.InstancePlugin):
|
||||
|
|
@ -28,7 +30,7 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
|
|||
hosts = ["maya"]
|
||||
families = ["colorbleed.rig"]
|
||||
actions = [colorbleed.api.RepairAction,
|
||||
colorbleed.api.SelectInvalidAction]
|
||||
colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
# Default controller values
|
||||
CONTROLLER_DEFAULTS = {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,9 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
from cb.utils.maya.context import undo_chunk
|
||||
|
||||
import colorbleed.maya.lib as lib
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin):
|
||||
|
|
@ -29,7 +31,7 @@ class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin):
|
|||
hosts = ["maya"]
|
||||
families = ["colorbleed.rig"]
|
||||
actions = [colorbleed.api.RepairAction,
|
||||
colorbleed.api.SelectInvalidAction]
|
||||
colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
attributes = [
|
||||
"rcurve",
|
||||
|
|
@ -81,7 +83,7 @@ class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin):
|
|||
def repair(cls, instance):
|
||||
|
||||
invalid = cls.get_invalid(instance)
|
||||
with undo_chunk():
|
||||
with lib.undo_chunk():
|
||||
for node in invalid:
|
||||
for attribute in cls.attributes:
|
||||
if cmds.attributeQuery(attribute, node=node, exists=True):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import maya.cmds as cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
import colorbleed.maya.lib as lib
|
||||
|
||||
|
||||
|
|
@ -19,7 +20,7 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin):
|
|||
families = ["colorbleed.rig"]
|
||||
hosts = ['maya']
|
||||
label = 'Rig Out Set Node Ids'
|
||||
actions = [colorbleed.api.SelectInvalidAction, colorbleed.api.RepairAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction, colorbleed.api.RepairAction]
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateSetdressNamespaces(pyblish.api.InstancePlugin):
|
||||
|
|
@ -17,7 +18,7 @@ class ValidateSetdressNamespaces(pyblish.api.InstancePlugin):
|
|||
label = "Validate Setdress Namespaces"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["colorbleed.setdress"]
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@ import colorbleed.api
|
|||
|
||||
from maya import cmds
|
||||
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateSetDressModelTransforms(pyblish.api.InstancePlugin):
|
||||
"""Verify only root nodes of the loaded asset have transformations.
|
||||
|
|
@ -26,7 +28,7 @@ class ValidateSetDressModelTransforms(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.ValidatorOrder + 0.49
|
||||
label = "Setdress Model Transforms"
|
||||
families = ["colorbleed.setdress"]
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
prompt_message = ("You are about to reset the matrix to the default values."
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
def short_name(node):
|
||||
|
|
@ -37,7 +38,7 @@ class ValidateShapeDefaultNames(pyblish.api.InstancePlugin):
|
|||
optional = True
|
||||
version = (0, 1, 0)
|
||||
label = "Shape Default Naming"
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@ import colorbleed.api
|
|||
|
||||
from maya import cmds
|
||||
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateShapeRenderStats(pyblish.api.Validator):
|
||||
"""Ensure all render stats are set to the default values."""
|
||||
|
|
@ -11,7 +13,7 @@ class ValidateShapeRenderStats(pyblish.api.Validator):
|
|||
hosts = ['maya']
|
||||
families = ['colorbleed.model']
|
||||
label = 'Shape Default Render Stats'
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction,
|
||||
colorbleed.api.RepairAction]
|
||||
|
||||
defaults = {'castsShadows': 1,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateStepSize(pyblish.api.InstancePlugin):
|
||||
|
|
@ -14,7 +15,7 @@ class ValidateStepSize(pyblish.api.InstancePlugin):
|
|||
families = ['colorbleed.camera',
|
||||
'colorbleed.pointcache',
|
||||
'colorbleed.animation']
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
MIN = 0.01
|
||||
MAX = 1.0
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
import colorbleed.maya.action
|
||||
|
||||
SUFFIX_NAMING_TABLE = {'mesh': ["_GEO", "_GES", "_GEP", "_OSD"],
|
||||
'nurbsCurve': ["_CRV"],
|
||||
|
|
@ -38,7 +38,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
|
|||
optional = True
|
||||
version = (0, 1, 0)
|
||||
label = 'Suffix Naming Conventions'
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def is_valid_name(node_name, shape_type):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateTransformZero(pyblish.api.Validator):
|
||||
|
|
@ -19,7 +20,7 @@ class ValidateTransformZero(pyblish.api.Validator):
|
|||
category = "geometry"
|
||||
version = (0, 1, 0)
|
||||
label = "Transform Zero (Freeze)"
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
_identity = [1.0, 0.0, 0.0, 0.0,
|
||||
0.0, 1.0, 0.0, 0.0,
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateVrayProxy(pyblish.api.InstancePlugin):
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = 'VRay Proxy Settings'
|
||||
hosts = ['maya']
|
||||
families = ['colorbleed.vrayproxy']
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("'%s' has invalid settings for VRay Proxy "
|
||||
"export!" % instance.name)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
data = instance.data
|
||||
|
||||
if not data["setMembers"]:
|
||||
cls.log.error("'%s' is empty! This is a bug" % instance.name)
|
||||
|
||||
if data["animation"]:
|
||||
if data["endFrame"] < data["startFrame"]:
|
||||
cls.log.error("End frame is smaller than start frame")
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import colorbleed.maya.action
|
||||
|
||||
|
||||
class ValidateVrayProxyMembers(pyblish.api.InstancePlugin):
|
||||
"""Validate whether the V-Ray Proxy instance has shape members"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = 'VRay Proxy Members'
|
||||
hosts = ['maya']
|
||||
families = ['colorbleed.vrayproxy']
|
||||
actions = [colorbleed.maya.action.SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("'%s' is invalid VRay Proxy for "
|
||||
"export!" % instance.name)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
shapes = cmds.ls(instance,
|
||||
shapes=True,
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
|
||||
if not shapes:
|
||||
cls.log.error("'%s' contains no shapes." % instance.name)
|
||||
|
||||
# Return the instance itself
|
||||
return [instance.name]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue