diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000000..c3acdec771
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,99 @@
+# Created by .ignore support plugin (hsz.mobi)
+### Python template
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+.hypothesis/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# dotenv
+.env
+
+# virtualenv
+.venv
+venv/
+ENV/
+
+# Spyder project settings
+.spyderproject
+
+# Rope project settings
+.ropeproject
+
+# Pycharm IDE settings
+.idea
diff --git a/README.md b/README.md
new file mode 100644
index 0000000000..634ede742d
--- /dev/null
+++ b/README.md
@@ -0,0 +1,20 @@
+The base studio *config* for [Avalon](https://getavalon.github.io/)
+
+
+
+_This configuration acts as a starting point for all pype club clients wth avalon deployment._
+
+### Code convention
+
+Below are some of the standard practices applied to this repositories.
+
+- **Etiquette: PEP8**
+ - All code is written in PEP8. It is recommended you use a linter as you work, flake8 and pylinter are both good options.
+- **Etiquette: Napoleon docstrings**
+ - Any docstrings are made in Google Napoleon format. See [Napoleon](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html) for details.
+- **Etiquette: Semantic Versioning**
+ - This project follows [semantic versioning](http://semver.org).
+- **Etiquette: Underscore means private**
+ - Anything prefixed with an underscore means that it is internal to wherever it is used. For example, a variable name is only ever used in the parent function or class. A module is not for use by the end-user. In contrast, anything without an underscore is public, but not necessarily part of the API. Members of the API resides in `api.py`.
+- **API: Idempotence**
+ - A public function must be able to be called twice and produce the exact same result. This means no changing of state without restoring previous state when finishing. For example, if a function requires changing the current selection in Autodesk Maya, it must restore the previous selection prior to completing.
diff --git a/config/__init__.py b/config/__init__.py
new file mode 100644
index 0000000000..e5d1aee374
--- /dev/null
+++ b/config/__init__.py
@@ -0,0 +1,26 @@
+import os
+
+from pyblish import api as pyblish
+from avalon import api as avalon
+
+from .launcher_actions import register_launcher_actions
+from .lib import collect_container_metadata
+
+PACKAGE_DIR = os.path.dirname(__file__)
+PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
+
+# Global plugin paths
+PUBLISH_PATH = os.path.join(PLUGINS_DIR, "global", "publish")
+LOAD_PATH = os.path.join(PLUGINS_DIR, "global", "load")
+
+
+def install():
+ print("Registering global plug-ins..")
+ pyblish.register_plugin_path(PUBLISH_PATH)
+ avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
+
+
+def uninstall():
+ print("Deregistering global plug-ins..")
+ pyblish.deregister_plugin_path(PUBLISH_PATH)
+ avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
diff --git a/config/action.py b/config/action.py
new file mode 100644
index 0000000000..1b9dfbf07e
--- /dev/null
+++ b/config/action.py
@@ -0,0 +1,202 @@
+# absolute_import is needed to counter the `module has no cmds error` in Maya
+from __future__ import absolute_import
+import pyblish.api
+
+
+def get_errored_instances_from_context(context):
+
+ instances = list()
+ for result in context.data["results"]:
+ if result["instance"] is None:
+ # When instance is None we are on the "context" result
+ continue
+
+ if result["error"]:
+ instances.append(result["instance"])
+
+ return instances
+
+
+def get_errored_plugins_from_data(context):
+ """Get all failed validation plugins
+
+ Args:
+ context (object):
+
+ Returns:
+ list of plugins which failed during validation
+
+ """
+
+ plugins = list()
+ results = context.data.get("results", [])
+ for result in results:
+ if result["success"] is True:
+ continue
+ plugins.append(result["plugin"])
+
+ return plugins
+
+
+class RepairAction(pyblish.api.Action):
+ """Repairs the action
+
+ To process the repairing this requires a static `repair(instance)` method
+ is available on the plugin.
+
+ """
+ label = "Repair"
+ on = "failed" # This action is only available on a failed plug-in
+ icon = "wrench" # Icon from Awesome Icon
+
+ def process(self, context, plugin):
+
+ if not hasattr(plugin, "repair"):
+ raise RuntimeError("Plug-in does not have repair method.")
+
+ # Get the errored instances
+ self.log.info("Finding failed instances..")
+ errored_instances = get_errored_instances_from_context(context)
+
+ # Apply pyblish.logic to get the instances for the plug-in
+ instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
+ for instance in instances:
+ plugin.repair(instance)
+
+
+class RepairContextAction(pyblish.api.Action):
+ """Repairs the action
+
+ To process the repairing this requires a static `repair(instance)` method
+ is available on the plugin.
+
+ """
+ label = "Repair Context"
+ on = "failed" # This action is only available on a failed plug-in
+
+ def process(self, context, plugin):
+
+ if not hasattr(plugin, "repair"):
+ raise RuntimeError("Plug-in does not have repair method.")
+
+ # Get the errored instances
+ self.log.info("Finding failed instances..")
+ errored_plugins = get_errored_plugins_from_data(context)
+
+ # Apply pyblish.logic to get the instances for the plug-in
+ if plugin in errored_plugins:
+ self.log.info("Attempting fix ...")
+ plugin.repair()
+
+
+class SelectInvalidAction(pyblish.api.Action):
+ """Select invalid nodes in Maya when plug-in failed.
+
+ To retrieve the invalid nodes this assumes a static `get_invalid()`
+ method is available on the plugin.
+
+ """
+ label = "Select invalid"
+ on = "failed" # This action is only available on a failed plug-in
+ icon = "search" # Icon from Awesome Icon
+
+ def process(self, context, plugin):
+
+ try:
+ from maya import cmds
+ except ImportError:
+ raise ImportError("Current host is not Maya")
+
+ errored_instances = get_errored_instances_from_context(context)
+
+ # Apply pyblish.logic to get the instances for the plug-in
+ instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
+
+ # Get the invalid nodes for the plug-ins
+ self.log.info("Finding invalid nodes..")
+ invalid = list()
+ for instance in instances:
+ invalid_nodes = plugin.get_invalid(instance)
+ if invalid_nodes:
+ if isinstance(invalid_nodes, (list, tuple)):
+ invalid.extend(invalid_nodes)
+ else:
+ self.log.warning("Plug-in returned to be invalid, "
+ "but has no selectable nodes.")
+
+ # Ensure unique (process each node only once)
+ invalid = list(set(invalid))
+
+ if invalid:
+ self.log.info("Selecting invalid nodes: %s" % ", ".join(invalid))
+ cmds.select(invalid, replace=True, noExpand=True)
+ else:
+ self.log.info("No invalid nodes found.")
+ cmds.select(deselect=True)
+
+
+class GenerateUUIDsOnInvalidAction(pyblish.api.Action):
+ """Generate UUIDs on the invalid nodes in the instance.
+
+ Invalid nodes are those returned by the plugin's `get_invalid` method.
+ As such it is the plug-in's responsibility to ensure the nodes that
+ receive new UUIDs are actually invalid.
+
+ Requires:
+ - instance.data["asset"]
+
+ """
+
+ label = "Regenerate UUIDs"
+ on = "failed" # This action is only available on a failed plug-in
+ icon = "wrench" # Icon from Awesome Icon
+
+ def process(self, context, plugin):
+
+ self.log.info("Finding bad nodes..")
+
+ # Get the errored instances
+ errored_instances = []
+ for result in context.data["results"]:
+ if result["error"] is not None and result["instance"] is not None:
+ if result["error"]:
+ instance = result["instance"]
+ errored_instances.append(instance)
+
+ # Apply pyblish logic to get the instances for the plug-in
+ instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
+
+ # Get the nodes from the all instances that ran through this plug-in
+ all_invalid = []
+ for instance in instances:
+ invalid = plugin.get_invalid(instance)
+ if invalid:
+
+ self.log.info("Fixing instance {}".format(instance.name))
+ self._update_id_attribute(instance, invalid)
+
+ all_invalid.extend(invalid)
+
+ if not all_invalid:
+ self.log.info("No invalid nodes found.")
+ return
+
+ all_invalid = list(set(all_invalid))
+ self.log.info("Generated ids on nodes: {0}".format(all_invalid))
+
+ def _update_id_attribute(self, instance, nodes):
+ """Delete the id attribute
+
+ Args:
+ instance: The instance we're fixing for
+ nodes (list): all nodes to regenerate ids on
+ """
+
+ import config.apps.maya.lib as lib
+ import avalon.io as io
+
+ asset = instance.data['asset']
+ asset_id = io.find_one({"name": asset, "type": "asset"},
+ projection={"_id": True})['_id']
+ for node, _id in lib.generate_ids(nodes, asset_id=asset_id):
+ lib.set_id(node, _id, overwrite=True)
diff --git a/config/api.py b/config/api.py
new file mode 100644
index 0000000000..531a63a50d
--- /dev/null
+++ b/config/api.py
@@ -0,0 +1,36 @@
+from collections import OrderedDict
+
+from .plugin import (
+
+ Extractor,
+
+ ValidatePipelineOrder,
+ ValidateContentsOrder,
+ ValidateSceneOrder,
+ ValidateMeshOrder
+)
+
+# temporary fix, might
+from .action import (
+
+ get_errored_instances_from_context,
+ SelectInvalidAction,
+ GenerateUUIDsOnInvalidAction,
+ RepairAction,
+ RepairContextAction
+)
+
+all = [
+ # plugin classes
+ "Extractor",
+ # ordering
+ "ValidatePipelineOrder",
+ "ValidateContentsOrder",
+ "ValidateSceneOrder",
+ "ValidateMeshOrder",
+ # action
+ "get_errored_instances_from_context",
+ "SelectInvalidAction",
+ "GenerateUUIDsOnInvalidAction",
+ "RepairAction"
+]
diff --git a/config/apps/__init__.py b/config/apps/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/config/apps/fusion/__init__.py b/config/apps/fusion/__init__.py
new file mode 100644
index 0000000000..8ae042c934
--- /dev/null
+++ b/config/apps/fusion/__init__.py
@@ -0,0 +1,67 @@
+import os
+
+from avalon import api as avalon
+from pyblish import api as pyblish
+
+
+PARENT_DIR = os.path.dirname(__file__)
+PACKAGE_DIR = os.path.dirname(PARENT_DIR)
+PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
+
+PUBLISH_PATH = os.path.join(PLUGINS_DIR, "fusion", "publish")
+LOAD_PATH = os.path.join(PLUGINS_DIR, "fusion", "load")
+CREATE_PATH = os.path.join(PLUGINS_DIR, "fusion", "create")
+INVENTORY_PATH = os.path.join(PLUGINS_DIR, "fusion", "inventory")
+
+
+def install():
+ print("Registering Fusion plug-ins..")
+ pyblish.register_plugin_path(PUBLISH_PATH)
+ avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
+ avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
+ avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
+
+ pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
+
+ # Disable all families except for the ones we explicitly want to see
+ family_states = ["colorbleed.imagesequence",
+ "colorbleed.camera",
+ "colorbleed.pointcache"]
+
+ avalon.data["familiesStateDefault"] = False
+ avalon.data["familiesStateToggled"] = family_states
+
+
+def uninstall():
+ print("Deregistering Fusion plug-ins..")
+ pyblish.deregister_plugin_path(PUBLISH_PATH)
+ avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
+ avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
+
+ pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
+
+
+def on_pyblish_instance_toggled(instance, new_value, old_value):
+ """Toggle saver tool passthrough states on instance toggles."""
+
+ from avalon.fusion import comp_lock_and_undo_chunk
+
+ comp = instance.context.data.get("currentComp")
+ if not comp:
+ return
+
+ savers = [tool for tool in instance if
+ getattr(tool, "ID", None) == "Saver"]
+ if not savers:
+ return
+
+ # Whether instances should be passthrough based on new value
+ passthrough = not new_value
+ with comp_lock_and_undo_chunk(comp,
+ undo_queue_name="Change instance "
+ "active state"):
+ for tool in savers:
+ attrs = tool.GetAttrs()
+ current = attrs["TOOLB_PassThrough"]
+ if current != passthrough:
+ tool.SetAttrs({"TOOLB_PassThrough": passthrough})
diff --git a/config/apps/fusion/lib.py b/config/apps/fusion/lib.py
new file mode 100644
index 0000000000..f2846c966a
--- /dev/null
+++ b/config/apps/fusion/lib.py
@@ -0,0 +1,61 @@
+import sys
+
+from avalon.vendor.Qt import QtGui
+import avalon.fusion
+
+
+self = sys.modules[__name__]
+self._project = None
+
+
+def update_frame_range(start, end, comp=None, set_render_range=True):
+ """Set Fusion comp's start and end frame range
+
+ Args:
+ start (float, int): start frame
+ end (float, int): end frame
+ comp (object, Optional): comp object from fusion
+ set_render_range (bool, Optional): When True this will also set the
+ composition's render start and end frame.
+
+ Returns:
+ None
+
+ """
+
+ if not comp:
+ comp = avalon.fusion.get_current_comp()
+
+ attrs = {
+ "COMPN_GlobalStart": start,
+ "COMPN_GlobalEnd": end
+ }
+
+ if set_render_range:
+ attrs.update({
+ "COMPN_RenderStart": start,
+ "COMPN_RenderEnd": end
+ })
+
+ with avalon.fusion.comp_lock_and_undo_chunk(comp):
+ comp.SetAttrs(attrs)
+
+
+def get_additional_data(container):
+ """Get Fusion related data for the container
+
+ Args:
+ container(dict): the container found by the ls() function
+
+ Returns:
+ dict
+ """
+
+ tool = container["_tool"]
+ tile_color = tool.TileColor
+ if tile_color is None:
+ return {}
+
+ return {"color": QtGui.QColor.fromRgbF(tile_color["R"],
+ tile_color["G"],
+ tile_color["B"])}
diff --git a/config/apps/maya/__init__.py b/config/apps/maya/__init__.py
new file mode 100644
index 0000000000..6a5f541f4f
--- /dev/null
+++ b/config/apps/maya/__init__.py
@@ -0,0 +1,181 @@
+import os
+import logging
+import weakref
+
+from maya import utils, cmds, mel
+
+from avalon import api as avalon, pipeline, maya
+from pyblish import api as pyblish
+
+from ..lib import (
+ update_task_from_path,
+ any_outdated
+)
+from . import menu
+from . import lib
+
+log = logging.getLogger("config.apps.maya")
+
+PARENT_DIR = os.path.dirname(__file__)
+PACKAGE_DIR = os.path.dirname(PARENT_DIR)
+PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
+
+PUBLISH_PATH = os.path.join(PLUGINS_DIR, "maya", "publish")
+LOAD_PATH = os.path.join(PLUGINS_DIR, "maya", "load")
+CREATE_PATH = os.path.join(PLUGINS_DIR, "maya", "create")
+
+
+def install():
+ pyblish.register_plugin_path(PUBLISH_PATH)
+ avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
+ avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
+
+ menu.install()
+
+ log.info("Installing callbacks ... ")
+ avalon.on("init", on_init)
+ avalon.on("save", on_save)
+ avalon.on("open", on_open)
+
+ avalon.before("save", on_before_save)
+
+ log.info("Overriding existing event 'taskChanged'")
+ override_event("taskChanged", on_task_changed)
+
+ log.info("Setting default family states for loader..")
+ avalon.data["familiesStateToggled"] = ["colorbleed.imagesequence"]
+
+
+def uninstall():
+ pyblish.deregister_plugin_path(PUBLISH_PATH)
+ avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
+ avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
+
+ menu.uninstall()
+
+
+def override_event(event, callback):
+ """
+ Override existing event callback
+ Args:
+ event (str): name of the event
+ callback (function): callback to be triggered
+
+ Returns:
+ None
+
+ """
+
+ ref = weakref.WeakSet()
+ ref.add(callback)
+
+ pipeline._registered_event_handlers[event] = ref
+
+
+def on_init(_):
+ avalon.logger.info("Running callback on init..")
+
+ def safe_deferred(fn):
+ """Execute deferred the function in a try-except"""
+
+ def _fn():
+ """safely call in deferred callback"""
+ try:
+ fn()
+ except Exception as exc:
+ print(exc)
+
+ try:
+ utils.executeDeferred(_fn)
+ except Exception as exc:
+ print(exc)
+
+ cmds.loadPlugin("AbcImport", quiet=True)
+ cmds.loadPlugin("AbcExport", quiet=True)
+
+ from .customize import override_component_mask_commands
+ safe_deferred(override_component_mask_commands)
+
+
+def on_before_save(return_code, _):
+ """Run validation for scene's FPS prior to saving"""
+ return lib.validate_fps()
+
+
+def on_save(_):
+ """Automatically add IDs to new nodes
+
+ Any transform of a mesh, without an existing ID, is given one
+ automatically on file save.
+ """
+
+ avalon.logger.info("Running callback on save..")
+
+ # Update current task for the current scene
+ update_task_from_path(cmds.file(query=True, sceneName=True))
+
+ # Generate ids of the current context on nodes in the scene
+ nodes = lib.get_id_required_nodes(referenced_nodes=False)
+ for node, new_id in lib.generate_ids(nodes):
+ lib.set_id(node, new_id, overwrite=False)
+
+
+def on_open(_):
+ """On scene open let's assume the containers have changed."""
+
+ from avalon.vendor.Qt import QtWidgets
+ from ..widgets import popup
+
+ # Ensure scene's FPS is set to project config
+ lib.validate_fps()
+
+ # Update current task for the current scene
+ update_task_from_path(cmds.file(query=True, sceneName=True))
+
+ if any_outdated():
+ log.warning("Scene has outdated content.")
+
+ # Find maya main window
+ top_level_widgets = {w.objectName(): w for w in
+ QtWidgets.QApplication.topLevelWidgets()}
+ parent = top_level_widgets.get("MayaWindow", None)
+
+ if parent is None:
+ log.info("Skipping outdated content pop-up "
+ "because Maya window can't be found.")
+ else:
+
+ # Show outdated pop-up
+ def _on_show_inventory():
+ import avalon.tools.cbsceneinventory as tool
+ tool.show(parent=parent)
+
+ dialog = popup.Popup(parent=parent)
+ dialog.setWindowTitle("Maya scene has outdated content")
+ dialog.setMessage("There are outdated containers in "
+ "your Maya scene.")
+ dialog.on_show.connect(_on_show_inventory)
+ dialog.show()
+
+
+def on_task_changed(*args):
+ """Wrapped function of app initialize and maya's on task changed"""
+
+ # Inputs (from the switched session and running app)
+ session = avalon.Session.copy()
+ app_name = os.environ["AVALON_APP_NAME"]
+
+ # Find the application definition
+ app_definition = pipeline.lib.get_application(app_name)
+
+ App = type("app_%s" % app_name,
+ (avalon.Application,),
+ {"config": app_definition.copy()})
+
+ # Initialize within the new session's environment
+ app = App()
+ env = app.environ(session)
+ app.initialize(env)
+
+ # Run
+ maya.pipeline._on_task_changed()
diff --git a/config/apps/maya/customize.py b/config/apps/maya/customize.py
new file mode 100644
index 0000000000..64f33d5aae
--- /dev/null
+++ b/config/apps/maya/customize.py
@@ -0,0 +1,66 @@
+"""A set of commands that install overrides to Maya's UI"""
+
+import maya.cmds as mc
+import maya.mel as mel
+from functools import partial
+import logging
+
+
+log = logging.getLogger(__name__)
+
+COMPONENT_MASK_ORIGINAL = {}
+
+
+def override_component_mask_commands():
+ """Override component mask ctrl+click behavior.
+
+ This implements special behavior for Maya's component
+ mask menu items where a ctrl+click will instantly make
+ it an isolated behavior disabling all others.
+
+ Tested in Maya 2016 and 2018
+
+ """
+ log.info("Installing override_component_mask_commands..")
+
+ # Get all object mask buttons
+ buttons = mc.formLayout("objectMaskIcons",
+ query=True,
+ childArray=True)
+ # Skip the triangle list item
+ buttons = [btn for btn in buttons if btn != "objPickMenuLayout"]
+
+ def on_changed_callback(raw_command, state):
+ """New callback"""
+
+ # If "control" is held force the toggled one to on and
+ # toggle the others based on whether any of the buttons
+ # was remaining active after the toggle, if not then
+ # enable all
+ if mc.getModifiers() == 4: # = CTRL
+ state = True
+ active = [mc.iconTextCheckBox(btn, query=True, value=True) for btn
+ in buttons]
+ if any(active):
+ mc.selectType(allObjects=False)
+ else:
+ mc.selectType(allObjects=True)
+
+ # Replace #1 with the current button state
+ cmd = raw_command.replace(" #1", " {}".format(int(state)))
+ mel.eval(cmd)
+
+ for btn in buttons:
+
+ # Store a reference to the original command so that if
+ # we rerun this override command it doesn't recursively
+ # try to implement the fix. (This also allows us to
+ # "uninstall" the behavior later)
+ if btn not in COMPONENT_MASK_ORIGINAL:
+ original = mc.iconTextCheckBox(btn, query=True, cc=True)
+ COMPONENT_MASK_ORIGINAL[btn] = original
+
+ # Assign the special callback
+ original = COMPONENT_MASK_ORIGINAL[btn]
+ new_fn = partial(on_changed_callback, original)
+ mc.iconTextCheckBox(btn, edit=True, cc=new_fn)
diff --git a/config/apps/maya/lib.py b/config/apps/maya/lib.py
new file mode 100644
index 0000000000..bb2690e3f8
--- /dev/null
+++ b/config/apps/maya/lib.py
@@ -0,0 +1,1438 @@
+"""Standalone helper functions"""
+
+import re
+import os
+import uuid
+
+import bson
+import json
+import logging
+import contextlib
+from collections import OrderedDict, defaultdict
+
+from maya import cmds, mel
+
+from avalon import api, maya, io, pipeline
+from avalon.vendor.six import string_types
+import avalon.maya.lib
+
+from colorbleed import lib
+
+
+log = logging.getLogger(__name__)
+
+ATTRIBUTE_DICT = {"int": {"attributeType": "long"},
+ "str": {"dataType": "string"},
+ "unicode": {"dataType": "string"},
+ "float": {"attributeType": "double"},
+ "bool": {"attributeType": "bool"}}
+
+SHAPE_ATTRS = {"castsShadows",
+ "receiveShadows",
+ "motionBlur",
+ "primaryVisibility",
+ "smoothShading",
+ "visibleInReflections",
+ "visibleInRefractions",
+ "doubleSided",
+ "opposite"}
+
+RENDER_ATTRS = {"vray":
+ {
+ "node": "vraySettings",
+ "prefix": "fileNamePrefix",
+ "padding": "fileNamePadding",
+ "ext": "imageFormatStr"
+ },
+ "default":
+ {
+ "node": "defaultRenderGlobals",
+ "prefix": "imageFilePrefix",
+ "padding": "extensionPadding"
+ }
+ }
+
+
+DEFAULT_MATRIX = [1.0, 0.0, 0.0, 0.0,
+ 0.0, 1.0, 0.0, 0.0,
+ 0.0, 0.0, 1.0, 0.0,
+ 0.0, 0.0, 0.0, 1.0]
+
+# The maya alembic export types
+_alembic_options = {
+ "startFrame": float,
+ "endFrame": float,
+ "frameRange": str, # "start end"; overrides startFrame & endFrame
+ "eulerFilter": bool,
+ "frameRelativeSample": float,
+ "noNormals": bool,
+ "renderableOnly": bool,
+ "step": float,
+ "stripNamespaces": bool,
+ "uvWrite": bool,
+ "wholeFrameGeo": bool,
+ "worldSpace": bool,
+ "writeVisibility": bool,
+ "writeColorSets": bool,
+ "writeFaceSets": bool,
+ "writeCreases": bool, # Maya 2015 Ext1+
+ "dataFormat": str,
+ "root": (list, tuple),
+ "attr": (list, tuple),
+ "attrPrefix": (list, tuple),
+ "userAttr": (list, tuple),
+ "melPerFrameCallback": str,
+ "melPostJobCallback": str,
+ "pythonPerFrameCallback": str,
+ "pythonPostJobCallback": str,
+ "selection": bool
+}
+
+INT_FPS = {15, 24, 25, 30, 48, 50, 60, 44100, 48000}
+FLOAT_FPS = {23.976, 29.97, 29.97, 47.952, 59.94}
+
+
+def matrix_equals(a, b, tolerance=1e-10):
+ """
+ Compares two matrices with an imperfection tolerance
+
+ Args:
+ a (list, tuple): the matrix to check
+ b (list, tuple): the matrix to check against
+ tolerance (float): the precision of the differences
+
+ Returns:
+ bool : True or False
+
+ """
+ if not all(abs(x - y) < tolerance for x, y in zip(a, b)):
+ return False
+ return True
+
+
+def unique(name):
+ assert isinstance(name, string_types), "`name` must be string"
+
+ while cmds.objExists(name):
+ matches = re.findall(r"\d+$", name)
+
+ if matches:
+ match = matches[-1]
+ name = name.rstrip(match)
+ number = int(match) + 1
+ else:
+ number = 1
+
+ name = name + str(number)
+
+ return name
+
+
+def uv_from_element(element):
+ """Return the UV coordinate of given 'element'
+
+ Supports components, meshes, nurbs.
+
+ """
+
+ supported = ["mesh", "nurbsSurface"]
+
+ uv = [0.5, 0.5]
+
+ if "." not in element:
+ type = cmds.nodeType(element)
+ if type == "transform":
+ geometry_shape = cmds.listRelatives(element, shapes=True)
+
+ if len(geometry_shape) >= 1:
+ geometry_shape = geometry_shape[0]
+ else:
+ return
+
+ elif type in supported:
+ geometry_shape = element
+
+ else:
+ cmds.error("Could not do what you wanted..")
+ return
+ else:
+ # If it is indeed a component - get the current Mesh
+ try:
+ parent = element.split(".", 1)[0]
+
+ # Maya is funny in that when the transform of the shape
+ # of the component elemen has children, the name returned
+ # by that elementection is the shape. Otherwise, it is
+ # the transform. So lets see what type we're dealing with here.
+ if cmds.nodeType(parent) in supported:
+ geometry_shape = parent
+ else:
+ geometry_shape = cmds.listRelatives(parent, shapes=1)[0]
+
+ if not geometry_shape:
+ cmds.error("Skipping %s: Could not find shape." % element)
+ return
+
+ if len(cmds.ls(geometry_shape)) > 1:
+ cmds.warning("Multiple shapes with identical "
+ "names found. This might not work")
+
+ except TypeError as e:
+ cmds.warning("Skipping %s: Didn't find a shape "
+ "for component elementection. %s" % (element, e))
+ return
+
+ try:
+ type = cmds.nodeType(geometry_shape)
+
+ if type == "nurbsSurface":
+ # If a surfacePoint is elementected on a nurbs surface
+ root, u, v = element.rsplit("[", 2)
+ uv = [float(u[:-1]), float(v[:-1])]
+
+ if type == "mesh":
+ # -----------
+ # Average the U and V values
+ # ===========
+ uvs = cmds.polyListComponentConversion(element, toUV=1)
+ if not uvs:
+ cmds.warning("Couldn't derive any UV's from "
+ "component, reverting to default U and V")
+ raise TypeError
+
+ # Flatten list of Uv's as sometimes it returns
+ # neighbors like this [2:3] instead of [2], [3]
+ flattened = []
+
+ for uv in uvs:
+ flattened.extend(cmds.ls(uv, flatten=True))
+
+ uvs = flattened
+
+ sumU = 0
+ sumV = 0
+ for uv in uvs:
+ try:
+ u, v = cmds.polyEditUV(uv, query=True)
+ except Exception:
+ cmds.warning("Couldn't find any UV coordinated, "
+ "reverting to default U and V")
+ raise TypeError
+
+ sumU += u
+ sumV += v
+
+ averagedU = sumU / len(uvs)
+ averagedV = sumV / len(uvs)
+
+ uv = [averagedU, averagedV]
+ except TypeError:
+ pass
+
+ return uv
+
+
+def shape_from_element(element):
+ """Return shape of given 'element'
+
+ Supports components, meshes, and surfaces
+
+ """
+
+ try:
+ # Get either shape or transform, based on element-type
+ node = cmds.ls(element, objectsOnly=True)[0]
+ except Exception:
+ cmds.warning("Could not find node in %s" % element)
+ return None
+
+ if cmds.nodeType(node) == 'transform':
+ try:
+ return cmds.listRelatives(node, shapes=True)[0]
+ except Exception:
+ cmds.warning("Could not find shape in %s" % element)
+ return None
+
+ else:
+ return node
+
+
+def collect_animation_data():
+ """Get the basic animation data
+
+ Returns:
+ OrderedDict
+
+ """
+
+ # get scene values as defaults
+ start = cmds.playbackOptions(query=True, animationStartTime=True)
+ end = cmds.playbackOptions(query=True, animationEndTime=True)
+
+ # build attributes
+ data = OrderedDict()
+ data["startFrame"] = start
+ data["endFrame"] = end
+ data["handles"] = 1
+ data["step"] = 1.0
+
+ return data
+
+
+@contextlib.contextmanager
+def attribute_values(attr_values):
+ """Remaps node attributes to values during context.
+
+ Arguments:
+ attr_values (dict): Dictionary with (attr, value)
+
+ """
+
+ original = [(attr, cmds.getAttr(attr)) for attr in attr_values]
+ try:
+ for attr, value in attr_values.items():
+ if isinstance(value, string_types):
+ cmds.setAttr(attr, value, type="string")
+ else:
+ cmds.setAttr(attr, value)
+ yield
+ finally:
+ for attr, value in original:
+ if isinstance(value, string_types):
+ cmds.setAttr(attr, value, type="string")
+ else:
+ cmds.setAttr(attr, value)
+
+
+@contextlib.contextmanager
+def renderlayer(layer):
+ """Set the renderlayer during the context"""
+
+ original = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
+
+ try:
+ cmds.editRenderLayerGlobals(currentRenderLayer=layer)
+ yield
+ finally:
+ cmds.editRenderLayerGlobals(currentRenderLayer=original)
+
+
+@contextlib.contextmanager
+def evaluation(mode="off"):
+ """Set the evaluation manager during context.
+
+ Arguments:
+ mode (str): The mode to apply during context.
+ "off": The standard DG evaluation (stable)
+ "serial": A serial DG evaluation
+ "parallel": The Maya 2016+ parallel evaluation
+
+ """
+
+ original = cmds.evaluationManager(query=True, mode=1)[0]
+ try:
+ cmds.evaluationManager(mode=mode)
+ yield
+ finally:
+ cmds.evaluationManager(mode=original)
+
+
+def get_renderer(layer):
+ with renderlayer(layer):
+ return cmds.getAttr("defaultRenderGlobals.currentRenderer")
+
+
+def get_current_renderlayer():
+ return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
+
+
+@contextlib.contextmanager
+def no_undo(flush=False):
+ """Disable the undo queue during the context
+
+ Arguments:
+ flush (bool): When True the undo queue will be emptied when returning
+ from the context losing all undo history. Defaults to False.
+
+ """
+ original = cmds.undoInfo(query=True, state=True)
+ keyword = 'state' if flush else 'stateWithoutFlush'
+
+ try:
+ cmds.undoInfo(**{keyword: False})
+ yield
+ finally:
+ cmds.undoInfo(**{keyword: original})
+
+
+@contextlib.contextmanager
+def namespaced(namespace, new=True):
+ """Work inside namespace during context
+
+ Args:
+ new (bool): When enabled this will rename the namespace to a unique
+ namespace if the input namespace already exists.
+
+ Yields:
+ str: The namespace that is used during the context
+
+ """
+ original = cmds.namespaceInfo(cur=True)
+ if new:
+ namespace = avalon.maya.lib.unique_namespace(namespace)
+ cmds.namespace(add=namespace)
+
+ try:
+ cmds.namespace(set=namespace)
+ yield namespace
+ finally:
+ cmds.namespace(set=original)
+
+
+def polyConstraint(components, *args, **kwargs):
+ """Return the list of *components* with the constraints applied.
+
+ A wrapper around Maya's `polySelectConstraint` to retrieve its results as
+ a list without altering selections. For a list of possible constraints
+ see `maya.cmds.polySelectConstraint` documentation.
+
+ Arguments:
+ components (list): List of components of polygon meshes
+
+ Returns:
+ list: The list of components filtered by the given constraints.
+
+ """
+
+ kwargs.pop('mode', None)
+
+ with no_undo(flush=False):
+ with maya.maintained_selection():
+ # Apply constraint using mode=2 (current and next) so
+ # it applies to the selection made before it; because just
+ # a `maya.cmds.select()` call will not trigger the constraint.
+ with reset_polySelectConstraint():
+ cmds.select(components, r=1, noExpand=True)
+ cmds.polySelectConstraint(*args, mode=2, **kwargs)
+ result = cmds.ls(selection=True)
+ cmds.select(clear=True)
+
+ return result
+
+
+@contextlib.contextmanager
+def reset_polySelectConstraint(reset=True):
+ """Context during which the given polyConstraint settings are disabled.
+
+ The original settings are restored after the context.
+
+ """
+
+ original = cmds.polySelectConstraint(query=True, stateString=True)
+
+ try:
+ if reset:
+ # Ensure command is available in mel
+ # This can happen when running standalone
+ if not mel.eval("exists resetPolySelectConstraint"):
+ mel.eval("source polygonConstraint")
+
+ # Reset all parameters
+ mel.eval("resetPolySelectConstraint;")
+ cmds.polySelectConstraint(disable=True)
+ yield
+ finally:
+ mel.eval(original)
+
+
+def is_visible(node,
+ displayLayer=True,
+ intermediateObject=True,
+ parentHidden=True,
+ visibility=True):
+ """Is `node` visible?
+
+ Returns whether a node is hidden by one of the following methods:
+ - The node exists (always checked)
+ - The node must be a dagNode (always checked)
+ - The node's visibility is off.
+ - The node is set as intermediate Object.
+ - The node is in a disabled displayLayer.
+ - Whether any of its parent nodes is hidden.
+
+ Roughly based on: http://ewertb.soundlinker.com/mel/mel.098.php
+
+ Returns:
+ bool: Whether the node is visible in the scene
+
+ """
+
+ # Only existing objects can be visible
+ if not cmds.objExists(node):
+ return False
+
+ # Only dagNodes can be visible
+ if not cmds.objectType(node, isAType='dagNode'):
+ return False
+
+ if visibility:
+ if not cmds.getAttr('{0}.visibility'.format(node)):
+ return False
+
+ if intermediateObject and cmds.objectType(node, isAType='shape'):
+ if cmds.getAttr('{0}.intermediateObject'.format(node)):
+ return False
+
+ if displayLayer:
+ # Display layers set overrideEnabled and overrideVisibility on members
+ if cmds.attributeQuery('overrideEnabled', node=node, exists=True):
+ override_enabled = cmds.getAttr('{}.overrideEnabled'.format(node))
+ override_visibility = cmds.getAttr('{}.overrideVisibility'.format(node))
+ if override_enabled and override_visibility:
+ return False
+
+ if parentHidden:
+ parents = cmds.listRelatives(node, parent=True, fullPath=True)
+ if parents:
+ parent = parents[0]
+ if not is_visible(parent,
+ displayLayer=displayLayer,
+ intermediateObject=False,
+ parentHidden=parentHidden,
+ visibility=visibility):
+ return False
+
+ return True
+
+
+def extract_alembic(file,
+ startFrame=None,
+ endFrame=None,
+ selection=True,
+ uvWrite= True,
+ eulerFilter= True,
+ dataFormat="ogawa",
+ verbose=False,
+ **kwargs):
+ """Extract a single Alembic Cache.
+
+ This extracts an Alembic cache using the `-selection` flag to minimize
+ the extracted content to solely what was Collected into the instance.
+
+ Arguments:
+
+ startFrame (float): Start frame of output. Ignored if `frameRange`
+ provided.
+
+ endFrame (float): End frame of output. Ignored if `frameRange`
+ provided.
+
+ frameRange (tuple or str): Two-tuple with start and end frame or a
+ string formatted as: "startFrame endFrame". This argument
+ overrides `startFrame` and `endFrame` arguments.
+
+ dataFormat (str): The data format to use for the cache,
+ defaults to "ogawa"
+
+ verbose (bool): When on, outputs frame number information to the
+ Script Editor or output window during extraction.
+
+ noNormals (bool): When on, normal data from the original polygon
+ objects is not included in the exported Alembic cache file.
+
+ renderableOnly (bool): When on, any non-renderable nodes or hierarchy,
+ such as hidden objects, are not included in the Alembic file.
+ Defaults to False.
+
+ stripNamespaces (bool): When on, any namespaces associated with the
+ exported objects are removed from the Alembic file. For example, an
+ object with the namespace taco:foo:bar appears as bar in the
+ Alembic file.
+
+ uvWrite (bool): When on, UV data from polygon meshes and subdivision
+ objects are written to the Alembic file. Only the current UV map is
+ included.
+
+ worldSpace (bool): When on, the top node in the node hierarchy is
+ stored as world space. By default, these nodes are stored as local
+ space. Defaults to False.
+
+ eulerFilter (bool): When on, X, Y, and Z rotation data is filtered with
+ an Euler filter. Euler filtering helps resolve irregularities in
+ rotations especially if X, Y, and Z rotations exceed 360 degrees.
+ Defaults to True.
+
+ """
+
+ # Ensure alembic exporter is loaded
+ cmds.loadPlugin('AbcExport', quiet=True)
+
+ # Alembic Exporter requires forward slashes
+ file = file.replace('\\', '/')
+
+ # Pass the start and end frame on as `frameRange` so that it
+ # never conflicts with that argument
+ if "frameRange" not in kwargs:
+ # Fallback to maya timeline if no start or end frame provided.
+ if startFrame is None:
+ startFrame = cmds.playbackOptions(query=True,
+ animationStartTime=True)
+ if endFrame is None:
+ endFrame = cmds.playbackOptions(query=True,
+ animationEndTime=True)
+
+ # Ensure valid types are converted to frame range
+ assert isinstance(startFrame, _alembic_options["startFrame"])
+ assert isinstance(endFrame, _alembic_options["endFrame"])
+ kwargs["frameRange"] = "{0} {1}".format(startFrame, endFrame)
+ else:
+ # Allow conversion from tuple for `frameRange`
+ frame_range = kwargs["frameRange"]
+ if isinstance(frame_range, (list, tuple)):
+ assert len(frame_range) == 2
+ kwargs["frameRange"] = "{0} {1}".format(frame_range[0],
+ frame_range[1])
+
+ # Assemble options
+ options = {
+ "selection": selection,
+ "uvWrite": uvWrite,
+ "eulerFilter": eulerFilter,
+ "dataFormat": dataFormat
+ }
+ options.update(kwargs)
+
+ # Validate options
+ for key, value in options.copy().items():
+
+ # Discard unknown options
+ if key not in _alembic_options:
+ options.pop(key)
+ continue
+
+ # Validate value type
+ valid_types = _alembic_options[key]
+ if not isinstance(value, valid_types):
+ raise TypeError("Alembic option unsupported type: "
+ "{0} (expected {1})".format(value, valid_types))
+
+ # The `writeCreases` argument was changed to `autoSubd` in Maya 2018+
+ maya_version = int(cmds.about(version=True))
+ if maya_version >= 2018:
+ options['autoSubd'] = options.pop('writeCreases', False)
+
+ # Format the job string from options
+ job_args = list()
+ for key, value in options.items():
+ if isinstance(value, (list, tuple)):
+ for entry in value:
+ job_args.append("-{} {}".format(key, entry))
+ elif isinstance(value, bool):
+ # Add only when state is set to True
+ if value:
+ job_args.append("-{0}".format(key))
+ else:
+ job_args.append("-{0} {1}".format(key, value))
+
+ job_str = " ".join(job_args)
+ job_str += ' -file "%s"' % file
+
+ # Ensure output directory exists
+ parent_dir = os.path.dirname(file)
+ if not os.path.exists(parent_dir):
+ os.makedirs(parent_dir)
+
+ if verbose:
+ log.debug("Preparing Alembic export with options: %s",
+ json.dumps(options, indent=4))
+ log.debug("Extracting Alembic with job arguments: %s", job_str)
+
+ # Perform extraction
+ print("Alembic Job Arguments : {}".format(job_str))
+
+ # Disable the parallel evaluation temporarily to ensure no buggy
+ # exports are made. (PLN-31)
+ # TODO: Make sure this actually fixes the issues
+ with evaluation("off"):
+ cmds.AbcExport(j=job_str, verbose=verbose)
+
+ if verbose:
+ log.debug("Extracted Alembic to: %s", file)
+
+ return file
+
+
+def maya_temp_folder():
+ scene_dir = os.path.dirname(cmds.file(query=True, sceneName=True))
+ tmp_dir = os.path.abspath(os.path.join(scene_dir, "..", "tmp"))
+ if not os.path.isdir(tmp_dir):
+ os.makedirs(tmp_dir)
+
+ return tmp_dir
+
+
+# region ID
+def get_id_required_nodes(referenced_nodes=False, nodes=None):
+ """Filter out any node which are locked (reference) or readOnly
+
+ Args:
+ referenced_nodes (bool): set True to filter out reference nodes
+ nodes (list, Optional): nodes to consider
+ Returns:
+ nodes (set): list of filtered nodes
+ """
+
+ if nodes is None:
+ # Consider all nodes
+ nodes = cmds.ls()
+
+ def _node_type_exists(node_type):
+ try:
+ cmds.nodeType(node_type, isTypeName=True)
+ return True
+ except RuntimeError:
+ return False
+
+ # `readOnly` flag is obsolete as of Maya 2016 therefor we explicitly remove
+ # default nodes and reference nodes
+ camera_shapes = ["frontShape", "sideShape", "topShape", "perspShape"]
+
+ ignore = set()
+ if not referenced_nodes:
+ ignore |= set(cmds.ls(long=True, referencedNodes=True))
+
+ # list all defaultNodes to filter out from the rest
+ ignore |= set(cmds.ls(long=True, defaultNodes=True))
+ ignore |= set(cmds.ls(camera_shapes, long=True))
+
+ # Remove Turtle from the result of `cmds.ls` if Turtle is loaded
+ # TODO: This should be a less specific check for a single plug-in.
+ if _node_type_exists("ilrBakeLayer"):
+ ignore |= set(cmds.ls(type="ilrBakeLayer", long=True))
+
+ # Establish set of nodes types to include
+ types = ["objectSet", "file", "mesh", "nurbsCurve", "nurbsSurface"]
+
+ # Check if plugin nodes are available for Maya by checking if the plugin
+ # is loaded
+ if cmds.pluginInfo("pgYetiMaya", query=True, loaded=True):
+ types.append("pgYetiMaya")
+
+ # We *always* ignore intermediate shapes, so we filter them out
+ # directly
+ nodes = cmds.ls(nodes, type=types, long=True, noIntermediate=True)
+
+ # The items which need to pass the id to their parent
+ # Add the collected transform to the nodes
+ dag = cmds.ls(nodes, type="dagNode", long=True) # query only dag nodes
+ transforms = cmds.listRelatives(dag,
+ parent=True,
+ fullPath=True) or []
+
+ nodes = set(nodes)
+ nodes |= set(transforms)
+
+ nodes -= ignore # Remove the ignored nodes
+ if not nodes:
+ return nodes
+
+ # Avoid locked nodes
+ nodes_list = list(nodes)
+ locked = cmds.lockNode(nodes_list, query=True, lock=True)
+ for node, lock in zip(nodes_list, locked):
+ if lock:
+ log.warning("Skipping locked node: %s" % node)
+ nodes.remove(node)
+
+ return nodes
+
+
+def get_id(node):
+ """
+ Get the `cbId` attribute of the given node
+ Args:
+ node (str): the name of the node to retrieve the attribute from
+
+ Returns:
+ str
+
+ """
+
+ if node is None:
+ return
+
+ if not cmds.attributeQuery("cbId", node=node, exists=True):
+ return
+
+ return cmds.getAttr("{}.cbId".format(node))
+
+
+def generate_ids(nodes, asset_id=None):
+ """Returns new unique ids for the given nodes.
+
+ Note: This does not assign the new ids, it only generates the values.
+
+ To assign new ids using this method:
+ >>> nodes = ["a", "b", "c"]
+ >>> for node, id in generate_ids(nodes):
+ >>> set_id(node, id)
+
+ To also override any existing values (and assign regenerated ids):
+ >>> nodes = ["a", "b", "c"]
+ >>> for node, id in generate_ids(nodes):
+ >>> set_id(node, id, overwrite=True)
+
+ Args:
+ nodes (list): List of nodes.
+ asset_id (str or bson.ObjectId): The database id for the *asset* to
+ generate for. When None provided the current asset in the
+ active session is used.
+
+ Returns:
+ list: A list of (node, id) tuples.
+
+ """
+
+ if asset_id is None:
+ # Get the asset ID from the database for the asset of current context
+ asset_data = io.find_one({"type": "asset",
+ "name": api.Session["AVALON_ASSET"]},
+ projection={"_id": True})
+ assert asset_data, "No current asset found in Session"
+ asset_id = asset_data['_id']
+
+ node_ids = []
+ for node in nodes:
+ _, uid = str(uuid.uuid4()).rsplit("-", 1)
+ unique_id = "{}:{}".format(asset_id, uid)
+ node_ids.append((node, unique_id))
+
+ return node_ids
+
+
+def set_id(node, unique_id, overwrite=False):
+ """Add cbId to `node` unless one already exists.
+
+ Args:
+ node (str): the node to add the "cbId" on
+ unique_id (str): The unique node id to assign.
+ This should be generated by `generate_ids`.
+ overwrite (bool, optional): When True overrides the current value even
+ if `node` already has an id. Defaults to False.
+
+ Returns:
+ None
+
+ """
+
+ attr = "{0}.cbId".format(node)
+ exists = cmds.attributeQuery("cbId", node=node, exists=True)
+
+ # Add the attribute if it does not exist yet
+ if not exists:
+ cmds.addAttr(node, longName="cbId", dataType="string")
+
+ # Set the value
+ if not exists or overwrite:
+ cmds.setAttr(attr, unique_id, type="string")
+
+
+def remove_id(node):
+ """Remove the id attribute from the input node.
+
+ Args:
+ node (str): The node name
+
+ Returns:
+ bool: Whether an id attribute was deleted
+
+ """
+ if cmds.attributeQuery("cbId", node=node, exists=True):
+ cmds.deleteAttr("{}.cbId".format(node))
+ return True
+ return False
+
+
+# endregion ID
+def get_reference_node(path):
+ """
+ Get the reference node when the path is found being used in a reference
+ Args:
+ path (str): the file path to check
+
+ Returns:
+ node (str): name of the reference node in question
+ """
+ try:
+ node = cmds.file(path, query=True, referenceNode=True)
+ except RuntimeError:
+ log.debug('File is not referenced : "{}"'.format(path))
+ return
+
+ reference_path = cmds.referenceQuery(path, filename=True)
+ if os.path.normpath(path) == os.path.normpath(reference_path):
+ return node
+
+
+def set_attribute(attribute, value, node):
+ """Adjust attributes based on the value from the attribute data
+
+ If an attribute does not exists on the target it will be added with
+ the dataType being controlled by the value type.
+
+ Args:
+ attribute (str): name of the attribute to change
+ value: the value to change to attribute to
+ node (str): name of the node
+
+ Returns:
+ None
+ """
+
+ value_type = type(value).__name__
+ kwargs = ATTRIBUTE_DICT[value_type]
+ if not cmds.attributeQuery(attribute, node=node, exists=True):
+ log.debug("Creating attribute '{}' on "
+ "'{}'".format(attribute, node))
+ cmds.addAttr(node, longName=attribute, **kwargs)
+
+ node_attr = "{}.{}".format(node, attribute)
+ if "dataType" in kwargs:
+ attr_type = kwargs["dataType"]
+ cmds.setAttr(node_attr, value, type=attr_type)
+ else:
+ cmds.setAttr(node_attr, value)
+
+
+def apply_attributes(attributes, nodes_by_id):
+ """Alter the attributes to match the state when publishing
+
+ Apply attribute settings from the publish to the node in the scene based
+ on the UUID which is stored in the cbId attribute.
+
+ Args:
+ attributes (list): list of dictionaries
+ nodes_by_id (dict): collection of nodes based on UUID
+ {uuid: [node, node]}
+
+ """
+
+ for attr_data in attributes:
+ nodes = nodes_by_id[attr_data["uuid"]]
+ attr_value = attr_data["attributes"]
+ for node in nodes:
+ for attr, value in attr_value.items():
+ set_attribute(attr, value, node)
+
+
+# region LOOKDEV
+def list_looks(asset_id):
+ """Return all look subsets for the given asset
+
+ This assumes all look subsets start with "look*" in their names.
+ """
+
+ # # get all subsets with look leading in
+ # the name associated with the asset
+ subset = io.find({"parent": bson.ObjectId(asset_id),
+ "type": "subset",
+ "name": {"$regex": "look*"}})
+
+ return list(subset)
+
+
+def assign_look_by_version(nodes, version_id):
+ """Assign nodes a specific published look version by id.
+
+ This assumes the nodes correspond with the asset.
+
+ Args:
+ nodes(list): nodes to assign look to
+ version_id (bson.ObjectId): database id of the version
+
+ Returns:
+ None
+ """
+
+ # Get representations of shader file and relationships
+ look_representation = io.find_one({"type": "representation",
+ "parent": version_id,
+ "name": "ma"})
+
+ json_representation = io.find_one({"type": "representation",
+ "parent": version_id,
+ "name": "json"})
+
+ # See if representation is already loaded, if so reuse it.
+ host = api.registered_host()
+ representation_id = str(look_representation['_id'])
+ for container in host.ls():
+ if (container['loader'] == "LookLoader" and
+ container['representation'] == representation_id):
+ log.info("Reusing loaded look ..")
+ container_node = container['objectName']
+ break
+ else:
+ log.info("Using look for the first time ..")
+
+ # Load file
+ loaders = api.loaders_from_representation(api.discover(api.Loader),
+ representation_id)
+ Loader = next((i for i in loaders if i.__name__ == "LookLoader"), None)
+ if Loader is None:
+ raise RuntimeError("Could not find LookLoader, this is a bug")
+
+ # Reference the look file
+ with maya.maintained_selection():
+ container_node = pipeline.load(Loader, look_representation)
+
+ # Get container members
+ shader_nodes = cmds.sets(container_node, query=True)
+
+ # Load relationships
+ shader_relation = api.get_representation_path(json_representation)
+ with open(shader_relation, "r") as f:
+ relationships = json.load(f)
+
+ # Assign relationships
+ apply_shaders(relationships, shader_nodes, nodes)
+
+
+def assign_look(nodes, subset="lookDefault"):
+ """Assigns a look to a node.
+
+ Optimizes the nodes by grouping by asset id and finding
+ related subset by name.
+
+ Args:
+ nodes (list): all nodes to assign the look to
+ subset (str): name of the subset to find
+ """
+
+ # Group all nodes per asset id
+ grouped = defaultdict(list)
+ for node in nodes:
+ colorbleed_id = get_id(node)
+ if not colorbleed_id:
+ continue
+
+ parts = colorbleed_id.split(":", 1)
+ grouped[parts[0]].append(node)
+
+ for asset_id, asset_nodes in grouped.items():
+ # create objectId for database
+ try:
+ asset_id = bson.ObjectId(asset_id)
+ except bson.errors.InvalidId:
+ log.warning("Asset ID is not compatible with bson")
+ continue
+ subset_data = io.find_one({"type": "subset",
+ "name": subset,
+ "parent": asset_id})
+
+ if not subset_data:
+ log.warning("No subset '{}' found for {}".format(subset, asset_id))
+ continue
+
+ # get last version
+ # with backwards compatibility
+ version = io.find_one({"parent": subset_data['_id'],
+ "type": "version",
+ "data.families":
+ {"$in": ["colorbleed.look"]}
+ },
+ sort=[("name", -1)],
+ projection={"_id": True, "name": True})
+
+ log.debug("Assigning look '{}' ".format(subset,
+ version["name"]))
+
+ assign_look_by_version(asset_nodes, version['_id'])
+
+
+def apply_shaders(relationships, shadernodes, nodes):
+ """Link shadingEngine to the right nodes based on relationship data
+
+ Relationship data is constructed of a collection of `sets` and `attributes`
+ `sets` corresponds with the shaderEngines found in the lookdev.
+ Each set has the keys `name`, `members` and `uuid`, the `members`
+ hold a collection of node information `name` and `uuid`.
+
+ Args:
+ relationships (dict): relationship data
+ shadernodes (list): list of nodes of the shading objectSets (includes
+ VRayObjectProperties and shadingEngines)
+ nodes (list): list of nodes to apply shader to
+
+ Returns:
+ None
+ """
+
+ attributes = relationships.get("attributes", [])
+ shader_data = relationships.get("relationships", {})
+
+ shading_engines = cmds.ls(shadernodes, type="objectSet", long=True)
+ assert shading_engines, "Error in retrieving objectSets from reference"
+
+ # region compute lookup
+ nodes_by_id = defaultdict(list)
+ for node in nodes:
+ nodes_by_id[get_id(node)].append(node)
+
+ shading_engines_by_id = defaultdict(list)
+ for shad in shading_engines:
+ shading_engines_by_id[get_id(shad)].append(shad)
+ # endregion
+
+ # region assign shading engines and other sets
+ for data in shader_data.values():
+ # collect all unique IDs of the set members
+ shader_uuid = data["uuid"]
+ member_uuids = [member["uuid"] for member in data["members"]]
+
+ filtered_nodes = list()
+ for uuid in member_uuids:
+ filtered_nodes.extend(nodes_by_id[uuid])
+
+ id_shading_engines = shading_engines_by_id[shader_uuid]
+ if not id_shading_engines:
+ log.error("No shader found with cbId "
+ "'{}'".format(shader_uuid))
+ continue
+ elif len(id_shading_engines) > 1:
+ log.error("Skipping shader assignment. "
+ "More than one shader found with cbId "
+ "'{}'. (found: {})".format(shader_uuid,
+ id_shading_engines))
+ continue
+
+ if not filtered_nodes:
+ log.warning("No nodes found for shading engine "
+ "'{0}'".format(id_shading_engines[0]))
+ continue
+
+ cmds.sets(filtered_nodes, forceElement=id_shading_engines[0])
+ # endregion
+
+ apply_attributes(attributes, nodes_by_id)
+
+
+# endregion LOOKDEV
+def get_isolate_view_sets():
+ """Return isolate view sets of all modelPanels.
+
+ Returns:
+ list: all sets related to isolate view
+
+ """
+
+ view_sets = set()
+ for panel in cmds.getPanel(type="modelPanel") or []:
+ view_set = cmds.modelEditor(panel, query=True, viewObjects=True)
+ if view_set:
+ view_sets.add(view_set)
+
+ return view_sets
+
+
+def get_related_sets(node):
+ """Return objectSets that are relationships for a look for `node`.
+
+ Filters out based on:
+ - id attribute is NOT `pyblish.avalon.container`
+ - shapes and deformer shapes (alembic creates meshShapeDeformed)
+ - set name ends with any from a predefined list
+ - set in not in viewport set (isolate selected for example)
+
+ Args:
+ node (str): name of the current node to check
+
+ Returns:
+ list: The related sets
+
+ """
+
+ # Ignore specific suffices
+ ignore_suffices = ["out_SET", "controls_SET", "_INST", "_CON"]
+
+ # Default nodes to ignore
+ defaults = {"defaultLightSet", "defaultObjectSet"}
+
+ # Ids to ignore
+ ignored = {"pyblish.avalon.instance", "pyblish.avalon.container"}
+
+ view_sets = get_isolate_view_sets()
+
+ sets = cmds.listSets(object=node, extendToShape=False)
+ if not sets:
+ return []
+
+ # Fix 'no object matches name' errors on nodes returned by listSets.
+ # In rare cases it can happen that a node is added to an internal maya
+ # set inaccessible by maya commands, for example check some nodes
+ # returned by `cmds.listSets(allSets=True)`
+ sets = cmds.ls(sets)
+
+ # Ignore `avalon.container`
+ sets = [s for s in sets if
+ not cmds.attributeQuery("id", node=s, exists=True) or
+ not cmds.getAttr("%s.id" % s) in ignored]
+
+ # Exclude deformer sets (`type=2` for `maya.cmds.listSets`)
+ deformer_sets = cmds.listSets(object=node,
+ extendToShape=False,
+ type=2) or []
+ deformer_sets = set(deformer_sets) # optimize lookup
+ sets = [s for s in sets if s not in deformer_sets]
+
+ # Ignore when the set has a specific suffix
+ sets = [s for s in sets if not any(s.endswith(x) for x in ignore_suffices)]
+
+ # Ignore viewport filter view sets (from isolate select and
+ # viewports)
+ sets = [s for s in sets if s not in view_sets]
+ sets = [s for s in sets if s not in defaults]
+
+ return sets
+
+
+def get_container_transforms(container, members=None, root=False):
+ """Retrieve the root node of the container content
+
+ When a container is created through a Loader the content
+ of the file will be grouped under a transform. The name of the root
+ transform is stored in the container information
+
+ Args:
+ container (dict): the container
+ members (list): optional and convenience argument
+ root (bool): return highest node in hierachy if True
+
+ Returns:
+ root (list / str):
+ """
+
+ if not members:
+ members = cmds.sets(container["objectName"], query=True)
+
+ results = cmds.ls(members, type="transform", long=True)
+ if root:
+ root = get_highest_in_hierarchy(results)
+ if root:
+ results = root[0]
+
+ return results
+
+
+def get_highest_in_hierarchy(nodes):
+ """Return highest nodes in the hierarchy that are in the `nodes` list.
+
+ The "highest in hierarchy" are the nodes closest to world: top-most level.
+
+ Args:
+ nodes (list): The nodes in which find the highest in hierarchies.
+
+ Returns:
+ list: The highest nodes from the input nodes.
+
+ """
+
+ # Ensure we use long names
+ nodes = cmds.ls(nodes, long=True)
+ lookup = set(nodes)
+
+ highest = []
+ for node in nodes:
+ # If no parents are within the nodes input list
+ # then this is a highest node
+ if not any(n in lookup for n in iter_parents(node)):
+ highest.append(node)
+
+ return highest
+
+
+def iter_parents(node):
+ """Iter parents of node from its long name.
+
+ Note: The `node` *must* be the long node name.
+
+ Args:
+ node (str): Node long name.
+
+ Yields:
+ str: All parent node names (long names)
+
+ """
+ while True:
+ split = node.rsplit("|", 1)
+ if len(split) == 1:
+ return
+
+ node = split[0]
+ yield node
+
+
+def remove_other_uv_sets(mesh):
+ """Remove all other UV sets than the current UV set.
+
+ Keep only current UV set and ensure it's the renamed to default 'map1'.
+
+ """
+
+ uvSets = cmds.polyUVSet(mesh, query=True, allUVSets=True)
+ current = cmds.polyUVSet(mesh, query=True, currentUVSet=True)[0]
+
+ # Copy over to map1
+ if current != 'map1':
+ cmds.polyUVSet(mesh, uvSet=current, newUVSet='map1', copy=True)
+ cmds.polyUVSet(mesh, currentUVSet=True, uvSet='map1')
+ current = 'map1'
+
+ # Delete all non-current UV sets
+ deleteUVSets = [uvSet for uvSet in uvSets if uvSet != current]
+ uvSet = None
+
+ # Maya Bug (tested in 2015/2016):
+ # In some cases the API's MFnMesh will report less UV sets than
+ # maya.cmds.polyUVSet. This seems to happen when the deletion of UV sets
+ # has not triggered a cleanup of the UVSet array attribute on the mesh
+ # node. It will still have extra entries in the attribute, though it will
+ # not show up in API or UI. Nevertheless it does show up in
+ # maya.cmds.polyUVSet. To ensure we clean up the array we'll force delete
+ # the extra remaining 'indices' that we don't want.
+
+ # TODO: Implement a better fix
+ # The best way to fix would be to get the UVSet indices from api with
+ # MFnMesh (to ensure we keep correct ones) and then only force delete the
+ # other entries in the array attribute on the node. But for now we're
+ # deleting all entries except first one. Note that the first entry could
+ # never be removed (the default 'map1' always exists and is supposed to
+ # be undeletable.)
+ try:
+ for uvSet in deleteUVSets:
+ cmds.polyUVSet(mesh, delete=True, uvSet=uvSet)
+ except RuntimeError as exc:
+ log.warning('Error uvSet: %s - %s', uvSet, exc)
+ indices = cmds.getAttr('{0}.uvSet'.format(mesh),
+ multiIndices=True)
+ if not indices:
+ log.warning("No uv set found indices for: %s", mesh)
+ return
+
+ # Delete from end to avoid shifting indices
+ # and remove the indices in the attribute
+ indices = reversed(indices[1:])
+ for i in indices:
+ attr = '{0}.uvSet[{1}]'.format(mesh, i)
+ cmds.removeMultiInstance(attr, b=True)
+
+
+def get_id_from_history(node):
+ """Return first node id in the history chain that matches this node.
+
+ The nodes in history must be of the exact same node type and must be
+ parented under the same parent.
+
+ Args:
+ node (str): node to retrieve the
+
+ Returns:
+ str or None: The id from the node in history or None when no id found
+ on any valid nodes in the history.
+
+ """
+
+ def _get_parent(node):
+ """Return full path name for parent of node"""
+ return cmds.listRelatives(node, parent=True, fullPath=True)
+
+ node = cmds.ls(node, long=True)[0]
+
+ # Find all similar nodes in history
+ history = cmds.listHistory(node)
+ node_type = cmds.nodeType(node)
+ similar_nodes = cmds.ls(history, exactType=node_type, long=True)
+
+ # Exclude itself
+ similar_nodes = [x for x in similar_nodes if x != node]
+
+ # The node *must be* under the same parent
+ parent = _get_parent(node)
+ similar_nodes = [i for i in similar_nodes if _get_parent(i) == parent]
+
+ # Check all of the remaining similar nodes and take the first one
+ # with an id and assume it's the original.
+ for similar_node in similar_nodes:
+ _id = get_id(similar_node)
+ if _id:
+ return _id
+
+
+def set_scene_fps(fps, update=True):
+ """Set FPS from project configuration
+
+ Args:
+ fps (int, float): desired FPS
+ update(bool): toggle update animation, default is True
+
+ Returns:
+ None
+
+ """
+
+ if fps in FLOAT_FPS:
+ unit = "{:f}fps".format(fps)
+
+ elif fps in INT_FPS:
+ unit = "{:d}fps".format(int(fps))
+
+ else:
+ raise ValueError("Unsupported FPS value: `%s`" % fps)
+
+ log.info("Updating FPS to '{}'".format(unit))
+ cmds.currentUnit(time=unit, updateAnimation=update)
+
+ # Force file stated to 'modified'
+ cmds.file(modified=True)
+
+
+# Valid FPS
+def validate_fps():
+ """Validate current scene FPS and show pop-up when it is incorrect
+
+ Returns:
+ bool
+
+ """
+
+ fps = lib.get_project_fps() # can be int or float
+ current_fps = mel.eval('currentTimeUnitToFPS()') # returns float
+
+ if current_fps != fps:
+
+ from avalon.vendor.Qt import QtWidgets
+ from ..widgets import popup
+
+ # Find maya main window
+ top_level_widgets = {w.objectName(): w for w in
+ QtWidgets.QApplication.topLevelWidgets()}
+
+ parent = top_level_widgets.get("MayaWindow", None)
+ if parent is None:
+ pass
+ else:
+ dialog = popup.Popup2(parent=parent)
+ dialog.setModal(True)
+ dialog.setWindowTitle("Maya scene not in line with project")
+ dialog.setMessage("The FPS is out of sync, please fix")
+
+ # Set new text for button (add optional argument for the popup?)
+ toggle = dialog.widgets["toggle"]
+ update = toggle.isChecked()
+ dialog.on_show.connect(lambda: set_scene_fps(fps, update))
+
+ dialog.show()
+
+ return False
+
+ return True
diff --git a/config/apps/maya/menu.json b/config/apps/maya/menu.json
new file mode 100644
index 0000000000..d394f5dba1
--- /dev/null
+++ b/config/apps/maya/menu.json
@@ -0,0 +1,1959 @@
+[
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\save_scene_incremental.py",
+ "sourcetype": "file",
+ "title": "Version Up",
+ "tooltip": "Incremental save with a specific format"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\show_current_scene_in_explorer.py",
+ "sourcetype": "file",
+ "title": "Explore current scene..",
+ "tooltip": "Show current scene in Explorer"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\avalon\\launch_manager.py",
+ "sourcetype": "file",
+ "title": "Project Manager",
+ "tooltip": "Add assets to the project"
+ },
+ {
+ "type": "separator"
+ },
+ {
+ "type": "menu",
+ "title": "Modeling",
+ "items": [
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\modeling\\duplicate_normalized.py",
+ "sourcetype": "file",
+ "tags": [
+ "modeling",
+ "duplicate",
+ "normalized"
+ ],
+ "title": "Duplicate Normalized",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\modeling\\transferUVs.py",
+ "sourcetype": "file",
+ "tags": [
+ "modeling",
+ "transfer",
+ "uv"
+ ],
+ "title": "Transfer UVs",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\modeling\\mirrorSymmetry.py",
+ "sourcetype": "file",
+ "tags": [
+ "modeling",
+ "mirror",
+ "symmetry"
+ ],
+ "title": "Mirror Symmetry",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\modeling\\selectOutlineUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "modeling",
+ "select",
+ "outline",
+ "ui"
+ ],
+ "title": "Select Outline UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\modeling\\polyDeleteOtherUVSets.py",
+ "sourcetype": "file",
+ "tags": [
+ "modeling",
+ "polygon",
+ "uvset",
+ "delete"
+ ],
+ "title": "Polygon Delete Other UV Sets",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\modeling\\polyCombineQuick.py",
+ "sourcetype": "file",
+ "tags": [
+ "modeling",
+ "combine",
+ "polygon",
+ "quick"
+ ],
+ "title": "Polygon Combine Quick",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\modeling\\separateMeshPerShader.py",
+ "sourcetype": "file",
+ "tags": [
+ "modeling",
+ "separateMeshPerShader"
+ ],
+ "title": "Separate Mesh Per Shader",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\modeling\\polyDetachSeparate.py",
+ "sourcetype": "file",
+ "tags": [
+ "modeling",
+ "poly",
+ "detach",
+ "separate"
+ ],
+ "title": "Polygon Detach and Separate",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\modeling\\polyRelaxVerts.py",
+ "sourcetype": "file",
+ "tags": [
+ "modeling",
+ "relax",
+ "verts"
+ ],
+ "title": "Polygon Relax Vertices",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\modeling\\polySelectEveryNthEdgeUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "modeling",
+ "select",
+ "nth",
+ "edge",
+ "ui"
+ ],
+ "title": "Select Every Nth Edge"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\modeling\\djPFXUVs.py",
+ "sourcetype": "file",
+ "tags": [
+ "modeling",
+ "djPFX",
+ "UVs"
+ ],
+ "title": "dj PFX UVs",
+ "tooltip": ""
+ }
+ ]
+ },
+ {
+ "type": "menu",
+ "title": "Rigging",
+ "items": [
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\addCurveBetween.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "addCurveBetween",
+ "file"
+ ],
+ "title": "Add Curve Between"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\averageSkinWeights.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "average",
+ "skin weights",
+ "file"
+ ],
+ "title": "Average Skin Weights"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\cbSmoothSkinWeightUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "cbSmoothSkinWeightUI",
+ "file"
+ ],
+ "title": "CB Smooth Skin Weight UI"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\channelBoxManagerUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "channelBoxManagerUI",
+ "file"
+ ],
+ "title": "Channel Box Manager UI"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\characterAutorigger.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "characterAutorigger",
+ "file"
+ ],
+ "title": "Character Auto Rigger"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\connectUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "connectUI",
+ "file"
+ ],
+ "title": "Connect UI"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\copySkinWeightsLocal.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "copySkinWeightsLocal",
+ "file"
+ ],
+ "title": "Copy Skin Weights Local"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\createCenterLocator.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "createCenterLocator",
+ "file"
+ ],
+ "title": "Create Center Locator"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\freezeTransformToGroup.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "freezeTransformToGroup",
+ "file"
+ ],
+ "title": "Freeze Transform To Group"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\groupSelected.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "groupSelected",
+ "file"
+ ],
+ "title": "Group Selected"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\ikHandlePoleVectorLocator.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "ikHandlePoleVectorLocator",
+ "file"
+ ],
+ "title": "IK Handle Pole Vector Locator"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\jointOrientUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "jointOrientUI",
+ "file"
+ ],
+ "title": "Joint Orient UI"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\jointsOnCurve.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "jointsOnCurve",
+ "file"
+ ],
+ "title": "Joints On Curve"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\resetBindSelectedSkinJoints.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "resetBindSelectedSkinJoints",
+ "file"
+ ],
+ "title": "Reset Bind Selected Skin Joints"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedComponents.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "selectSkinclusterJointsFromSelectedComponents",
+ "file"
+ ],
+ "title": "Select Skincluster Joints From Selected Components"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedMesh.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "selectSkinclusterJointsFromSelectedMesh",
+ "file"
+ ],
+ "title": "Select Skincluster Joints From Selected Mesh"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\setJointLabels.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "setJointLabels",
+ "file"
+ ],
+ "title": "Set Joint Labels"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\setJointOrientationFromCurrentRotation.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "setJointOrientationFromCurrentRotation",
+ "file"
+ ],
+ "title": "Set Joint Orientation From Current Rotation"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\setSelectedJointsOrientationZero.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "setSelectedJointsOrientationZero",
+ "file"
+ ],
+ "title": "Set Selected Joints Orientation Zero"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\mirrorCurveShape.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "mirrorCurveShape",
+ "file"
+ ],
+ "title": "Mirror Curve Shape"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\setRotationOrderUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "setRotationOrderUI",
+ "file"
+ ],
+ "title": "Set Rotation Order UI"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\paintItNowUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "paintItNowUI",
+ "file"
+ ],
+ "title": "Paint It Now UI"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\parentScaleConstraint.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "parentScaleConstraint",
+ "file"
+ ],
+ "title": "Parent Scale Constraint"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\quickSetWeightsUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "quickSetWeightsUI",
+ "file"
+ ],
+ "title": "Quick Set Weights UI"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\rapidRig.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "rapidRig",
+ "file"
+ ],
+ "title": "Rapid Rig"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\regenerate_blendshape_targets.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "regenerate_blendshape_targets",
+ "file"
+ ],
+ "title": "Regenerate Blendshape Targets"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\removeRotationAxis.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "removeRotationAxis",
+ "file"
+ ],
+ "title": "Remove Rotation Axis"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\resetBindSelectedMeshes.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "resetBindSelectedMeshes",
+ "file"
+ ],
+ "title": "Reset Bind Selected Meshes"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\simpleControllerOnSelection.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "simpleControllerOnSelection",
+ "file"
+ ],
+ "title": "Simple Controller On Selection"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\simpleControllerOnSelectionHierarchy.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "simpleControllerOnSelectionHierarchy",
+ "file"
+ ],
+ "title": "Simple Controller On Selection Hierarchy"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\superRelativeCluster.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "superRelativeCluster",
+ "file"
+ ],
+ "title": "Super Relative Cluster"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\tfSmoothSkinWeight.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "tfSmoothSkinWeight",
+ "file"
+ ],
+ "title": "TF Smooth Skin Weight"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\toggleIntermediates.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "toggleIntermediates",
+ "file"
+ ],
+ "title": "Toggle Intermediates"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\toggleSegmentScaleCompensate.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "toggleSegmentScaleCompensate",
+ "file"
+ ],
+ "title": "Toggle Segment Scale Compensate"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\rigging\\toggleSkinclusterDeformNormals.py",
+ "sourcetype": "file",
+ "tags": [
+ "rigging",
+ "toggleSkinclusterDeformNormals",
+ "file"
+ ],
+ "title": "Toggle Skincluster Deform Normals"
+ }
+ ]
+ },
+ {
+ "type": "menu",
+ "title": "Shading",
+ "items": [
+ {
+ "type": "menu",
+ "title": "VRay",
+ "items": [
+ {
+ "type": "action",
+ "title": "Import Proxies",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\vrayImportProxies.py",
+ "sourcetype": "file",
+ "tags": ["shading", "vray", "import", "proxies"],
+ "tooltip": ""
+ },
+ {
+ "type": "separator"
+ },
+ {
+ "type": "action",
+ "title": "Select All GES",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\selectAllGES.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": [
+ "shading",
+ "vray",
+ "select All GES"
+ ]
+ },
+ {
+ "type": "action",
+ "title": "Select All GES Under Selection",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\selectAllGESUnderSelection.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": ["shading", "vray", "select", "all", "GES"]
+ },
+ {
+ "type": "separator"
+ },
+ {
+ "type": "action",
+ "title": "Selection To VRay Mesh",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\selectionToVrayMesh.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": ["shading", "vray", "selection", "vraymesh"]
+ },
+ {
+ "type": "action",
+ "title": "Add VRay Round Edges Attribute",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\addVrayRoundEdgesAttribute.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": ["shading", "vray", "round edges", "attribute"]
+ },
+ {
+ "type": "action",
+ "title": "Add Gamma",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\vrayAddGamma.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": ["shading", "vray", "add gamma"]
+ },
+ {
+ "type": "separator"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\select_vraymesh_materials_with_unconnected_shader_slots.py",
+ "sourcetype": "file",
+ "title": "Select Unconnected Shader Materials",
+ "tags": [
+ "shading",
+ "vray",
+ "select",
+ "vraymesh",
+ "materials",
+ "unconnected shader slots"
+ ],
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\vrayMergeSimilarVRayMeshMaterials.py",
+ "sourcetype": "file",
+ "title": "Merge Similar VRay Mesh Materials",
+ "tags": [
+ "shading",
+ "vray",
+ "Merge",
+ "VRayMesh",
+ "Materials"
+ ],
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "title": "Create Two Sided Material",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\vrayCreate2SidedMtlForSelectedMtlRenamed.py",
+ "sourcetype": "file",
+ "tooltip": "Creates two sided material for selected material and renames it",
+ "tags": [
+ "shading",
+ "vray",
+ "two sided",
+ "material"
+ ]
+ },
+ {
+ "type": "action",
+ "title": "Create Two Sided Material For Selected",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\vrayCreate2SidedMtlForSelectedMtl.py",
+ "sourcetype": "file",
+ "tooltip": "Select material to create a two sided version from it",
+ "tags": [
+ "shading",
+ "vray",
+ "Create2SidedMtlForSelectedMtl.py"
+ ]
+ },
+ {
+ "type": "separator"
+ },
+ {
+ "type": "action",
+ "title": "Add OpenSubdiv Attribute",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\addVrayOpenSubdivAttribute.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": [
+ "shading",
+ "vray",
+ "add",
+ "open subdiv",
+ "attribute"
+ ]
+ },
+ {
+ "type": "action",
+ "title": "Remove OpenSubdiv Attribute",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\removeVrayOpenSubdivAttribute.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": [
+ "shading",
+ "vray",
+ "remove",
+ "opensubdiv",
+ "attributee"
+ ]
+ },
+ {
+ "type": "separator"
+ },
+ {
+ "type": "action",
+ "title": "Add Subdivision Attribute",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\addVraySubdivisionAttribute.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": [
+ "shading",
+ "vray",
+ "addVraySubdivisionAttribute"
+ ]
+ },
+ {
+ "type": "action",
+ "title": "Remove Subdivision Attribute.py",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\removeVraySubdivisionAttribute.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": [
+ "shading",
+ "vray",
+ "remove",
+ "subdivision",
+ "attribute"
+ ]
+ },
+ {
+ "type": "separator"
+ },
+ {
+ "type": "action",
+ "title": "Add Vray Object Ids",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\addVrayObjectIds.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": [
+ "shading",
+ "vray",
+ "add",
+ "object id"
+ ]
+ },
+ {
+ "type": "action",
+ "title": "Add Vray Material Ids",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\addVrayMaterialIds.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": [
+ "shading",
+ "vray",
+ "addVrayMaterialIds.py"
+ ]
+ },
+ {
+ "type": "separator"
+ },
+ {
+ "type": "action",
+ "title": "Set Physical DOF Depth",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\vrayPhysicalDOFSetDepth.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": [
+ "shading",
+ "vray",
+ "physical",
+ "DOF ",
+ "Depth"
+ ]
+ },
+ {
+ "type": "action",
+ "title": "Magic Vray Proxy UI",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vray\\magicVrayProxyUI.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": [
+ "shading",
+ "vray",
+ "magicVrayProxyUI"
+ ]
+ }
+ ]
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\pyblish\\lighting\\set_filename_prefix.py",
+ "sourcetype": "file",
+ "tags": [
+ "shading",
+ "lookdev",
+ "assign",
+ "shaders",
+ "prefix",
+ "filename",
+ "render"
+ ],
+ "title": "Set filename prefix",
+ "tooltip": "Set the render file name prefix."
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\look_manager_ui.py",
+ "sourcetype": "file",
+ "tags": [
+ "shading",
+ "look",
+ "assign",
+ "shaders",
+ "auto"
+ ],
+ "title": "Look Manager",
+ "tooltip": "Open the Look Manager UI for look assignment"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\LightLinkUi.py",
+ "sourcetype": "file",
+ "tags": [
+ "shading",
+ "light",
+ "link",
+ "ui"
+ ],
+ "title": "Light Link UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\vdviewer_ui.py",
+ "sourcetype": "file",
+ "tags": [
+ "shading",
+ "look",
+ "vray",
+ "displacement",
+ "shaders",
+ "auto"
+ ],
+ "title": "VRay Displ Viewer",
+ "tooltip": "Open the VRay Displacement Viewer, select and control the content of the set"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\setTexturePreviewToCLRImage.py",
+ "sourcetype": "file",
+ "tags": [
+ "shading",
+ "CLRImage",
+ "textures",
+ "preview"
+ ],
+ "title": "Set Texture Preview To CLRImage",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\fixDefaultShaderSetBehavior.py",
+ "sourcetype": "file",
+ "tags": [
+ "shading",
+ "fix",
+ "DefaultShaderSet",
+ "Behavior"
+ ],
+ "title": "Fix Default Shader Set Behavior",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\fixSelectedShapesReferenceAssignments.py",
+ "sourcetype": "file",
+ "tags": [
+ "shading",
+ "fix",
+ "Selected",
+ "Shapes",
+ "Reference",
+ "Assignments"
+ ],
+ "title": "Fix Shapes Reference Assignments",
+ "tooltip": "Select shapes to fix the reference assignments"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\selectLambert1Members.py",
+ "sourcetype": "file",
+ "tags": [
+ "shading",
+ "selectLambert1Members"
+ ],
+ "title": "Select Lambert1 Members",
+ "tooltip": "Selects all objects which have the Lambert1 shader assigned"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\selectShapesWithoutShader.py",
+ "sourcetype": "file",
+ "tags": [
+ "shading",
+ "selectShapesWithoutShader"
+ ],
+ "title": "Select Shapes Without Shader",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\fixRenderLayerOutAdjustmentErrors.py",
+ "sourcetype": "file",
+ "tags": [
+ "shading",
+ "fixRenderLayerOutAdjustmentErrors"
+ ],
+ "title": "Fix RenderLayer Out Adjustment Errors",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\fix_renderlayer_missing_node_override.py",
+ "sourcetype": "file",
+ "tags": [
+ "shading",
+ "renderlayer",
+ "missing",
+ "reference",
+ "switch",
+ "layer"
+ ],
+ "title": "Fix RenderLayer Missing Referenced Nodes Overrides",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "title": "Image 2 Tiled EXR",
+ "command": "$COLORBLEED_SCRIPTS\\shading\\open_img2exr.py",
+ "sourcetype": "file",
+ "tooltip": "",
+ "tags": [
+ "shading",
+ "vray",
+ "exr"
+ ]
+ }
+ ]
+ },
+ {
+ "type": "menu",
+ "title": "Rendering",
+ "items": [
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\pyblish\\open_deadline_submission_settings.py",
+ "sourcetype": "file",
+ "tags": [
+ "settings",
+ "deadline",
+ "globals",
+ "render"
+ ],
+ "title": "DL Submission Settings UI",
+ "tooltip": "Open the Deadline Submission Settings UI"
+ }
+ ]
+ },
+ {
+ "type": "menu",
+ "title": "Animation",
+ "items": [
+ {
+ "type": "menu",
+ "title": "Attributes",
+ "tooltip": "",
+ "items": [
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\attributes\\copyValues.py",
+ "sourcetype": "file",
+ "tags": [
+ "animation",
+ "copy",
+ "attributes"
+ ],
+ "title": "Copy Values",
+ "tooltip": "Copy attribute values"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\attributes\\copyInConnections.py",
+ "sourcetype": "file",
+ "tags": [
+ "animation",
+ "copy",
+ "attributes",
+ "connections",
+ "incoming"
+ ],
+ "title": "Copy In Connections",
+ "tooltip": "Copy incoming connections"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\attributes\\copyOutConnections.py",
+ "sourcetype": "file",
+ "tags": [
+ "animation",
+ "copy",
+ "attributes",
+ "connections",
+ "out"
+ ],
+ "title": "Copy Out Connections",
+ "tooltip": "Copy outcoming connections"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\attributes\\copyTransformLocal.py",
+ "sourcetype": "file",
+ "tags": [
+ "animation",
+ "copy",
+ "attributes",
+ "transforms",
+ "local"
+ ],
+ "title": "Copy Local Transfroms",
+ "tooltip": "Copy local transfroms"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\attributes\\copyTransformMatrix.py",
+ "sourcetype": "file",
+ "tags": [
+ "animation",
+ "copy",
+ "attributes",
+ "transforms",
+ "matrix"
+ ],
+ "title": "Copy Matrix Transfroms",
+ "tooltip": "Copy Matrix transfroms"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\attributes\\copyTransformUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "animation",
+ "copy",
+ "attributes",
+ "transforms",
+ "UI"
+ ],
+ "title": "Copy Transforms UI",
+ "tooltip": "Open the Copy Transforms UI"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\attributes\\simpleCopyUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "animation",
+ "copy",
+ "attributes",
+ "transforms",
+ "UI",
+ "simple"
+ ],
+ "title": "Simple Copy UI",
+ "tooltip": "Open the simple Copy Transforms UI"
+ }
+ ]
+ },
+ {
+ "type": "menu",
+ "title": "Optimize",
+ "tooltip": "Optimization scripts",
+ "items": [
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\optimize\\toggleFreezeHierarchy.py",
+ "sourcetype": "file",
+ "tags": [
+ "animation",
+ "hierarchy",
+ "toggle",
+ "freeze"
+ ],
+ "title": "Toggle Freeze Hierarchy",
+ "tooltip": "Freeze and unfreeze hierarchy"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\optimize\\toggleParallelNucleus.py",
+ "sourcetype": "file",
+ "tags": [
+ "animation",
+ "nucleus",
+ "toggle",
+ "parallel"
+ ],
+ "title": "Toggle Parallel Nucleus",
+ "tooltip": "Toggle parallel nucleus"
+ }
+ ]
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\bakeSelectedToWorldSpace.py",
+ "tags": ["animation", "bake","selection", "worldspace.py"],
+ "title": "Bake Selected To Worldspace",
+ "type": "action"
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\timeStepper.py",
+ "tags": ["animation", "time","stepper"],
+ "title": "Time Stepper",
+ "type": "action"
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\capture_ui.py",
+ "tags": ["animation", "capture", "ui", "screen", "movie", "image"],
+ "title": "Capture UI",
+ "type": "action"
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\simplePlayblastUI.py",
+ "tags": ["animation", "simple", "playblast", "ui"],
+ "title": "Simple Playblast UI",
+ "type": "action"
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\tweenMachineUI.py",
+ "tags": ["animation", "tween", "machine"],
+ "title": "Tween Machine UI",
+ "type": "action"
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\selectAllAnimationCurves.py",
+ "tags": ["animation", "select", "curves"],
+ "title": "Select All Animation Curves",
+ "type": "action"
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\pathAnimation.py",
+ "tags": ["animation", "path", "along"],
+ "title": "Path Animation",
+ "type": "action"
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\offsetSelectedObjectsUI.py",
+ "tags": [
+ "animation",
+ "offsetSelectedObjectsUI.py"
+ ],
+ "title": "Offset Selected Objects UI",
+ "type": "action"
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\key_amplifier_ui.py",
+ "tags": [
+ "animation",
+ "key", "amplifier"
+ ],
+ "title": "Key Amplifier UI",
+ "type": "action"
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\anim_scene_optimizer.py",
+ "tags": [
+ "animation",
+ "anim_scene_optimizer.py"
+ ],
+ "title": "Anim_Scene_Optimizer",
+ "type": "action"
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\zvParentMaster.py",
+ "tags": [
+ "animation",
+ "zvParentMaster.py"
+ ],
+ "title": "ZV Parent Master",
+ "type": "action"
+ },
+ {
+ "sourcetype": "file",
+ "command": "$COLORBLEED_SCRIPTS\\animation\\poseLibrary.py",
+ "tags": [
+ "animation",
+ "poseLibrary.py"
+ ],
+ "title": "Pose Library",
+ "type": "action"
+ }
+ ]
+ },
+ {
+ "type": "menu",
+ "title": "Layout",
+ "items": [
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\alignDistributeUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "align",
+ "Distribute",
+ "UI"
+ ],
+ "title": "Align Distribute UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\alignSimpleUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "align",
+ "UI",
+ "Simple"
+ ],
+ "title": "Align Simple UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\center_locator.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "center",
+ "locator"
+ ],
+ "title": "Center Locator",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\average_locator.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "average",
+ "locator"
+ ],
+ "title": "Average Locator",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\selectWithinProximityUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "select",
+ "proximity",
+ "ui"
+ ],
+ "title": "Select Within Proximity UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\dupCurveUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "Duplicate",
+ "Curve",
+ "UI"
+ ],
+ "title": "Duplicate Curve UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\randomDeselectUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "random",
+ "Deselect",
+ "UI"
+ ],
+ "title": "Random Deselect UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\multiReferencerUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "multi",
+ "reference"
+ ],
+ "title": "Multi Referencer UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\duplicateOffsetUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "duplicate",
+ "offset",
+ "UI"
+ ],
+ "title": "Duplicate Offset UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\spPaint3d.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "spPaint3d",
+ "paint",
+ "tool"
+ ],
+ "title": "SP Paint 3d",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\randomizeUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "randomize",
+ "UI"
+ ],
+ "title": "Randomize UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\layout\\distributeWithinObjectUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "layout",
+ "distribute",
+ "ObjectUI",
+ "within"
+ ],
+ "title": "Distribute Within Object UI",
+ "tooltip": ""
+ }
+ ]
+ },
+ {
+ "type": "menu",
+ "title": "Particles",
+ "items": [
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\instancerToObjects.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles",
+ "instancerToObjects"
+ ],
+ "title": "Instancer To Objects",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\instancerToObjectsInstances.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles",
+ "instancerToObjectsInstances"
+ ],
+ "title": "Instancer To Objects Instances",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\objectsToParticlesAndInstancerCleanSource.py",
+ "sourcetype": "file",
+ "tags": ["particles", "objects", "Particles", "Instancer", "Clean", "Source"],
+ "title": "Objects To Particles & Instancer - Clean Source",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\particleComponentsToLocators.py",
+ "sourcetype": "file",
+ "tags": ["particles", "components", "locators"],
+ "title": "Particle Components To Locators",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\objectsToParticlesAndInstancer.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles", "objects", "particles", "instancer"],
+ "title": "Objects To Particles And Instancer",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\spawnParticlesOnMesh.py",
+ "sourcetype": "file",
+ "tags": ["particles", "spawn","on","mesh"],
+ "title": "Spawn Particles On Mesh",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\instancerToObjectsInstancesWithAnimation.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles",
+ "instancerToObjectsInstancesWithAnimation"
+ ],
+ "title": "Instancer To Objects Instances With Animation",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\objectsToParticles.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles",
+ "objectsToParticles"
+ ],
+ "title": "Objects To Particles",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\add_particle_cacheFile_attrs.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles",
+ "add_particle_cacheFile_attrs"
+ ],
+ "title": "Add Particle CacheFile Attributes",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\mergeParticleSystems.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles",
+ "mergeParticleSystems"
+ ],
+ "title": "Merge Particle Systems",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\particlesToLocators.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles",
+ "particlesToLocators"
+ ],
+ "title": "Particles To Locators",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\instancerToObjectsWithAnimation.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles",
+ "instancerToObjectsWithAnimation"
+ ],
+ "title": "Instancer To Objects With Animation",
+ "tooltip": ""
+ },
+ {"type": "separator"},
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\mayaReplicateHoudiniTool.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles",
+ "houdini", "houdiniTool", "houdiniEngine"
+ ],
+ "title": "Replicate Houdini Tool",
+ "tooltip": ""
+ },
+ {"type": "separator"},
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\clearInitialState.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles",
+ "clearInitialState"
+ ],
+ "title": "Clear Initial State",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\particles\\killSelectedParticles.py",
+ "sourcetype": "file",
+ "tags": [
+ "particles",
+ "killSelectedParticles"
+ ],
+ "title": "Kill Selected Particles",
+ "tooltip": ""
+ }
+ ]
+ },
+ {
+ "type": "menu",
+ "title": "Cleanup",
+ "items": [
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\repair_faulty_containers.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "repair", "containers"
+ ],
+ "title": "Find and Repair Containers",
+ "tooltip": ""
+ },{
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\selectByType.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "selectByType"
+ ],
+ "title": "Select By Type",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\selectIntermediateObjects.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "selectIntermediateObjects"
+ ],
+ "title": "Select Intermediate Objects",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\selectNonUniqueNames.py",
+ "sourcetype": "file",
+ "tags": ["cleanup", "select", "non unique", "names"],
+ "title": "Select Non Unique Names",
+ "tooltip": ""
+ },
+ {"type": "separator"},
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\removeNamespaces.py",
+ "sourcetype": "file",
+ "tags": ["cleanup", "remove", "namespaces"],
+ "title": "Remove Namespaces",
+ "tooltip": "Remove all namespaces"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\remove_user_defined_attributes.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "remove_user_defined_attributes"
+ ],
+ "title": "Remove User Defined Attributes",
+ "tooltip": "Remove all user-defined attributs from all nodes"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\removeUnknownNodes.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "removeUnknownNodes"
+ ],
+ "title": "Remove Unknown Nodes",
+ "tooltip": "Remove all unknown nodes"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\removeUnloadedReferences.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "removeUnloadedReferences"
+ ],
+ "title": "Remove Unloaded References",
+ "tooltip": "Remove all unloaded references"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\removeReferencesFailedEdits.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "removeReferencesFailedEdits"
+ ],
+ "title": "Remove References Failed Edits",
+ "tooltip": "Remove failed edits for all references"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\remove_unused_looks.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "removeUnusedLooks"
+ ],
+ "title": "Remove Unused Looks",
+ "tooltip": "Remove all loaded yet unused Avalon look containers"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\deleteGhostIntermediateObjects.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "deleteGhostIntermediateObjects"
+ ],
+ "title": "Delete Ghost Intermediate Objects",
+ "tooltip": ""
+ },
+ {"type": "separator"},
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\resetViewportCache.py",
+ "sourcetype": "file",
+ "tags": ["cleanup", "reset","viewport", "cache"],
+ "title": "Reset Viewport Cache",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\uniqifyNodeNames.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "uniqifyNodeNames"
+ ],
+ "title": "Uniqify Node Names",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\autoRenameFileNodes.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "auto", "rename","filenodes"
+ ],
+ "title": "Auto Rename File Nodes",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\update_asset_id.py",
+ "sourcetype": "file",
+ "tags":["cleanup", "update", "database", "asset", "id"],
+ "title": "Update Asset ID",
+ "tooltip": "Will replace the Colorbleed ID with a new one (asset ID : Unique number)"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\colorbleedRename.py",
+ "sourcetype": "file",
+ "tags": ["cleanup", "rename", "ui"],
+ "title": "Colorbleed Renamer",
+ "tooltip": "Colorbleed Rename UI"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\renameShapesToTransform.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "renameShapesToTransform"
+ ],
+ "title": "Rename Shapes To Transform",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\reorderUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "reorderUI"
+ ],
+ "title": "Reorder UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\cleanup\\pastedCleaner.py",
+ "sourcetype": "file",
+ "tags": [
+ "cleanup",
+ "pastedCleaner"
+ ],
+ "title": "Pasted Cleaner",
+ "tooltip": ""
+ }
+ ]
+ },
+ {
+ "type": "menu",
+ "title": "Others",
+ "items": [
+ {
+ "type": "menu",
+ "sourcetype": "file",
+ "title": "Yeti",
+ "items": [
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\yeti\\cache_selected_yeti_nodes.py",
+ "sourcetype": "file",
+ "tags": ["others", "yeti", "cache", "selected"],
+ "title": "Cache Selected Yeti Nodes",
+ "tooltip": ""
+ }
+ ]
+ },
+ {
+ "type": "menu",
+ "title": "Hair",
+ "tooltip": "",
+ "items": [
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\hair\\recolorHairCurrentCurve",
+ "sourcetype": "file",
+ "tags": ["others", "selectSoftSelection"],
+ "title": "Select Soft Selection",
+ "tooltip": ""
+ }
+ ]
+ },
+ {
+ "type": "menu",
+ "command": "$COLORBLEED_SCRIPTS\\others\\display",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "display"
+ ],
+ "title": "Display",
+ "items": [
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\display\\wireframeSelectedObjects.py",
+ "sourcetype": "file",
+ "tags": ["others", "wireframe","selected","objects"],
+ "title": "Wireframe Selected Objects",
+ "tooltip": ""
+ }
+ ]
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\archiveSceneUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "archiveSceneUI"
+ ],
+ "title": "Archive Scene UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\getSimilarMeshes.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "getSimilarMeshes"
+ ],
+ "title": "Get Similar Meshes",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\createBoundingBoxEachSelected.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "createBoundingBoxEachSelected"
+ ],
+ "title": "Create BoundingBox Each Selected",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\curveFromPositionEveryFrame.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "curveFromPositionEveryFrame"
+ ],
+ "title": "Curve From Position",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\instanceLeafSmartTransform.py",
+ "sourcetype": "file",
+ "tags": ["others", "instance","leaf", "smart", "transform"],
+ "title": "Instance Leaf Smart Transform",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\instanceSmartTransform.py",
+ "sourcetype": "file",
+ "tags": ["others", "instance", "smart", "transform"],
+ "title": "Instance Smart Transform",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\randomizeUVShellsSelectedObjects.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "randomizeUVShellsSelectedObjects"
+ ],
+ "title": "Randomize UV Shells",
+ "tooltip": "Select objects before running action"
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\centerPivotGroup.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "centerPivotGroup"
+ ],
+ "title": "Center Pivot Group",
+ "tooltip": ""
+ },
+ {"type": "separator"},
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\locatorsOnSelectedFaces.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "locatorsOnSelectedFaces"
+ ],
+ "title": "Locators On Selected Faces",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\locatorsOnEdgeSelectionPrompt.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "locatorsOnEdgeSelectionPrompt"
+ ],
+ "title": "Locators On Edge Selection Prompt",
+ "tooltip": ""
+ },
+ {"type": "separator"},
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\copyDeformers.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "copyDeformers"
+ ],
+ "title": "Copy Deformers",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\selectInReferenceEditor.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "selectInReferenceEditor"
+ ],
+ "title": "Select In Reference Editor",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\selectConstrainingObject.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "selectConstrainingObject"
+ ],
+ "title": "Select Constraining Object",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\deformerSetRelationsUI.py",
+ "sourcetype": "file",
+ "tags": [
+ "others",
+ "deformerSetRelationsUI"
+ ],
+ "title": "Deformer Set Relations UI",
+ "tooltip": ""
+ },
+ {
+ "type": "action",
+ "command": "$COLORBLEED_SCRIPTS\\others\\recreateBaseNodesForAllLatticeNodes.py",
+ "sourcetype": "file",
+ "tags": ["others", "recreate","base", "nodes", "lattice"],
+ "title": "Recreate Base Nodes For Lattice Nodes",
+ "tooltip": ""
+ }
+ ]
+ }
+]
\ No newline at end of file
diff --git a/config/apps/maya/menu.py b/config/apps/maya/menu.py
new file mode 100644
index 0000000000..1e411e1c78
--- /dev/null
+++ b/config/apps/maya/menu.py
@@ -0,0 +1,73 @@
+import sys
+import os
+import logging
+
+from avalon.vendor.Qt import QtWidgets, QtCore, QtGui
+
+import maya.cmds as cmds
+
+self = sys.modules[__name__]
+self._menu = "colorbleed"
+
+log = logging.getLogger(__name__)
+
+
+def _get_menu():
+ """Return the menu instance if it currently exists in Maya"""
+
+ app = QtWidgets.QApplication.instance()
+ widgets = dict((w.objectName(), w) for w in app.allWidgets())
+ menu = widgets.get(self._menu)
+ return menu
+
+
+def deferred():
+
+ import scriptsmenu.launchformaya as launchformaya
+ import scriptsmenu.scriptsmenu as scriptsmenu
+
+ log.info("Attempting to install ...")
+
+ # load configuration of custom menu
+ config_path = os.path.join(os.path.dirname(__file__), "menu.json")
+ config = scriptsmenu.load_configuration(config_path)
+
+ # run the launcher for Maya menu
+ cb_menu = launchformaya.main(title=self._menu.title(),
+ objectName=self._menu)
+
+ # apply configuration
+ cb_menu.build_from_configuration(cb_menu, config)
+
+
+def uninstall():
+
+ menu = _get_menu()
+ if menu:
+ log.info("Attempting to uninstall ..")
+
+ try:
+ menu.deleteLater()
+ del menu
+ except Exception as e:
+ log.error(e)
+
+
+def install():
+
+ if cmds.about(batch=True):
+ print("Skipping colorbleed.menu initialization in batch mode..")
+ return
+
+ uninstall()
+ # Allow time for uninstallation to finish.
+ cmds.evalDeferred(deferred)
+
+
+def popup():
+ """Pop-up the existing menu near the mouse cursor"""
+ menu = _get_menu()
+
+ cursor = QtGui.QCursor()
+ point = cursor.pos()
+ menu.exec_(point)
diff --git a/config/apps/maya/plugin.py b/config/apps/maya/plugin.py
new file mode 100644
index 0000000000..21a074e874
--- /dev/null
+++ b/config/apps/maya/plugin.py
@@ -0,0 +1,218 @@
+from avalon import api
+
+
+def get_reference_node_parents(ref):
+ """Return all parent reference nodes of reference node
+
+ Args:
+ ref (str): reference node.
+
+ Returns:
+ list: The upstream parent reference nodes.
+
+ """
+ from maya import cmds
+
+ parent = cmds.referenceQuery(ref,
+ referenceNode=True,
+ parent=True)
+ parents = []
+ while parent:
+ parents.append(parent)
+ parent = cmds.referenceQuery(parent,
+ referenceNode=True,
+ parent=True)
+ return parents
+
+
+class ReferenceLoader(api.Loader):
+ """A basic ReferenceLoader for Maya
+
+ This will implement the basic behavior for a loader to inherit from that
+ will containerize the reference and will implement the `remove` and
+ `update` logic.
+
+ """
+ def load(self,
+ context,
+ name=None,
+ namespace=None,
+ data=None):
+
+ import os
+ from avalon.maya import lib
+ from avalon.maya.pipeline import containerise
+
+ assert os.path.exists(self.fname), "%s does not exist." % self.fname
+
+ asset = context['asset']
+
+ namespace = namespace or lib.unique_namespace(
+ asset["name"] + "_",
+ prefix="_" if asset["name"][0].isdigit() else "",
+ suffix="_",
+ )
+
+ self.process_reference(context=context,
+ name=name,
+ namespace=namespace,
+ data=data)
+
+ # Only containerize if any nodes were loaded by the Loader
+ nodes = self[:]
+ if not nodes:
+ return
+
+ return containerise(
+ name=name,
+ namespace=namespace,
+ nodes=nodes,
+ context=context,
+ loader=self.__class__.__name__)
+
+ def process_reference(self, context, name, namespace, data):
+ """To be implemented by subclass"""
+ raise NotImplementedError("Must be implemented by subclass")
+
+ def _get_reference_node(self, members):
+ """Get the reference node from the container members
+ Args:
+ members: list of node names
+
+ Returns:
+ str: Reference node name.
+
+ """
+
+ from maya import cmds
+
+ # Collect the references without .placeHolderList[] attributes as
+ # unique entries (objects only) and skipping the sharedReferenceNode.
+ references = set()
+ for ref in cmds.ls(members, exactType="reference", objectsOnly=True):
+
+ # Ignore any `:sharedReferenceNode`
+ if ref.rsplit(":", 1)[-1].startswith("sharedReferenceNode"):
+ continue
+
+ references.add(ref)
+
+ assert references, "No reference node found in container"
+
+ # Get highest reference node (least parents)
+ highest = min(references,
+ key=lambda x: len(get_reference_node_parents(x)))
+
+ # Warn the user when we're taking the highest reference node
+ if len(references) > 1:
+ self.log.warning("More than one reference node found in "
+ "container, using highest reference node: "
+ "%s (in: %s)", highest, list(references))
+
+ return highest
+
+ def update(self, container, representation):
+
+ import os
+ from maya import cmds
+
+ node = container["objectName"]
+
+ path = api.get_representation_path(representation)
+
+ # Get reference node from container members
+ members = cmds.sets(node, query=True, nodesOnly=True)
+ reference_node = self._get_reference_node(members)
+
+ file_type = {
+ "ma": "mayaAscii",
+ "mb": "mayaBinary",
+ "abc": "Alembic"
+ }.get(representation["name"])
+
+ assert file_type, "Unsupported representation: %s" % representation
+
+ assert os.path.exists(path), "%s does not exist." % path
+
+ try:
+ content = cmds.file(path,
+ loadReference=reference_node,
+ type=file_type,
+ returnNewNodes=True)
+ except RuntimeError as exc:
+ # When changing a reference to a file that has load errors the
+ # command will raise an error even if the file is still loaded
+ # correctly (e.g. when raising errors on Arnold attributes)
+ # When the file is loaded and has content, we consider it's fine.
+ if not cmds.referenceQuery(reference_node, isLoaded=True):
+ raise
+
+ content = cmds.referenceQuery(reference_node,
+ nodes=True,
+ dagPath=True)
+ if not content:
+ raise
+
+ self.log.warning("Ignoring file read error:\n%s", exc)
+
+ # Fix PLN-40 for older containers created with Avalon that had the
+ # `.verticesOnlySet` set to True.
+ if cmds.getAttr("{}.verticesOnlySet".format(node)):
+ self.log.info("Setting %s.verticesOnlySet to False", node)
+ cmds.setAttr("{}.verticesOnlySet".format(node), False)
+
+ # Add new nodes of the reference to the container
+ cmds.sets(content, forceElement=node)
+
+ # Remove any placeHolderList attribute entries from the set that
+ # are remaining from nodes being removed from the referenced file.
+ members = cmds.sets(node, query=True)
+ invalid = [x for x in members if ".placeHolderList" in x]
+ if invalid:
+ cmds.sets(invalid, remove=node)
+
+ # Update metadata
+ cmds.setAttr("{}.representation".format(node),
+ str(representation["_id"]),
+ type="string")
+
+ def remove(self, container):
+ """Remove an existing `container` from Maya scene
+
+ Deprecated; this functionality is replaced by `api.remove()`
+
+ Arguments:
+ container (avalon-core:container-1.0): Which container
+ to remove from scene.
+
+ """
+
+ from maya import cmds
+
+ node = container["objectName"]
+
+ # Assume asset has been referenced
+ members = cmds.sets(node, query=True)
+ reference_node = self._get_reference_node(members)
+
+ assert reference_node, ("Imported container not supported; "
+ "container must be referenced.")
+
+ self.log.info("Removing '%s' from Maya.." % container["name"])
+
+ namespace = cmds.referenceQuery(reference_node, namespace=True)
+ fname = cmds.referenceQuery(reference_node, filename=True)
+ cmds.file(fname, removeReference=True)
+
+ try:
+ cmds.delete(node)
+ except ValueError:
+ # Already implicitly deleted by Maya upon removing reference
+ pass
+
+ try:
+ # If container is not automatically cleaned up by May (issue #118)
+ cmds.namespace(removeNamespace=namespace,
+ deleteNamespaceContent=True)
+ except RuntimeError:
+ pass
diff --git a/config/launcher_actions.py b/config/launcher_actions.py
new file mode 100644
index 0000000000..7d72cb2b38
--- /dev/null
+++ b/config/launcher_actions.py
@@ -0,0 +1,86 @@
+import os
+from avalon import api, lib, pipeline
+
+
+class FusionRenderNode(api.Action):
+
+ name = "fusionrendernode9"
+ label = "F9 Render Node"
+ icon = "object-group"
+ order = 997
+
+ def is_compatible(self, session):
+ """Return whether the action is compatible with the session"""
+ if "AVALON_PROJECT" in session:
+ return False
+ return True
+
+ def process(self, session, **kwargs):
+ """Implement the behavior for when the action is triggered
+
+ Args:
+ session (dict): environment dictionary
+
+ Returns:
+ Popen instance of newly spawned process
+
+ """
+
+ # Update environment with session
+ env = os.environ.copy()
+ env.update(session)
+
+ # Get executable by name
+ app = lib.get_application(self.name)
+ env.update(app["environment"])
+ executable = lib.which(app["executable"])
+
+ return lib.launch(executable=executable, args=[], environment=env)
+
+
+class VrayRenderSlave(api.Action):
+
+ name = "vrayrenderslave"
+ label = "V-Ray Slave"
+ icon = "object-group"
+ order = 996
+
+ def is_compatible(self, session):
+ """Return whether the action is compatible with the session"""
+ if "AVALON_PROJECT" in session:
+ return False
+ return True
+
+ def process(self, session, **kwargs):
+ """Implement the behavior for when the action is triggered
+
+ Args:
+ session (dict): environment dictionary
+
+ Returns:
+ Popen instance of newly spawned process
+
+ """
+
+ # Update environment with session
+ env = os.environ.copy()
+ env.update(session)
+
+ # Get executable by name
+ app = lib.get_application(self.name)
+ env.update(app["environment"])
+ executable = lib.which(app["executable"])
+
+ # Run as server
+ arguments = ["-server", "-portNumber=20207"]
+
+ return lib.launch(executable=executable,
+ args=arguments,
+ environment=env)
+
+
+def register_launcher_actions():
+ """Register specific actions which should be accessible in the launcher"""
+
+ pipeline.register_plugin(api.Action, FusionRenderNode)
+ pipeline.register_plugin(api.Action, VrayRenderSlave)
diff --git a/config/lib.py b/config/lib.py
new file mode 100644
index 0000000000..1297aba606
--- /dev/null
+++ b/config/lib.py
@@ -0,0 +1,272 @@
+import os
+import re
+import logging
+import importlib
+
+from .vendor import pather
+from .vendor.pather.error import ParseError
+
+import avalon.io as io
+import avalon.api
+
+log = logging.getLogger(__name__)
+
+
+def is_latest(representation):
+ """Return whether the representation is from latest version
+
+ Args:
+ representation (dict): The representation document from the database.
+
+ Returns:
+ bool: Whether the representation is of latest version.
+
+ """
+
+ version = io.find_one({"_id": representation['parent']})
+
+ # Get highest version under the parent
+ highest_version = io.find_one({
+ "type": "version",
+ "parent": version["parent"]
+ }, sort=[("name", -1)], projection={"name": True})
+
+ if version['name'] == highest_version['name']:
+ return True
+ else:
+ return False
+
+
+def any_outdated():
+ """Return whether the current scene has any outdated content"""
+
+ checked = set()
+ host = avalon.api.registered_host()
+ for container in host.ls():
+ representation = container['representation']
+ if representation in checked:
+ continue
+
+ representation_doc = io.find_one({"_id": io.ObjectId(representation),
+ "type": "representation"},
+ projection={"parent": True})
+ if representation_doc and not is_latest(representation_doc):
+ return True
+ elif not representation_doc:
+ log.debug("Container '{objectName}' has an invalid "
+ "representation, it is missing in the "
+ "database".format(**container))
+
+ checked.add(representation)
+ return False
+
+
+def update_task_from_path(path):
+ """Update the context using the current scene state.
+
+ When no changes to the context it will not trigger an update.
+ When the context for a file could not be parsed an error is logged but not
+ raised.
+
+ """
+ if not path:
+ log.warning("Can't update the current task. Scene is not saved.")
+ return
+
+ # Find the current context from the filename
+ project = io.find_one({"type": "project"},
+ projection={"config.template.work": True})
+ template = project['config']['template']['work']
+ # Force to use the registered to root to avoid using wrong paths
+ template = pather.format(template, {"root": avalon.api.registered_root()})
+ try:
+ context = pather.parse(template, path)
+ except ParseError:
+ log.error("Can't update the current task. Unable to parse the "
+ "task for: %s (pattern: %s)", path, template)
+ return
+
+ # Find the changes between current Session and the path's context.
+ current = {
+ "asset": avalon.api.Session["AVALON_ASSET"],
+ "task": avalon.api.Session["AVALON_TASK"],
+ "app": avalon.api.Session["AVALON_APP"]
+ }
+ changes = {key: context[key] for key, current_value in current.items()
+ if context[key] != current_value}
+
+ if changes:
+ log.info("Updating work task to: %s", context)
+ avalon.api.update_current_task(**changes)
+
+
+def _rreplace(s, a, b, n=1):
+ """Replace a with b in string s from right side n times"""
+ return b.join(s.rsplit(a, n))
+
+
+def version_up(filepath):
+ """Version up filepath to a new non-existing version.
+
+ Parses for a version identifier like `_v001` or `.v001`
+ When no version present _v001 is appended as suffix.
+
+ Returns:
+ str: filepath with increased version number
+
+ """
+
+ dirname = os.path.dirname(filepath)
+ basename, ext = os.path.splitext(os.path.basename(filepath))
+
+ regex = "[._]v\d+"
+ matches = re.findall(regex, str(basename), re.IGNORECASE)
+ if not matches:
+ log.info("Creating version...")
+ new_label = "_v{version:03d}".format(version=1)
+ new_basename = "{}{}".format(basename, new_label)
+ else:
+ label = matches[-1]
+ version = re.search("\d+", label).group()
+ padding = len(version)
+
+ new_version = int(version) + 1
+ new_version = '{version:0{padding}d}'.format(version=new_version,
+ padding=padding)
+ new_label = label.replace(version, new_version, 1)
+ new_basename = _rreplace(basename, label, new_label)
+
+ new_filename = "{}{}".format(new_basename, ext)
+ new_filename = os.path.join(dirname, new_filename)
+ new_filename = os.path.normpath(new_filename)
+
+ if new_filename == filepath:
+ raise RuntimeError("Created path is the same as current file,"
+ "this is a bug")
+
+ if os.path.exists(new_filename):
+ log.info("Skipping existing version %s" % new_label)
+ return version_up(new_filename)
+
+ log.info("New version %s" % new_label)
+ return new_filename
+
+
+def switch_item(container,
+ asset_name=None,
+ subset_name=None,
+ representation_name=None):
+ """Switch container asset, subset or representation of a container by name.
+
+ It'll always switch to the latest version - of course a different
+ approach could be implemented.
+
+ Args:
+ container (dict): data of the item to switch with
+ asset_name (str): name of the asset
+ subset_name (str): name of the subset
+ representation_name (str): name of the representation
+
+ Returns:
+ dict
+
+ """
+
+ if all(not x for x in [asset_name, subset_name, representation_name]):
+ raise ValueError("Must have at least one change provided to switch.")
+
+ # Collect any of current asset, subset and representation if not provided
+ # so we can use the original name from those.
+ if any(not x for x in [asset_name, subset_name, representation_name]):
+ _id = io.ObjectId(container["representation"])
+ representation = io.find_one({"type": "representation", "_id": _id})
+ version, subset, asset, project = io.parenthood(representation)
+
+ if asset_name is None:
+ asset_name = asset["name"]
+
+ if subset_name is None:
+ subset_name = subset["name"]
+
+ if representation_name is None:
+ representation_name = representation["name"]
+
+ # Find the new one
+ asset = io.find_one({"name": asset_name, "type": "asset"})
+ assert asset, ("Could not find asset in the database with the name "
+ "'%s'" % asset_name)
+
+ subset = io.find_one({"name": subset_name,
+ "type": "subset",
+ "parent": asset["_id"]})
+ assert subset, ("Could not find subset in the database with the name "
+ "'%s'" % subset_name)
+
+ version = io.find_one({"type": "version",
+ "parent": subset["_id"]},
+ sort=[('name', -1)])
+
+ assert version, "Could not find a version for {}.{}".format(
+ asset_name, subset_name
+ )
+
+ representation = io.find_one({"name": representation_name,
+ "type": "representation",
+ "parent": version["_id"]})
+
+ assert representation, ("Could not find representation in the database with"
+ " the name '%s'" % representation_name)
+
+ avalon.api.switch(container, representation)
+
+ return representation
+
+
+def _get_host_name():
+
+ _host = avalon.api.registered_host()
+ # This covers nested module name like avalon.maya
+ return _host.__name__.rsplit(".", 1)[-1]
+
+
+def collect_container_metadata(container):
+ """Add additional data based on the current host
+
+ If the host application's lib module does not have a function to inject
+ additional data it will return the input container
+
+ Args:
+ container (dict): collection if representation data in host
+
+ Returns:
+ generator
+ """
+ # TODO: Improve method of getting the host lib module
+ host_name = _get_host_name()
+ package_name = "colorbleed.{}.lib".format(host_name)
+ hostlib = importlib.import_module(package_name)
+
+ if not hasattr(hostlib, "get_additional_data"):
+ return {}
+
+ return hostlib.get_additional_data(container)
+
+
+def get_project_fps():
+ """Returns project's FPS, if not found will return 25 by default
+
+ Returns:
+ int, float
+ """
+
+ project_name = io.active_project()
+ project = io.find_one({"name": project_name,
+ "type": "project"},
+ projection={"config": True})
+
+ config = project.get("config", None)
+ assert config, "This is a bug"
+
+ fps = config.get("fps", 25.0)
+
+ return fps
diff --git a/config/plugin.py b/config/plugin.py
new file mode 100644
index 0000000000..0ba1fe5ded
--- /dev/null
+++ b/config/plugin.py
@@ -0,0 +1,34 @@
+import tempfile
+import pyblish.api
+
+ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05
+ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1
+ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2
+ValidateMeshOrder = pyblish.api.ValidatorOrder + 0.3
+
+
+class Extractor(pyblish.api.InstancePlugin):
+ """Extractor base class.
+
+ The extractor base class implements a "staging_dir" function used to
+ generate a temporary directory for an instance to extract to.
+
+ This temporary directory is generated through `tempfile.mkdtemp()`
+
+ """
+
+ order = 2.0
+
+ def staging_dir(self, instance):
+ """Provide a temporary directory in which to store extracted files
+
+ Upon calling this method the staging directory is stored inside
+ the instance.data['stagingDir']
+ """
+ staging_dir = instance.data.get('stagingDir', None)
+
+ if not staging_dir:
+ staging_dir = tempfile.mkdtemp(prefix="pyblish_tmp_")
+ instance.data['stagingDir'] = staging_dir
+
+ return staging_dir
diff --git a/config/plugins/fusion/create/create_tiff_saver.py b/config/plugins/fusion/create/create_tiff_saver.py
new file mode 100644
index 0000000000..b313ca994f
--- /dev/null
+++ b/config/plugins/fusion/create/create_tiff_saver.py
@@ -0,0 +1,46 @@
+import os
+
+import avalon.api
+from avalon import fusion
+
+
+class CreateTiffSaver(avalon.api.Creator):
+
+ name = "tiffDefault"
+ label = "Create Tiff Saver"
+ hosts = ["fusion"]
+ family = "colorbleed.saver"
+
+ def process(self):
+
+ file_format = "TiffFormat"
+
+ comp = fusion.get_current_comp()
+
+ # todo: improve method of getting current environment
+ # todo: pref avalon.Session over os.environ
+
+ workdir = os.path.normpath(os.environ["AVALON_WORKDIR"])
+
+ filename = "{}..tiff".format(self.name)
+ filepath = os.path.join(workdir, "render", "preview", filename)
+
+ with fusion.comp_lock_and_undo_chunk(comp):
+ args = (-32768, -32768) # Magical position numbers
+ saver = comp.AddTool("Saver", *args)
+ saver.SetAttrs({"TOOLS_Name": self.name})
+
+ # Setting input attributes is different from basic attributes
+ # Not confused with "MainInputAttributes" which
+ saver["Clip"] = filepath
+ saver["OutputFormat"] = file_format
+
+ # # # Set standard TIFF settings
+ if saver[file_format] is None:
+ raise RuntimeError("File format is not set to TiffFormat, "
+ "this is a bug")
+
+ # Set file format attributes
+ saver[file_format]["Depth"] = 1 # int8 | int16 | float32 | other
+ saver[file_format]["SaveAlpha"] = 0
+
diff --git a/config/plugins/fusion/inventory/select_containers.py b/config/plugins/fusion/inventory/select_containers.py
new file mode 100644
index 0000000000..2f7b3e5809
--- /dev/null
+++ b/config/plugins/fusion/inventory/select_containers.py
@@ -0,0 +1,25 @@
+from avalon import api
+
+
+class FusionSelectContainers(api.InventoryAction):
+
+ label = "Select Containers"
+ icon = "mouse-pointer"
+ color = "#d8d8d8"
+
+ def process(self, containers):
+
+ import avalon.fusion
+
+ tools = [i["_tool"] for i in containers]
+
+ comp = avalon.fusion.get_current_comp()
+ flow = comp.CurrentFrame.FlowView
+
+ with avalon.fusion.comp_lock_and_undo_chunk(comp, self.label):
+ # Clear selection
+ flow.Select()
+
+ # Select tool
+ for tool in tools:
+ flow.Select(tool)
diff --git a/config/plugins/fusion/inventory/set_tool_color.py b/config/plugins/fusion/inventory/set_tool_color.py
new file mode 100644
index 0000000000..940a0e9941
--- /dev/null
+++ b/config/plugins/fusion/inventory/set_tool_color.py
@@ -0,0 +1,68 @@
+from avalon import api, style
+from avalon.vendor.Qt import QtGui, QtWidgets
+
+import avalon.fusion
+
+
+class FusionSetToolColor(api.InventoryAction):
+ """Update the color of the selected tools"""
+
+ label = "Set Tool Color"
+ icon = "plus"
+ color = "#d8d8d8"
+ _fallback_color = QtGui.QColor(1.0, 1.0, 1.0)
+
+ def process(self, containers):
+ """Color all selected tools the selected colors"""
+
+ result = []
+ comp = avalon.fusion.get_current_comp()
+
+ # Get tool color
+ first = containers[0]
+ tool = first["_tool"]
+ color = tool.TileColor
+
+ if color is not None:
+ qcolor = QtGui.QColor().fromRgbF(color["R"], color["G"], color["B"])
+ else:
+ qcolor = self._fallback_color
+
+ # Launch pick color
+ picked_color = self.get_color_picker(qcolor)
+ if not picked_color:
+ return
+
+ with avalon.fusion.comp_lock_and_undo_chunk(comp):
+ for container in containers:
+ # Convert color to RGB 0-1 floats
+ rgb_f = picked_color.getRgbF()
+ rgb_f_table = {"R": rgb_f[0], "G": rgb_f[1], "B": rgb_f[2]}
+
+ # Update tool
+ tool = container["_tool"]
+ tool.TileColor = rgb_f_table
+
+ result.append(container)
+
+ return result
+
+ def get_color_picker(self, color):
+ """Launch color picker and return chosen color
+
+ Args:
+ color(QtGui.QColor): Start color to display
+
+ Returns:
+ QtGui.QColor
+
+ """
+
+ color_dialog = QtWidgets.QColorDialog(color)
+ color_dialog.setStyleSheet(style.load_stylesheet())
+
+ accepted = color_dialog.exec_()
+ if not accepted:
+ return
+
+ return color_dialog.selectedColor()
diff --git a/config/plugins/fusion/load/actions.py b/config/plugins/fusion/load/actions.py
new file mode 100644
index 0000000000..19474076c2
--- /dev/null
+++ b/config/plugins/fusion/load/actions.py
@@ -0,0 +1,76 @@
+"""A module containing generic loader actions that will display in the Loader.
+
+"""
+
+from avalon import api
+
+
+class FusionSetFrameRangeLoader(api.Loader):
+ """Specific loader of Alembic for the avalon.animation family"""
+
+ families = ["colorbleed.animation",
+ "colorbleed.camera",
+ "colorbleed.imagesequence",
+ "colorbleed.yeticache",
+ "colorbleed.pointcache"]
+ representations = ["*"]
+
+ label = "Set frame range"
+ order = 11
+ icon = "clock-o"
+ color = "white"
+
+ def load(self, context, name, namespace, data):
+
+ from config.apps.fusion import lib
+
+ version = context['version']
+ version_data = version.get("data", {})
+
+ start = version_data.get("startFrame", None)
+ end = version_data.get("endFrame", None)
+
+ if start is None or end is None:
+ print("Skipping setting frame range because start or "
+ "end frame data is missing..")
+ return
+
+ lib.update_frame_range(start, end)
+
+
+class FusionSetFrameRangeWithHandlesLoader(api.Loader):
+ """Specific loader of Alembic for the avalon.animation family"""
+
+ families = ["colorbleed.animation",
+ "colorbleed.camera",
+ "colorbleed.imagesequence",
+ "colorbleed.yeticache",
+ "colorbleed.pointcache"]
+ representations = ["*"]
+
+ label = "Set frame range (with handles)"
+ order = 12
+ icon = "clock-o"
+ color = "white"
+
+ def load(self, context, name, namespace, data):
+
+ from config.apps.fusion import lib
+
+ version = context['version']
+ version_data = version.get("data", {})
+
+ start = version_data.get("startFrame", None)
+ end = version_data.get("endFrame", None)
+
+ if start is None or end is None:
+ print("Skipping setting frame range because start or "
+ "end frame data is missing..")
+ return
+
+ # Include handles
+ handles = version_data.get("handles", 0)
+ start -= handles
+ end += handles
+
+ lib.update_frame_range(start, end)
diff --git a/config/plugins/fusion/load/load_sequence.py b/config/plugins/fusion/load/load_sequence.py
new file mode 100644
index 0000000000..81313992ed
--- /dev/null
+++ b/config/plugins/fusion/load/load_sequence.py
@@ -0,0 +1,259 @@
+import os
+import contextlib
+
+from avalon import api
+import avalon.io as io
+
+
+@contextlib.contextmanager
+def preserve_inputs(tool, inputs):
+ """Preserve the tool's inputs after context"""
+
+ comp = tool.Comp()
+
+ values = {}
+ for name in inputs:
+ tool_input = getattr(tool, name)
+ value = tool_input[comp.TIME_UNDEFINED]
+ values[name] = value
+
+ try:
+ yield
+ finally:
+ for name, value in values.items():
+ tool_input = getattr(tool, name)
+ tool_input[comp.TIME_UNDEFINED] = value
+
+
+@contextlib.contextmanager
+def preserve_trim(loader, log=None):
+ """Preserve the relative trim of the Loader tool.
+
+ This tries to preserve the loader's trim (trim in and trim out) after
+ the context by reapplying the "amount" it trims on the clip's length at
+ start and end.
+
+ """
+
+ # Get original trim as amount of "trimming" from length
+ time = loader.Comp().TIME_UNDEFINED
+ length = loader.GetAttrs()["TOOLIT_Clip_Length"][1] - 1
+ trim_from_start = loader["ClipTimeStart"][time]
+ trim_from_end = length - loader["ClipTimeEnd"][time]
+
+ try:
+ yield
+ finally:
+
+ length = loader.GetAttrs()["TOOLIT_Clip_Length"][1] - 1
+ if trim_from_start > length:
+ trim_from_start = length
+ if log:
+ log.warning("Reducing trim in to %d "
+ "(because of less frames)" % trim_from_start)
+
+ remainder = length - trim_from_start
+ if trim_from_end > remainder:
+ trim_from_end = remainder
+ if log:
+ log.warning("Reducing trim in to %d "
+ "(because of less frames)" % trim_from_end)
+
+ loader["ClipTimeStart"][time] = trim_from_start
+ loader["ClipTimeEnd"][time] = length - trim_from_end
+
+
+def loader_shift(loader, frame, relative=True):
+ """Shift global in time by i preserving duration
+
+ This moves the loader by i frames preserving global duration. When relative
+ is False it will shift the global in to the start frame.
+
+ Args:
+ loader (tool): The fusion loader tool.
+ frame (int): The amount of frames to move.
+ relative (bool): When True the shift is relative, else the shift will
+ change the global in to frame.
+
+ Returns:
+ int: The resulting relative frame change (how much it moved)
+
+ """
+ comp = loader.Comp()
+ time = comp.TIME_UNDEFINED
+
+ old_in = loader["GlobalIn"][time]
+ old_out = loader["GlobalOut"][time]
+
+ if relative:
+ shift = frame
+ else:
+ shift = frame - old_in
+
+ # Shifting global in will try to automatically compensate for the change
+ # in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those
+ # input values to "just shift" the clip
+ with preserve_inputs(loader, inputs=["ClipTimeStart",
+ "ClipTimeEnd",
+ "HoldFirstFrame",
+ "HoldLastFrame"]):
+
+ # GlobalIn cannot be set past GlobalOut or vice versa
+ # so we must apply them in the order of the shift.
+ if shift > 0:
+ loader["GlobalOut"][time] = old_out + shift
+ loader["GlobalIn"][time] = old_in + shift
+ else:
+ loader["GlobalIn"][time] = old_in + shift
+ loader["GlobalOut"][time] = old_out + shift
+
+ return int(shift)
+
+
+class FusionLoadSequence(api.Loader):
+ """Load image sequence into Fusion"""
+
+ families = ["colorbleed.imagesequence"]
+ representations = ["*"]
+
+ label = "Load sequence"
+ order = -10
+ icon = "code-fork"
+ color = "orange"
+
+ def load(self, context, name, namespace, data):
+
+ from avalon.fusion import (
+ imprint_container,
+ get_current_comp,
+ comp_lock_and_undo_chunk
+ )
+
+ # Fallback to asset name when namespace is None
+ if namespace is None:
+ namespace = context['asset']['name']
+
+ # Use the first file for now
+ path = self._get_first_image(self.fname)
+
+ # Create the Loader with the filename path set
+ comp = get_current_comp()
+ with comp_lock_and_undo_chunk(comp, "Create Loader"):
+
+ args = (-32768, -32768)
+ tool = comp.AddTool("Loader", *args)
+ tool["Clip"] = path
+
+ # Set global in point to start frame (if in version.data)
+ start = context["version"]["data"].get("startFrame", None)
+ if start is not None:
+ loader_shift(tool, start, relative=False)
+
+ imprint_container(tool,
+ name=name,
+ namespace=namespace,
+ context=context,
+ loader=self.__class__.__name__)
+
+ def switch(self, container, representation):
+ self.update(container, representation)
+
+ def update(self, container, representation):
+ """Update the Loader's path
+
+ Fusion automatically tries to reset some variables when changing
+ the loader's path to a new file. These automatic changes are to its
+ inputs:
+ - ClipTimeStart: Fusion reset to 0 if duration changes
+ - We keep the trim in as close as possible to the previous value.
+ When there are less frames then the amount of trim we reduce
+ it accordingly.
+
+ - ClipTimeEnd: Fusion reset to 0 if duration changes
+ - We keep the trim out as close as possible to the previous value
+ within new amount of frames after trim in (ClipTimeStart) has
+ been set.
+
+ - GlobalIn: Fusion reset to comp's global in if duration changes
+ - We change it to the "startFrame"
+
+ - GlobalEnd: Fusion resets to globalIn + length if duration changes
+ - We do the same like Fusion - allow fusion to take control.
+
+ - HoldFirstFrame: Fusion resets this to 0
+ - We preverse the value.
+
+ - HoldLastFrame: Fusion resets this to 0
+ - We preverse the value.
+
+ - Reverse: Fusion resets to disabled if "Loop" is not enabled.
+ - We preserve the value.
+
+ - Depth: Fusion resets to "Format"
+ - We preverse the value.
+
+ - KeyCode: Fusion resets to ""
+ - We preverse the value.
+
+ - TimeCodeOffset: Fusion resets to 0
+ - We preverse the value.
+
+ """
+
+ from avalon.fusion import comp_lock_and_undo_chunk
+
+ tool = container["_tool"]
+ assert tool.ID == "Loader", "Must be Loader"
+ comp = tool.Comp()
+
+ root = api.get_representation_path(representation)
+ path = self._get_first_image(root)
+
+ # Get start frame from version data
+ version = io.find_one({"type": "version",
+ "_id": representation["parent"]})
+ start = version["data"].get("startFrame")
+ if start is None:
+ self.log.warning("Missing start frame for updated version"
+ "assuming starts at frame 0 for: "
+ "{} ({})".format(tool.Name, representation))
+ start = 0
+
+ with comp_lock_and_undo_chunk(comp, "Update Loader"):
+
+ # Update the loader's path whilst preserving some values
+ with preserve_trim(tool, log=self.log):
+ with preserve_inputs(tool,
+ inputs=("HoldFirstFrame",
+ "HoldLastFrame",
+ "Reverse",
+ "Depth",
+ "KeyCode",
+ "TimeCodeOffset")):
+ tool["Clip"] = path
+
+ # Set the global in to the start frame of the sequence
+ global_in_changed = loader_shift(tool, start, relative=False)
+ if global_in_changed:
+ # Log this change to the user
+ self.log.debug("Changed '%s' global in: %d" % (tool.Name,
+ start))
+
+ # Update the imprinted representation
+ tool.SetData("avalon.representation", str(representation["_id"]))
+
+ def remove(self, container):
+
+ from avalon.fusion import comp_lock_and_undo_chunk
+
+ tool = container["_tool"]
+ assert tool.ID == "Loader", "Must be Loader"
+ comp = tool.Comp()
+
+ with comp_lock_and_undo_chunk(comp, "Remove Loader"):
+ tool.Delete()
+
+ def _get_first_image(self, root):
+ """Get first file in representation root"""
+ files = sorted(os.listdir(root))
+ return os.path.join(root, files[0])
diff --git a/config/plugins/fusion/publish/collect_comp.py b/config/plugins/fusion/publish/collect_comp.py
new file mode 100644
index 0000000000..1cf182c8ca
--- /dev/null
+++ b/config/plugins/fusion/publish/collect_comp.py
@@ -0,0 +1,24 @@
+import os
+
+import pyblish.api
+
+from avalon import fusion
+
+
+class CollectCurrentCompFusion(pyblish.api.ContextPlugin):
+ """Collect current comp"""
+
+ order = pyblish.api.CollectorOrder - 0.4
+ label = "Collect Current Comp"
+ hosts = ["fusion"]
+
+ def process(self, context):
+ """Collect all image sequence tools"""
+
+ current_comp = fusion.get_current_comp()
+ assert current_comp, "Must have active Fusion composition"
+ context.data["currentComp"] = current_comp
+
+ # Store path to current file
+ filepath = current_comp.GetAttrs().get("COMPS_FileName", "")
+ context.data['currentFile'] = filepath
diff --git a/config/plugins/fusion/publish/collect_fusion_version.py b/config/plugins/fusion/publish/collect_fusion_version.py
new file mode 100644
index 0000000000..65d8386f33
--- /dev/null
+++ b/config/plugins/fusion/publish/collect_fusion_version.py
@@ -0,0 +1,22 @@
+import pyblish.api
+
+
+class CollectFusionVersion(pyblish.api.ContextPlugin):
+ """Collect current comp"""
+
+ order = pyblish.api.CollectorOrder
+ label = "Collect Fusion Version"
+ hosts = ["fusion"]
+
+ def process(self, context):
+ """Collect all image sequence tools"""
+
+ comp = context.data.get("currentComp")
+ if not comp:
+ raise RuntimeError("No comp previously collected, unable to "
+ "retrieve Fusion version.")
+
+ version = comp.GetApp().Version
+ context.data["fusionVersion"] = version
+
+ self.log.info("Fusion version: %s" % version)
diff --git a/config/plugins/fusion/publish/collect_instances.py b/config/plugins/fusion/publish/collect_instances.py
new file mode 100644
index 0000000000..322197fc87
--- /dev/null
+++ b/config/plugins/fusion/publish/collect_instances.py
@@ -0,0 +1,96 @@
+import os
+
+import pyblish.api
+
+
+def get_comp_render_range(comp):
+ """Return comp's start and end render range."""
+ comp_attrs = comp.GetAttrs()
+ start = comp_attrs["COMPN_RenderStart"]
+ end = comp_attrs["COMPN_RenderEnd"]
+
+ # Whenever render ranges are undefined fall back
+ # to the comp's global start and end
+ if start == -1000000000:
+ start = comp_attrs["COMPN_GlobalEnd"]
+ if end == -1000000000:
+ end = comp_attrs["COMPN_GlobalStart"]
+
+ return start, end
+
+
+class CollectInstances(pyblish.api.ContextPlugin):
+ """Collect Fusion saver instances
+
+ This additionally stores the Comp start and end render range in the
+ current context's data as "startFrame" and "endFrame".
+
+ """
+
+ order = pyblish.api.CollectorOrder
+ label = "Collect Instances"
+ hosts = ["fusion"]
+
+ def process(self, context):
+ """Collect all image sequence tools"""
+
+ from avalon.fusion.lib import get_frame_path
+
+ comp = context.data["currentComp"]
+
+ # Get all savers in the comp
+ tools = comp.GetToolList(False).values()
+ savers = [tool for tool in tools if tool.ID == "Saver"]
+
+ start, end = get_comp_render_range(comp)
+ context.data["startFrame"] = start
+ context.data["endFrame"] = end
+
+ for tool in savers:
+ path = tool["Clip"][comp.TIME_UNDEFINED]
+
+ tool_attrs = tool.GetAttrs()
+ active = not tool_attrs["TOOLB_PassThrough"]
+
+ if not path:
+ self.log.warning("Skipping saver because it "
+ "has no path set: {}".format(tool.Name))
+ continue
+
+ filename = os.path.basename(path)
+ head, padding, tail = get_frame_path(filename)
+ ext = os.path.splitext(path)[1]
+ assert tail == ext, ("Tail does not match %s" % ext)
+ subset = head.rstrip("_. ") # subset is head of the filename
+
+ # Include start and end render frame in label
+ label = "{subset} ({start}-{end})".format(subset=subset,
+ start=int(start),
+ end=int(end))
+
+ instance = context.create_instance(subset)
+ instance.data.update({
+ "asset": os.environ["AVALON_ASSET"], # todo: not a constant
+ "subset": subset,
+ "path": path,
+ "outputDir": os.path.dirname(path),
+ "ext": ext, # todo: should be redundant
+ "label": label,
+ "families": ["colorbleed.saver"],
+ "family": "colorbleed.saver",
+ "active": active,
+ "publish": active # backwards compatibility
+ })
+
+ instance.append(tool)
+
+ self.log.info("Found: \"%s\" " % path)
+
+ # Sort/grouped by family (preserving local index)
+ context[:] = sorted(context, key=self.sort_by_family)
+
+ return context
+
+ def sort_by_family(self, instance):
+ """Sort by family"""
+ return instance.data.get("families", instance.data.get("family"))
diff --git a/config/plugins/fusion/publish/collect_render_target.py b/config/plugins/fusion/publish/collect_render_target.py
new file mode 100644
index 0000000000..a9193eaee5
--- /dev/null
+++ b/config/plugins/fusion/publish/collect_render_target.py
@@ -0,0 +1,44 @@
+import pyblish.api
+
+
+class CollectFusionRenderMode(pyblish.api.InstancePlugin):
+ """Collect current comp's render Mode
+
+ Options:
+ renderlocal
+ deadline
+
+ Note that this value is set for each comp separately. When you save the
+ comp this information will be stored in that file. If for some reason the
+ available tool does not visualize which render mode is set for the
+ current comp, please run the following line in the console (Py2)
+
+ comp.GetData("colorbleed.rendermode")
+
+ This will return the name of the current render mode as seen above under
+ Options.
+
+ """
+
+ order = pyblish.api.CollectorOrder + 0.4
+ label = "Collect Render Mode"
+ hosts = ["fusion"]
+ families = ["colorbleed.saver"]
+
+ def process(self, instance):
+ """Collect all image sequence tools"""
+ options = ["renderlocal", "deadline"]
+
+ comp = instance.context.data.get("currentComp")
+ if not comp:
+ raise RuntimeError("No comp previously collected, unable to "
+ "retrieve Fusion version.")
+
+ rendermode = comp.GetData("colorbleed.rendermode") or "renderlocal"
+ assert rendermode in options, "Must be supported render mode"
+
+ self.log.info("Render mode: {0}".format(rendermode))
+
+ # Append family
+ family = "colorbleed.saver.{0}".format(rendermode)
+ instance.data["families"].append(family)
diff --git a/config/plugins/fusion/publish/increment_current_file_deadline.py b/config/plugins/fusion/publish/increment_current_file_deadline.py
new file mode 100644
index 0000000000..9d766c426c
--- /dev/null
+++ b/config/plugins/fusion/publish/increment_current_file_deadline.py
@@ -0,0 +1,34 @@
+import pyblish.api
+
+
+class FusionIncrementCurrentFile(pyblish.api.ContextPlugin):
+ """Increment the current file.
+
+ Saves the current file with an increased version number.
+
+ """
+
+ label = "Increment current file"
+ order = pyblish.api.IntegratorOrder + 9.0
+ hosts = ["fusion"]
+ families = ["colorbleed.saver.deadline"]
+ optional = True
+
+ def process(self, context):
+
+ from colorbleed.lib import version_up
+ from colorbleed.action import get_errored_plugins_from_data
+
+ errored_plugins = get_errored_plugins_from_data(context)
+ if any(plugin.__name__ == "FusionSubmitDeadline"
+ for plugin in errored_plugins):
+ raise RuntimeError("Skipping incrementing current file because "
+ "submission to deadline failed.")
+
+ comp = context.data.get("currentComp")
+ assert comp, "Must have comp"
+
+ current_filepath = context.data["currentFile"]
+ new_filepath = version_up(current_filepath)
+
+ comp.Save(new_filepath)
diff --git a/config/plugins/fusion/publish/publish_image_sequences.py b/config/plugins/fusion/publish/publish_image_sequences.py
new file mode 100644
index 0000000000..33d6d2773d
--- /dev/null
+++ b/config/plugins/fusion/publish/publish_image_sequences.py
@@ -0,0 +1,98 @@
+import re
+import os
+import json
+import subprocess
+
+import pyblish.api
+
+from colorbleed.action import get_errored_plugins_from_data
+
+
+def _get_script():
+ """Get path to the image sequence script"""
+
+ # todo: use a more elegant way to get the python script
+
+ try:
+ from colorbleed.scripts import publish_filesequence
+ except Exception:
+ raise RuntimeError("Expected module 'publish_imagesequence'"
+ "to be available")
+
+ module_path = publish_filesequence.__file__
+ if module_path.endswith(".pyc"):
+ module_path = module_path[:-len(".pyc")] + ".py"
+
+ return module_path
+
+
+class PublishImageSequence(pyblish.api.InstancePlugin):
+ """Publish the generated local image sequences."""
+
+ order = pyblish.api.IntegratorOrder
+ label = "Publish Rendered Image Sequence(s)"
+ hosts = ["fusion"]
+ families = ["colorbleed.saver.renderlocal"]
+
+ def process(self, instance):
+
+ # Skip this plug-in if the ExtractImageSequence failed
+ errored_plugins = get_errored_plugins_from_data(instance.context)
+ if any(plugin.__name__ == "FusionRenderLocal" for plugin in
+ errored_plugins):
+ raise RuntimeError("Fusion local render failed, "
+ "publishing images skipped.")
+
+ subset = instance.data["subset"]
+ ext = instance.data["ext"]
+
+ # Regex to match resulting renders
+ regex = "^{subset}.*[0-9]+{ext}+$".format(subset=re.escape(subset),
+ ext=re.escape(ext))
+
+ # The instance has most of the information already stored
+ metadata = {
+ "regex": regex,
+ "startFrame": instance.context.data["startFrame"],
+ "endFrame": instance.context.data["endFrame"],
+ "families": ["colorbleed.imagesequence"],
+ }
+
+ # Write metadata and store the path in the instance
+ output_directory = instance.data["outputDir"]
+ path = os.path.join(output_directory,
+ "{}_metadata.json".format(subset))
+ with open(path, "w") as f:
+ json.dump(metadata, f)
+
+ assert os.path.isfile(path), ("Stored path is not a file for %s"
+ % instance.data["name"])
+
+ # Suppress any subprocess console
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ startupinfo.wShowWindow = subprocess.SW_HIDE
+
+ process = subprocess.Popen(["python", _get_script(),
+ "--paths", path],
+ bufsize=1,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ startupinfo=startupinfo)
+
+ while True:
+ output = process.stdout.readline()
+ # Break when there is no output or a return code has been given
+ if output == '' and process.poll() is not None:
+ process.stdout.close()
+ break
+ if output:
+ line = output.strip()
+ if line.startswith("ERROR"):
+ self.log.error(line)
+ else:
+ self.log.info(line)
+
+ if process.returncode != 0:
+ raise RuntimeError("Process quit with non-zero "
+ "return code: {}".format(process.returncode))
diff --git a/config/plugins/fusion/publish/render_local.py b/config/plugins/fusion/publish/render_local.py
new file mode 100644
index 0000000000..39ce0d45dc
--- /dev/null
+++ b/config/plugins/fusion/publish/render_local.py
@@ -0,0 +1,42 @@
+import pyblish.api
+
+import avalon.fusion as fusion
+
+
+class FusionRenderLocal(pyblish.api.InstancePlugin):
+ """Render the current Fusion composition locally.
+
+ Extract the result of savers by starting a comp render
+ This will run the local render of Fusion.
+
+ """
+
+ order = pyblish.api.ExtractorOrder
+ label = "Render Local"
+ hosts = ["fusion"]
+ families = ["colorbleed.saver.renderlocal"]
+
+ def process(self, instance):
+
+ # This should be a ContextPlugin, but this is a workaround
+ # for a bug in pyblish to run once for a family: issue #250
+ context = instance.context
+ key = "__hasRun{}".format(self.__class__.__name__)
+ if context.data.get(key, False):
+ return
+ else:
+ context.data[key] = True
+
+ current_comp = context.data["currentComp"]
+ start_frame = current_comp.GetAttrs("COMPN_RenderStart")
+ end_frame = current_comp.GetAttrs("COMPN_RenderEnd")
+
+ self.log.info("Starting render")
+ self.log.info("Start frame: {}".format(start_frame))
+ self.log.info("End frame: {}".format(end_frame))
+
+ with fusion.comp_lock_and_undo_chunk(current_comp):
+ result = current_comp.Render()
+
+ if not result:
+ raise RuntimeError("Comp render failed")
diff --git a/config/plugins/fusion/publish/save_scene.py b/config/plugins/fusion/publish/save_scene.py
new file mode 100644
index 0000000000..e88b8ba769
--- /dev/null
+++ b/config/plugins/fusion/publish/save_scene.py
@@ -0,0 +1,21 @@
+import pyblish.api
+
+
+class FusionSaveComp(pyblish.api.ContextPlugin):
+ """Save current comp"""
+
+ label = "Save current file"
+ order = pyblish.api.ExtractorOrder - 0.49
+ hosts = ["fusion"]
+ families = ["colorbleed.saver"]
+
+ def process(self, context):
+
+ comp = context.data.get("currentComp")
+ assert comp, "Must have comp"
+
+ current = comp.GetAttrs().get("COMPS_FileName", "")
+ assert context.data['currentFile'] == current
+
+ self.log.info("Saving current file..")
+ comp.Save()
diff --git a/config/plugins/fusion/publish/submit_deadline.py b/config/plugins/fusion/publish/submit_deadline.py
new file mode 100644
index 0000000000..9fbf9b4003
--- /dev/null
+++ b/config/plugins/fusion/publish/submit_deadline.py
@@ -0,0 +1,149 @@
+import os
+import json
+import getpass
+
+from avalon import api
+from avalon.vendor import requests
+
+import pyblish.api
+
+
+class FusionSubmitDeadline(pyblish.api.InstancePlugin):
+ """Submit current Comp to Deadline
+
+ Renders are submitted to a Deadline Web Service as
+ supplied via the environment variable AVALON_DEADLINE
+
+ """
+
+ label = "Submit to Deadline"
+ order = pyblish.api.IntegratorOrder
+ hosts = ["fusion"]
+ families = ["colorbleed.saver.deadline"]
+
+ def process(self, instance):
+
+ context = instance.context
+
+ key = "__hasRun{}".format(self.__class__.__name__)
+ if context.data.get(key, False):
+ return
+ else:
+ context.data[key] = True
+
+ from avalon.fusion.lib import get_frame_path
+
+ AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
+ "http://localhost:8082")
+ assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
+
+ # Collect all saver instances in context that are to be rendered
+ saver_instances = []
+ for instance in context[:]:
+ if not self.families[0] in instance.data.get("families"):
+ # Allow only saver family instances
+ continue
+
+ if not instance.data.get("publish", True):
+ # Skip inactive instances
+ continue
+ self.log.debug(instance.data["name"])
+ saver_instances.append(instance)
+
+ if not saver_instances:
+ raise RuntimeError("No instances found for Deadline submittion")
+
+ fusion_version = int(context.data["fusionVersion"])
+ filepath = context.data["currentFile"]
+ filename = os.path.basename(filepath)
+ comment = context.data.get("comment", "")
+ deadline_user = context.data.get("deadlineUser", getpass.getuser())
+
+ # Documentation for keys available at:
+ # https://docs.thinkboxsoftware.com
+ # /products/deadline/8.0/1_User%20Manual/manual
+ # /manual-submission.html#job-info-file-options
+ payload = {
+ "JobInfo": {
+ # Top-level group name
+ "BatchName": filename,
+
+ # Job name, as seen in Monitor
+ "Name": filename,
+
+ # User, as seen in Monitor
+ "UserName": deadline_user,
+
+ # Use a default submission pool for Fusion
+ "Pool": "fusion",
+
+ "Plugin": "Fusion",
+ "Frames": "{start}-{end}".format(
+ start=int(context.data["startFrame"]),
+ end=int(context.data["endFrame"])
+ ),
+
+ "Comment": comment,
+ },
+ "PluginInfo": {
+ # Input
+ "FlowFile": filepath,
+
+ # Mandatory for Deadline
+ "Version": str(fusion_version),
+
+ # Render in high quality
+ "HighQuality": True,
+
+ # Whether saver output should be checked after rendering
+ # is complete
+ "CheckOutput": True,
+
+ # Proxy: higher numbers smaller images for faster test renders
+ # 1 = no proxy quality
+ "Proxy": 1,
+ },
+
+ # Mandatory for Deadline, may be empty
+ "AuxFiles": []
+ }
+
+ # Enable going to rendered frames from Deadline Monitor
+ for index, instance in enumerate(saver_instances):
+ head, padding, tail = get_frame_path(instance.data["path"])
+ path = "{}{}{}".format(head, "#" * padding, tail)
+ folder, filename = os.path.split(path)
+ payload["JobInfo"]["OutputDirectory%d" % index] = folder
+ payload["JobInfo"]["OutputFilename%d" % index] = filename
+
+ # Include critical variables with submission
+ keys = [
+ # TODO: This won't work if the slaves don't have accesss to
+ # these paths, such as if slaves are running Linux and the
+ # submitter is on Windows.
+ "PYTHONPATH",
+ "OFX_PLUGIN_PATH",
+ "FUSION9_MasterPrefs"
+ ]
+ environment = dict({key: os.environ[key] for key in keys
+ if key in os.environ}, **api.Session)
+
+ payload["JobInfo"].update({
+ "EnvironmentKeyValue%d" % index: "{key}={value}".format(
+ key=key,
+ value=environment[key]
+ ) for index, key in enumerate(environment)
+ })
+
+ self.log.info("Submitting..")
+ self.log.info(json.dumps(payload, indent=4, sort_keys=True))
+
+ # E.g. http://192.168.0.1:8082/api/jobs
+ url = "{}/api/jobs".format(AVALON_DEADLINE)
+ response = requests.post(url, json=payload)
+ if not response.ok:
+ raise Exception(response.text)
+
+ # Store the response for dependent job submission plug-ins
+ for instance in saver_instances:
+ instance.data["deadlineSubmissionJob"] = response.json()
diff --git a/config/plugins/fusion/publish/validate_background_depth.py b/config/plugins/fusion/publish/validate_background_depth.py
new file mode 100644
index 0000000000..abf5dd248e
--- /dev/null
+++ b/config/plugins/fusion/publish/validate_background_depth.py
@@ -0,0 +1,40 @@
+import pyblish.api
+
+from colorbleed import action
+
+
+class ValidateBackgroundDepth(pyblish.api.InstancePlugin):
+ """Validate if all Background tool are set to float32 bit"""
+
+ order = pyblish.api.ValidatorOrder
+ label = "Validate Background Depth 32 bit"
+ actions = [action.RepairAction]
+ hosts = ["fusion"]
+ families = ["colorbleed.saver"]
+ optional = True
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ context = instance.context
+ comp = context.data.get("currentComp")
+ assert comp, "Must have Comp object"
+
+ backgrounds = comp.GetToolList(False, "Background").values()
+ if not backgrounds:
+ return []
+
+ return [i for i in backgrounds if i.GetInput("Depth") != 4.0]
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Found %i nodes which are not set to float32"
+ % len(invalid))
+
+ @classmethod
+ def repair(cls, instance):
+ comp = instance.context.data.get("currentComp")
+ invalid = cls.get_invalid(instance)
+ for i in invalid:
+ i.SetInput("Depth", 4.0, comp.TIME_UNDEFINED)
diff --git a/config/plugins/fusion/publish/validate_comp_saved.py b/config/plugins/fusion/publish/validate_comp_saved.py
new file mode 100644
index 0000000000..18621ceb4f
--- /dev/null
+++ b/config/plugins/fusion/publish/validate_comp_saved.py
@@ -0,0 +1,29 @@
+import os
+
+import pyblish.api
+
+
+class ValidateFusionCompSaved(pyblish.api.ContextPlugin):
+ """Ensure current comp is saved"""
+
+ order = pyblish.api.ValidatorOrder
+ label = "Validate Comp Saved"
+ families = ["colorbleed.saver"]
+ hosts = ["fusion"]
+
+ def process(self, context):
+
+ comp = context.data.get("currentComp")
+ assert comp, "Must have Comp object"
+ attrs = comp.GetAttrs()
+
+ filename = attrs["COMPS_FileName"]
+ if not filename:
+ raise RuntimeError("Comp is not saved.")
+
+ if not os.path.exists(filename):
+ raise RuntimeError("Comp file does not exist: %s" % filename)
+
+ if attrs["COMPB_Modified"]:
+ self.log.warning("Comp is modified. Save your comp to ensure your "
+ "changes propagate correctly.")
diff --git a/config/plugins/fusion/publish/validate_create_folder_checked.py b/config/plugins/fusion/publish/validate_create_folder_checked.py
new file mode 100644
index 0000000000..7a10b1e538
--- /dev/null
+++ b/config/plugins/fusion/publish/validate_create_folder_checked.py
@@ -0,0 +1,41 @@
+import pyblish.api
+
+from colorbleed import action
+
+
+class ValidateCreateFolderChecked(pyblish.api.InstancePlugin):
+ """Valid if all savers have the input attribute CreateDir checked on
+
+ This attribute ensures that the folders to which the saver will write
+ will be created.
+ """
+
+ order = pyblish.api.ValidatorOrder
+ actions = [action.RepairAction]
+ label = "Validate Create Folder Checked"
+ families = ["colorbleed.saver"]
+ hosts = ["fusion"]
+
+ @classmethod
+ def get_invalid(cls, instance):
+ active = instance.data.get("active", instance.data.get("publish"))
+ if not active:
+ return []
+
+ tool = instance[0]
+ create_dir = tool.GetInput("CreateDir")
+ if create_dir == 0.0:
+ cls.log.error("%s has Create Folder turned off" % instance[0].Name)
+ return [tool]
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Found Saver with Create Folder During "
+ "Render checked off")
+
+ @classmethod
+ def repair(cls, instance):
+ invalid = cls.get_invalid(instance)
+ for tool in invalid:
+ tool.SetInput("CreateDir", 1.0)
diff --git a/config/plugins/fusion/publish/validate_filename_has_extension.py b/config/plugins/fusion/publish/validate_filename_has_extension.py
new file mode 100644
index 0000000000..b7fe1f4c31
--- /dev/null
+++ b/config/plugins/fusion/publish/validate_filename_has_extension.py
@@ -0,0 +1,36 @@
+import os
+
+import pyblish.api
+
+
+class ValidateFilenameHasExtension(pyblish.api.InstancePlugin):
+ """Ensure the Saver has an extension in the filename path
+
+ This disallows files written as `filename` instead of `filename.frame.ext`.
+ Fusion does not always set an extension for your filename when
+ changing the file format of the saver.
+
+ """
+
+ order = pyblish.api.ValidatorOrder
+ label = "Validate Filename Has Extension"
+ families = ["colorbleed.saver"]
+ hosts = ["fusion"]
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Found Saver without an extension")
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ path = instance.data["path"]
+ fname, ext = os.path.splitext(path)
+
+ if not ext:
+ tool = instance[0]
+ cls.log.error("%s has no extension specified" % tool.Name)
+ return [tool]
+
+ return []
diff --git a/config/plugins/fusion/publish/validate_saver_has_input.py b/config/plugins/fusion/publish/validate_saver_has_input.py
new file mode 100644
index 0000000000..27ed77d9d9
--- /dev/null
+++ b/config/plugins/fusion/publish/validate_saver_has_input.py
@@ -0,0 +1,29 @@
+import pyblish.api
+
+
+class ValidateSaverHasInput(pyblish.api.InstancePlugin):
+ """Validate saver has incoming connection
+
+ This ensures a Saver has at least an input connection.
+
+ """
+
+ order = pyblish.api.ValidatorOrder
+ label = "Validate Saver Has Input"
+ families = ["colorbleed.saver"]
+ hosts = ["fusion"]
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ saver = instance[0]
+ if not saver.Input.GetConnectedOutput():
+ return [saver]
+
+ return []
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Saver has no incoming connection: "
+ "{} ({})".format(instance, invalid[0].Name))
diff --git a/config/plugins/fusion/publish/validate_saver_passthrough.py b/config/plugins/fusion/publish/validate_saver_passthrough.py
new file mode 100644
index 0000000000..ca58eba5bd
--- /dev/null
+++ b/config/plugins/fusion/publish/validate_saver_passthrough.py
@@ -0,0 +1,44 @@
+import pyblish.api
+
+
+class ValidateSaverPassthrough(pyblish.api.ContextPlugin):
+ """Validate saver passthrough is similar to Pyblish publish state"""
+
+ order = pyblish.api.ValidatorOrder
+ label = "Validate Saver Passthrough"
+ families = ["colorbleed.saver"]
+ hosts = ["fusion"]
+
+ def process(self, context):
+
+ # Workaround for ContextPlugin always running, even if no instance
+ # is present with the family
+ instances = pyblish.api.instances_by_plugin(instances=list(context),
+ plugin=self)
+ if not instances:
+ self.log.debug("Ignoring plugin.. (bugfix)")
+
+ invalid_instances = []
+ for instance in instances:
+ invalid = self.is_invalid(instance)
+ if invalid:
+ invalid_instances.append(instance)
+
+ if invalid_instances:
+ self.log.info("Reset pyblish to collect your current scene state, "
+ "that should fix error.")
+ raise RuntimeError("Invalid instances: "
+ "{0}".format(invalid_instances))
+
+ def is_invalid(self, instance):
+
+ saver = instance[0]
+ attr = saver.GetAttrs()
+ active = not attr["TOOLB_PassThrough"]
+
+ if active != instance.data["publish"]:
+ self.log.info("Saver has different passthrough state than "
+ "Pyblish: {} ({})".format(instance, saver.Name))
+ return [saver]
+
+ return []
diff --git a/config/plugins/fusion/publish/validate_unique_subsets.py b/config/plugins/fusion/publish/validate_unique_subsets.py
new file mode 100644
index 0000000000..527b4acc69
--- /dev/null
+++ b/config/plugins/fusion/publish/validate_unique_subsets.py
@@ -0,0 +1,29 @@
+import pyblish.api
+
+
+class ValidateUniqueSubsets(pyblish.api.InstancePlugin):
+ """Ensure all instances have a unique subset name"""
+
+ order = pyblish.api.ValidatorOrder
+ label = "Validate Unique Subsets"
+ families = ["colorbleed.saver"]
+ hosts = ["fusion"]
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ context = instance.context
+ subset = instance.data["subset"]
+ for other_instance in context[:]:
+ if other_instance == instance:
+ continue
+
+ if other_instance.data["subset"] == subset:
+ return [instance] # current instance is invalid
+
+ return []
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Animation content is invalid. See log.")
diff --git a/config/plugins/global/load/copy_file.py b/config/plugins/global/load/copy_file.py
new file mode 100644
index 0000000000..bbb8e1d6f7
--- /dev/null
+++ b/config/plugins/global/load/copy_file.py
@@ -0,0 +1,33 @@
+from avalon import api, style
+
+
+class CopyFile(api.Loader):
+ """Copy the published file to be pasted at the desired location"""
+
+ representations = ["*"]
+ families = ["*"]
+
+ label = "Copy File"
+ order = 10
+ icon = "copy"
+ color = style.colors.default
+
+ def load(self, context, name=None, namespace=None, data=None):
+ self.log.info("Added copy to clipboard: {0}".format(self.fname))
+ self.copy_file_to_clipboard(self.fname)
+
+ @staticmethod
+ def copy_file_to_clipboard(path):
+ from avalon.vendor.Qt import QtCore, QtWidgets
+
+ app = QtWidgets.QApplication.instance()
+ assert app, "Must have running QApplication instance"
+
+ # Build mime data for clipboard
+ data = QtCore.QMimeData()
+ url = QtCore.QUrl.fromLocalFile(path)
+ data.setUrls([url])
+
+ # Set to Clipboard
+ clipboard = app.clipboard()
+ clipboard.setMimeData(data)
diff --git a/config/plugins/global/load/copy_file_path.py b/config/plugins/global/load/copy_file_path.py
new file mode 100644
index 0000000000..cfda9dc271
--- /dev/null
+++ b/config/plugins/global/load/copy_file_path.py
@@ -0,0 +1,29 @@
+import os
+
+from avalon import api
+
+
+class CopyFilePath(api.Loader):
+ """Copy published file path to clipboard"""
+ representations = ["*"]
+ families = ["*"]
+
+ label = "Copy File Path"
+ order = 20
+ icon = "clipboard"
+ color = "#999999"
+
+ def load(self, context, name=None, namespace=None, data=None):
+ self.log.info("Added file path to clipboard: {0}".format(self.fname))
+ self.copy_path_to_clipboard(self.fname)
+
+ @staticmethod
+ def copy_path_to_clipboard(path):
+ from avalon.vendor.Qt import QtCore, QtWidgets
+
+ app = QtWidgets.QApplication.instance()
+ assert app, "Must have running QApplication instance"
+
+ # Set to Clipboard
+ clipboard = app.clipboard()
+ clipboard.setText(os.path.normpath(path))
diff --git a/config/plugins/global/load/open_imagesequence.py b/config/plugins/global/load/open_imagesequence.py
new file mode 100644
index 0000000000..12fb9a0226
--- /dev/null
+++ b/config/plugins/global/load/open_imagesequence.py
@@ -0,0 +1,49 @@
+import sys
+import os
+import subprocess
+
+from avalon import api
+
+
+def open(filepath):
+ """Open file with system default executable"""
+ if sys.platform.startswith('darwin'):
+ subprocess.call(('open', filepath))
+ elif os.name == 'nt':
+ os.startfile(filepath)
+ elif os.name == 'posix':
+ subprocess.call(('xdg-open', filepath))
+
+
+class PlayImageSequence(api.Loader):
+ """Open Image Sequence with system default"""
+
+ families = ["colorbleed.imagesequence"]
+ representations = ["*"]
+
+ label = "Play sequence"
+ order = -10
+ icon = "play-circle"
+ color = "orange"
+
+ def load(self, context, name, namespace, data):
+
+ directory = self.fname
+ from avalon.vendor import clique
+
+ pattern = clique.PATTERNS["frames"]
+ files = os.listdir(directory)
+ collections, remainder = clique.assemble(files,
+ patterns=[pattern],
+ minimum_items=1)
+
+ assert not remainder, ("There shouldn't have been a remainder for "
+ "'%s': %s" % (directory, remainder))
+
+ seqeunce = collections[0]
+ first_image = list(seqeunce)[0]
+ filepath = os.path.normpath(os.path.join(directory, first_image))
+
+ self.log.info("Opening : {}".format(filepath))
+
+ open(filepath)
diff --git a/config/plugins/global/publish/cleanup.py b/config/plugins/global/publish/cleanup.py
new file mode 100644
index 0000000000..43f8385592
--- /dev/null
+++ b/config/plugins/global/publish/cleanup.py
@@ -0,0 +1,33 @@
+import os
+import shutil
+import pyblish.api
+
+
+class CleanUp(pyblish.api.InstancePlugin):
+ """Cleans up the staging directory after a successful publish.
+
+ The removal will only happen for staging directories which are inside the
+ temporary folder, otherwise the folder is ignored.
+
+ """
+
+ order = pyblish.api.IntegratorOrder + 10
+ label = "Clean Up"
+
+ def process(self, instance):
+
+ import tempfile
+
+ staging_dir = instance.data.get("stagingDir", None)
+ if not staging_dir or not os.path.exists(staging_dir):
+ self.log.info("No staging directory found: %s" % staging_dir)
+ return
+
+ temp_root = tempfile.gettempdir()
+ if not os.path.normpath(staging_dir).startswith(temp_root):
+ self.log.info("Skipping cleanup. Staging directory is not in the "
+ "temp folder: %s" % staging_dir)
+ return
+
+ self.log.info("Removing temporary folder ...")
+ shutil.rmtree(staging_dir)
diff --git a/config/plugins/global/publish/collect_assumed_destination.py b/config/plugins/global/publish/collect_assumed_destination.py
new file mode 100644
index 0000000000..00e56cd2bf
--- /dev/null
+++ b/config/plugins/global/publish/collect_assumed_destination.py
@@ -0,0 +1,108 @@
+import pyblish.api
+import os
+
+from avalon import io, api
+
+
+class CollectAssumedDestination(pyblish.api.InstancePlugin):
+ """Generate the assumed destination path where the file will be stored"""
+
+ label = "Collect Assumed Destination"
+ order = pyblish.api.CollectorOrder + 0.499
+
+ def process(self, instance):
+
+ self.create_destination_template(instance)
+
+ template_data = instance.data["assumedTemplateData"]
+ template = instance.data["template"]
+
+ mock_template = template.format(**template_data)
+
+ # For now assume resources end up in a "resources" folder in the
+ # published folder
+ mock_destination = os.path.join(os.path.dirname(mock_template),
+ "resources")
+
+ # Clean the path
+ mock_destination = os.path.abspath(os.path.normpath(mock_destination))
+
+ # Define resource destination and transfers
+ resources = instance.data.get("resources", list())
+ transfers = instance.data.get("transfers", list())
+ for resource in resources:
+
+ # Add destination to the resource
+ source_filename = os.path.basename(resource["source"])
+ destination = os.path.join(mock_destination, source_filename)
+ resource['destination'] = destination
+
+ # Collect transfers for the individual files of the resource
+ # e.g. all individual files of a cache or UDIM textures.
+ files = resource['files']
+ for fsrc in files:
+ fname = os.path.basename(fsrc)
+ fdest = os.path.join(mock_destination, fname)
+ transfers.append([fsrc, fdest])
+
+ instance.data["resources"] = resources
+ instance.data["transfers"] = transfers
+
+ def create_destination_template(self, instance):
+ """Create a filepath based on the current data available
+
+ Example template:
+ {root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
+ {subset}.{representation}
+ Args:
+ instance: the instance to publish
+
+ Returns:
+ file path (str)
+ """
+
+ # get all the stuff from the database
+ subset_name = instance.data["subset"]
+ asset_name = instance.data["asset"]
+ project_name = api.Session["AVALON_PROJECT"]
+
+ project = io.find_one({"type": "project",
+ "name": project_name},
+ projection={"config": True})
+
+ template = project["config"]["template"]["publish"]
+
+ asset = io.find_one({"type": "asset",
+ "name": asset_name,
+ "parent": project["_id"]})
+
+ assert asset, ("No asset found by the name '{}' "
+ "in project '{}'".format(asset_name, project_name))
+ silo = asset['silo']
+
+ subset = io.find_one({"type": "subset",
+ "name": subset_name,
+ "parent": asset["_id"]})
+
+ # assume there is no version yet, we start at `1`
+ version = None
+ version_number = 1
+ if subset is not None:
+ version = io.find_one({"type": "version",
+ "parent": subset["_id"]},
+ sort=[("name", -1)])
+
+ # if there is a subset there ought to be version
+ if version is not None:
+ version_number += version["name"]
+
+ template_data = {"root": api.Session["AVALON_PROJECTS"],
+ "project": project_name,
+ "silo": silo,
+ "asset": asset_name,
+ "subset": subset_name,
+ "version": version_number,
+ "representation": "TEMP"}
+
+ instance.data["assumedTemplateData"] = template_data
+ instance.data["template"] = template
diff --git a/config/plugins/global/publish/collect_comment.py b/config/plugins/global/publish/collect_comment.py
new file mode 100644
index 0000000000..a246b7eaba
--- /dev/null
+++ b/config/plugins/global/publish/collect_comment.py
@@ -0,0 +1,11 @@
+import pyblish.api
+
+
+class CollectColorbleedComment(pyblish.api.ContextPlugin):
+ """This plug-ins displays the comment dialog box per default"""
+
+ label = "Collect Comment"
+ order = pyblish.api.CollectorOrder
+
+ def process(self, context):
+ context.data["comment"] = ""
diff --git a/config/plugins/global/publish/collect_context_label.py b/config/plugins/global/publish/collect_context_label.py
new file mode 100644
index 0000000000..ec8e0f7cdc
--- /dev/null
+++ b/config/plugins/global/publish/collect_context_label.py
@@ -0,0 +1,22 @@
+import os
+import pyblish.api
+
+
+class CollectContextLabel(pyblish.api.ContextPlugin):
+ """Labelize context using the registered host and current file"""
+
+ order = pyblish.api.CollectorOrder + 0.25
+ label = "Context Label"
+
+ def process(self, context):
+
+ # Get last registered host
+ host = pyblish.api.registered_hosts()[-1]
+
+ # Get scene name from "currentFile"
+ path = context.data.get("currentFile") or ""
+ base = os.path.basename(path)
+
+ # Set label
+ label = "{host} - {scene}".format(host=host.title(), scene=base)
+ context.data["label"] = label
diff --git a/config/plugins/global/publish/collect_current_shell_file.py b/config/plugins/global/publish/collect_current_shell_file.py
new file mode 100644
index 0000000000..a467459bc8
--- /dev/null
+++ b/config/plugins/global/publish/collect_current_shell_file.py
@@ -0,0 +1,14 @@
+import os
+import pyblish.api
+
+
+class CollectCurrentShellFile(pyblish.api.ContextPlugin):
+ """Inject the current working file into context"""
+
+ order = pyblish.api.CollectorOrder - 0.5
+ label = "Current File"
+ hosts = ["shell"]
+
+ def process(self, context):
+ """Inject the current working file"""
+ context.data["currentFile"] = os.path.join(os.getcwd(), "")
diff --git a/config/plugins/global/publish/collect_deadline_user.py b/config/plugins/global/publish/collect_deadline_user.py
new file mode 100644
index 0000000000..4f7af94419
--- /dev/null
+++ b/config/plugins/global/publish/collect_deadline_user.py
@@ -0,0 +1,52 @@
+import os
+import subprocess
+
+import pyblish.api
+
+CREATE_NO_WINDOW = 0x08000000
+
+
+def deadline_command(cmd):
+ # Find Deadline
+ path = os.environ.get("DEADLINE_PATH", None)
+ assert path is not None, "Variable 'DEADLINE_PATH' must be set"
+
+ executable = os.path.join(path, "deadlinecommand")
+ if os.name == "nt":
+ executable += ".exe"
+ assert os.path.exists(
+ executable), "Deadline executable not found at %s" % executable
+ assert cmd, "Must have a command"
+
+ query = (executable, cmd)
+
+ process = subprocess.Popen(query, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ creationflags=CREATE_NO_WINDOW)
+ out, err = process.communicate()
+
+ return out
+
+
+class CollectDeadlineUser(pyblish.api.ContextPlugin):
+ """Retrieve the local active Deadline user"""
+
+ order = pyblish.api.CollectorOrder + 0.499
+ label = "Deadline User"
+ hosts = ['maya', 'fusion']
+ families = ["colorbleed.renderlayer", "colorbleed.saver.deadline"]
+
+ def process(self, context):
+ """Inject the current working file"""
+
+ user = deadline_command("GetCurrentUserName").strip()
+
+ if not user:
+ self.log.warning("No Deadline user found. "
+ "Do you have Deadline installed?")
+ return
+
+ self.log.info("Found Deadline user: {}".format(user))
+ context.data['deadlineUser'] = user
+
diff --git a/config/plugins/global/publish/collect_filesequences.py b/config/plugins/global/publish/collect_filesequences.py
new file mode 100644
index 0000000000..9c2390d3dc
--- /dev/null
+++ b/config/plugins/global/publish/collect_filesequences.py
@@ -0,0 +1,184 @@
+import os
+import re
+import copy
+import json
+import pprint
+
+import pyblish.api
+from avalon import api
+
+
+def collect(root,
+ regex=None,
+ exclude_regex=None,
+ startFrame=None,
+ endFrame=None):
+ """Collect sequence collections in root"""
+
+ from avalon.vendor import clique
+
+ files = list()
+ for filename in os.listdir(root):
+
+ # Must have extension
+ ext = os.path.splitext(filename)[1]
+ if not ext:
+ continue
+
+ # Only files
+ if not os.path.isfile(os.path.join(root, filename)):
+ continue
+
+ # Include and exclude regex
+ if regex and not re.search(regex, filename):
+ continue
+ if exclude_regex and re.search(exclude_regex, filename):
+ continue
+
+ files.append(filename)
+
+ # Match collections
+ # Support filenames like: projectX_shot01_0010.tiff with this regex
+ pattern = r"(?P(?P0*)\d+)\.\D+\d?$"
+ collections, remainder = clique.assemble(files,
+ patterns=[pattern],
+ minimum_items=1)
+
+ # Ignore any remainders
+ if remainder:
+ print("Skipping remainder {}".format(remainder))
+
+ # Exclude any frames outside start and end frame.
+ for collection in collections:
+ for index in list(collection.indexes):
+ if startFrame is not None and index < startFrame:
+ collection.indexes.discard(index)
+ continue
+ if endFrame is not None and index > endFrame:
+ collection.indexes.discard(index)
+ continue
+
+ # Keep only collections that have at least a single frame
+ collections = [c for c in collections if c.indexes]
+
+ return collections
+
+
+class CollectFileSequences(pyblish.api.ContextPlugin):
+ """Gather file sequences from working directory
+
+ When "FILESEQUENCE" environment variable is set these paths (folders or
+ .json files) are parsed for image sequences. Otherwise the current
+ working directory is searched for file sequences.
+
+ The json configuration may have the optional keys:
+ asset (str): The asset to publish to. If not provided fall back to
+ api.Session["AVALON_ASSET"]
+ subset (str): The subset to publish to. If not provided the sequence's
+ head (up to frame number) will be used.
+ startFrame (int): The start frame for the sequence
+ endFrame (int): The end frame for the sequence
+ root (str): The path to collect from (can be relative to the .json)
+ regex (str): A regex for the sequence filename
+ exclude_regex (str): A regex for filename to exclude from collection
+ metadata (dict): Custom metadata for instance.data["metadata"]
+
+ """
+
+ order = pyblish.api.CollectorOrder
+ targets = ["filesequence"]
+ label = "File Sequences"
+
+ def process(self, context):
+
+ if os.environ.get("FILESEQUENCE"):
+ paths = os.environ["FILESEQUENCE"].split(os.pathsep)
+ else:
+ cwd = context.get("workspaceDir", os.getcwd())
+ paths = [cwd]
+
+ for path in paths:
+
+ self.log.info("Loading: {}".format(path))
+
+ if path.endswith(".json"):
+ # Search using .json configuration
+ with open(path, "r") as f:
+ try:
+ data = json.load(f)
+ except Exception as exc:
+ self.log.error("Error loading json: "
+ "{} - Exception: {}".format(path, exc))
+ raise
+
+ cwd = os.path.dirname(path)
+ root_override = data.get("root")
+ if root_override:
+ if os.path.isabs(root_override):
+ root = root_override
+ else:
+ root = os.path.join(cwd, root_override)
+ else:
+ root = cwd
+
+ else:
+ # Search in directory
+ data = dict()
+ root = path
+
+ self.log.info("Collecting: {}".format(root))
+ regex = data.get("regex")
+ if regex:
+ self.log.info("Using regex: {}".format(regex))
+
+ collections = collect(root=root,
+ regex=regex,
+ exclude_regex=data.get("exclude_regex"),
+ startFrame=data.get("startFrame"),
+ endFrame=data.get("endFrame"))
+
+ self.log.info("Found collections: {}".format(collections))
+
+ if data.get("subset"):
+ # If subset is provided for this json then it must be a single
+ # collection.
+ if len(collections) > 1:
+ self.log.error("Forced subset can only work with a single "
+ "found sequence")
+ raise RuntimeError("Invalid sequence")
+
+ # Get family from the data
+ families = data.get("families", ["colorbleed.imagesequence"])
+ assert isinstance(families, (list, tuple)), "Must be iterable"
+ assert families, "Must have at least a single family"
+
+ for collection in collections:
+ instance = context.create_instance(str(collection))
+ self.log.info("Collection: %s" % list(collection))
+
+ # Ensure each instance gets a unique reference to the data
+ data = copy.deepcopy(data)
+
+ # If no subset provided, get it from collection's head
+ subset = data.get("subset", collection.head.rstrip("_. "))
+
+ # If no start or end frame provided, get it from collection
+ indices = list(collection.indexes)
+ start = data.get("startFrame", indices[0])
+ end = data.get("endFrame", indices[-1])
+
+ instance.data.update({
+ "name": str(collection),
+ "family": families[0], # backwards compatibility / pyblish
+ "families": list(families),
+ "subset": subset,
+ "asset": data.get("asset", api.Session["AVALON_ASSET"]),
+ "stagingDir": root,
+ "files": [list(collection)],
+ "startFrame": start,
+ "endFrame": end
+ })
+ instance.append(collection)
+
+ self.log.debug("Collected instance:\n"
+ "{}".format(pprint.pformat(instance.data)))
diff --git a/config/plugins/global/publish/collect_shell_workspace.py b/config/plugins/global/publish/collect_shell_workspace.py
new file mode 100644
index 0000000000..566b348ab9
--- /dev/null
+++ b/config/plugins/global/publish/collect_shell_workspace.py
@@ -0,0 +1,14 @@
+import os
+import pyblish.api
+
+
+class CollectShellWorkspace(pyblish.api.ContextPlugin):
+ """Inject the current workspace into context"""
+
+ order = pyblish.api.CollectorOrder - 0.5
+ label = "Shell Workspace"
+
+ hosts = ["shell"]
+
+ def process(self, context):
+ context.data["workspaceDir"] = os.getcwd()
diff --git a/config/plugins/global/publish/collect_time.py b/config/plugins/global/publish/collect_time.py
new file mode 100644
index 0000000000..d4fa658425
--- /dev/null
+++ b/config/plugins/global/publish/collect_time.py
@@ -0,0 +1,12 @@
+import pyblish.api
+from avalon import api
+
+
+class CollectMindbenderTime(pyblish.api.ContextPlugin):
+ """Store global time at the time of publish"""
+
+ label = "Collect Current Time"
+ order = pyblish.api.CollectorOrder
+
+ def process(self, context):
+ context.data["time"] = api.time()
diff --git a/config/plugins/global/publish/integrate.py b/config/plugins/global/publish/integrate.py
new file mode 100644
index 0000000000..be9c46fbfe
--- /dev/null
+++ b/config/plugins/global/publish/integrate.py
@@ -0,0 +1,349 @@
+import os
+import logging
+import shutil
+
+import errno
+import pyblish.api
+from avalon import api, io
+
+
+log = logging.getLogger(__name__)
+
+
+class IntegrateAsset(pyblish.api.InstancePlugin):
+ """Resolve any dependency issies
+
+ This plug-in resolves any paths which, if not updated might break
+ the published file.
+
+ The order of families is important, when working with lookdev you want to
+ first publish the texture, update the texture paths in the nodes and then
+ publish the shading network. Same goes for file dependent assets.
+ """
+
+ label = "Integrate Asset"
+ order = pyblish.api.IntegratorOrder
+ families = ["colorbleed.animation",
+ "colorbleed.camera",
+ "colorbleed.imagesequence",
+ "colorbleed.look",
+ "config.apps.mayaAscii",
+ "colorbleed.model",
+ "colorbleed.pointcache",
+ "colorbleed.setdress",
+ "colorbleed.rig",
+ "colorbleed.vrayproxy",
+ "colorbleed.yetiRig",
+ "colorbleed.yeticache"]
+
+ def process(self, instance):
+
+ self.register(instance)
+
+ self.log.info("Integrating Asset in to the database ...")
+ self.integrate(instance)
+
+ def register(self, instance):
+
+ # Required environment variables
+ PROJECT = api.Session["AVALON_PROJECT"]
+ ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
+ LOCATION = api.Session["AVALON_LOCATION"]
+
+ context = instance.context
+ # Atomicity
+ #
+ # Guarantee atomic publishes - each asset contains
+ # an identical set of members.
+ # __
+ # / o
+ # / \
+ # | o |
+ # \ /
+ # o __/
+ #
+ assert all(result["success"] for result in context.data["results"]), (
+ "Atomicity not held, aborting.")
+
+ # Assemble
+ #
+ # |
+ # v
+ # ---> <----
+ # ^
+ # |
+ #
+ stagingdir = instance.data.get("stagingDir")
+ assert stagingdir, ("Incomplete instance \"%s\": "
+ "Missing reference to staging area." % instance)
+
+ # extra check if stagingDir actually exists and is available
+
+ self.log.debug("Establishing staging directory @ %s" % stagingdir)
+
+ project = io.find_one({"type": "project"},
+ projection={"config.template.publish": True})
+
+ asset = io.find_one({"type": "asset",
+ "name": ASSET,
+ "parent": project["_id"]})
+
+ assert all([project, asset]), ("Could not find current project or "
+ "asset '%s'" % ASSET)
+
+ subset = self.get_subset(asset, instance)
+
+ # get next version
+ latest_version = io.find_one({"type": "version",
+ "parent": subset["_id"]},
+ {"name": True},
+ sort=[("name", -1)])
+
+ next_version = 1
+ if latest_version is not None:
+ next_version += latest_version["name"]
+
+ self.log.info("Verifying version from assumed destination")
+
+ assumed_data = instance.data["assumedTemplateData"]
+ assumed_version = assumed_data["version"]
+ if assumed_version != next_version:
+ raise AttributeError("Assumed version 'v{0:03d}' does not match"
+ "next version in database "
+ "('v{1:03d}')".format(assumed_version,
+ next_version))
+
+ self.log.debug("Next version: v{0:03d}".format(next_version))
+
+ version_data = self.create_version_data(context, instance)
+ version = self.create_version(subset=subset,
+ version_number=next_version,
+ locations=[LOCATION],
+ data=version_data)
+
+ self.log.debug("Creating version ...")
+ version_id = io.insert_one(version).inserted_id
+
+ # Write to disk
+ # _
+ # | |
+ # _| |_
+ # ____\ /
+ # |\ \ / \
+ # \ \ v \
+ # \ \________.
+ # \|________|
+ #
+ root = api.registered_root()
+ template_data = {"root": root,
+ "project": PROJECT,
+ "silo": asset['silo'],
+ "asset": ASSET,
+ "subset": subset["name"],
+ "version": version["name"]}
+
+ template_publish = project["config"]["template"]["publish"]
+
+ # Find the representations to transfer amongst the files
+ # Each should be a single representation (as such, a single extension)
+ representations = []
+
+ for files in instance.data["files"]:
+
+ # Collection
+ # _______
+ # |______|\
+ # | |\|
+ # | ||
+ # | ||
+ # | ||
+ # |_______|
+ #
+ if isinstance(files, list):
+ collection = files
+ # Assert that each member has identical suffix
+ _, ext = os.path.splitext(collection[0])
+ assert all(ext == os.path.splitext(name)[1]
+ for name in collection), (
+ "Files had varying suffixes, this is a bug"
+ )
+
+ assert not any(os.path.isabs(name) for name in collection)
+
+ template_data["representation"] = ext[1:]
+
+ for fname in collection:
+
+ src = os.path.join(stagingdir, fname)
+ dst = os.path.join(
+ template_publish.format(**template_data),
+ fname
+ )
+
+ instance.data["transfers"].append([src, dst])
+
+ else:
+ # Single file
+ # _______
+ # | |\
+ # | |
+ # | |
+ # | |
+ # |_______|
+ #
+ fname = files
+ assert not os.path.isabs(fname), (
+ "Given file name is a full path"
+ )
+ _, ext = os.path.splitext(fname)
+
+ template_data["representation"] = ext[1:]
+
+ src = os.path.join(stagingdir, fname)
+ dst = template_publish.format(**template_data)
+
+ instance.data["transfers"].append([src, dst])
+
+ representation = {
+ "schema": "avalon-core:representation-2.0",
+ "type": "representation",
+ "parent": version_id,
+ "name": ext[1:],
+ "data": {},
+ "dependencies": instance.data.get("dependencies", "").split(),
+
+ # Imprint shortcut to context
+ # for performance reasons.
+ "context": {
+ "project": PROJECT,
+ "asset": ASSET,
+ "silo": asset['silo'],
+ "subset": subset["name"],
+ "version": version["name"],
+ "representation": ext[1:]
+ }
+ }
+ representations.append(representation)
+
+ self.log.info("Registering {} items".format(len(representations)))
+
+ io.insert_many(representations)
+
+ def integrate(self, instance):
+ """Move the files
+
+ Through `instance.data["transfers"]`
+
+ Args:
+ instance: the instance to integrate
+ """
+
+ transfers = instance.data["transfers"]
+
+ for src, dest in transfers:
+ self.log.info("Copying file .. {} -> {}".format(src, dest))
+ self.copy_file(src, dest)
+
+ def copy_file(self, src, dst):
+ """ Copy given source to destination
+
+ Arguments:
+ src (str): the source file which needs to be copied
+ dst (str): the destination of the sourc file
+ Returns:
+ None
+ """
+
+ dirname = os.path.dirname(dst)
+ try:
+ os.makedirs(dirname)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ pass
+ else:
+ self.log.critical("An unexpected error occurred.")
+ raise
+
+ shutil.copy(src, dst)
+
+ def get_subset(self, asset, instance):
+
+ subset = io.find_one({"type": "subset",
+ "parent": asset["_id"],
+ "name": instance.data["subset"]})
+
+ if subset is None:
+ subset_name = instance.data["subset"]
+ self.log.info("Subset '%s' not found, creating.." % subset_name)
+
+ _id = io.insert_one({
+ "schema": "avalon-core:subset-2.0",
+ "type": "subset",
+ "name": subset_name,
+ "data": {},
+ "parent": asset["_id"]
+ }).inserted_id
+
+ subset = io.find_one({"_id": _id})
+
+ return subset
+
+ def create_version(self, subset, version_number, locations, data=None):
+ """ Copy given source to destination
+
+ Args:
+ subset (dict): the registered subset of the asset
+ version_number (int): the version number
+ locations (list): the currently registered locations
+
+ Returns:
+ dict: collection of data to create a version
+ """
+ # Imprint currently registered location
+ version_locations = [location for location in locations if
+ location is not None]
+
+ return {"schema": "avalon-core:version-2.0",
+ "type": "version",
+ "parent": subset["_id"],
+ "name": version_number,
+ "locations": version_locations,
+ "data": data}
+
+ def create_version_data(self, context, instance):
+ """Create the data collection for the version
+
+ Args:
+ context: the current context
+ instance: the current instance being published
+
+ Returns:
+ dict: the required information with instance.data as key
+ """
+
+ families = []
+ current_families = instance.data.get("families", list())
+ instance_family = instance.data.get("family", None)
+
+ if instance_family is not None:
+ families.append(instance_family)
+ families += current_families
+
+ # create relative source path for DB
+ relative_path = os.path.relpath(context.data["currentFile"],
+ api.registered_root())
+ source = os.path.join("{root}", relative_path).replace("\\", "/")
+
+ version_data = {"families": families,
+ "time": context.data["time"],
+ "author": context.data["user"],
+ "source": source,
+ "comment": context.data.get("comment")}
+
+ # Include optional data if present in
+ optionals = ["startFrame", "endFrame", "step", "handles"]
+ for key in optionals:
+ if key in instance.data:
+ version_data[key] = instance.data[key]
+
+ return version_data
diff --git a/config/plugins/global/publish/submit_publish_job.py b/config/plugins/global/publish/submit_publish_job.py
new file mode 100644
index 0000000000..34a09c9b81
--- /dev/null
+++ b/config/plugins/global/publish/submit_publish_job.py
@@ -0,0 +1,316 @@
+import os
+import json
+import pprint
+import re
+
+from avalon import api, io
+from avalon.vendor import requests, clique
+
+import pyblish.api
+
+
+def _get_script():
+ """Get path to the image sequence script"""
+ try:
+ from colorbleed.scripts import publish_filesequence
+ except Exception as e:
+ raise RuntimeError("Expected module 'publish_imagesequence'"
+ "to be available")
+
+ module_path = publish_filesequence.__file__
+ if module_path.endswith(".pyc"):
+ module_path = module_path[:-len(".pyc")] + ".py"
+
+ return module_path
+
+
+# Logic to retrieve latest files concerning extendFrames
+def get_latest_version(asset_name, subset_name, family):
+ # Get asset
+ asset_name = io.find_one({"type": "asset",
+ "name": asset_name},
+ projection={"name": True})
+
+ subset = io.find_one({"type": "subset",
+ "name": subset_name,
+ "parent": asset_name["_id"]},
+ projection={"_id": True, "name": True})
+
+ # Check if subsets actually exists (pre-run check)
+ assert subset, "No subsets found, please publish with `extendFrames` off"
+
+ # Get version
+ version_projection = {"name": True,
+ "data.startFrame": True,
+ "data.endFrame": True,
+ "parent": True}
+
+ version = io.find_one({"type": "version",
+ "parent": subset["_id"],
+ "data.families": family},
+ projection=version_projection,
+ sort=[("name", -1)])
+
+ assert version, "No version found, this is a bug"
+
+ return version
+
+
+def get_resources(version, extension=None):
+ """
+ Get the files from the specific version
+ """
+ query = {"type": "representation", "parent": version["_id"]}
+ if extension:
+ query["name"] = extension
+
+ representation = io.find_one(query)
+ assert representation, "This is a bug"
+
+ directory = api.get_representation_path(representation)
+ print("Source: ", directory)
+ resources = sorted([os.path.normpath(os.path.join(directory, fname))
+ for fname in os.listdir(directory)])
+
+ return resources
+
+
+def get_resource_files(resources, frame_range, override=True):
+
+ res_collections, _ = clique.assemble(resources)
+ assert len(res_collections) == 1, "Multiple collections found"
+ res_collection = res_collections[0]
+
+ # Remove any frames
+ if override:
+ for frame in frame_range:
+ if frame not in res_collection.indexes:
+ continue
+ res_collection.indexes.remove(frame)
+
+ return list(res_collection)
+
+
+class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
+ """Submit image sequence publish jobs to Deadline.
+
+ These jobs are dependent on a deadline job submission prior to this
+ plug-in.
+
+ Renders are submitted to a Deadline Web Service as
+ supplied via the environment variable AVALON_DEADLINE
+
+ Options in instance.data:
+ - deadlineSubmission (dict, Required): The returned .json
+ data from the job submission to deadline.
+
+ - outputDir (str, Required): The output directory where the metadata
+ file should be generated. It's assumed that this will also be
+ final folder containing the output files.
+
+ - ext (str, Optional): The extension (including `.`) that is required
+ in the output filename to be picked up for image sequence
+ publishing.
+
+ - publishJobState (str, Optional): "Active" or "Suspended"
+ This defaults to "Suspended"
+
+ This requires a "startFrame" and "endFrame" to be present in instance.data
+ or in context.data.
+
+ """
+
+ label = "Submit image sequence jobs to Deadline"
+ order = pyblish.api.IntegratorOrder + 0.1
+ hosts = ["fusion", "maya"]
+ families = ["colorbleed.saver.deadline", "colorbleed.renderlayer"]
+
+ def process(self, instance):
+
+ AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
+ "http://localhost:8082")
+ assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
+
+ # Get a submission job
+ job = instance.data.get("deadlineSubmissionJob")
+ if not job:
+ raise RuntimeError("Can't continue without valid deadline "
+ "submission prior to this plug-in.")
+
+ data = instance.data.copy()
+ subset = data["subset"]
+ state = data.get("publishJobState", "Suspended")
+ job_name = "{batch} - {subset} [publish image sequence]".format(
+ batch=job["Props"]["Name"],
+ subset=subset
+ )
+
+ # Add in start/end frame
+ context = instance.context
+ start = instance.data.get("startFrame", context.data["startFrame"])
+ end = instance.data.get("endFrame", context.data["endFrame"])
+ resources = []
+
+ # Add in regex for sequence filename
+ # This assumes the output files start with subset name and ends with
+ # a file extension.
+ if "ext" in instance.data:
+ ext = re.escape(instance.data["ext"])
+ else:
+ ext = "\.\D+"
+
+ regex = "^{subset}.*\d+{ext}$".format(subset=re.escape(subset),
+ ext=ext)
+
+ # Write metadata for publish job
+ render_job = data.pop("deadlineSubmissionJob")
+ metadata = {
+ "regex": regex,
+ "startFrame": start,
+ "endFrame": end,
+ "families": ["colorbleed.imagesequence"],
+
+ # Optional metadata (for debugging)
+ "metadata": {
+ "instance": data,
+ "job": job,
+ "session": api.Session.copy()
+ }
+ }
+
+ # Ensure output dir exists
+ output_dir = instance.data["outputDir"]
+ if not os.path.isdir(output_dir):
+ os.makedirs(output_dir)
+
+ if data.get("extendFrames", False):
+
+ family = "colorbleed.imagesequence"
+ override = data["overrideExistingFrame"]
+
+ # override = data.get("overrideExistingFrame", False)
+ out_file = render_job.get("OutFile")
+ if not out_file:
+ raise RuntimeError("OutFile not found in render job!")
+
+ extension = os.path.splitext(out_file[0])[1]
+ _ext = extension[1:]
+
+ # Frame comparison
+ prev_start = None
+ prev_end = None
+ resource_range = range(int(start), int(end)+1)
+
+ # Gather all the subset files (one subset per render pass!)
+ subset_names = [data["subset"]]
+ subset_names.extend(data.get("renderPasses", []))
+
+ for subset_name in subset_names:
+ version = get_latest_version(asset_name=data["asset"],
+ subset_name=subset_name,
+ family=family)
+
+ # Set prev start / end frames for comparison
+ if not prev_start and not prev_end:
+ prev_start = version["data"]["startFrame"]
+ prev_end = version["data"]["endFrame"]
+
+ subset_resources = get_resources(version, _ext)
+ resource_files = get_resource_files(subset_resources,
+ resource_range,
+ override)
+
+ resources.extend(resource_files)
+
+ updated_start = min(start, prev_start)
+ updated_end = max(end, prev_end)
+
+ # Update metadata and instance start / end frame
+ self.log.info("Updating start / end frame : "
+ "{} - {}".format(updated_start, updated_end))
+
+ # TODO : Improve logic to get new frame range for the
+ # publish job (publish_filesequence.py)
+ # The current approach is not following Pyblish logic which is based
+ # on Collect / Validate / Extract.
+
+ # ---- Collect Plugins ---
+ # Collect Extend Frames - Only run if extendFrames is toggled
+ # # # Store in instance:
+ # # # Previous rendered files per subset based on frames
+ # # # --> Add to instance.data[resources]
+ # # # Update publish frame range
+
+ # ---- Validate Plugins ---
+ # Validate Extend Frames
+ # # # Check if instance has the requirements to extend frames
+ # There might have been some things which can be added to the list
+ # Please do so when fixing this.
+
+ # Start frame
+ metadata["startFrame"] = updated_start
+ metadata["metadata"]["instance"]["startFrame"] = updated_start
+
+ # End frame
+ metadata["endFrame"] = updated_end
+ metadata["metadata"]["instance"]["endFrame"] = updated_end
+
+ metadata_filename = "{}_metadata.json".format(subset)
+ metadata_path = os.path.join(output_dir, metadata_filename)
+ with open(metadata_path, "w") as f:
+ json.dump(metadata, f, indent=4, sort_keys=True)
+
+ # Generate the payload for Deadline submission
+ payload = {
+ "JobInfo": {
+ "Plugin": "Python",
+ "BatchName": job["Props"]["Batch"],
+ "Name": job_name,
+ "JobType": "Normal",
+ "JobDependency0": job["_id"],
+ "UserName": job["Props"]["User"],
+ "Comment": instance.context.data.get("comment", ""),
+ "InitialStatus": state
+ },
+ "PluginInfo": {
+ "Version": "3.6",
+ "ScriptFile": _get_script(),
+ "Arguments": '--path "{}"'.format(metadata_path),
+ "SingleFrameOnly": "True"
+ },
+
+ # Mandatory for Deadline, may be empty
+ "AuxFiles": []
+ }
+
+ # Transfer the environment from the original job to this dependent
+ # job so they use the same environment
+ environment = job["Props"].get("Env", {})
+ payload["JobInfo"].update({
+ "EnvironmentKeyValue%d" % index: "{key}={value}".format(
+ key=key,
+ value=environment[key]
+ ) for index, key in enumerate(environment)
+ })
+
+ self.log.info("Submitting..")
+ self.log.info(json.dumps(payload, indent=4, sort_keys=True))
+
+ url = "{}/api/jobs".format(AVALON_DEADLINE)
+ response = requests.post(url, json=payload)
+ if not response.ok:
+ raise Exception(response.text)
+
+ # Copy files from previous render if extendFrame is True
+ if data.get("extendFrames", False):
+
+ self.log.info("Preparing to copy ..")
+ import shutil
+
+ dest_path = data["outputDir"]
+ for source in resources:
+ src_file = os.path.basename(source)
+ dest = os.path.join(dest_path, src_file)
+ shutil.copy(source, dest)
+
+ self.log.info("Finished copying %i files" % len(resources))
diff --git a/config/plugins/global/publish/validate_file_saved.py b/config/plugins/global/publish/validate_file_saved.py
new file mode 100644
index 0000000000..33d8ce0457
--- /dev/null
+++ b/config/plugins/global/publish/validate_file_saved.py
@@ -0,0 +1,15 @@
+import pyblish.api
+
+
+class ValidateCurrentSaveFile(pyblish.api.ContextPlugin):
+ """File must be saved before publishing"""
+
+ label = "Validate File Saved"
+ order = pyblish.api.ValidatorOrder - 0.1
+ hosts = ["maya", "houdini"]
+
+ def process(self, context):
+
+ current_file = context.data["currentFile"]
+ if not current_file:
+ raise RuntimeError("File not saved")
diff --git a/config/plugins/global/publish/validate_sequence_frames.py b/config/plugins/global/publish/validate_sequence_frames.py
new file mode 100644
index 0000000000..bd14b4c0c3
--- /dev/null
+++ b/config/plugins/global/publish/validate_sequence_frames.py
@@ -0,0 +1,34 @@
+import pyblish.api
+
+
+class ValidateSequenceFrames(pyblish.api.InstancePlugin):
+ """Ensure the sequence of frames is complete
+
+ The files found in the folder are checked against the startFrame and
+ endFrame of the instance. If the first or last file is not
+ corresponding with the first or last frame it is flagged as invalid.
+ """
+
+ order = pyblish.api.ValidatorOrder
+ label = "Validate Sequence Frames"
+ families = ["colorbleed.imagesequence"]
+ hosts = ["shell"]
+
+ def process(self, instance):
+
+ collection = instance[0]
+ self.log.info(collection)
+
+ frames = list(collection.indexes)
+
+ current_range = (frames[0], frames[-1])
+ required_range = (instance.data["startFrame"],
+ instance.data["endFrame"])
+
+ if current_range != required_range:
+ raise ValueError("Invalid frame range: {0} - "
+ "expected: {1}".format(current_range,
+ required_range))
+
+ missing = collection.holes().indexes
+ assert not missing, "Missing frames: %s" % (missing,)
diff --git a/config/plugins/maya/create/colorbleed_animation.py b/config/plugins/maya/create/colorbleed_animation.py
new file mode 100644
index 0000000000..1759343858
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_animation.py
@@ -0,0 +1,36 @@
+from collections import OrderedDict
+
+import avalon.maya
+from config.apps.maya import lib
+
+
+class CreateAnimation(avalon.maya.Creator):
+ """Animation output for character rigs"""
+
+ name = "animationDefault"
+ label = "Animation"
+ family = "colorbleed.animation"
+ icon = "male"
+
+ def __init__(self, *args, **kwargs):
+ super(CreateAnimation, self).__init__(*args, **kwargs)
+
+ # create an ordered dict with the existing data first
+ data = OrderedDict(**self.data)
+
+ # get basic animation data : start / end / handles / steps
+ for key, value in lib.collect_animation_data().items():
+ data[key] = value
+
+ # Write vertex colors with the geometry.
+ data["writeColorSets"] = False
+
+ # Include only renderable visible shapes.
+ # Skips locators and empty transforms
+ data["renderableOnly"] = False
+
+ # Include only nodes that are visible at least once during the
+ # frame range.
+ data["visibleOnly"] = False
+
+ self.data = data
\ No newline at end of file
diff --git a/config/plugins/maya/create/colorbleed_camera.py b/config/plugins/maya/create/colorbleed_camera.py
new file mode 100644
index 0000000000..31f14cf272
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_camera.py
@@ -0,0 +1,27 @@
+from collections import OrderedDict
+import avalon.maya
+from config.apps.maya import lib
+
+
+class CreateCamera(avalon.maya.Creator):
+ """Single baked camera"""
+
+ name = "cameraDefault"
+ label = "Camera"
+ family = "colorbleed.camera"
+ icon = "video-camera"
+
+ def __init__(self, *args, **kwargs):
+ super(CreateCamera, self).__init__(*args, **kwargs)
+
+ # get basic animation data : start / end / handles / steps
+ data = OrderedDict(**self.data)
+ animation_data = lib.collect_animation_data()
+ for key, value in animation_data.items():
+ data[key] = value
+
+ # Bake to world space by default, when this is False it will also
+ # include the parent hierarchy in the baked results
+ data['bakeToWorldSpace'] = True
+
+ self.data = data
diff --git a/config/plugins/maya/create/colorbleed_look.py b/config/plugins/maya/create/colorbleed_look.py
new file mode 100644
index 0000000000..7a14694fc5
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_look.py
@@ -0,0 +1,20 @@
+from collections import OrderedDict
+import avalon.maya
+from config.apps.maya import lib
+
+
+class CreateLook(avalon.maya.Creator):
+ """Shader connections defining shape look"""
+
+ name = "look"
+ label = "Look"
+ family = "colorbleed.look"
+ icon = "paint-brush"
+
+ def __init__(self, *args, **kwargs):
+ super(CreateLook, self).__init__(*args, **kwargs)
+
+ data = OrderedDict(**self.data)
+ data["renderlayer"] = lib.get_current_renderlayer()
+
+ self.data = data
diff --git a/config/plugins/maya/create/colorbleed_mayaascii.py b/config/plugins/maya/create/colorbleed_mayaascii.py
new file mode 100644
index 0000000000..57738af769
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_mayaascii.py
@@ -0,0 +1,10 @@
+import avalon.maya
+
+
+class CreateMayaAscii(avalon.maya.Creator):
+ """Raw Maya Ascii file export"""
+
+ name = "mayaAscii"
+ label = "Maya Ascii"
+ family = "config.apps.mayaAscii"
+ icon = "file-archive-o"
diff --git a/config/plugins/maya/create/colorbleed_model.py b/config/plugins/maya/create/colorbleed_model.py
new file mode 100644
index 0000000000..b55b3dc3dd
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_model.py
@@ -0,0 +1,10 @@
+import avalon.maya
+
+
+class CreateModel(avalon.maya.Creator):
+ """Polygonal static geometry"""
+
+ name = "modelDefault"
+ label = "Model"
+ family = "colorbleed.model"
+ icon = "cube"
diff --git a/config/plugins/maya/create/colorbleed_pointcache.py b/config/plugins/maya/create/colorbleed_pointcache.py
new file mode 100644
index 0000000000..87e5abb214
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_pointcache.py
@@ -0,0 +1,36 @@
+from collections import OrderedDict
+
+import avalon.maya
+from config.apps.maya import lib
+
+
+class CreatePointCache(avalon.maya.Creator):
+ """Alembic pointcache for animated data"""
+
+ name = "pointcache"
+ label = "Point Cache"
+ family = "colorbleed.pointcache"
+ icon = "gears"
+
+ def __init__(self, *args, **kwargs):
+ super(CreatePointCache, self).__init__(*args, **kwargs)
+
+ # create an ordered dict with the existing data first
+ data = OrderedDict(**self.data)
+
+ # get basic animation data : start / end / handles / steps
+ for key, value in lib.collect_animation_data().items():
+ data[key] = value
+
+ # Write vertex colors with the geometry.
+ data["writeColorSets"] = False
+
+ # Include only renderable visible shapes.
+ # Skips locators and empty transforms
+ data["renderableOnly"] = False
+
+ # Include only nodes that are visible at least once during the
+ # frame range.
+ data["visibleOnly"] = False
+
+ self.data = data
\ No newline at end of file
diff --git a/config/plugins/maya/create/colorbleed_renderglobals.py b/config/plugins/maya/create/colorbleed_renderglobals.py
new file mode 100644
index 0000000000..1d12d9fe9d
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_renderglobals.py
@@ -0,0 +1,66 @@
+from collections import OrderedDict
+
+from maya import cmds
+
+from avalon.vendor import requests
+import avalon.maya
+from avalon import api
+
+
+class CreateRenderGlobals(avalon.maya.Creator):
+
+ label = "Render Globals"
+ family = "colorbleed.renderglobals"
+ icon = "gears"
+
+ def __init__(self, *args, **kwargs):
+ super(CreateRenderGlobals, self).__init__(*args, **kwargs)
+
+ # We won't be publishing this one
+ self.data["id"] = "avalon.renderglobals"
+
+ # get pools
+ AVALON_DEADLINE = api.Session["AVALON_DEADLINE"]
+ argument = "{}/api/pools?NamesOnly=true".format(AVALON_DEADLINE)
+ response = requests.get(argument)
+ if not response.ok:
+ self.log.warning("No pools retrieved")
+ pools = []
+ else:
+ pools = response.json()
+
+ # We don't need subset or asset attributes
+ self.data.pop("subset", None)
+ self.data.pop("asset", None)
+ self.data.pop("active", None)
+
+ data = OrderedDict(**self.data)
+
+ data["suspendPublishJob"] = False
+ data["extendFrames"] = False
+ data["overrideExistingFrame"] = True
+ data["useLegacyRenderLayers"] = True
+ data["priority"] = 50
+ data["framesPerTask"] = 1
+ data["whitelist"] = False
+ data["machineList"] = ""
+ data["useMayaBatch"] = True
+ data["primaryPool"] = pools
+ # We add a string "-" to allow the user to not set any secondary pools
+ data["secondaryPool"] = ["-"] + pools
+
+ self.data = data
+ self.options = {"useSelection": False} # Force no content
+
+ def process(self):
+
+ exists = cmds.ls(self.name)
+ assert len(exists) <= 1, (
+ "More than one renderglobal exists, this is a bug")
+
+ if exists:
+ return cmds.warning("%s already exists." % exists[0])
+
+ super(CreateRenderGlobals, self).process()
+
+ cmds.setAttr("{}.machineList".format(self.name), lock=True)
diff --git a/config/plugins/maya/create/colorbleed_rig.py b/config/plugins/maya/create/colorbleed_rig.py
new file mode 100644
index 0000000000..6947aaac31
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_rig.py
@@ -0,0 +1,21 @@
+from maya import cmds
+
+import avalon.maya
+
+
+class CreateRig(avalon.maya.Creator):
+ """Artist-friendly rig with controls to direct motion"""
+
+ name = "rigDefault"
+ label = "Rig"
+ family = "colorbleed.rig"
+ icon = "wheelchair"
+
+ def process(self):
+ instance = super(CreateRig, self).process()
+
+ self.log.info("Creating Rig instance set up ...")
+
+ controls = cmds.sets(name="controls_SET", empty=True)
+ pointcache = cmds.sets(name="out_SET", empty=True)
+ cmds.sets([controls, pointcache], forceElement=instance)
diff --git a/config/plugins/maya/create/colorbleed_setdress.py b/config/plugins/maya/create/colorbleed_setdress.py
new file mode 100644
index 0000000000..47089bea21
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_setdress.py
@@ -0,0 +1,10 @@
+import avalon.maya
+
+
+class CreateSetDress(avalon.maya.Creator):
+ """A grouped package of loaded content"""
+
+ name = "setdress"
+ label = "Set Dress"
+ family = "colorbleed.setdress"
+ icon = "cubes"
\ No newline at end of file
diff --git a/config/plugins/maya/create/colorbleed_vrayproxy.py b/config/plugins/maya/create/colorbleed_vrayproxy.py
new file mode 100644
index 0000000000..e100c31bd0
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_vrayproxy.py
@@ -0,0 +1,23 @@
+from collections import OrderedDict
+
+import avalon.maya
+
+
+class CreateVrayProxy(avalon.maya.Creator):
+ """Alembic pointcache for animated data"""
+
+ name = "vrayproxy"
+ label = "VRay Proxy"
+ family = "colorbleed.vrayproxy"
+ icon = "gears"
+
+ def __init__(self, *args, **kwargs):
+ super(CreateVrayProxy, self).__init__(*args, **kwargs)
+
+ data = OrderedDict(**self.data)
+
+ data["animation"] = False
+ data["startFrame"] = 1
+ data["endFrame"] = 1
+
+ self.data.update(data)
diff --git a/config/plugins/maya/create/colorbleed_yeti_cache.py b/config/plugins/maya/create/colorbleed_yeti_cache.py
new file mode 100644
index 0000000000..3bafb7a5c7
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_yeti_cache.py
@@ -0,0 +1,25 @@
+from collections import OrderedDict
+
+import avalon.maya
+from config.apps.maya import lib
+
+
+class CreateYetiCache(avalon.maya.Creator):
+ """Output for procedural plugin nodes of Yeti """
+
+ name = "yetiDefault"
+ label = "Yeti Cache"
+ family = "colorbleed.yeticache"
+ icon = "pagelines"
+
+ def __init__(self, *args, **kwargs):
+ super(CreateYetiCache, self).__init__(*args, **kwargs)
+
+ data = OrderedDict(self.data)
+ data["peroll"] = 0
+
+ anim_data = lib.collect_animation_data()
+ data.update({"startFrame": anim_data["startFrame"],
+ "endFrame": anim_data["endFrame"]})
+
+ self.data = data
diff --git a/config/plugins/maya/create/colorbleed_yeti_rig.py b/config/plugins/maya/create/colorbleed_yeti_rig.py
new file mode 100644
index 0000000000..3b21c586a7
--- /dev/null
+++ b/config/plugins/maya/create/colorbleed_yeti_rig.py
@@ -0,0 +1,20 @@
+from maya import cmds
+
+import avalon.maya
+
+
+class CreateYetiRig(avalon.maya.Creator):
+ """Output for procedural plugin nodes ( Yeti / XGen / etc)"""
+
+ label = "Yeti Rig"
+ family = "colorbleed.yetiRig"
+ icon = "usb"
+
+ def process(self):
+
+ instance = super(CreateYetiRig, self).process()
+
+ self.log.info("Creating Rig instance set up ...")
+
+ input_meshes = cmds.sets(name="input_SET", empty=True)
+ cmds.sets(input_meshes, forceElement=instance)
diff --git a/config/plugins/maya/load/_load_animation.py b/config/plugins/maya/load/_load_animation.py
new file mode 100644
index 0000000000..35fd2e647a
--- /dev/null
+++ b/config/plugins/maya/load/_load_animation.py
@@ -0,0 +1,48 @@
+import config.apps.maya.plugin
+
+
+class AbcLoader(config.apps.maya.plugin.ReferenceLoader):
+ """Specific loader of Alembic for the avalon.animation family"""
+
+ families = ["colorbleed.animation",
+ "colorbleed.camera",
+ "colorbleed.pointcache"]
+ representations = ["abc"]
+
+ label = "Reference animation"
+ order = -10
+ icon = "code-fork"
+ color = "orange"
+
+ def process_reference(self, context, name, namespace, data):
+
+ import maya.cmds as cmds
+ from avalon import maya
+
+ cmds.loadPlugin("AbcImport.mll", quiet=True)
+ # Prevent identical alembic nodes from being shared
+ # Create unique namespace for the cameras
+
+ # Get name from asset being loaded
+ # Assuming name is subset name from the animation, we split the number
+ # suffix from the name to ensure the namespace is unique
+ name = name.split("_")[0]
+ namespace = maya.unique_namespace("{}_".format(name),
+ format="%03d",
+ suffix="_abc")
+
+ # hero_001 (abc)
+ # asset_counter{optional}
+
+ nodes = cmds.file(self.fname,
+ namespace=namespace,
+ sharedReferenceFile=False,
+ groupReference=True,
+ groupName="{}:{}".format(namespace, name),
+ reference=True,
+ returnNewNodes=True)
+
+ # load colorbleed ID attribute
+ self[:] = nodes
+
+ return nodes
diff --git a/config/plugins/maya/load/actions.py b/config/plugins/maya/load/actions.py
new file mode 100644
index 0000000000..440fabf124
--- /dev/null
+++ b/config/plugins/maya/load/actions.py
@@ -0,0 +1,147 @@
+"""A module containing generic loader actions that will display in the Loader.
+
+"""
+
+from avalon import api
+
+
+class SetFrameRangeLoader(api.Loader):
+ """Specific loader of Alembic for the avalon.animation family"""
+
+ families = ["colorbleed.animation",
+ "colorbleed.camera",
+ "colorbleed.pointcache"]
+ representations = ["abc"]
+
+ label = "Set frame range"
+ order = 11
+ icon = "clock-o"
+ color = "white"
+
+ def load(self, context, name, namespace, data):
+
+ import maya.cmds as cmds
+
+ version = context['version']
+ version_data = version.get("data", {})
+
+ start = version_data.get("startFrame", None)
+ end = version_data.get("endFrame", None)
+
+ if start is None or end is None:
+ print("Skipping setting frame range because start or "
+ "end frame data is missing..")
+ return
+
+ cmds.playbackOptions(minTime=start,
+ maxTime=end,
+ animationStartTime=start,
+ animationEndTime=end)
+
+
+class SetFrameRangeWithHandlesLoader(api.Loader):
+ """Specific loader of Alembic for the avalon.animation family"""
+
+ families = ["colorbleed.animation",
+ "colorbleed.camera",
+ "colorbleed.pointcache"]
+ representations = ["abc"]
+
+ label = "Set frame range (with handles)"
+ order = 12
+ icon = "clock-o"
+ color = "white"
+
+ def load(self, context, name, namespace, data):
+
+ import maya.cmds as cmds
+
+ version = context['version']
+ version_data = version.get("data", {})
+
+ start = version_data.get("startFrame", None)
+ end = version_data.get("endFrame", None)
+
+ if start is None or end is None:
+ print("Skipping setting frame range because start or "
+ "end frame data is missing..")
+ return
+
+ # Include handles
+ handles = version_data.get("handles", 0)
+ start -= handles
+ end += handles
+
+ cmds.playbackOptions(minTime=start,
+ maxTime=end,
+ animationStartTime=start,
+ animationEndTime=end)
+
+
+class ImportMayaLoader(api.Loader):
+ """Import action for Maya (unmanaged)
+
+ Warning:
+ The loaded content will be unmanaged and is *not* visible in the
+ scene inventory. It's purely intended to merge content into your scene
+ so you could also use it as a new base.
+
+ """
+ representations = ["ma"]
+ families = ["*"]
+
+ label = "Import"
+ order = 10
+ icon = "arrow-circle-down"
+ color = "#775555"
+
+ def load(self, context, name=None, namespace=None, data=None):
+ import maya.cmds as cmds
+
+ from avalon import maya
+ from avalon.maya import lib
+
+ choice = self.display_warning()
+ if choice is False:
+ return
+
+ asset = context['asset']
+
+ namespace = namespace or lib.unique_namespace(
+ asset["name"] + "_",
+ prefix="_" if asset["name"][0].isdigit() else "",
+ suffix="_",
+ )
+
+ with maya.maintained_selection():
+ cmds.file(self.fname,
+ i=True,
+ namespace=namespace,
+ returnNewNodes=True,
+ groupReference=True,
+ groupName="{}:{}".format(namespace, name))
+
+ # We do not containerize imported content, it remains unmanaged
+ return
+
+ def display_warning(self):
+ """Show warning to ensure the user can't import models by accident
+
+ Returns:
+ bool
+
+ """
+
+ from avalon.vendor.Qt import QtWidgets
+
+ accept = QtWidgets.QMessageBox.Ok
+ buttons = accept | QtWidgets.QMessageBox.Cancel
+
+ message = "Are you sure you want import this"
+ state = QtWidgets.QMessageBox.warning(None,
+ "Are you sure?",
+ message,
+ buttons=buttons,
+ defaultButton=accept)
+
+ return state == accept
diff --git a/config/plugins/maya/load/load_alembic.py b/config/plugins/maya/load/load_alembic.py
new file mode 100644
index 0000000000..4daa1fada6
--- /dev/null
+++ b/config/plugins/maya/load/load_alembic.py
@@ -0,0 +1,33 @@
+import config.apps.maya.plugin
+
+
+class AbcLoader(config.apps.maya.plugin.ReferenceLoader):
+ """Specific loader of Alembic for the avalon.animation family"""
+
+ families = ["colorbleed.animation",
+ "colorbleed.pointcache"]
+ label = "Reference animation"
+ representations = ["abc"]
+ order = -10
+ icon = "code-fork"
+ color = "orange"
+
+ def process_reference(self, context, name, namespace, data):
+
+ import maya.cmds as cmds
+
+ cmds.loadPlugin("AbcImport.mll", quiet=True)
+ nodes = cmds.file(self.fname,
+ namespace=namespace,
+ sharedReferenceFile=False,
+ groupReference=True,
+ groupName="{}:{}".format(namespace, name),
+ reference=True,
+ returnNewNodes=True)
+
+ self[:] = nodes
+
+ return nodes
+
+ def switch(self, container, representation):
+ self.update(container, representation)
diff --git a/config/plugins/maya/load/load_camera.py b/config/plugins/maya/load/load_camera.py
new file mode 100644
index 0000000000..5aca36a934
--- /dev/null
+++ b/config/plugins/maya/load/load_camera.py
@@ -0,0 +1,45 @@
+import config.apps.maya.plugin
+
+
+class CameraLoader(config.apps.maya.plugin.ReferenceLoader):
+ """Specific loader of Alembic for the avalon.animation family"""
+
+ families = ["colorbleed.camera"]
+ label = "Reference camera"
+ representations = ["abc", "ma"]
+ order = -10
+ icon = "code-fork"
+ color = "orange"
+
+ def process_reference(self, context, name, namespace, data):
+
+ import maya.cmds as cmds
+ # Get family type from the context
+
+ cmds.loadPlugin("AbcImport.mll", quiet=True)
+ nodes = cmds.file(self.fname,
+ namespace=namespace,
+ sharedReferenceFile=False,
+ groupReference=True,
+ groupName="{}:{}".format(namespace, name),
+ reference=True,
+ returnNewNodes=True)
+
+ cameras = cmds.ls(nodes, type="camera")
+
+ # Check the Maya version, lockTransform has been introduced since
+ # Maya 2016.5 Ext 2
+ version = int(cmds.about(version=True))
+ if version >= 2016:
+ for camera in cameras:
+ cmds.camera(camera, edit=True, lockTransform=True)
+ else:
+ self.log.warning("This version of Maya does not support locking of"
+ " transforms of cameras.")
+
+ self[:] = nodes
+
+ return nodes
+
+ def switch(self, container, representation):
+ self.update(container, representation)
diff --git a/config/plugins/maya/load/load_look.py b/config/plugins/maya/load/load_look.py
new file mode 100644
index 0000000000..bcac5a7499
--- /dev/null
+++ b/config/plugins/maya/load/load_look.py
@@ -0,0 +1,40 @@
+import config.apps.maya.plugin
+
+
+class LookLoader(config.apps.maya.plugin.ReferenceLoader):
+ """Specific loader for lookdev"""
+
+ families = ["colorbleed.look"]
+ representations = ["ma"]
+
+ label = "Reference look"
+ order = -10
+ icon = "code-fork"
+ color = "orange"
+
+ def process_reference(self, context, name, namespace, data):
+ """
+ Load and try to ssign Lookdev to nodes based on relationship data
+ Args:
+ name:
+ namespace:
+ context:
+ data:
+
+ Returns:
+
+ """
+
+ import maya.cmds as cmds
+ from avalon import maya
+
+ with maya.maintained_selection():
+ nodes = cmds.file(self.fname,
+ namespace=namespace,
+ reference=True,
+ returnNewNodes=True)
+
+ self[:] = nodes
+
+ def switch(self, container, representation):
+ self.update(container, representation)
diff --git a/config/plugins/maya/load/load_mayaascii.py b/config/plugins/maya/load/load_mayaascii.py
new file mode 100644
index 0000000000..c48d3370b5
--- /dev/null
+++ b/config/plugins/maya/load/load_mayaascii.py
@@ -0,0 +1,33 @@
+import config.apps.maya.plugin
+
+
+class MayaAsciiLoader(config.apps.maya.plugin.ReferenceLoader):
+ """Load the model"""
+
+ families = ["config.apps.mayaAscii"]
+ representations = ["ma"]
+
+ label = "Reference Maya Ascii"
+ order = -10
+ icon = "code-fork"
+ color = "orange"
+
+ def process_reference(self, context, name, namespace, data):
+
+ import maya.cmds as cmds
+ from avalon import maya
+
+ with maya.maintained_selection():
+ nodes = cmds.file(self.fname,
+ namespace=namespace,
+ reference=True,
+ returnNewNodes=True,
+ groupReference=True,
+ groupName="{}:{}".format(namespace, name))
+
+ self[:] = nodes
+
+ return nodes
+
+ def switch(self, container, representation):
+ self.update(container, representation)
diff --git a/config/plugins/maya/load/load_model.py b/config/plugins/maya/load/load_model.py
new file mode 100644
index 0000000000..3eb2e53d33
--- /dev/null
+++ b/config/plugins/maya/load/load_model.py
@@ -0,0 +1,125 @@
+from avalon import api
+import config.apps.maya.plugin
+
+
+class ModelLoader(config.apps.maya.plugin.ReferenceLoader):
+ """Load the model"""
+
+ families = ["colorbleed.model"]
+ representations = ["ma"]
+
+ label = "Reference Model"
+ order = -10
+ icon = "code-fork"
+ color = "orange"
+
+ def process_reference(self, context, name, namespace, data):
+
+ import maya.cmds as cmds
+ from avalon import maya
+
+ with maya.maintained_selection():
+ nodes = cmds.file(self.fname,
+ namespace=namespace,
+ reference=True,
+ returnNewNodes=True,
+ groupReference=True,
+ groupName="{}:{}".format(namespace, name))
+
+ self[:] = nodes
+
+ return nodes
+
+ def switch(self, container, representation):
+ self.update(container, representation)
+
+
+class GpuCacheLoader(api.Loader):
+ """Load model Alembic as gpuCache"""
+
+ families = ["colorbleed.model"]
+ representations = ["abc"]
+
+ label = "Import Gpu Cache"
+ order = -5
+ icon = "code-fork"
+ color = "orange"
+
+ def load(self, context, name, namespace, data):
+
+ import maya.cmds as cmds
+ import avalon.maya.lib as lib
+ from avalon.maya.pipeline import containerise
+
+ asset = context['asset']['name']
+ namespace = namespace or lib.unique_namespace(
+ asset + "_",
+ prefix="_" if asset[0].isdigit() else "",
+ suffix="_",
+ )
+
+ cmds.loadPlugin("gpuCache", quiet=True)
+
+ # Root group
+ label = "{}:{}".format(namespace, name)
+ root = cmds.group(name=label, empty=True)
+
+ # Create transform with shape
+ transform_name = label + "_GPU"
+ transform = cmds.createNode("transform", name=transform_name,
+ parent=root)
+ cache = cmds.createNode("gpuCache",
+ parent=transform,
+ name="{0}Shape".format(transform_name))
+
+ # Set the cache filepath
+ cmds.setAttr(cache + '.cacheFileName', self.fname, type="string")
+ cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root
+
+ # Lock parenting of the transform and cache
+ cmds.lockNode([transform, cache], lock=True)
+
+ nodes = [root, transform, cache]
+ self[:] = nodes
+
+ return containerise(
+ name=name,
+ namespace=namespace,
+ nodes=nodes,
+ context=context,
+ loader=self.__class__.__name__)
+
+ def update(self, container, representation):
+
+ import maya.cmds as cmds
+
+ path = api.get_representation_path(representation)
+
+ # Update the cache
+ members = cmds.sets(container['objectName'], query=True)
+ caches = cmds.ls(members, type="gpuCache", long=True)
+
+ assert len(caches) == 1, "This is a bug"
+
+ for cache in caches:
+ cmds.setAttr(cache + ".cacheFileName", path, type="string")
+
+ cmds.setAttr(container["objectName"] + ".representation",
+ str(representation["_id"]),
+ type="string")
+
+ def switch(self, container, representation):
+ self.update(container, representation)
+
+ def remove(self, container):
+ import maya.cmds as cmds
+ members = cmds.sets(container['objectName'], query=True)
+ cmds.lockNode(members, lock=False)
+ cmds.delete([container['objectName']] + members)
+
+ # Clean up the namespace
+ try:
+ cmds.namespace(removeNamespace=container['namespace'],
+ deleteNamespaceContent=True)
+ except RuntimeError:
+ pass
diff --git a/config/plugins/maya/load/load_rig.py b/config/plugins/maya/load/load_rig.py
new file mode 100644
index 0000000000..8b7dbe1532
--- /dev/null
+++ b/config/plugins/maya/load/load_rig.py
@@ -0,0 +1,70 @@
+from maya import cmds
+
+import config.apps.maya.plugin
+from avalon import api, maya
+
+
+class RigLoader(config.apps.maya.plugin.ReferenceLoader):
+ """Specific loader for rigs
+
+ This automatically creates an instance for animators upon load.
+
+ """
+
+ families = ["colorbleed.rig"]
+ representations = ["ma"]
+
+ label = "Reference rig"
+ order = -10
+ icon = "code-fork"
+ color = "orange"
+
+ def process_reference(self, context, name, namespace, data):
+
+ nodes = cmds.file(self.fname,
+ namespace=namespace,
+ reference=True,
+ returnNewNodes=True,
+ groupReference=True,
+ groupName="{}:{}".format(namespace, name))
+
+ # Store for post-process
+ self[:] = nodes
+ if data.get("post_process", True):
+ self._post_process(name, namespace, context, data)
+
+ return nodes
+
+ def _post_process(self, name, namespace, context, data):
+
+ # TODO(marcus): We are hardcoding the name "out_SET" here.
+ # Better register this keyword, so that it can be used
+ # elsewhere, such as in the Integrator plug-in,
+ # without duplication.
+
+ output = next((node for node in self if
+ node.endswith("out_SET")), None)
+ controls = next((node for node in self if
+ node.endswith("controls_SET")), None)
+
+ assert output, "No out_SET in rig, this is a bug."
+ assert controls, "No controls_SET in rig, this is a bug."
+
+ # Find the roots amongst the loaded nodes
+ roots = cmds.ls(self[:], assemblies=True, long=True)
+ assert roots, "No root nodes in rig, this is a bug."
+
+ asset = api.Session["AVALON_ASSET"]
+ dependency = str(context["representation"]["_id"])
+
+ # Create the animation instance
+ with maya.maintained_selection():
+ cmds.select([output, controls] + roots, noExpand=True)
+ api.create(name=namespace,
+ asset=asset,
+ family="colorbleed.animation",
+ options={"useSelection": True},
+ data={"dependencies": dependency})
+
+ def switch(self, container, representation):
+ self.update(container, representation)
diff --git a/config/plugins/maya/load/load_setdress.py b/config/plugins/maya/load/load_setdress.py
new file mode 100644
index 0000000000..8cc857fc3f
--- /dev/null
+++ b/config/plugins/maya/load/load_setdress.py
@@ -0,0 +1,80 @@
+from avalon import api
+
+
+class SetDressLoader(api.Loader):
+
+ families = ["colorbleed.setdress"]
+ representations = ["json"]
+
+ label = "Load Set Dress"
+ order = -9
+ icon = "code-fork"
+ color = "orange"
+
+ def load(self, context, name, namespace, data):
+
+ from avalon.maya.pipeline import containerise
+ from avalon.maya import lib
+
+ asset = context['asset']['name']
+ namespace = namespace or lib.unique_namespace(
+ asset + "_",
+ prefix="_" if asset[0].isdigit() else "",
+ suffix="_",
+ )
+
+ from colorbleed import setdress_api
+
+ containers = setdress_api.load_package(filepath=self.fname,
+ name=name,
+ namespace=namespace)
+
+ self[:] = containers
+
+ # Only containerize if any nodes were loaded by the Loader
+ nodes = self[:]
+ if not nodes:
+ return
+
+ return containerise(
+ name=name,
+ namespace=namespace,
+ nodes=nodes,
+ context=context,
+ loader=self.__class__.__name__)
+
+ def update(self, container, representation):
+
+ from colorbleed import setdress_api
+ return setdress_api.update_package(container,
+ representation)
+
+ def remove(self, container):
+ """Remove all sub containers"""
+
+ from avalon import api
+ from colorbleed import setdress_api
+ import maya.cmds as cmds
+
+ # Remove all members
+ member_containers = setdress_api.get_contained_containers(container)
+ for member_container in member_containers:
+ self.log.info("Removing container %s",
+ member_container['objectName'])
+ api.remove(member_container)
+
+ # Remove alembic hierarchy reference
+ # TODO: Check whether removing all contained references is safe enough
+ members = cmds.sets(container['objectName'], query=True) or []
+ references = cmds.ls(members, type="reference")
+ for reference in references:
+ self.log.info("Removing %s", reference)
+ fname = cmds.referenceQuery(reference, filename=True)
+ cmds.file(fname, removeReference=True)
+
+ # Delete container and its contents
+ if cmds.objExists(container['objectName']):
+ members = cmds.sets(container['objectName'], query=True) or []
+ cmds.delete([container['objectName']] + members)
+
+ # TODO: Ensure namespace is gone
\ No newline at end of file
diff --git a/config/plugins/maya/load/load_vrayproxy.py b/config/plugins/maya/load/load_vrayproxy.py
new file mode 100644
index 0000000000..cbfe45cb46
--- /dev/null
+++ b/config/plugins/maya/load/load_vrayproxy.py
@@ -0,0 +1,144 @@
+from avalon.maya import lib
+from avalon import api
+
+import maya.cmds as cmds
+
+
+class VRayProxyLoader(api.Loader):
+ """Load VRayMesh proxy"""
+
+ families = ["colorbleed.vrayproxy"]
+ representations = ["vrmesh"]
+
+ label = "Import VRay Proxy"
+ order = -10
+ icon = "code-fork"
+ color = "orange"
+
+ def load(self, context, name, namespace, data):
+
+ from avalon.maya.pipeline import containerise
+ from config.apps.maya.lib import namespaced
+
+ asset_name = context['asset']["name"]
+ namespace = namespace or lib.unique_namespace(
+ asset_name + "_",
+ prefix="_" if asset_name[0].isdigit() else "",
+ suffix="_",
+ )
+
+ # Ensure V-Ray for Maya is loaded.
+ cmds.loadPlugin("vrayformaya", quiet=True)
+
+ with lib.maintained_selection():
+ cmds.namespace(addNamespace=namespace)
+ with namespaced(namespace, new=False):
+ nodes = self.create_vray_proxy(name,
+ filename=self.fname)
+
+ self[:] = nodes
+ if not nodes:
+ return
+
+ return containerise(
+ name=name,
+ namespace=namespace,
+ nodes=nodes,
+ context=context,
+ loader=self.__class__.__name__)
+
+ def update(self, container, representation):
+
+ node = container['objectName']
+ assert cmds.objExists(node), "Missing container"
+
+ members = cmds.sets(node, query=True) or []
+ vraymeshes = cmds.ls(members, type="VRayMesh")
+ assert vraymeshes, "Cannot find VRayMesh in container"
+
+ filename = api.get_representation_path(representation)
+
+ for vray_mesh in vraymeshes:
+ cmds.setAttr("{}.fileName".format(vray_mesh),
+ filename,
+ type="string")
+
+ # Update metadata
+ cmds.setAttr("{}.representation".format(node),
+ str(representation["_id"]),
+ type="string")
+
+ def remove(self, container):
+
+ # Delete container and its contents
+ if cmds.objExists(container['objectName']):
+ members = cmds.sets(container['objectName'], query=True) or []
+ cmds.delete([container['objectName']] + members)
+
+ # Remove the namespace, if empty
+ namespace = container['namespace']
+ if cmds.namespace(exists=namespace):
+ members = cmds.namespaceInfo(namespace, listNamespace=True)
+ if not members:
+ cmds.namespace(removeNamespace=namespace)
+ else:
+ self.log.warning("Namespace not deleted because it "
+ "still has members: %s", namespace)
+
+ def switch(self, container, representation):
+ self.update(container, representation)
+
+ def create_vray_proxy(self, name, filename):
+ """Re-create the structure created by VRay to support vrmeshes
+
+ Args:
+ name(str): name of the asset
+
+ Returns:
+ nodes(list)
+ """
+
+ # Create nodes
+ vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name))
+ mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name))
+ vray_mat = cmds.createNode("VRayMeshMaterial",
+ name="{}_VRMM".format(name))
+ vray_mat_sg = cmds.createNode("shadingEngine",
+ name="{}_VRSG".format(name))
+
+ cmds.setAttr("{}.fileName".format(vray_mesh),
+ filename,
+ type="string")
+
+ # Create important connections
+ cmds.connectAttr("time1.outTime",
+ "{0}.currentFrame".format(vray_mesh))
+ cmds.connectAttr("{}.fileName2".format(vray_mesh),
+ "{}.fileName".format(vray_mat))
+ cmds.connectAttr("{}.instancing".format(vray_mesh),
+ "{}.instancing".format(vray_mat))
+ cmds.connectAttr("{}.output".format(vray_mesh),
+ "{}.inMesh".format(mesh_shape))
+ cmds.connectAttr("{}.overrideFileName".format(vray_mesh),
+ "{}.overrideFileName".format(vray_mat))
+ cmds.connectAttr("{}.currentFrame".format(vray_mesh),
+ "{}.currentFrame".format(vray_mat))
+
+ # Set surface shader input
+ cmds.connectAttr("{}.outColor".format(vray_mat),
+ "{}.surfaceShader".format(vray_mat_sg))
+
+ # Connect mesh to shader
+ cmds.sets([mesh_shape], addElement=vray_mat_sg)
+
+ group_node = cmds.group(empty=True, name="{}_GRP".format(name))
+ mesh_transform = cmds.listRelatives(mesh_shape,
+ parent=True, fullPath=True)
+ cmds.parent(mesh_transform, group_node)
+ nodes = [vray_mesh, mesh_shape, vray_mat, vray_mat_sg, group_node]
+
+ # Fix: Force refresh so the mesh shows correctly after creation
+ cmds.refresh()
+ cmds.setAttr("{}.geomType".format(vray_mesh), 2)
+
+ return nodes
diff --git a/config/plugins/maya/load/load_yeti_cache.py b/config/plugins/maya/load/load_yeti_cache.py
new file mode 100644
index 0000000000..e3c4a149d0
--- /dev/null
+++ b/config/plugins/maya/load/load_yeti_cache.py
@@ -0,0 +1,298 @@
+import os
+import json
+import re
+import glob
+from collections import defaultdict
+
+from maya import cmds
+
+from avalon import api
+from avalon.maya import lib as avalon_lib, pipeline
+from config.apps.maya import lib
+
+
+class YetiCacheLoader(api.Loader):
+
+ families = ["colorbleed.yeticache", "colorbleed.yetiRig"]
+ representations = ["fur"]
+
+ label = "Load Yeti Cache"
+ order = -9
+ icon = "code-fork"
+ color = "orange"
+
+ def load(self, context, name=None, namespace=None, data=None):
+
+ # Build namespace
+ asset = context["asset"]
+ if namespace is None:
+ namespace = self.create_namespace(asset["name"])
+
+ # Ensure Yeti is loaded
+ if not cmds.pluginInfo("pgYetiMaya", query=True, loaded=True):
+ cmds.loadPlugin("pgYetiMaya", quiet=True)
+
+ # Get JSON
+ fname, ext = os.path.splitext(self.fname)
+ settings_fname = "{}.fursettings".format(fname)
+ with open(settings_fname, "r") as fp:
+ fursettings = json.load(fp)
+
+ # Check if resources map exists
+ # Get node name from JSON
+ if "nodes" not in fursettings:
+ raise RuntimeError("Encountered invalid data, expect 'nodes' in "
+ "fursettings.")
+
+ node_data = fursettings["nodes"]
+ nodes = self.create_nodes(namespace, node_data)
+
+ group_name = "{}:{}".format(namespace, name)
+ group_node = cmds.group(nodes, name=group_name)
+
+ nodes.append(group_node)
+
+ self[:] = nodes
+
+ return pipeline.containerise(name=name,
+ namespace=namespace,
+ nodes=nodes,
+ context=context,
+ loader=self.__class__.__name__)
+
+ def remove(self, container):
+
+ from maya import cmds
+
+ namespace = container["namespace"]
+ container_name = container["objectName"]
+
+ self.log.info("Removing '%s' from Maya.." % container["name"])
+
+ container_content = cmds.sets(container_name, query=True)
+ nodes = cmds.ls(container_content, long=True)
+
+ nodes.append(container_name)
+
+ try:
+ cmds.delete(nodes)
+ except ValueError:
+ # Already implicitly deleted by Maya upon removing reference
+ pass
+
+ cmds.namespace(removeNamespace=namespace, deleteNamespaceContent=True)
+
+ def update(self, container, representation):
+
+ namespace = container["namespace"]
+ container_node = container["objectName"]
+ path = api.get_representation_path(representation)
+
+ # Get all node data
+ fname, ext = os.path.splitext(path)
+ settings_fname = "{}.fursettings".format(fname)
+ with open(settings_fname, "r") as fp:
+ settings = json.load(fp)
+
+ # Collect scene information of asset
+ set_members = cmds.sets(container["objectName"], query=True)
+ container_root = lib.get_container_transforms(container,
+ members=set_members,
+ root=True)
+ scene_nodes = cmds.ls(set_members, type="pgYetiMaya", long=True)
+
+ # Build lookup with cbId as keys
+ scene_lookup = defaultdict(list)
+ for node in scene_nodes:
+ cb_id = lib.get_id(node)
+ scene_lookup[cb_id].append(node)
+
+ # Re-assemble metadata with cbId as keys
+ meta_data_lookup = {n["cbId"]: n for n in settings["nodes"]}
+
+ # Compare look ups and get the nodes which ar not relevant any more
+ to_delete_lookup = {cb_id for cb_id in scene_lookup.keys() if
+ cb_id not in meta_data_lookup}
+ if to_delete_lookup:
+
+ # Get nodes and remove entry from lookup
+ to_remove = []
+ for _id in to_delete_lookup:
+ # Get all related nodes
+ shapes = scene_lookup[_id]
+ # Get the parents of all shapes under the ID
+ transforms = cmds.listRelatives(shapes,
+ parent=True,
+ fullPath=True) or []
+ to_remove.extend(shapes + transforms)
+
+ # Remove id from look uop
+ scene_lookup.pop(_id, None)
+
+ cmds.delete(to_remove)
+
+ for cb_id, data in meta_data_lookup.items():
+
+ # Update cache file name
+ file_name = data["name"].replace(":", "_")
+ cache_file_path = "{}.%04d.fur".format(file_name)
+ data["attrs"]["cacheFileName"] = os.path.join(path, cache_file_path)
+
+ if cb_id not in scene_lookup:
+
+ self.log.info("Creating new nodes ..")
+
+ new_nodes = self.create_nodes(namespace, [data])
+ cmds.sets(new_nodes, addElement=container_node)
+ cmds.parent(new_nodes, container_root)
+
+ else:
+ # Update the matching nodes
+ scene_nodes = scene_lookup[cb_id]
+ lookup_result = meta_data_lookup[cb_id]["name"]
+
+ # Remove namespace if any (e.g.: "character_01_:head_YNShape")
+ node_name = lookup_result.rsplit(":", 1)[-1]
+
+ for scene_node in scene_nodes:
+
+ # Get transform node, this makes renaming easier
+ transforms = cmds.listRelatives(scene_node,
+ parent=True,
+ fullPath=True) or []
+ assert len(transforms) == 1, "This is a bug!"
+
+ # Get scene node's namespace and rename the transform node
+ lead = scene_node.rsplit(":", 1)[0]
+ namespace = ":{}".format(lead.rsplit("|")[-1])
+
+ new_shape_name = "{}:{}".format(namespace, node_name)
+ new_trans_name = new_shape_name.rsplit("Shape", 1)[0]
+
+ transform_node = transforms[0]
+ cmds.rename(transform_node,
+ new_trans_name,
+ ignoreShape=False)
+
+ # Get the newly named shape node
+ yeti_nodes = cmds.listRelatives(new_trans_name,
+ children=True)
+ yeti_node = yeti_nodes[0]
+
+ for attr, value in data["attrs"].items():
+ lib.set_attribute(attr, value, yeti_node)
+
+ cmds.setAttr("{}.representation".format(container_node),
+ str(representation["_id"]),
+ typ="string")
+
+ def switch(self, container, representation):
+ self.update(container, representation)
+
+ # helper functions
+
+ def create_namespace(self, asset):
+ """Create a unique namespace
+ Args:
+ asset (dict): asset information
+
+ """
+
+ asset_name = "{}_".format(asset)
+ prefix = "_" if asset_name[0].isdigit()else ""
+ namespace = avalon_lib.unique_namespace(asset_name,
+ prefix=prefix,
+ suffix="_")
+
+ return namespace
+
+ def validate_cache(self, filename, pattern="%04d"):
+ """Check if the cache has more than 1 frame
+
+ All caches with more than 1 frame need to be called with `%04d`
+ If the cache has only one frame we return that file name as we assume
+ it is a snapshot.
+
+ Args:
+ filename(str)
+ pattern(str)
+
+ Returns:
+ str
+
+ """
+
+ glob_pattern = filename.replace(pattern, "*")
+
+ escaped = re.escape(filename)
+ re_pattern = escaped.replace(pattern, "-?[0-9]+")
+
+ files = glob.glob(glob_pattern)
+ files = [str(f) for f in files if re.match(re_pattern, f)]
+
+ if len(files) == 1:
+ return files[0]
+ elif len(files) == 0:
+ self.log.error("Could not find cache files for '%s'" % filename)
+
+ return filename
+
+ def create_nodes(self, namespace, settings):
+ """Create nodes with the correct namespace and settings
+
+ Args:
+ namespace(str): namespace
+ settings(list): list of dictionaries
+
+ Returns:
+ list
+
+ """
+
+ nodes = []
+ for node_settings in settings:
+
+ # Create pgYetiMaya node
+ original_node = node_settings["name"]
+ node_name = "{}:{}".format(namespace, original_node)
+ yeti_node = cmds.createNode("pgYetiMaya", name=node_name)
+
+ # Create transform node
+ transform_node = node_name.rstrip("Shape")
+
+ lib.set_id(transform_node, node_settings["transform"]["cbId"])
+ lib.set_id(yeti_node, node_settings["cbId"])
+
+ nodes.extend([transform_node, yeti_node])
+
+ # Ensure the node has no namespace identifiers
+ attributes = node_settings["attrs"]
+
+ # Check if cache file name is stored
+ if "cacheFileName" not in attributes:
+ file_name = original_node.replace(":", "_")
+ cache_name = "{}.%04d.fur".format(file_name)
+ cache = os.path.join(self.fname, cache_name)
+
+ self.validate_cache(cache)
+ attributes["cacheFileName"] = cache
+
+ # Update attributes with requirements
+ attributes.update({"viewportDensity": 0.1,
+ "verbosity": 2,
+ "fileMode": 1})
+
+ # Apply attributes to pgYetiMaya node
+ for attr, value in attributes.items():
+ lib.set_attribute(attr, value, yeti_node)
+
+ # Fix for : YETI-6
+ # Fixes the render stats (this is literally taken from Perigrene's
+ # ../scripts/pgYetiNode.mel script)
+ cmds.setAttr("{}.visibleInReflections".format(yeti_node), True)
+ cmds.setAttr("{}.visibleInRefractions".format(yeti_node), True)
+
+ # Connect to the time node
+ cmds.connectAttr("time1.outTime", "%s.currentTime" % yeti_node)
+
+ return nodes
diff --git a/config/plugins/maya/load/load_yeti_rig.py b/config/plugins/maya/load/load_yeti_rig.py
new file mode 100644
index 0000000000..6e5c1d0014
--- /dev/null
+++ b/config/plugins/maya/load/load_yeti_rig.py
@@ -0,0 +1,31 @@
+import config.apps.maya.plugin
+
+
+class YetiRigLoader(config.apps.maya.plugin.ReferenceLoader):
+
+ families = ["colorbleed.yetiRig"]
+ representations = ["ma"]
+
+ label = "Load Yeti Rig"
+ order = -9
+ icon = "code-fork"
+ color = "orange"
+
+ def process_reference(self, context, name=None, namespace=None, data=None):
+
+ import maya.cmds as cmds
+ from avalon import maya
+
+ with maya.maintained_selection():
+ nodes = cmds.file(self.fname,
+ namespace=namespace,
+ reference=True,
+ returnNewNodes=True,
+ groupReference=True,
+ groupName="{}:{}".format(namespace, name))
+
+ self[:] = nodes
+
+ self.log.info("Yeti Rig Connection Manager will be available soon")
+
+ return nodes
diff --git a/config/plugins/maya/publish/collect_animation.py b/config/plugins/maya/publish/collect_animation.py
new file mode 100644
index 0000000000..53251cadc3
--- /dev/null
+++ b/config/plugins/maya/publish/collect_animation.py
@@ -0,0 +1,53 @@
+import pyblish.api
+
+import maya.cmds as cmds
+
+
+class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin):
+ """Collect out hierarchy data for instance.
+
+ Collect all hierarchy nodes which reside in the out_SET of the animation
+ instance or point cache instance. This is to unify the logic of retrieving
+ that specific data. This eliminates the need to write two separate pieces
+ of logic to fetch all hierarchy nodes.
+
+ Results in a list of nodes from the content of the instances
+
+ """
+
+ order = pyblish.api.CollectorOrder + 0.4
+ families = ["colorbleed.animation"]
+ label = "Collect Animation Output Geometry"
+ hosts = ["maya"]
+
+ ignore_type = ["constraints"]
+
+ def process(self, instance):
+ """Collect the hierarchy nodes"""
+
+ family = instance.data["family"]
+ out_set = next((i for i in instance.data["setMembers"] if
+ i.endswith("out_SET")), None)
+
+ assert out_set, ("Expecting out_SET for instance of family"
+ " '%s'" % family)
+ members = cmds.ls(cmds.sets(out_set, query=True), long=True)
+
+ # Get all the relatives of the members
+ descendants = cmds.listRelatives(members,
+ allDescendents=True,
+ fullPath=True) or []
+ descendants = cmds.ls(descendants, noIntermediate=True, long=True)
+
+ # Add members and descendants together for a complete overview
+ hierarchy = members + descendants
+
+ # Ignore certain node types (e.g. constraints)
+ ignore = cmds.ls(hierarchy, type=self.ignore_type, long=True)
+ if ignore:
+ ignore = set(ignore)
+ hierarchy = [node for node in hierarchy if node not in ignore]
+
+ # Store data in the instance for the validator
+ instance.data["out_hierarchy"] = hierarchy
+
diff --git a/config/plugins/maya/publish/collect_current_file.py b/config/plugins/maya/publish/collect_current_file.py
new file mode 100644
index 0000000000..0b38ebcf3d
--- /dev/null
+++ b/config/plugins/maya/publish/collect_current_file.py
@@ -0,0 +1,16 @@
+from maya import cmds
+
+import pyblish.api
+
+
+class CollectMayaCurrentFile(pyblish.api.ContextPlugin):
+ """Inject the current working file into context"""
+
+ order = pyblish.api.CollectorOrder - 0.5
+ label = "Maya Current File"
+ hosts = ['maya']
+
+ def process(self, context):
+ """Inject the current working file"""
+ current_file = cmds.file(query=True, sceneName=True)
+ context.data['currentFile'] = current_file
diff --git a/config/plugins/maya/publish/collect_history.py b/config/plugins/maya/publish/collect_history.py
new file mode 100644
index 0000000000..840e916133
--- /dev/null
+++ b/config/plugins/maya/publish/collect_history.py
@@ -0,0 +1,40 @@
+from maya import cmds
+
+import pyblish.api
+
+
+class CollectMayaHistory(pyblish.api.InstancePlugin):
+ """Collect history for instances from the Maya scene
+
+ Note:
+ This removes render layers collected in the history
+
+ This is separate from Collect Instances so we can target it towards only
+ specific family types.
+
+ """
+
+ order = pyblish.api.CollectorOrder + 0.1
+ hosts = ["maya"]
+ label = "Maya History"
+ families = ["colorbleed.rig"]
+ verbose = False
+
+ def process(self, instance):
+
+ # Collect the history with long names
+ history = cmds.listHistory(instance, leaf=False) or []
+ history = cmds.ls(history, long=True)
+
+ # Remove invalid node types (like renderlayers)
+ invalid = cmds.ls(history, type="renderLayer", long=True)
+ if invalid:
+ invalid = set(invalid) # optimize lookup
+ history = [x for x in history if x not in invalid]
+
+ # Combine members with history
+ members = instance[:] + history
+ members = list(set(members)) # ensure unique
+
+ # Update the instance
+ instance[:] = members
diff --git a/config/plugins/maya/publish/collect_instances.py b/config/plugins/maya/publish/collect_instances.py
new file mode 100644
index 0000000000..e40893c71c
--- /dev/null
+++ b/config/plugins/maya/publish/collect_instances.py
@@ -0,0 +1,141 @@
+from maya import cmds
+
+import pyblish.api
+
+
+class CollectInstances(pyblish.api.ContextPlugin):
+ """Gather instances by objectSet and pre-defined attribute
+
+ This collector takes into account assets that are associated with
+ an objectSet and marked with a unique identifier;
+
+ Identifier:
+ id (str): "pyblish.avalon.instance"
+
+ Supported Families:
+ avalon.model: Geometric representation of artwork
+ avalon.rig: An articulated model for animators.
+ A rig may contain a series of sets in which to identify
+ its contents.
+
+ - cache_SEL: Should contain cachable polygonal meshes
+ - controls_SEL: Should contain animatable controllers for animators
+ - resources_SEL: Should contain nodes that reference external files
+
+ Limitations:
+ - Only Maya is supported
+ - One (1) rig per scene file
+ - Unmanaged history, it is up to the TD to ensure
+ history is up to par.
+ avalon.animation: Pointcache of `avalon.rig`
+
+ Limitations:
+ - Does not take into account nodes connected to those
+ within an objectSet. Extractors are assumed to export
+ with history preserved, but this limits what they will
+ be able to achieve and the amount of data available
+ to validators.
+
+ """
+
+ label = "Collect Instances"
+ order = pyblish.api.CollectorOrder
+ hosts = ["maya"]
+
+ def process(self, context):
+
+ objectset = cmds.ls("*.id", long=True, type="objectSet",
+ recursive=True, objectsOnly=True)
+ for objset in objectset:
+
+ if not cmds.attributeQuery("id", node=objset, exists=True):
+ continue
+
+ id_attr = "{}.id".format(objset)
+ if cmds.getAttr(id_attr) != "pyblish.avalon.instance":
+ continue
+
+ # The developer is responsible for specifying
+ # the family of each instance.
+ has_family = cmds.attributeQuery("family",
+ node=objset,
+ exists=True)
+ assert has_family, "\"%s\" was missing a family" % objset
+
+ members = cmds.sets(objset, query=True)
+ if members is None:
+ self.log.warning("Skipped empty instance: \"%s\" " % objset)
+ continue
+
+ self.log.info("Creating instance for {}".format(objset))
+
+ data = dict()
+
+ # Apply each user defined attribute as data
+ for attr in cmds.listAttr(objset, userDefined=True) or list():
+ try:
+ value = cmds.getAttr("%s.%s" % (objset, attr))
+ except Exception:
+ # Some attributes cannot be read directly,
+ # such as mesh and color attributes. These
+ # are considered non-essential to this
+ # particular publishing pipeline.
+ value = None
+ data[attr] = value
+
+ # temporarily translation of `active` to `publish` till issue has
+ # been resolved, https://github.com/pyblish/pyblish-base/issues/307
+ if "active" in data:
+ data["publish"] = data["active"]
+
+ # Collect members
+ members = cmds.ls(members, long=True) or []
+
+ # `maya.cmds.listRelatives(noIntermediate=True)` only works when
+ # `shapes=True` argument is passed, since we also want to include
+ # transforms we filter afterwards.
+ children = cmds.listRelatives(members,
+ allDescendents=True,
+ fullPath=True) or []
+ children = cmds.ls(children, noIntermediate=True, long=True)
+
+ parents = self.get_all_parents(members)
+ members_hierarchy = list(set(members + children + parents))
+
+ # Create the instance
+ name = cmds.ls(objset, long=False)[0] # use short name
+ instance = context.create_instance(data.get("name", name))
+ instance[:] = members_hierarchy
+ instance.data["setMembers"] = members
+ instance.data.update(data)
+
+ # Produce diagnostic message for any graphical
+ # user interface interested in visualising it.
+ self.log.info("Found: \"%s\" " % instance.data["name"])
+
+ def sort_by_family(instance):
+ """Sort by family"""
+ return instance.data.get("families", instance.data.get("family"))
+
+ # Sort/grouped by family (preserving local index)
+ context[:] = sorted(context, key=sort_by_family)
+
+ return context
+
+ def get_all_parents(self, nodes):
+ """Get all parents by using string operations (optimization)
+
+ Args:
+ nodes (list): the nodes which are found in the objectSet
+
+ Returns:
+ list
+ """
+
+ parents = []
+ for node in nodes:
+ splitted = node.split("|")
+ items = ["|".join(splitted[0:i]) for i in range(2, len(splitted))]
+ parents.extend(items)
+
+ return list(set(parents))
diff --git a/config/plugins/maya/publish/collect_look.py b/config/plugins/maya/publish/collect_look.py
new file mode 100644
index 0000000000..5241040e95
--- /dev/null
+++ b/config/plugins/maya/publish/collect_look.py
@@ -0,0 +1,279 @@
+from maya import cmds
+import pyblish.api
+import config.apps.maya.lib as lib
+from cb.utils.maya import context, shaders
+
+SHAPE_ATTRS = ["castsShadows",
+ "receiveShadows",
+ "motionBlur",
+ "primaryVisibility",
+ "smoothShading",
+ "visibleInReflections",
+ "visibleInRefractions",
+ "doubleSided",
+ "opposite"]
+
+SHAPE_ATTRS = set(SHAPE_ATTRS)
+
+
+def get_look_attrs(node):
+ """Returns attributes of a node that are important for the look.
+
+ These are the "changed" attributes (those that have edits applied
+ in the current scene).
+
+ Returns:
+ list: Attribute names to extract
+
+ """
+
+ # When referenced get only attributes that are "changed since file open"
+ # which includes any reference edits, otherwise take *all* user defined
+ # attributes
+ is_referenced = cmds.referenceQuery(node, isNodeReferenced=True)
+ result = cmds.listAttr(node, userDefined=True,
+ changedSinceFileOpen=is_referenced) or []
+
+ # `cbId` is added when a scene is saved, ignore by default
+ if "cbId" in result:
+ result.remove("cbId")
+
+ # For shapes allow render stat changes
+ if cmds.objectType(node, isAType="shape"):
+ attrs = cmds.listAttr(node, changedSinceFileOpen=True) or []
+ for attr in attrs:
+ if attr in SHAPE_ATTRS:
+ result.append(attr)
+
+ return result
+
+
+class CollectLook(pyblish.api.InstancePlugin):
+ """Collect look data for instance.
+
+ For the shapes/transforms of the referenced object to collect look for
+ retrieve the user-defined attributes (like V-ray attributes) and their
+ values as they were created in the current scene.
+
+ For the members of the instance collect the sets (shadingEngines and
+ other sets, e.g. VRayDisplacement) they are in along with the exact
+ membership relations.
+
+ Collects:
+ lookAttribtutes (list): Nodes in instance with their altered attributes
+ lookSetRelations (list): Sets and their memberships
+ lookSets (list): List of set names included in the look
+
+ """
+
+ order = pyblish.api.CollectorOrder + 0.4
+ families = ["colorbleed.look"]
+ label = "Collect Look"
+ hosts = ["maya"]
+
+ def process(self, instance):
+ """Collect the Look in the instance with the correct layer settings"""
+
+ with context.renderlayer(instance.data["renderlayer"]):
+ self.collect(instance)
+
+ def collect(self, instance):
+
+ self.log.info("Looking for look associations "
+ "for %s" % instance.data['name'])
+
+ # Discover related object sets
+ self.log.info("Gathering sets..")
+ sets = self.collect_sets(instance)
+
+ # Lookup set (optimization)
+ instance_lookup = set(cmds.ls(instance, long=True))
+
+ self.log.info("Gathering set relations..")
+ # Ensure iteration happen in a list so we can remove keys from the
+ # dict within the loop
+ for objset in list(sets):
+ self.log.debug("From %s.." % objset)
+
+ # Get all nodes of the current objectSet (shadingEngine)
+ for member in cmds.ls(cmds.sets(objset, query=True), long=True):
+ member_data = self.collect_member_data(member,
+ instance_lookup)
+ if not member_data:
+ continue
+
+ # Add information of the node to the members list
+ sets[objset]["members"].append(member_data)
+
+ # Remove sets that didn't have any members assigned in the end
+ # Thus the data will be limited to only what we need.
+ if not sets[objset]["members"]:
+ self.log.info("Removing redundant set information: "
+ "%s" % objset)
+ sets.pop(objset, None)
+
+ self.log.info("Gathering attribute changes to instance members..")
+ attributes = self.collect_attributes_changed(instance)
+
+ # Store data on the instance
+ instance.data["lookData"] = {"attributes": attributes,
+ "relationships": sets}
+
+ # Collect file nodes used by shading engines (if we have any)
+ files = list()
+ looksets = sets.keys()
+ if looksets:
+ self.log.info("Found the following sets:\n{}".format(looksets))
+ # Get the entire node chain of the look sets
+ history = cmds.listHistory(looksets)
+ files = cmds.ls(history, type="file", long=True)
+
+ # Collect textures if any file nodes are found
+ instance.data["resources"] = [self.collect_resource(n)
+ for n in files]
+
+ # Log a warning when no relevant sets were retrieved for the look.
+ if not instance.data["lookData"]["relationships"]:
+ self.log.warning("No sets found for the nodes in the instance: "
+ "%s" % instance[:])
+
+ # Ensure unique shader sets
+ # Add shader sets to the instance for unify ID validation
+ instance.extend(shader for shader in looksets if shader
+ not in instance_lookup)
+
+ self.log.info("Collected look for %s" % instance)
+
+ def collect_sets(self, instance):
+ """Collect all objectSets which are of importance for publishing
+
+ It checks if all nodes in the instance are related to any objectSet
+ which need to be
+
+ Args:
+ instance (list): all nodes to be published
+
+ Returns:
+ dict
+ """
+
+ sets = dict()
+ for node in instance:
+ related_sets = lib.get_related_sets(node)
+ if not related_sets:
+ continue
+
+ for objset in related_sets:
+ if objset in sets:
+ continue
+
+ sets[objset] = {"uuid": lib.get_id(objset), "members": list()}
+
+ return sets
+
+ def collect_member_data(self, member, instance_members):
+ """Get all information of the node
+ Args:
+ member (str): the name of the node to check
+ instance_members (set): the collected instance members
+
+ Returns:
+ dict
+
+ """
+
+ node, components = (member.rsplit(".", 1) + [None])[:2]
+
+ # Only include valid members of the instance
+ if node not in instance_members:
+ return
+
+ node_id = lib.get_id(node)
+ if not node_id:
+ self.log.error("Member '{}' has no attribute 'cbId'".format(node))
+ return
+
+ member_data = {"name": node, "uuid": node_id}
+ if components:
+ member_data["components"] = components
+
+ return member_data
+
+ def collect_attributes_changed(self, instance):
+ """Collect all userDefined attributes which have changed
+
+ Each node gets checked for user defined attributes which have been
+ altered during development. Each changes gets logged in a dictionary
+
+ [{name: node,
+ uuid: uuid,
+ attributes: {attribute: value}}]
+
+ Args:
+ instance (list): all nodes which will be published
+
+ Returns:
+ list
+ """
+
+ attributes = []
+ for node in instance:
+
+ # Collect changes to "custom" attributes
+ node_attrs = get_look_attrs(node)
+
+ # Only include if there are any properties we care about
+ if not node_attrs:
+ continue
+
+ node_attributes = {}
+ for attr in node_attrs:
+ if not cmds.attributeQuery(attr, node=node, exists=True):
+ continue
+ attribute = "{}.{}".format(node, attr)
+ node_attributes[attr] = cmds.getAttr(attribute)
+
+ attributes.append({"name": node,
+ "uuid": lib.get_id(node),
+ "attributes": node_attributes})
+
+ return attributes
+
+ def collect_resource(self, node):
+ """Collect the link to the file(s) used (resource)
+ Args:
+ node (str): name of the node
+
+ Returns:
+ dict
+ """
+
+ attribute = "{}.fileTextureName".format(node)
+ source = cmds.getAttr(attribute)
+
+ # Compare with the computed file path, e.g. the one with the
+ # pattern in it, to generate some logging information about this
+ # difference
+ computed_attribute = "{}.computedFileTextureNamePattern".format(node)
+ computed_source = cmds.getAttr(computed_attribute)
+ if source != computed_source:
+ self.log.debug("Detected computed file pattern difference "
+ "from original pattern: {0} "
+ "({1} -> {2})".format(node,
+ source,
+ computed_source))
+
+ # We replace backslashes with forward slashes because V-Ray
+ # can't handle the UDIM files with the backslashes in the
+ # paths as the computed patterns
+ source = source.replace("\\", "/")
+
+ files = shaders.get_file_node_files(node)
+ if len(files) == 0:
+ self.log.error("No valid files found from node `%s`" % node)
+
+ # Define the resource
+ return {"node": node,
+ "attribute": attribute,
+ "source": source, # required for resources
+ "files": files} # required for resources
diff --git a/config/plugins/maya/publish/collect_maya_units.py b/config/plugins/maya/publish/collect_maya_units.py
new file mode 100644
index 0000000000..2421641d26
--- /dev/null
+++ b/config/plugins/maya/publish/collect_maya_units.py
@@ -0,0 +1,30 @@
+import maya.cmds as cmds
+import maya.mel as mel
+
+import pyblish.api
+
+
+class CollectMayaUnits(pyblish.api.ContextPlugin):
+ """Collect Maya's scene units."""
+
+ label = "Maya Units"
+ order = pyblish.api.CollectorOrder
+ hosts = ["maya"]
+
+ def process(self, context):
+
+ # Get the current linear units
+ units = cmds.currentUnit(query=True, linear=True)
+
+ # Get the current angular units ('deg' or 'rad')
+ units_angle = cmds.currentUnit(query=True, angle=True)
+
+ # Get the current time units
+ # Using the mel command is simpler than using
+ # `cmds.currentUnit(q=1, time=1)`. Otherwise we
+ # have to parse the returned string value to FPS
+ fps = mel.eval('currentTimeUnitToFPS()')
+
+ context.data['linearUnits'] = units
+ context.data['angularUnits'] = units_angle
+ context.data['fps'] = fps
diff --git a/config/plugins/maya/publish/collect_maya_workspace.py b/config/plugins/maya/publish/collect_maya_workspace.py
new file mode 100644
index 0000000000..1250ea438f
--- /dev/null
+++ b/config/plugins/maya/publish/collect_maya_workspace.py
@@ -0,0 +1,27 @@
+import os
+
+import pyblish.api
+
+from maya import cmds
+
+
+class CollectMayaWorkspace(pyblish.api.ContextPlugin):
+ """Inject the current workspace into context"""
+
+ order = pyblish.api.CollectorOrder - 0.5
+ label = "Maya Workspace"
+
+ hosts = ['maya']
+ version = (0, 1, 0)
+
+ def process(self, context):
+ workspace = cmds.workspace(rootDirectory=True, query=True)
+ if not workspace:
+ # Project has not been set. Files will
+ # instead end up next to the working file.
+ workspace = cmds.workspace(dir=True, query=True)
+
+ # Maya returns forward-slashes by default
+ normalised = os.path.normpath(workspace)
+
+ context.set_data('workspaceDir', value=normalised)
diff --git a/config/plugins/maya/publish/collect_model.py b/config/plugins/maya/publish/collect_model.py
new file mode 100644
index 0000000000..cd780acc7f
--- /dev/null
+++ b/config/plugins/maya/publish/collect_model.py
@@ -0,0 +1,26 @@
+from maya import cmds
+
+import pyblish.api
+
+
+class CollectModelData(pyblish.api.InstancePlugin):
+ """Collect model data
+
+ Ensures always only a single frame is extracted (current frame).
+
+ Note:
+ This is a workaround so that the `colorbleed.model` family can use the
+ same pointcache extractor implementation as animation and pointcaches.
+ This always enforces the "current" frame to be published.
+
+ """
+
+ order = pyblish.api.CollectorOrder + 0.499
+ label = 'Collect Model Data'
+ families = ["colorbleed.model"]
+
+ def process(self, instance):
+ # Extract only current frame (override)
+ frame = cmds.currentTime(query=True)
+ instance.data['startFrame'] = frame
+ instance.data['endFrame'] = frame
diff --git a/config/plugins/maya/publish/collect_render_layer_aovs.py b/config/plugins/maya/publish/collect_render_layer_aovs.py
new file mode 100644
index 0000000000..51e9adbd57
--- /dev/null
+++ b/config/plugins/maya/publish/collect_render_layer_aovs.py
@@ -0,0 +1,95 @@
+from maya import cmds
+
+import pyblish.api
+
+import config.apps.maya.lib as lib
+
+
+class CollectRenderLayerAOVS(pyblish.api.InstancePlugin):
+ """Validate all render layer's AOVs / Render Elements are registered in
+ the database
+
+ This validator is important to be able to Extend Frames
+
+ Technical information:
+ Each renderer uses different logic to work with render passes.
+ VRay - RenderElement
+ Simple node connection to the actual renderLayer node
+
+ Arnold - AOV:
+ Uses its own render settings node and connects an aiOAV to it
+
+ Redshift - AOV:
+ Uses its own render settings node and RedshiftAOV node. It is not
+ connected but all AOVs are enabled for all render layers by default.
+
+ """
+
+ order = pyblish.api.CollectorOrder + 0.01
+ label = "Render Elements / AOVs"
+ hosts = ["maya"]
+ families = ["colorbleed.renderlayer"]
+
+ def process(self, instance):
+
+ # Check if Extend Frames is toggled
+ if not instance.data("extendFrames", False):
+ return
+
+ # Get renderer
+ renderer = cmds.getAttr("defaultRenderGlobals.currentRenderer")
+
+ self.log.info("Renderer found: {}".format(renderer))
+
+ rp_node_types = {"vray": "VRayRenderElement",
+ "arnold": "aiAOV",
+ "redshift": "RedshiftAOV"}
+
+ if renderer not in rp_node_types.keys():
+ self.log.error("Unsupported renderer found: '{}'".format(renderer))
+ return
+
+ result = []
+
+ # Collect all AOVs / Render Elements
+ with lib.renderlayer(instance.name):
+
+ node_type = rp_node_types[renderer]
+ render_elements = cmds.ls(type=node_type)
+
+ # Check if AOVs / Render Elements are enabled
+ for element in render_elements:
+ enabled = cmds.getAttr("{}.enabled".format(element))
+ if not enabled:
+ continue
+
+ pass_name = self.get_pass_name(renderer, element)
+ render_pass = "%s.%s" % (instance.name, pass_name)
+
+ result.append(render_pass)
+
+ self.log.info("Found {} render elements / AOVs for "
+ "'{}'".format(len(result), instance.name))
+
+ instance.data["renderPasses"] = result
+
+ def get_pass_name(self, renderer, node):
+
+ if renderer == "vray":
+ vray_node_attr = next(attr for attr in cmds.listAttr(node)
+ if attr.startswith("vray_name"))
+
+ pass_type = vray_node_attr.rsplit("_", 1)[-1]
+ if pass_type == "extratex":
+ vray_node_attr = "vray_explicit_name_extratex"
+
+ # Node type is in the attribute name but we need to check if value
+ # of the attribute as it can be changed
+ pass_name = cmds.getAttr("{}.{}".format(node, vray_node_attr))
+
+ elif renderer in ["arnold", "redshift"]:
+ pass_name = cmds.getAttr("{}.name".format(node))
+ else:
+ raise RuntimeError("Unsupported renderer: '{}'".format(renderer))
+
+ return pass_name
\ No newline at end of file
diff --git a/config/plugins/maya/publish/collect_renderlayers.py b/config/plugins/maya/publish/collect_renderlayers.py
new file mode 100644
index 0000000000..fa4ae51593
--- /dev/null
+++ b/config/plugins/maya/publish/collect_renderlayers.py
@@ -0,0 +1,191 @@
+from maya import cmds
+
+import pyblish.api
+
+from avalon import maya, api
+import config.apps.maya.lib as lib
+
+
+class CollectMayaRenderlayers(pyblish.api.ContextPlugin):
+ """Gather instances by active render layers"""
+
+ order = pyblish.api.CollectorOrder
+ hosts = ["maya"]
+ label = "Render Layers"
+
+ def process(self, context):
+
+ asset = api.Session["AVALON_ASSET"]
+ filepath = context.data["currentFile"].replace("\\", "/")
+
+ # Get render globals node
+ try:
+ render_globals = cmds.ls("renderglobalsDefault")[0]
+ except IndexError:
+ self.log.error("Cannot collect renderlayers without "
+ "renderGlobals node")
+ return
+
+ # Get start and end frame
+ start_frame = self.get_render_attribute("startFrame")
+ end_frame = self.get_render_attribute("endFrame")
+ context.data["startFrame"] = start_frame
+ context.data["endFrame"] = end_frame
+
+ # Get all valid renderlayers
+ # This is how Maya populates the renderlayer display
+ rlm_attribute = "renderLayerManager.renderLayerId"
+ connected_layers = cmds.listConnections(rlm_attribute) or []
+ valid_layers = set(connected_layers)
+
+ # Get all renderlayers and check their state
+ renderlayers = [i for i in cmds.ls(type="renderLayer") if
+ cmds.getAttr("{}.renderable".format(i)) and not
+ cmds.referenceQuery(i, isNodeReferenced=True)]
+
+ # Sort by displayOrder
+ def sort_by_display_order(layer):
+ return cmds.getAttr("%s.displayOrder" % layer)
+
+ renderlayers = sorted(renderlayers, key=sort_by_display_order)
+
+ for layer in renderlayers:
+
+ # Check if layer is in valid (linked) layers
+ if layer not in valid_layers:
+ self.log.warning("%s is invalid, skipping" % layer)
+ continue
+
+ if layer.endswith("defaultRenderLayer"):
+ layername = "masterLayer"
+ else:
+ layername = layer.split("rs_", 1)[-1]
+
+ # Get layer specific settings, might be overrides
+ with lib.renderlayer(layer):
+ data = {
+ "subset": layername,
+ "setMembers": layer,
+ "publish": True,
+ "startFrame": self.get_render_attribute("startFrame"),
+ "endFrame": self.get_render_attribute("endFrame"),
+ "byFrameStep": self.get_render_attribute("byFrameStep"),
+ "renderer": self.get_render_attribute("currentRenderer"),
+
+ # instance subset
+ "family": "Render Layers",
+ "families": ["colorbleed.renderlayer"],
+ "asset": asset,
+ "time": api.time(),
+ "author": context.data["user"],
+
+ # Add source to allow tracing back to the scene from
+ # which was submitted originally
+ "source": filepath
+ }
+
+ # Apply each user defined attribute as data
+ for attr in cmds.listAttr(layer, userDefined=True) or list():
+ try:
+ value = cmds.getAttr("{}.{}".format(layer, attr))
+ except Exception:
+ # Some attributes cannot be read directly,
+ # such as mesh and color attributes. These
+ # are considered non-essential to this
+ # particular publishing pipeline.
+ value = None
+
+ data[attr] = value
+
+ # Include (optional) global settings
+ # TODO(marcus): Take into account layer overrides
+ # Get global overrides and translate to Deadline values
+ overrides = self.parse_options(render_globals)
+ data.update(**overrides)
+
+ instance = context.create_instance(layername)
+ instance.data.update(data)
+
+ def get_render_attribute(self, attr):
+ return cmds.getAttr("defaultRenderGlobals.{}".format(attr))
+
+ def parse_options(self, render_globals):
+ """Get all overrides with a value, skip those without
+
+ Here's the kicker. These globals override defaults in the submission
+ integrator, but an empty value means no overriding is made.
+ Otherwise, Frames would override the default frames set under globals.
+
+ Args:
+ render_globals (str): collection of render globals
+
+ Returns:
+ dict: only overrides with values
+ """
+
+ attributes = maya.read(render_globals)
+
+ options = {"renderGlobals": {}}
+ options["renderGlobals"]["Priority"] = attributes["priority"]
+
+ # Check for specific pools
+ pool_a, pool_b = self._discover_pools(attributes)
+ options["renderGlobals"].update({"Pool": pool_a})
+ if pool_b:
+ options["renderGlobals"].update({"SecondaryPool": pool_b})
+
+ legacy = attributes["useLegacyRenderLayers"]
+ options["renderGlobals"]["UseLegacyRenderLayers"] = legacy
+
+ # Machine list
+ machine_list = attributes["machineList"]
+ if machine_list:
+ key = "Whitelist" if attributes["whitelist"] else "Blacklist"
+ options['renderGlobals'][key] = machine_list
+
+ # Suspend publish job
+ state = "Suspended" if attributes["suspendPublishJob"] else "Active"
+ options["publishJobState"] = state
+
+ chunksize = attributes.get("framesPerTask", 1)
+ options["renderGlobals"]["ChunkSize"] = chunksize
+
+ # Override frames should be False if extendFrames is False. This is
+ # to ensure it doesn't go off doing crazy unpredictable things
+ override_frames = False
+ extend_frames = attributes.get("extendFrames", False)
+ if extend_frames:
+ override_frames = attributes.get("overrideExistingFrame", False)
+
+ options["extendFrames"] = extend_frames
+ options["overrideExistingFrame"] = override_frames
+
+ maya_render_plugin = "MayaBatch"
+ if not attributes.get("useMayaBatch", True):
+ maya_render_plugin = "MayaCmd"
+
+ options["mayaRenderPlugin"] = maya_render_plugin
+
+ return options
+
+ def _discover_pools(self, attributes):
+
+ pool_a = None
+ pool_b = None
+
+ # Check for specific pools
+ if "primaryPool" in attributes:
+ pool_a = attributes["primaryPool"]
+ pool_b = attributes["secondaryPool"]
+
+ else:
+ # Backwards compatibility
+ pool_str = attributes.get("pools", None)
+ if pool_str:
+ pool_a, pool_b = pool_str.split(";")
+
+ # Ensure empty entry token is caught
+ if pool_b == "-":
+ pool_b = None
+
+ return pool_a, pool_b
diff --git a/config/plugins/maya/publish/collect_setdress.py b/config/plugins/maya/publish/collect_setdress.py
new file mode 100644
index 0000000000..0d17f70020
--- /dev/null
+++ b/config/plugins/maya/publish/collect_setdress.py
@@ -0,0 +1,91 @@
+from collections import defaultdict
+import pyblish.api
+
+from maya import cmds, mel
+from avalon import maya as avalon
+from config.apps.maya import lib
+
+# TODO : Publish of setdress: -unique namespace for all assets, VALIDATOR!
+
+
+class CollectSetDress(pyblish.api.InstancePlugin):
+ """Collect all relevant setdress items
+
+ Collected data:
+
+ * File name
+ * Compatible loader
+ * Matrix per instance
+ * Namespace
+
+ Note: GPU caches are currently not supported in the pipeline. There is no
+ logic yet which supports the swapping of GPU cache to renderable objects.
+
+ """
+
+ order = pyblish.api.CollectorOrder + 0.49
+ label = "Set Dress"
+ families = ["colorbleed.setdress"]
+
+ def process(self, instance):
+
+ # Find containers
+ containers = avalon.ls()
+
+ # Get all content from the instance
+ instance_lookup = set(cmds.ls(instance, type="transform", long=True))
+ data = defaultdict(list)
+
+ hierarchy_nodes = []
+ for container in containers:
+
+ root = lib.get_container_transforms(container, root=True)
+ if not root or root not in instance_lookup:
+ continue
+
+ # Retrieve the hierarchy
+ parent = cmds.listRelatives(root, parent=True, fullPath=True)[0]
+ hierarchy_nodes.append(parent)
+
+ # Temporary warning for GPU cache which are not supported yet
+ loader = container["loader"]
+ if loader == "GpuCacheLoader":
+ self.log.warning("GPU Cache Loader is currently not supported"
+ "in the pipeline, we will export it tho")
+
+ # Gather info for new data entry
+ representation_id = container["representation"]
+ instance_data = {"loader": loader,
+ "parent": parent,
+ "namespace": container["namespace"]}
+
+ # Check if matrix differs from default and store changes
+ matrix_data = self.get_matrix_data(root)
+ if matrix_data:
+ instance_data["matrix"] = matrix_data
+
+ data[representation_id].append(instance_data)
+
+ instance.data["scenedata"] = dict(data)
+ instance.data["hierarchy"] = list(set(hierarchy_nodes))
+
+ def get_file_rule(self, rule):
+ return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule))
+
+ def get_matrix_data(self, node):
+ """Get the matrix of all members when they are not default
+
+ Each matrix which differs from the default will be stored in a
+ dictionary
+
+ Args:
+ members (list): list of transform nmodes
+ Returns:
+ dict
+ """
+
+ matrix = cmds.xform(node, query=True, matrix=True)
+ if matrix == lib.DEFAULT_MATRIX:
+ return
+
+ return matrix
diff --git a/config/plugins/maya/publish/collect_yeti_cache.py b/config/plugins/maya/publish/collect_yeti_cache.py
new file mode 100644
index 0000000000..266c2c48da
--- /dev/null
+++ b/config/plugins/maya/publish/collect_yeti_cache.py
@@ -0,0 +1,64 @@
+from maya import cmds
+
+import pyblish.api
+
+from config.apps.maya import lib
+
+SETTINGS = {"renderDensity",
+ "renderWidth",
+ "renderLength",
+ "increaseRenderBounds",
+ "imageSearchPath",
+ "cbId"}
+
+
+class CollectYetiCache(pyblish.api.InstancePlugin):
+ """Collect all information of the Yeti caches
+
+ The information contains the following attributes per Yeti node
+
+ - "renderDensity"
+ - "renderWidth"
+ - "renderLength"
+ - "increaseRenderBounds"
+ - "imageSearchPath"
+
+ Other information is the name of the transform and it's Colorbleed ID
+ """
+
+ order = pyblish.api.CollectorOrder + 0.45
+ label = "Collect Yeti Cache"
+ families = ["colorbleed.yetiRig", "colorbleed.yeticache"]
+ hosts = ["maya"]
+ tasks = ["animation", "fx"]
+
+ def process(self, instance):
+
+ # Collect fur settings
+ settings = {"nodes": []}
+
+ # Get yeti nodes and their transforms
+ yeti_shapes = cmds.ls(instance, type="pgYetiMaya")
+ for shape in yeti_shapes:
+ shape_data = {"transform": None,
+ "name": shape,
+ "cbId": lib.get_id(shape),
+ "attrs": None}
+
+ # Get specific node attributes
+ attr_data = {}
+ for attr in SETTINGS:
+ current = cmds.getAttr("%s.%s" % (shape, attr))
+ attr_data[attr] = current
+
+ # Get transform data
+ parent = cmds.listRelatives(shape, parent=True)[0]
+ transform_data = {"name": parent, "cbId": lib.get_id(parent)}
+
+ # Store collected data
+ shape_data["attrs"] = attr_data
+ shape_data["transform"] = transform_data
+
+ settings["nodes"].append(shape_data)
+
+ instance.data["fursettings"] = settings
diff --git a/config/plugins/maya/publish/collect_yeti_rig.py b/config/plugins/maya/publish/collect_yeti_rig.py
new file mode 100644
index 0000000000..ec9fffe693
--- /dev/null
+++ b/config/plugins/maya/publish/collect_yeti_rig.py
@@ -0,0 +1,156 @@
+import os
+import re
+
+from maya import cmds
+
+import pyblish.api
+
+from config.apps.maya import lib
+
+
+SETTINGS = {"renderDensity",
+ "renderWidth",
+ "renderLength",
+ "increaseRenderBounds",
+ "imageSearchPath",
+ "cbId"}
+
+
+class CollectYetiRig(pyblish.api.InstancePlugin):
+ """Collect all information of the Yeti Rig"""
+
+ order = pyblish.api.CollectorOrder + 0.4
+ label = "Collect Yeti Rig"
+ families = ["colorbleed.yetiRig"]
+ hosts = ["maya"]
+
+ def process(self, instance):
+
+ assert "input_SET" in cmds.sets(instance.name, query=True), (
+ "Yeti Rig must have an input_SET")
+
+ # Get the input meshes information
+ input_content = cmds.sets("input_SET", query=True)
+ input_nodes = cmds.listRelatives(input_content,
+ allDescendents=True,
+ fullPath=True) or input_content
+
+ # Get all the shapes
+ input_shapes = cmds.ls(input_nodes, long=True, noIntermediate=True)
+
+ # Store all connections
+ connections = cmds.listConnections(input_shapes,
+ source=True,
+ destination=False,
+ connections=True,
+ plugs=True) or []
+
+ # Group per source, destination pair. We need to reverse the connection
+ # list as it comes in with the shape used to query first while that
+ # shape is the destination of the connection
+ grouped = [(connections[i+1], item) for i, item in
+ enumerate(connections) if i % 2 == 0]
+
+ inputs = []
+ for src, dest in grouped:
+ source_node, source_attr = src.split(".", 1)
+ dest_node, dest_attr = dest.split(".", 1)
+
+ inputs.append({"connections": [source_attr, dest_attr],
+ "sourceID": lib.get_id(source_node),
+ "destinationID": lib.get_id(dest_node)})
+
+ # Collect any textures if used
+ yeti_resources = []
+ yeti_nodes = cmds.ls(instance[:], type="pgYetiMaya")
+ for node in yeti_nodes:
+ # Get Yeti resources (textures)
+ # TODO: referenced files in Yeti Graph
+ resources = self.get_yeti_resources(node)
+ yeti_resources.extend(resources)
+
+ instance.data["rigsettings"] = {"inputs": inputs}
+
+ instance.data["resources"] = yeti_resources
+
+ # Force frame range for export
+ instance.data["startFrame"] = 1
+ instance.data["endFrame"] = 1
+
+ def get_yeti_resources(self, node):
+ """Get all texture file paths
+
+ If a texture is a sequence it gathers all sibling files to ensure
+ the texture sequence is complete.
+
+ Args:
+ node (str): node name of the pgYetiMaya node
+
+ Returns:
+ list
+ """
+ resources = []
+ image_search_path = cmds.getAttr("{}.imageSearchPath".format(node))
+ texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
+
+ if texture_filenames and not image_search_path:
+ raise ValueError("pgYetiMaya node '%s' is missing the path to the "
+ "files in the 'imageSearchPath "
+ "atttribute'" % node)
+
+ for texture in texture_filenames:
+ node_resources = {"files": [], "source": texture, "node": node}
+ texture_filepath = os.path.join(image_search_path, texture)
+ if len(texture.split(".")) > 2:
+
+ # For UDIM based textures (tiles)
+ if "" in texture:
+ sequences = self.get_sequence(texture_filepath,
+ pattern="")
+ node_resources["files"].extend(sequences)
+
+ # Based textures (animated masks f.e)
+ elif "%04d" in texture:
+ sequences = self.get_sequence(texture_filepath,
+ pattern="%04d")
+ node_resources["files"].extend(sequences)
+ # Assuming it is a fixed name
+ else:
+ node_resources["files"].append(texture_filepath)
+ else:
+ node_resources["files"].append(texture_filepath)
+
+ resources.append(node_resources)
+
+ return resources
+
+ def get_sequence(self, filename, pattern="%04d"):
+ """Get sequence from filename
+
+ Supports negative frame ranges like -001, 0000, 0001 and -0001,
+ 0000, 0001.
+
+ Arguments:
+ filename (str): The full path to filename containing the given
+ pattern.
+ pattern (str): The pattern to swap with the variable frame number.
+
+ Returns:
+ list: file sequence.
+
+ """
+
+ from avalon.vendor import clique
+
+ escaped = re.escape(filename)
+ re_pattern = escaped.replace(pattern, "-?[0-9]+")
+
+ source_dir = os.path.dirname(filename)
+ files = [f for f in os.listdir(source_dir)
+ if re.match(re_pattern, f)]
+
+ pattern = [clique.PATTERNS["frames"]]
+ collection, remainder = clique.assemble(files,
+ patterns=pattern)
+
+ return collection
diff --git a/config/plugins/maya/publish/extract_animation.py b/config/plugins/maya/publish/extract_animation.py
new file mode 100644
index 0000000000..df1e2bbf97
--- /dev/null
+++ b/config/plugins/maya/publish/extract_animation.py
@@ -0,0 +1,78 @@
+import os
+
+from maya import cmds
+
+import avalon.maya
+import colorbleed.api
+from config.apps.maya.lib import extract_alembic
+
+
+class ExtractColorbleedAnimation(colorbleed.api.Extractor):
+ """Produce an alembic of just point positions and normals.
+
+ Positions and normals, uvs, creases are preserved, but nothing more,
+ for plain and predictable point caches.
+
+ """
+
+ label = "Extract Animation"
+ hosts = ["maya"]
+ families = ["colorbleed.animation"]
+
+ def process(self, instance):
+
+ # Collect the out set nodes
+ out_sets = [node for node in instance if node.endswith("out_SET")]
+ if len(out_sets) != 1:
+ raise RuntimeError("Couldn't find exactly one out_SET: "
+ "{0}".format(out_sets))
+ out_set = out_sets[0]
+ nodes = cmds.sets(out_set, query=True)
+
+ # Include all descendants
+ nodes += cmds.listRelatives(nodes,
+ allDescendents=True,
+ fullPath=True) or []
+
+ # Collect the start and end including handles
+ start = instance.data["startFrame"]
+ end = instance.data["endFrame"]
+ handles = instance.data.get("handles", 0)
+ if handles:
+ start -= handles
+ end += handles
+
+ self.log.info("Extracting animation..")
+ dirname = self.staging_dir(instance)
+
+ parent_dir = self.staging_dir(instance)
+ filename = "{name}.abc".format(**instance.data)
+ path = os.path.join(parent_dir, filename)
+
+ options = {
+ "step": instance.data.get("step", 1.0),
+ "attr": ["cbId"],
+ "writeVisibility": True,
+ "writeCreases": True,
+ "uvWrite": True,
+ "selection": True
+ }
+
+ if int(cmds.about(version=True)) >= 2017:
+ # Since Maya 2017 alembic supports multiple uv sets - write them.
+ options["writeUVSets"] = True
+
+ with avalon.maya.suspended_refresh():
+ with avalon.maya.maintained_selection():
+ cmds.select(nodes, noExpand=True)
+ extract_alembic(file=path,
+ startFrame=start,
+ endFrame=end,
+ **options)
+
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+
+ instance.data["files"].append(filename)
+
+ self.log.info("Extracted {} to {}".format(instance, dirname))
diff --git a/config/plugins/maya/publish/extract_camera_alembic.py b/config/plugins/maya/publish/extract_camera_alembic.py
new file mode 100644
index 0000000000..2b18ced96f
--- /dev/null
+++ b/config/plugins/maya/publish/extract_camera_alembic.py
@@ -0,0 +1,79 @@
+import os
+
+from maya import cmds
+
+import avalon.maya
+import colorbleed.api
+
+import cb.utils.maya.context as context
+
+
+class ExtractCameraAlembic(colorbleed.api.Extractor):
+ """Extract a Camera as Alembic.
+
+ The cameras gets baked to world space by default. Only when the instance's
+ `bakeToWorldSpace` is set to False it will include its full hierarchy.
+
+ """
+
+ label = "Camera (Alembic)"
+ hosts = ["maya"]
+ families = ["colorbleed.camera"]
+
+ def process(self, instance):
+
+ # get settings
+ framerange = [instance.data.get("startFrame", 1),
+ instance.data.get("endFrame", 1)]
+ handles = instance.data.get("handles", 0)
+ step = instance.data.get("step", 1.0)
+ bake_to_worldspace = instance.data("bakeToWorldSpace", True)
+
+ # get cameras
+ members = instance.data['setMembers']
+ cameras = cmds.ls(members, leaf=True, shapes=True, long=True,
+ dag=True, type="camera")
+
+ # validate required settings
+ assert len(cameras) == 1, "Not a single camera found in extraction"
+ assert isinstance(step, float), "Step must be a float value"
+ camera = cameras[0]
+
+ # Define extract output file path
+ dir_path = self.staging_dir(instance)
+ filename = "{0}.abc".format(instance.name)
+ path = os.path.join(dir_path, filename)
+
+ # Perform alembic extraction
+ with avalon.maya.maintained_selection():
+ cmds.select(camera, replace=True, noExpand=True)
+
+ # Enforce forward slashes for AbcExport because we're
+ # embedding it into a job string
+ path = path.replace("\\", "/")
+
+ job_str = ' -selection -dataFormat "ogawa" '
+ job_str += ' -attrPrefix cb'
+ job_str += ' -frameRange {0} {1} '.format(framerange[0] - handles,
+ framerange[1] + handles)
+ job_str += ' -step {0} '.format(step)
+
+ if bake_to_worldspace:
+ transform = cmds.listRelatives(camera,
+ parent=True,
+ fullPath=True)[0]
+ job_str += ' -worldSpace -root {0}'.format(transform)
+
+ job_str += ' -file "{0}"'.format(path)
+
+ with context.evaluation("off"):
+ with context.no_refresh():
+ cmds.AbcExport(j=job_str, verbose=False)
+
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+
+ instance.data["files"].append(filename)
+
+ self.log.info("Extracted instance '{0}' to: {1}".format(
+ instance.name, path))
diff --git a/config/plugins/maya/publish/extract_camera_mayaAscii.py b/config/plugins/maya/publish/extract_camera_mayaAscii.py
new file mode 100644
index 0000000000..c2c1f32b01
--- /dev/null
+++ b/config/plugins/maya/publish/extract_camera_mayaAscii.py
@@ -0,0 +1,134 @@
+import os
+
+from maya import cmds
+
+import avalon.maya
+import colorbleed.api
+
+import cb.utils.maya.context as context
+from cb.utils.maya.animation import bakeToWorldSpace
+
+
+def massage_ma_file(path):
+ """Clean up .ma file for backwards compatibility.
+
+ Massage the .ma of baked camera to stay
+ backwards compatible with older versions
+ of Fusion (6.4)
+
+ """
+ # Get open file's lines
+ f = open(path, "r+")
+ lines = f.readlines()
+ f.seek(0) # reset to start of file
+
+ # Rewrite the file
+ for line in lines:
+ # Skip all 'rename -uid' lines
+ stripped = line.strip()
+ if stripped.startswith("rename -uid "):
+ continue
+
+ f.write(line)
+
+ f.truncate() # remove remainder
+ f.close()
+
+
+class ExtractCameraMayaAscii(colorbleed.api.Extractor):
+ """Extract a Camera as Maya Ascii.
+
+ This will create a duplicate of the camera that will be baked *with*
+ substeps and handles for the required frames. This temporary duplicate
+ will be published.
+
+ The cameras gets baked to world space by default. Only when the instance's
+ `bakeToWorldSpace` is set to False it will include its full hierarchy.
+
+ Note:
+ The extracted Maya ascii file gets "massaged" removing the uuid values
+ so they are valid for older versions of Fusion (e.g. 6.4)
+
+ """
+
+ label = "Camera (Maya Ascii)"
+ hosts = ["maya"]
+ families = ["colorbleed.camera"]
+
+ def process(self, instance):
+
+ # get settings
+ framerange = [instance.data.get("startFrame", 1),
+ instance.data.get("endFrame", 1)]
+ handles = instance.data.get("handles", 0)
+ step = instance.data.get("step", 1.0)
+ bake_to_worldspace = instance.data("bakeToWorldSpace", True)
+
+ # TODO: Implement a bake to non-world space
+ # Currently it will always bake the resulting camera to world-space
+ # and it does not allow to include the parent hierarchy, even though
+ # with `bakeToWorldSpace` set to False it should include its hierarchy
+ # to be correct with the family implementation.
+ if not bake_to_worldspace:
+ self.log.warning("Camera (Maya Ascii) export only supports world"
+ "space baked camera extractions. The disabled "
+ "bake to world space is ignored...")
+
+ # get cameras
+ members = instance.data['setMembers']
+ cameras = cmds.ls(members, leaf=True, shapes=True, long=True,
+ dag=True, type="camera")
+
+ range_with_handles = [framerange[0] - handles,
+ framerange[1] + handles]
+
+ # validate required settings
+ assert len(cameras) == 1, "Not a single camera found in extraction"
+ assert isinstance(step, float), "Step must be a float value"
+ camera = cameras[0]
+ transform = cmds.listRelatives(camera, parent=True, fullPath=True)
+
+ # Define extract output file path
+ dir_path = self.staging_dir(instance)
+ filename = "{0}.ma".format(instance.name)
+ path = os.path.join(dir_path, filename)
+
+ # Perform extraction
+ self.log.info("Performing camera bakes for: {0}".format(transform))
+ with avalon.maya.maintained_selection():
+ with context.evaluation("off"):
+ with context.no_refresh():
+ baked = bakeToWorldSpace(transform,
+ frameRange=range_with_handles,
+ step=step)
+ baked_shapes = cmds.ls(baked,
+ type="camera",
+ dag=True,
+ shapes=True,
+ long=True)
+
+ self.log.info("Performing extraction..")
+ cmds.select(baked_shapes, noExpand=True)
+ cmds.file(path,
+ force=True,
+ typ="mayaAscii",
+ exportSelected=True,
+ preserveReferences=False,
+ constructionHistory=False,
+ channels=True, # allow animation
+ constraints=False,
+ shader=False,
+ expressions=False)
+
+ # Delete the baked hierarchy
+ cmds.delete(baked)
+
+ massage_ma_file(path)
+
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+
+ instance.data["files"].append(filename)
+
+ self.log.info("Extracted instance '{0}' to: {1}".format(
+ instance.name, path))
diff --git a/config/plugins/maya/publish/extract_look.py b/config/plugins/maya/publish/extract_look.py
new file mode 100644
index 0000000000..6308ca1fec
--- /dev/null
+++ b/config/plugins/maya/publish/extract_look.py
@@ -0,0 +1,97 @@
+import os
+import json
+from collections import OrderedDict
+
+from maya import cmds
+
+import pyblish.api
+import avalon.maya
+import colorbleed.api
+import config.apps.maya.lib as maya
+
+from cb.utils.maya import context
+
+
+class ExtractLook(colorbleed.api.Extractor):
+ """Extract Look (Maya Ascii + JSON)
+
+ Only extracts the sets (shadingEngines and alike) alongside a .json file
+ that stores it relationships for the sets and "attribute" data for the
+ instance members.
+
+ """
+
+ label = "Extract Look (Maya ASCII + JSON)"
+ hosts = ["maya"]
+ families = ["colorbleed.look"]
+ order = pyblish.api.ExtractorOrder + 0.2
+
+ def process(self, instance):
+
+ # Define extract output file path
+ dir_path = self.staging_dir(instance)
+ maya_fname = "{0}.ma".format(instance.name)
+ json_fname = "{0}.json".format(instance.name)
+
+ # Make texture dump folder
+ maya_path = os.path.join(dir_path, maya_fname)
+ json_path = os.path.join(dir_path, json_fname)
+
+ self.log.info("Performing extraction..")
+
+ # Remove all members of the sets so they are not included in the
+ # exported file by accident
+ self.log.info("Extract sets (Maya ASCII) ...")
+ lookdata = instance.data["lookData"]
+ relationships = lookdata["relationships"]
+ sets = relationships.keys()
+
+ resources = instance.data["resources"]
+
+ remap = OrderedDict() # needs to be ordered, see color space values
+ for resource in resources:
+ attr = resource['attribute']
+ remap[attr] = resource['destination']
+
+ # Preserve color space values (force value after filepath change)
+ # This will also trigger in the same order at end of context to
+ # ensure after context it's still the original value.
+ color_space_attr = resource['node'] + ".colorSpace"
+ remap[color_space_attr] = cmds.getAttr(color_space_attr)
+
+ self.log.info("Finished remapping destinations ...")
+
+ # Extract in correct render layer
+ layer = instance.data.get("renderlayer", "defaultRenderLayer")
+ with context.renderlayer(layer):
+ # TODO: Ensure membership edits don't become renderlayer overrides
+ with context.empty_sets(sets, force=True):
+ with maya.attribute_values(remap):
+ with avalon.maya.maintained_selection():
+ cmds.select(sets, noExpand=True)
+ cmds.file(maya_path,
+ force=True,
+ typ="mayaAscii",
+ exportSelected=True,
+ preserveReferences=False,
+ channels=True,
+ constraints=True,
+ expressions=True,
+ constructionHistory=True)
+
+ # Write the JSON data
+ self.log.info("Extract json..")
+ data = {"attributes": lookdata["attributes"],
+ "relationships": relationships}
+
+ with open(json_path, "w") as f:
+ json.dump(data, f)
+
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+
+ instance.data["files"].append(maya_fname)
+ instance.data["files"].append(json_fname)
+
+ self.log.info("Extracted instance '%s' to: %s" % (instance.name,
+ maya_path))
diff --git a/config/plugins/maya/publish/extract_maya_ascii_raw.py b/config/plugins/maya/publish/extract_maya_ascii_raw.py
new file mode 100644
index 0000000000..f447f25851
--- /dev/null
+++ b/config/plugins/maya/publish/extract_maya_ascii_raw.py
@@ -0,0 +1,54 @@
+import os
+
+from maya import cmds
+
+import avalon.maya
+import colorbleed.api
+
+
+class ExtractMayaAsciiRaw(colorbleed.api.Extractor):
+ """Extract as Maya Ascii (raw)
+
+ This will preserve all references, construction history, etc.
+
+ """
+
+ label = "Maya ASCII (Raw)"
+ hosts = ["maya"]
+ families = ["config.apps.mayaAscii"]
+
+ def process(self, instance):
+
+ # Define extract output file path
+ dir_path = self.staging_dir(instance)
+ filename = "{0}.ma".format(instance.name)
+ path = os.path.join(dir_path, filename)
+
+ # Whether to include all nodes in the instance (including those from
+ # history) or only use the exact set members
+ members_only = instance.data.get("exactSetMembersOnly", False)
+ if members_only:
+ members = instance.data.get("setMembers", list())
+ if not members:
+ raise RuntimeError("Can't export 'exact set members only' "
+ "when set is empty.")
+ else:
+ members = instance[:]
+
+ # Perform extraction
+ self.log.info("Performing extraction..")
+ with avalon.maya.maintained_selection():
+ cmds.select(members, noExpand=True)
+ cmds.file(path,
+ force=True,
+ typ="mayaAscii",
+ exportSelected=True,
+ preserveReferences=True,
+ constructionHistory=True)
+
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+
+ instance.data["files"].append(filename)
+
+ self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
diff --git a/config/plugins/maya/publish/extract_model.py b/config/plugins/maya/publish/extract_model.py
new file mode 100644
index 0000000000..e1be53d59a
--- /dev/null
+++ b/config/plugins/maya/publish/extract_model.py
@@ -0,0 +1,78 @@
+import os
+
+from maya import cmds
+
+import avalon.maya
+import colorbleed.api
+
+from cb.utils.maya import context
+
+
+class ExtractModel(colorbleed.api.Extractor):
+ """Extract as Model (Maya Ascii)
+
+ Only extracts contents based on the original "setMembers" data to ensure
+ publishing the least amount of required shapes. From that it only takes
+ the shapes that are not intermediateObjects
+
+ During export it sets a temporary context to perform a clean extraction.
+ The context ensures:
+ - Smooth preview is turned off for the geometry
+ - Default shader is assigned (no materials are exported)
+ - Remove display layers
+
+ """
+
+ label = "Model (Maya ASCII)"
+ hosts = ["maya"]
+ families = ["colorbleed.model"]
+
+ def process(self, instance):
+
+ # Define extract output file path
+ stagingdir = self.staging_dir(instance)
+ filename = "{0}.ma".format(instance.name)
+ path = os.path.join(stagingdir, filename)
+
+ # Perform extraction
+ self.log.info("Performing extraction..")
+
+ # Get only the shape contents we need in such a way that we avoid
+ # taking along intermediateObjects
+ members = instance.data("setMembers")
+ members = cmds.ls(members,
+ dag=True,
+ shapes=True,
+ type=("mesh", "nurbsCurve"),
+ noIntermediate=True,
+ long=True)
+
+ with context.no_display_layers(instance):
+ with context.displaySmoothness(members,
+ divisionsU=0,
+ divisionsV=0,
+ pointsWire=4,
+ pointsShaded=1,
+ polygonObject=1):
+ with context.shader(members,
+ shadingEngine="initialShadingGroup"):
+ with avalon.maya.maintained_selection():
+ cmds.select(members, noExpand=True)
+ cmds.file(path,
+ force=True,
+ typ="mayaAscii",
+ exportSelected=True,
+ preserveReferences=False,
+ channels=False,
+ constraints=False,
+ expressions=False,
+ constructionHistory=False)
+
+ # Store reference for integration
+
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+
+ instance.data["files"].append(filename)
+
+ self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
diff --git a/config/plugins/maya/publish/extract_pointcache.py b/config/plugins/maya/publish/extract_pointcache.py
new file mode 100644
index 0000000000..d9dee9e0e7
--- /dev/null
+++ b/config/plugins/maya/publish/extract_pointcache.py
@@ -0,0 +1,74 @@
+import os
+
+from maya import cmds
+
+import avalon.maya
+import colorbleed.api
+from config.apps.maya.lib import extract_alembic
+
+
+class ExtractColorbleedAlembic(colorbleed.api.Extractor):
+ """Produce an alembic of just point positions and normals.
+
+ Positions and normals, uvs, creases are preserved, but nothing more,
+ for plain and predictable point caches.
+
+ """
+
+ label = "Extract Pointcache (Alembic)"
+ hosts = ["maya"]
+ families = ["colorbleed.pointcache",
+ "colorbleed.model"]
+
+ def process(self, instance):
+
+ nodes = instance[:]
+
+ # Collect the start and end including handles
+ start = instance.data.get("startFrame", 1)
+ end = instance.data.get("endFrame", 1)
+ handles = instance.data.get("handles", 0)
+ if handles:
+ start -= handles
+ end += handles
+
+ # Get extra export arguments
+ writeColorSets = instance.data.get("writeColorSets", False)
+
+ self.log.info("Extracting animation..")
+ dirname = self.staging_dir(instance)
+
+ self.log.info("nodes: %s" % str(nodes))
+
+ parent_dir = self.staging_dir(instance)
+ filename = "{name}.abc".format(**instance.data)
+ path = os.path.join(parent_dir, filename)
+
+ options = {
+ "step": instance.data.get("step", 1.0),
+ "attr": ["cbId"],
+ "writeVisibility": True,
+ "writeCreases": True,
+ "writeColorSets": writeColorSets,
+ "uvWrite": True,
+ "selection": True
+ }
+
+ if int(cmds.about(version=True)) >= 2017:
+ # Since Maya 2017 alembic supports multiple uv sets - write them.
+ options["writeUVSets"] = True
+
+ with avalon.maya.suspended_refresh():
+ with avalon.maya.maintained_selection():
+ cmds.select(nodes, noExpand=True)
+ extract_alembic(file=path,
+ startFrame=start,
+ endFrame=end,
+ **options)
+
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+
+ instance.data["files"].append(filename)
+
+ self.log.info("Extracted {} to {}".format(instance, dirname))
diff --git a/config/plugins/maya/publish/extract_rig.py b/config/plugins/maya/publish/extract_rig.py
new file mode 100644
index 0000000000..7db6adfe43
--- /dev/null
+++ b/config/plugins/maya/publish/extract_rig.py
@@ -0,0 +1,42 @@
+import os
+
+from maya import cmds
+
+import avalon.maya
+import colorbleed.api
+
+
+class ExtractColorbleedRig(colorbleed.api.Extractor):
+ """Extract rig as Maya Ascii"""
+
+ label = "Extract Rig (Maya ASCII)"
+ hosts = ["maya"]
+ families = ["colorbleed.rig"]
+
+ def process(self, instance):
+
+ # Define extract output file path
+ dir_path = self.staging_dir(instance)
+ filename = "{0}.ma".format(instance.name)
+ path = os.path.join(dir_path, filename)
+
+ # Perform extraction
+ self.log.info("Performing extraction..")
+ with avalon.maya.maintained_selection():
+ cmds.select(instance, noExpand=True)
+ cmds.file(path,
+ force=True,
+ typ="mayaAscii",
+ exportSelected=True,
+ preserveReferences=False,
+ channels=True,
+ constraints=True,
+ expressions=True,
+ constructionHistory=True)
+
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+
+ instance.data["files"].append(filename)
+
+ self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
diff --git a/config/plugins/maya/publish/extract_setdress.py b/config/plugins/maya/publish/extract_setdress.py
new file mode 100644
index 0000000000..4d791c8055
--- /dev/null
+++ b/config/plugins/maya/publish/extract_setdress.py
@@ -0,0 +1,54 @@
+import json
+
+import os
+
+import colorbleed.api
+from config.apps.maya.lib import extract_alembic
+
+from maya import cmds
+
+
+class ExtractSetDress(colorbleed.api.Extractor):
+ """Produce an alembic of just point positions and normals.
+
+ Positions and normals are preserved, but nothing more,
+ for plain and predictable point caches.
+
+ """
+
+ label = "Extract Set Dress"
+ hosts = ["maya"]
+ families = ["colorbleed.setdress"]
+
+ def process(self, instance):
+
+ parent_dir = self.staging_dir(instance)
+ hierarchy_filename = "{}.abc".format(instance.name)
+ hierarchy_path = os.path.join(parent_dir, hierarchy_filename)
+ json_filename = "{}.json".format(instance.name)
+ json_path = os.path.join(parent_dir, json_filename)
+
+ self.log.info("Dumping scene data for debugging ..")
+ with open(json_path, "w") as filepath:
+ json.dump(instance.data["scenedata"], filepath, ensure_ascii=False)
+
+ self.log.info("Extracting point cache ..")
+ cmds.select(instance.data["hierarchy"])
+
+ # Run basic alembic exporter
+ extract_alembic(file=hierarchy_path,
+ startFrame=1.0,
+ endFrame=1.0,
+ **{"step": 1.0,
+ "attr": ["cbId"],
+ "writeVisibility": True,
+ "writeCreases": True,
+ "uvWrite": True,
+ "selection": True})
+
+ instance.data["files"] = [json_filename, hierarchy_filename]
+
+ # Remove data
+ instance.data.pop("scenedata", None)
+
+ cmds.select(clear=True)
diff --git a/config/plugins/maya/publish/extract_vrayproxy.py b/config/plugins/maya/publish/extract_vrayproxy.py
new file mode 100644
index 0000000000..f1190d52ab
--- /dev/null
+++ b/config/plugins/maya/publish/extract_vrayproxy.py
@@ -0,0 +1,60 @@
+import os
+
+import avalon.maya
+import colorbleed.api
+
+from maya import cmds
+
+
+class ExtractVRayProxy(colorbleed.api.Extractor):
+ """Extract the content of the instance to a vrmesh file
+
+ Things to pay attention to:
+ - If animation is toggled, are the frames correct
+ -
+ """
+
+ label = "VRay Proxy (.vrmesh)"
+ hosts = ["maya"]
+ families = ["colorbleed.vrayproxy"]
+
+ def process(self, instance):
+
+ staging_dir = self.staging_dir(instance)
+ file_name = "{}.vrmesh".format(instance.name)
+ file_path = os.path.join(staging_dir, file_name)
+
+ anim_on = instance.data["animation"]
+ if not anim_on:
+ # Remove animation information because it is not required for
+ # non-animated subsets
+ instance.data.pop("startFrame", None)
+ instance.data.pop("endFrame", None)
+
+ start_frame = 1
+ end_frame = 1
+ else:
+ start_frame = instance.data["startFrame"]
+ end_frame = instance.data["endFrame"]
+
+ # Write out vrmesh file
+ self.log.info("Writing: '%s'" % file_path)
+ with avalon.maya.maintained_selection():
+ cmds.select(instance.data["setMembers"], noExpand=True)
+ cmds.vrayCreateProxy(exportType=1,
+ dir=staging_dir,
+ fname=file_name,
+ animOn=anim_on,
+ animType=3,
+ startFrame=start_frame,
+ endFrame=end_frame,
+ ignoreHiddenObjects=True,
+ createProxyNode=False)
+
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+
+ instance.data["files"].append(file_name)
+
+ self.log.info("Extracted instance '%s' to: %s"
+ % (instance.name, staging_dir))
diff --git a/config/plugins/maya/publish/extract_yeti_cache.py b/config/plugins/maya/publish/extract_yeti_cache.py
new file mode 100644
index 0000000000..479bf52813
--- /dev/null
+++ b/config/plugins/maya/publish/extract_yeti_cache.py
@@ -0,0 +1,65 @@
+import os
+import json
+
+from maya import cmds
+
+import colorbleed.api
+
+
+class ExtractYetiCache(colorbleed.api.Extractor):
+ """Produce an alembic of just point positions and normals.
+
+ Positions and normals are preserved, but nothing more,
+ for plain and predictable point caches.
+
+ """
+
+ label = "Extract Yeti Cache"
+ hosts = ["maya"]
+ families = ["colorbleed.yetiRig", "colorbleed.yeticache"]
+
+ def process(self, instance):
+
+ yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
+ if not yeti_nodes:
+ raise RuntimeError("No pgYetiMaya nodes found in the instance")
+
+ # Define extract output file path
+ dirname = self.staging_dir(instance)
+
+ # Yeti related staging dirs
+ data_file = os.path.join(dirname, "yeti.fursettings")
+
+ # Collect information for writing cache
+ start_frame = instance.data.get("startFrame")
+ end_frame = instance.data.get("endFrame")
+ preroll = instance.data.get("preroll")
+ if preroll > 0:
+ start_frame -= preroll
+
+ self.log.info("Writing out cache")
+ # Start writing the files for snap shot
+ # will be replace by the Yeti node name
+ path = os.path.join(dirname, ".%04d.fur")
+ cmds.pgYetiCommand(yeti_nodes,
+ writeCache=path,
+ range=(start_frame, end_frame),
+ sampleTimes="0.0 1.0",
+ updateViewport=False,
+ generatePreview=False)
+
+ cache_files = [x for x in os.listdir(dirname) if x.endswith(".fur")]
+
+ self.log.info("Writing metadata file")
+ settings = instance.data.get("fursettings", None)
+ if settings is not None:
+ with open(data_file, "w") as fp:
+ json.dump(settings, fp, ensure_ascii=False)
+
+ # Ensure files can be stored
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+
+ instance.data["files"].extend([cache_files, "yeti.fursettings"])
+
+ self.log.info("Extracted {} to {}".format(instance, dirname))
diff --git a/config/plugins/maya/publish/extract_yeti_rig.py b/config/plugins/maya/publish/extract_yeti_rig.py
new file mode 100644
index 0000000000..ad53fb7962
--- /dev/null
+++ b/config/plugins/maya/publish/extract_yeti_rig.py
@@ -0,0 +1,133 @@
+import os
+import json
+import contextlib
+
+from maya import cmds
+
+import avalon.maya.lib as lib
+import colorbleed.api
+import config.apps.maya.lib as maya
+
+
+@contextlib.contextmanager
+def disconnected_attributes(settings, members):
+
+ members = cmds.ls(members, long=True)
+ original_connections = []
+ try:
+ for input in settings["inputs"]:
+
+ # Get source shapes
+ source_nodes = lib.lsattr("cbId", input["sourceID"])
+ sources = [i for i in source_nodes if
+ not cmds.referenceQuery(i, isNodeReferenced=True)
+ and i in members]
+ try:
+ source = sources[0]
+ except IndexError:
+ print("source_id:", input["sourceID"])
+ continue
+
+ # Get destination shapes (the shapes used as hook up)
+ destination_nodes = lib.lsattr("cbId", input["destinationID"])
+ destinations = [i for i in destination_nodes if i not in members
+ and i not in sources]
+ destination = destinations[0]
+
+ # Break connection
+ connections = input["connections"]
+ src_attribute = "%s.%s" % (source, connections[0])
+ dst_attribute = "%s.%s" % (destination, connections[1])
+
+ # store connection pair
+ if not cmds.isConnected(src_attribute, dst_attribute):
+ continue
+
+ cmds.disconnectAttr(src_attribute, dst_attribute)
+ original_connections.append([src_attribute, dst_attribute])
+ yield
+ finally:
+ # restore connections
+ for connection in original_connections:
+ try:
+ cmds.connectAttr(connection[0], connection[1])
+ except Exception as e:
+ print(e)
+ continue
+
+
+class ExtractYetiRig(colorbleed.api.Extractor):
+ """Produce an alembic of just point positions and normals.
+
+ Positions and normals are preserved, but nothing more,
+ for plain and predictable point caches.
+
+ """
+
+ label = "Extract Yeti Rig"
+ hosts = ["maya"]
+ families = ["colorbleed.yetiRig"]
+
+ def process(self, instance):
+
+ yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
+ if not yeti_nodes:
+ raise RuntimeError("No pgYetiMaya nodes found in the instance")
+
+ # Define extract output file path
+ dirname = self.staging_dir(instance)
+ settings_path = os.path.join(dirname, "yeti.rigsettings")
+
+ # Yeti related staging dirs
+ maya_path = os.path.join(dirname, "yeti_rig.ma")
+
+ self.log.info("Writing metadata file")
+
+ image_search_path = ""
+ settings = instance.data.get("rigsettings", None)
+ if settings is not None:
+
+ # Create assumed destination folder for imageSearchPath
+ assumed_temp_data = instance.data["assumedTemplateData"]
+ template = instance.data["template"]
+ template_formatted = template.format(**assumed_temp_data)
+
+ destination_folder = os.path.dirname(template_formatted)
+ image_search_path = os.path.join(destination_folder, "resources")
+ image_search_path = os.path.normpath(image_search_path)
+
+ settings["imageSearchPath"] = image_search_path
+ with open(settings_path, "w") as fp:
+ json.dump(settings, fp, ensure_ascii=False)
+
+ attr_value = {"%s.imageSearchPath" % n: str(image_search_path) for
+ n in yeti_nodes}
+
+ # Get input_SET members
+ input_set = [i for i in instance if i == "input_SET"]
+ # Get all items
+ set_members = cmds.sets(input_set[0], query=True)
+ members = cmds.listRelatives(set_members, ad=True, fullPath=True) or []
+ members += cmds.ls(set_members, long=True)
+
+ nodes = instance.data["setMembers"]
+ with disconnected_attributes(settings, members):
+ with maya.attribute_values(attr_value):
+ cmds.select(nodes, noExpand=True)
+ cmds.file(maya_path,
+ force=True,
+ exportSelected=True,
+ typ="mayaAscii",
+ preserveReferences=False,
+ constructionHistory=True,
+ shader=False)
+
+ # Ensure files can be stored
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+
+ instance.data["files"].extend(["yeti_rig.ma", "yeti.rigsettings"])
+
+ self.log.info("Extracted {} to {}".format(instance, dirname))
+
+ cmds.select(clear=True)
diff --git a/config/plugins/maya/publish/increment_current_file_deadline.py b/config/plugins/maya/publish/increment_current_file_deadline.py
new file mode 100644
index 0000000000..cb8374a7e1
--- /dev/null
+++ b/config/plugins/maya/publish/increment_current_file_deadline.py
@@ -0,0 +1,39 @@
+import pyblish.api
+
+
+class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin):
+ """Increment the current file.
+
+ Saves the current maya scene with an increased version number.
+
+ """
+
+ label = "Increment current file"
+ order = pyblish.api.IntegratorOrder + 9.0
+ hosts = ["maya"]
+ families = ["colorbleed.renderlayer"]
+ optional = True
+
+ def process(self, context):
+
+ import os
+ from maya import cmds
+ from colorbleed.lib import version_up
+ from colorbleed.action import get_errored_plugins_from_data
+
+ errored_plugins = get_errored_plugins_from_data(context)
+ if any(plugin.__name__ == "MayaSubmitDeadline"
+ for plugin in errored_plugins):
+ raise RuntimeError("Skipping incrementing current file because "
+ "submission to deadline failed.")
+
+ current_filepath = context.data["currentFile"]
+ new_filepath = version_up(current_filepath)
+
+ # Ensure the suffix is .ma because we're saving to `mayaAscii` type
+ if not new_filepath.endswith(".ma"):
+ self.log.warning("Refactoring scene to .ma extension")
+ new_filepath = os.path.splitext(new_filepath)[0] + ".ma"
+
+ cmds.file(rename=new_filepath)
+ cmds.file(save=True, force=True, type="mayaAscii")
diff --git a/config/plugins/maya/publish/save_scene.py b/config/plugins/maya/publish/save_scene.py
new file mode 100644
index 0000000000..bf80bc9699
--- /dev/null
+++ b/config/plugins/maya/publish/save_scene.py
@@ -0,0 +1,21 @@
+import pyblish.api
+
+
+class SaveCurrentScene(pyblish.api.ContextPlugin):
+ """Save current scene
+
+ """
+
+ label = "Save current file"
+ order = pyblish.api.IntegratorOrder - 0.49
+ hosts = ["maya"]
+ families = ["colorbleed.renderlayer"]
+
+ def process(self, context):
+ import maya.cmds as cmds
+
+ current = cmds.file(query=True, sceneName=True)
+ assert context.data['currentFile'] == current
+
+ self.log.info("Saving current file..")
+ cmds.file(save=True, force=True)
diff --git a/config/plugins/maya/publish/submit_deadline.py b/config/plugins/maya/publish/submit_deadline.py
new file mode 100644
index 0000000000..43cdc22c4d
--- /dev/null
+++ b/config/plugins/maya/publish/submit_deadline.py
@@ -0,0 +1,262 @@
+import os
+import json
+import getpass
+
+from maya import cmds
+
+from avalon import api
+from avalon.vendor import requests
+
+import pyblish.api
+
+import config.apps.maya.lib as lib
+
+
+def get_renderer_variables(renderlayer=None):
+ """Retrieve the extension which has been set in the VRay settings
+
+ Will return None if the current renderer is not VRay
+ For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which
+ start with `rs`. Use the actual node name, do NOT use the `nice name`
+
+ Args:
+ renderlayer (str): the node name of the renderlayer.
+
+ Returns:
+ dict
+ """
+
+ renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer())
+ render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"])
+
+ padding = cmds.getAttr("{}.{}".format(render_attrs["node"],
+ render_attrs["padding"]))
+
+ filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0]
+
+ if renderer == "vray":
+ # Maya's renderSettings function does not return V-Ray file extension
+ # so we get the extension from vraySettings
+ extension = cmds.getAttr("vraySettings.imageFormatStr")
+
+ # When V-Ray image format has not been switched once from default .png
+ # the getAttr command above returns None. As such we explicitly set
+ # it to `.png`
+ if extension is None:
+ extension = "png"
+
+ filename_prefix = "/_/"
+ else:
+ # Get the extension, getAttr defaultRenderGlobals.imageFormat
+ # returns an index number.
+ filename_base = os.path.basename(filename_0)
+ extension = os.path.splitext(filename_base)[-1].strip(".")
+ filename_prefix = "/_/"
+
+ return {"ext": extension,
+ "filename_prefix": filename_prefix,
+ "padding": padding,
+ "filename_0": filename_0}
+
+
+def preview_fname(folder, scene, layer, padding, ext):
+ """Return output file path with #### for padding.
+
+ Deadline requires the path to be formatted with # in place of numbers.
+ For example `/path/to/render.####.png`
+
+ Args:
+ folder (str): The root output folder (image path)
+ scene (str): The scene name
+ layer (str): The layer name to be rendered
+ padding (int): The padding length
+ ext(str): The output file extension
+
+ Returns:
+ str
+
+ """
+
+ # Following hardcoded "/_/"
+ output = "{scene}/{scene}_{layer}/{layer}.{number}.{ext}".format(
+ scene=scene,
+ layer=layer,
+ number="#" * padding,
+ ext=ext
+ )
+
+ return os.path.join(folder, output)
+
+
+class MayaSubmitDeadline(pyblish.api.InstancePlugin):
+ """Submit available render layers to Deadline
+
+ Renders are submitted to a Deadline Web Service as
+ supplied via the environment variable AVALON_DEADLINE
+
+ """
+
+ label = "Submit to Deadline"
+ order = pyblish.api.IntegratorOrder
+ hosts = ["maya"]
+ families = ["colorbleed.renderlayer"]
+
+ def process(self, instance):
+
+ AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
+ "http://localhost:8082")
+ assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
+
+ context = instance.context
+ workspace = context.data["workspaceDir"]
+ filepath = context.data["currentFile"]
+ filename = os.path.basename(filepath)
+ comment = context.data.get("comment", "")
+ scene = os.path.splitext(filename)[0]
+ dirname = os.path.join(workspace, "renders")
+ renderlayer = instance.data['setMembers'] # rs_beauty
+ renderlayer_name = instance.name # beauty
+ renderlayer_globals = instance.data["renderGlobals"]
+ legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
+ deadline_user = context.data.get("deadlineUser", getpass.getuser())
+ jobname = "%s - %s" % (filename, instance.name)
+
+ # Get the variables depending on the renderer
+ render_variables = get_renderer_variables(renderlayer)
+ output_filename_0 = preview_fname(folder=dirname,
+ scene=scene,
+ layer=renderlayer_name,
+ padding=render_variables["padding"],
+ ext=render_variables["ext"])
+
+ try:
+ # Ensure render folder exists
+ os.makedirs(dirname)
+ except OSError:
+ pass
+
+ # Documentation for keys available at:
+ # https://docs.thinkboxsoftware.com
+ # /products/deadline/8.0/1_User%20Manual/manual
+ # /manual-submission.html#job-info-file-options
+ payload = {
+ "JobInfo": {
+ # Top-level group name
+ "BatchName": filename,
+
+ # Job name, as seen in Monitor
+ "Name": jobname,
+
+ # Arbitrary username, for visualisation in Monitor
+ "UserName": deadline_user,
+
+ "Plugin": instance.data.get("mayaRenderPlugin", "MayaBatch"),
+ "Frames": "{start}-{end}x{step}".format(
+ start=int(instance.data["startFrame"]),
+ end=int(instance.data["endFrame"]),
+ step=int(instance.data["byFrameStep"]),
+ ),
+
+ "Comment": comment,
+
+ # Optional, enable double-click to preview rendered
+ # frames from Deadline Monitor
+ "OutputFilename0": output_filename_0.replace("\\", "/"),
+ },
+ "PluginInfo": {
+ # Input
+ "SceneFile": filepath,
+
+ # Output directory and filename
+ "OutputFilePath": dirname.replace("\\", "/"),
+ "OutputFilePrefix": render_variables["filename_prefix"],
+
+ # Mandatory for Deadline
+ "Version": cmds.about(version=True),
+
+ # Only render layers are considered renderable in this pipeline
+ "UsingRenderLayers": True,
+
+ # Use legacy Render Layer system
+ "UseLegacyRenderLayers": legacy_layers,
+
+ # Render only this layer
+ "RenderLayer": renderlayer,
+
+ # Determine which renderer to use from the file itself
+ "Renderer": instance.data["renderer"],
+
+ # Resolve relative references
+ "ProjectPath": workspace,
+ },
+
+ # Mandatory for Deadline, may be empty
+ "AuxFiles": []
+ }
+
+ # Include critical environment variables with submission
+ keys = [
+ # This will trigger `userSetup.py` on the slave
+ # such that proper initialisation happens the same
+ # way as it does on a local machine.
+ # TODO(marcus): This won't work if the slaves don't
+ # have accesss to these paths, such as if slaves are
+ # running Linux and the submitter is on Windows.
+ "PYTHONPATH",
+
+ # todo: This is a temporary fix for yeti variables
+ "PEREGRINEL_LICENSE",
+ "REDSHIFT_MAYAEXTENSIONSPATH",
+ "VRAY_FOR_MAYA2018_PLUGINS_X64",
+ "VRAY_PLUGINS_X64",
+ "VRAY_USE_THREAD_AFFINITY",
+ "MAYA_MODULE_PATH"
+ ]
+ environment = dict({key: os.environ[key] for key in keys
+ if key in os.environ}, **api.Session)
+
+ PATHS = os.environ["PATH"].split(";")
+ environment["PATH"] = ";".join([p for p in PATHS
+ if p.startswith("P:")])
+
+ payload["JobInfo"].update({
+ "EnvironmentKeyValue%d" % index: "{key}={value}".format(
+ key=key,
+ value=environment[key]
+ ) for index, key in enumerate(environment)
+ })
+
+ # Include optional render globals
+ render_globals = instance.data.get("renderGlobals", {})
+ payload["JobInfo"].update(render_globals)
+
+ self.log.info("using render plugin : {}".format(payload["JobInfo"]["Plugin"]))
+
+ self.preflight_check(instance)
+
+ self.log.info("Submitting..")
+ self.log.info(json.dumps(payload, indent=4, sort_keys=True))
+
+ # E.g. http://192.168.0.1:8082/api/jobs
+ url = "{}/api/jobs".format(AVALON_DEADLINE)
+ response = requests.post(url, json=payload)
+ if not response.ok:
+ raise Exception(response.text)
+
+ # Store output dir for unified publisher (filesequence)
+ instance.data["outputDir"] = os.path.dirname(output_filename_0)
+ instance.data["deadlineSubmissionJob"] = response.json()
+
+ def preflight_check(self, instance):
+ """Ensure the startFrame, endFrame and byFrameStep are integers"""
+
+ for key in ("startFrame", "endFrame", "byFrameStep"):
+ value = instance.data[key]
+
+ if int(value) == value:
+ continue
+
+ self.log.warning(
+ "%f=%d was rounded off to nearest integer"
+ % (value, int(value))
+ )
diff --git a/config/plugins/maya/publish/validate_animation_content.py b/config/plugins/maya/publish/validate_animation_content.py
new file mode 100644
index 0000000000..0725281705
--- /dev/null
+++ b/config/plugins/maya/publish/validate_animation_content.py
@@ -0,0 +1,36 @@
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateAnimationContent(pyblish.api.InstancePlugin):
+ """Adheres to the content of 'animation' family
+
+ - Must have collected `out_hierarchy` data.
+ - All nodes in `out_hierarchy` must be in the instance.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ["maya"]
+ families = ["colorbleed.animation"]
+ label = "Animation Content"
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ @classmethod
+ def get_invalid(cls, instance):
+ assert 'out_hierarchy' in instance.data, "Missing `out_hierarchy` data"
+
+ # All nodes in the `out_hierarchy` must be among the nodes that are
+ # in the instance. The nodes in the instance are found from the top
+ # group, as such this tests whether all nodes are under that top group.
+
+ lookup = set(instance[:])
+ invalid = [node for node in instance.data['out_hierarchy'] if
+ node not in lookup]
+
+ return invalid
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Animation content is invalid. See log.")
diff --git a/config/plugins/maya/publish/validate_animation_out_set_related_node_ids.py b/config/plugins/maya/publish/validate_animation_out_set_related_node_ids.py
new file mode 100644
index 0000000000..b67dcbf3f8
--- /dev/null
+++ b/config/plugins/maya/publish/validate_animation_out_set_related_node_ids.py
@@ -0,0 +1,80 @@
+import maya.cmds as cmds
+
+import pyblish.api
+import colorbleed.api
+import config.apps.maya.lib as lib
+
+
+class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin):
+ """Validate if deformed shapes have related IDs to the original shapes
+
+ When a deformer is applied in the scene on a referenced mesh that already
+ had deformers then Maya will create a new shape node for the mesh that
+ does not have the original id. This validator checks whether the ids are
+ valid on all the shape nodes in the instance.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ families = ['colorbleed.animation', "colorbleed.pointcache"]
+ hosts = ['maya']
+ label = 'Animation Out Set Related Node Ids'
+ actions = [colorbleed.api.SelectInvalidAction, colorbleed.api.RepairAction]
+
+ def process(self, instance):
+ """Process all meshes"""
+
+ # Ensure all nodes have a cbId and a related ID to the original shapes
+ # if a deformer has been created on the shape
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Nodes found with non-related "
+ "asset IDs: {0}".format(invalid))
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Get all nodes which do not match the criteria"""
+
+ invalid = []
+ types_to_skip = ["locator"]
+
+ # get asset id
+ nodes = instance.data.get("out_hierarchy", instance[:])
+ for node in nodes:
+
+ # We only check when the node is *not* referenced
+ if cmds.referenceQuery(node, isNodeReferenced=True):
+ continue
+
+ # Check if node is a shape as deformers only work on shapes
+ obj_type = cmds.objectType(node, isAType="shape")
+ if not obj_type:
+ continue
+
+ # Skip specific types
+ if cmds.objectType(node) in types_to_skip:
+ continue
+
+ # Get the current id of the node
+ node_id = lib.get_id(node)
+ if not node_id:
+ invalid.append(node)
+ continue
+
+ history_id = lib.get_id_from_history(node)
+ if history_id is not None and node_id != history_id:
+ invalid.append(node)
+
+ return invalid
+
+ @classmethod
+ def repair(cls, instance):
+
+ for node in cls.get_invalid(instance):
+ # Get the original id from history
+ history_id = lib.get_id_from_history(node)
+ if not history_id:
+ cls.log.error("Could not find ID in history for '%s'", node)
+ continue
+
+ lib.set_id(node, history_id, overwrite=True)
diff --git a/config/plugins/maya/publish/validate_camera_attributes.py b/config/plugins/maya/publish/validate_camera_attributes.py
new file mode 100644
index 0000000000..46b9de7ecd
--- /dev/null
+++ b/config/plugins/maya/publish/validate_camera_attributes.py
@@ -0,0 +1,67 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateCameraAttributes(pyblish.api.InstancePlugin):
+ """Validates Camera has no invalid attribute keys or values.
+
+ The Alembic file format does not a specifc subset of attributes as such
+ we validate that no values are set there as the output will not match the
+ current scene. For example the preScale, film offsets and film roll.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ families = ['colorbleed.camera']
+ hosts = ['maya']
+ label = 'Camera Attributes'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ DEFAULTS = [
+ ("filmFitOffset", 0.0),
+ ("horizontalFilmOffset", 0.0),
+ ("verticalFilmOffset", 0.0),
+ ("preScale", 1.0),
+ ("filmTranslateH", 0.0),
+ ("filmTranslateV", 0.0),
+ ("filmRollValue", 0.0)
+ ]
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ # get cameras
+ members = instance.data['setMembers']
+ shapes = cmds.ls(members, dag=True, shapes=True, long=True)
+ cameras = cmds.ls(shapes, type='camera', long=True)
+
+ invalid = set()
+ for cam in cameras:
+
+ for attr, default_value in cls.DEFAULTS:
+ plug = "{}.{}".format(cam, attr)
+ value = cmds.getAttr(plug)
+
+ # Check if is default value
+ if value != default_value:
+ cls.log.warning("Invalid attribute value: {0} "
+ "(should be: {1}))".format(plug,
+ default_value))
+ invalid.add(cam)
+
+ if cmds.listConnections(plug, source=True, destination=False):
+ # TODO: Validate correctly whether value always correct
+ cls.log.warning("%s has incoming connections, validation "
+ "is unpredictable." % plug)
+
+ return list(invalid)
+
+ def process(self, instance):
+ """Process all the nodes in the instance"""
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise RuntimeError("Invalid camera attributes: %s" % invalid)
diff --git a/config/plugins/maya/publish/validate_camera_contents.py b/config/plugins/maya/publish/validate_camera_contents.py
new file mode 100644
index 0000000000..09f5d5392b
--- /dev/null
+++ b/config/plugins/maya/publish/validate_camera_contents.py
@@ -0,0 +1,63 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateCameraContents(pyblish.api.InstancePlugin):
+ """Validates Camera instance contents.
+
+ A Camera instance may only hold a SINGLE camera's transform, nothing else.
+
+ It may hold a "locator" as shape, but different shapes are down the
+ hierarchy.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ families = ['colorbleed.camera']
+ hosts = ['maya']
+ label = 'Camera Contents'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ # get cameras
+ members = instance.data['setMembers']
+ shapes = cmds.ls(members, dag=True, shapes=True, long=True)
+
+ # single camera
+ invalid = []
+ cameras = cmds.ls(shapes, type='camera', long=True)
+ if len(cameras) != 1:
+ cls.log.warning("Camera instance must have a single camera. "
+ "Found {0}: {1}".format(len(cameras), cameras))
+ invalid.extend(cameras)
+
+ # We need to check this edge case because returning an extended
+ # list when there are no actual cameras results in
+ # still an empty 'invalid' list
+ if len(cameras) < 1:
+ raise RuntimeError("No cameras in instance.")
+
+ # non-camera shapes
+ valid_shapes = cmds.ls(shapes, type=('camera', 'locator'), long=True)
+ shapes = set(shapes) - set(valid_shapes)
+ if shapes:
+ shapes = list(shapes)
+ cls.log.warning("Camera instance should only contain camera "
+ "shapes. Found: {0}".format(shapes))
+ invalid.extend(shapes)
+
+ invalid = list(set(invalid))
+
+ return invalid
+
+ def process(self, instance):
+ """Process all the nodes in the instance"""
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Invalid camera contents: "
+ "{0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_deadline_connection.py b/config/plugins/maya/publish/validate_deadline_connection.py
new file mode 100644
index 0000000000..7f57e5b96a
--- /dev/null
+++ b/config/plugins/maya/publish/validate_deadline_connection.py
@@ -0,0 +1,26 @@
+import pyblish.api
+
+import avalon.api as api
+from avalon.vendor import requests
+
+
+class ValidateDeadlineConnection(pyblish.api.ContextPlugin):
+ """Validate Deadline Web Service is running"""
+
+ label = "Validate Deadline Web Service"
+ order = pyblish.api.ValidatorOrder
+ hosts = ["maya"]
+ families = ["colorbleed.renderlayer"]
+
+ def process(self, instance):
+
+ AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
+ "http://localhost:8082")
+
+ assert AVALON_DEADLINE is not None, "Requires AVALON_DEADLINE"
+
+ # Check response
+ response = requests.get(AVALON_DEADLINE)
+ assert response.ok, "Response must be ok"
+ assert response.text.startswith("Deadline Web Service "), \
+ "Web service did not respond with 'Deadline Web Service'"
\ No newline at end of file
diff --git a/config/plugins/maya/publish/validate_frame_range.py b/config/plugins/maya/publish/validate_frame_range.py
new file mode 100644
index 0000000000..010422df38
--- /dev/null
+++ b/config/plugins/maya/publish/validate_frame_range.py
@@ -0,0 +1,44 @@
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateFrameRange(pyblish.api.InstancePlugin):
+ """Valides the frame ranges.
+
+ Checks the `startFrame`, `endFrame` and `handles` data.
+ This does NOT ensure there's actual data present.
+
+ This validates:
+ - `startFrame` is lower than or equal to the `endFrame`.
+ - must have both the `startFrame` and `endFrame` data.
+ - The `handles` value is not lower than zero.
+
+ """
+
+ label = "Validate Frame Range"
+ order = colorbleed.api.ValidateContentsOrder
+ families = ["colorbleed.animation",
+ "colorbleed.pointcache",
+ "colorbleed.camera",
+ "colorbleed.renderlayer"]
+
+ def process(self, instance):
+
+ start = instance.data.get("startFrame", None)
+ end = instance.data.get("endFrame", None)
+ handles = instance.data.get("handles", None)
+
+ # Check if any of the values are present
+ if any(value is None for value in [start, end]):
+ raise ValueError("No time values for this instance. "
+ "(Missing `startFrame` or `endFrame`)")
+
+ self.log.info("Comparing start (%s) and end (%s)" % (start, end))
+ if start > end:
+ raise RuntimeError("The start frame is a higher value "
+ "than the end frame: "
+ "{0}>{1}".format(start, end))
+
+ if handles is not None:
+ if handles < 0.0:
+ raise RuntimeError("Handles are set to a negative value")
diff --git a/config/plugins/maya/publish/validate_instance_has_members.py b/config/plugins/maya/publish/validate_instance_has_members.py
new file mode 100644
index 0000000000..d209505378
--- /dev/null
+++ b/config/plugins/maya/publish/validate_instance_has_members.py
@@ -0,0 +1,27 @@
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateInstanceHasMembers(pyblish.api.InstancePlugin):
+ """Validates instance objectSet has *any* members."""
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ["maya"]
+ label = 'Instance has members'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ invalid = list()
+ if not instance.data["setMembers"]:
+ objectset_name = instance.data['name']
+ invalid.append(objectset_name)
+
+ return invalid
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Empty instances found: {0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_instance_subset.py b/config/plugins/maya/publish/validate_instance_subset.py
new file mode 100644
index 0000000000..6a26d09594
--- /dev/null
+++ b/config/plugins/maya/publish/validate_instance_subset.py
@@ -0,0 +1,44 @@
+import pyblish.api
+import colorbleed.api
+import string
+
+# Allow only characters, numbers and underscore
+allowed = set(string.ascii_lowercase +
+ string.ascii_uppercase +
+ string.digits +
+ '_')
+
+
+def validate_name(subset):
+ return all(x in allowed for x in subset)
+
+
+class ValidateSubsetName(pyblish.api.InstancePlugin):
+ """Validates subset name has only valid characters"""
+
+ order = colorbleed.api.ValidateContentsOrder
+ families = ["*"]
+ label = "Subset Name"
+
+ def process(self, instance):
+
+ subset = instance.data.get("subset", None)
+
+ # Ensure subset data
+ if subset is None:
+ raise RuntimeError("Instance is missing subset "
+ "name: {0}".format(subset))
+
+ if not isinstance(subset, basestring):
+ raise TypeError("Instance subset name must be string, "
+ "got: {0} ({1})".format(subset, type(subset)))
+
+ # Ensure is not empty subset
+ if not subset:
+ raise ValueError("Instance subset name is "
+ "empty: {0}".format(subset))
+
+ # Validate subset characters
+ if not validate_name(subset):
+ raise ValueError("Instance subset name contains invalid "
+ "characters: {0}".format(subset))
diff --git a/config/plugins/maya/publish/validate_instancer_content.py b/config/plugins/maya/publish/validate_instancer_content.py
new file mode 100644
index 0000000000..ed97410fc6
--- /dev/null
+++ b/config/plugins/maya/publish/validate_instancer_content.py
@@ -0,0 +1,73 @@
+import maya.cmds as cmds
+
+import pyblish.api
+import config.apps.maya.lib as lib
+
+
+class ValidateInstancerContent(pyblish.api.InstancePlugin):
+ """Validates that all meshes in the instance have object IDs.
+
+ This skips a check on intermediate objects because we consider them
+ not important.
+ """
+ order = pyblish.api.ValidatorOrder
+ label = 'Instancer Content'
+ families = ['colorbleed.instancer']
+
+ def process(self, instance):
+
+ error = False
+ members = instance.data['setMembers']
+ export_members = instance.data['exactExportMembers']
+
+ self.log.info("Contents {0}".format(members))
+
+ if not len(members) == len(cmds.ls(members, type="instancer")):
+ self.log.error("Instancer can only contain instancers")
+ error = True
+
+ # TODO: Implement better check for particles are cached
+ if not cmds.ls(export_members, type="nucleus"):
+ self.log.error("Instancer must have a connected nucleus")
+ error = True
+
+ if not cmds.ls(export_members, type="cacheFile"):
+ self.log.error("Instancer must be cached")
+ error = True
+
+ hidden = self.check_geometry_hidden(export_members)
+ if not hidden:
+ error = True
+ self.log.error("Instancer input geometry must be hidden "
+ "the scene. Invalid: {0}".format(hidden))
+
+ # Ensure all in one group
+ parents = cmds.listRelatives(members,
+ allParents=True,
+ fullPath=True) or []
+ roots = list(set(cmds.ls(parents, assemblies=True, long=True)))
+ if len(roots) > 1:
+ self.log.error("Instancer should all be contained in a single "
+ "group. Current roots: {0}".format(roots))
+ error = True
+
+ if error:
+ raise RuntimeError("Instancer Content is invalid. See log.")
+
+ def check_geometry_hidden(self, export_members):
+
+ # Ensure all instanced geometry is hidden
+ shapes = cmds.ls(export_members,
+ dag=True,
+ shapes=True,
+ noIntermediate=True)
+ meshes = cmds.ls(shapes, type="mesh")
+
+ visible = [node for node in meshes
+ if lib.is_visible(node,
+ displayLayer=False,
+ intermediateObject=False)]
+ if visible:
+ return False
+
+ return True
diff --git a/config/plugins/maya/publish/validate_instancer_frame_ranges.py b/config/plugins/maya/publish/validate_instancer_frame_ranges.py
new file mode 100644
index 0000000000..c7307c5a2a
--- /dev/null
+++ b/config/plugins/maya/publish/validate_instancer_frame_ranges.py
@@ -0,0 +1,168 @@
+import os
+import re
+import pyblish.api
+
+VERBOSE = False
+
+
+def is_cache_resource(resource):
+ """Return whether resource is a cacheFile resource"""
+ required = set(["maya", "node", "cacheFile"])
+ tags = resource.get("tags", [])
+ return required.issubset(tags)
+
+
+def valdidate_files(files):
+ for f in files:
+ assert os.path.exists(f)
+ assert f.endswith(".mcx") or f.endswith(".mcc")
+
+ return True
+
+
+def filter_ticks(files):
+ tick_files = set()
+ ticks = set()
+ for path in files:
+ match = re.match(".+Tick([0-9]+).mcx$", os.path.basename(path))
+ if match:
+ tick_files.add(path)
+ num = match.group(1)
+ ticks.add(int(num))
+
+ return tick_files, ticks
+
+
+class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
+ """Validates all instancer particle systems are cached correctly.
+
+ This means they should have the files/frames as required by the start-end
+ frame (including handles).
+
+ This also checks the files exist and checks the "ticks" (substeps) files.
+
+ """
+ order = pyblish.api.ValidatorOrder
+ label = 'Instancer Cache Frame Ranges'
+ families = ['colorbleed.instancer']
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ import pyseq
+
+ start_frame = instance.data.get("startFrame", 0)
+ end_frame = instance.data.get("endFrame", 0)
+ required = range(int(start_frame), int(end_frame) + 1)
+
+ invalid = list()
+ resources = instance.data.get("resources", [])
+
+ for resource in resources:
+ if not is_cache_resource(resource):
+ continue
+
+ node = resource['node']
+ all_files = resource['files'][:]
+ all_lookup = set(all_files)
+
+ # The first file is usually the .xml description file.
+ xml = all_files.pop(0)
+ assert xml.endswith(".xml")
+
+ if VERBOSE:
+ cls.log.info("Checking: {0}".format(all_files))
+
+ # Ensure all files exist (including ticks)
+ # The remainder file paths should be the .mcx or .mcc files
+ valdidate_files(all_files)
+
+ # Maya particle caches support substeps by saving out additional
+ # files that end with a Tick60.mcx, Tick120.mcx, etc. suffix.
+ # To avoid `pyseq` getting confused we filter those out and then
+ # for each file (except the last frame) check that at least all
+ # ticks exist.
+
+ tick_files, ticks = filter_ticks(all_files)
+ if tick_files:
+ files = [f for f in all_files if f not in tick_files]
+ else:
+ files = all_files
+
+ sequences = pyseq.get_sequences(files)
+ if len(sequences) != 1:
+ invalid.append(node)
+ cls.log.warning("More than one sequence found? "
+ "{0} {1}".format(node, files))
+ cls.log.warning("Found caches: {0}".format(sequences))
+ continue
+
+ sequence = sequences[0]
+ cls.log.debug("Found sequence: {0}".format(sequence))
+
+ start = sequence.start()
+ end = sequence.end()
+
+ if start > start_frame or end < end_frame:
+ invalid.append(node)
+ cls.log.warning("Sequence does not have enough "
+ "frames: {0}-{1} (requires: {2}-{3})"
+ "".format(start, end,
+ start_frame,
+ end_frame))
+ continue
+
+ # Ensure all frames are present
+ missing = set(sequence.missing())
+ if missing:
+ required_missing = [x for x in required if x in missing]
+ if required_missing:
+ invalid.append(node)
+ cls.log.warning("Sequence is missing required frames: "
+ "{0}".format(required_missing))
+ continue
+
+ # Ensure all tick files (substep) exist for the files in the folder
+ # for the frames required by the time range.
+ if ticks:
+ ticks = list(sorted(ticks))
+ cls.log.info("Found ticks: {0} "
+ "(substeps: {1})".format(ticks, len(ticks)))
+
+ # Check all frames except the last since we don't
+ # require subframes after our time range.
+ tick_check_frames = set(required[:-1])
+
+ # Check all frames
+ for item in sequence:
+ frame = item.frame
+ if not frame:
+ invalid.append(node)
+ cls.log.error("Path is not a frame in sequence: "
+ "{0}".format(item))
+ continue
+
+ # Not required for our time range
+ if frame not in tick_check_frames:
+ continue
+
+ path = item.path
+ for num in ticks:
+ base, ext = os.path.splitext(path)
+ tick_file = base + "Tick{0}".format(num) + ext
+ if tick_file not in all_lookup:
+ invalid.append(node)
+ cls.log.warning("Tick file found that is not "
+ "in cache query filenames: "
+ "{0}".format(tick_file))
+
+ return invalid
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ self.log.error("Invalid nodes: {0}".format(invalid))
+ raise RuntimeError("Invalid particle caches in instance. "
+ "See logs for details.")
diff --git a/config/plugins/maya/publish/validate_joints_hidden.py b/config/plugins/maya/publish/validate_joints_hidden.py
new file mode 100644
index 0000000000..70c6a81690
--- /dev/null
+++ b/config/plugins/maya/publish/validate_joints_hidden.py
@@ -0,0 +1,43 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+import config.apps.maya.lib as lib
+
+
+class ValidateJointsHidden(pyblish.api.InstancePlugin):
+ """Validate all joints are hidden visually.
+
+ This includes being hidden:
+ - visibility off,
+ - in a display layer that has visibility off,
+ - having hidden parents or
+ - being an intermediate object.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ['maya']
+ families = ['colorbleed.rig']
+ category = 'rig'
+ version = (0, 1, 0)
+ label = "Joints Hidden"
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.RepairAction]
+
+ @staticmethod
+ def get_invalid(instance):
+ joints = cmds.ls(instance, type='joint', long=True)
+ return [j for j in joints if lib.is_visible(j, displayLayer=True)]
+
+ def process(self, instance):
+ """Process all the nodes in the instance 'objectSet'"""
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise ValueError("Visible joints found: {0}".format(invalid))
+
+ @classmethod
+ def repair(cls, instance):
+ import maya.mel as mel
+ mel.eval("HideJoints")
diff --git a/config/plugins/maya/publish/validate_look_contents.py b/config/plugins/maya/publish/validate_look_contents.py
new file mode 100644
index 0000000000..5f47ea39cc
--- /dev/null
+++ b/config/plugins/maya/publish/validate_look_contents.py
@@ -0,0 +1,114 @@
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateLookContents(pyblish.api.InstancePlugin):
+ """Validate look instance contents
+
+ Rules:
+ * Look data must have `relationships` and `attributes` keys.
+ * At least one relationship must be collection.
+ * All relationship object sets at least have an ID value
+
+ Tip:
+ * When no node IDs are found on shadingEngines please save your scene
+ and try again.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ families = ['colorbleed.look']
+ hosts = ['maya']
+ label = 'Look Data Contents'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ def process(self, instance):
+ """Process all the nodes in the instance"""
+
+ if not instance[:]:
+ raise RuntimeError("Instance is empty")
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("'{}' has invalid look "
+ "content".format(instance.name))
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Get all invalid nodes"""
+
+ cls.log.info("Validating look content for "
+ "'{}'".format(instance.name))
+
+ # check if data has the right attributes and content
+ attributes = cls.validate_lookdata_attributes(instance)
+ # check the looks for ID
+ looks = cls.validate_looks(instance)
+ # check if file nodes have valid files
+ files = cls.validate_files(instance)
+
+ invalid = looks + attributes + files
+
+ return invalid
+
+ @classmethod
+ def validate_lookdata_attributes(cls, instance):
+ """Check if the lookData has the required attributes
+
+ Args:
+ instance
+
+ """
+
+ invalid = set()
+
+ keys = ["relationships", "attributes"]
+ lookdata = instance.data["lookData"]
+ for key in keys:
+ if key not in lookdata:
+ cls.log.error("Look Data has no key "
+ "'{}'".format(key))
+ invalid.add(instance.name)
+
+ # Validate at least one single relationship is collected
+ if not lookdata["relationships"]:
+ cls.log.error("Look '%s' has no "
+ "`relationships`" % instance.name)
+ invalid.add(instance.name)
+
+ # Check if attributes are on a node with an ID, crucial for rebuild!
+ for attr_changes in lookdata["attributes"]:
+ if not attr_changes["uuid"]:
+ cls.log.error("Node '%s' has no cbId, please set the "
+ "attributes to its children if it has any"
+ % attr_changes["name"])
+ invalid.add(instance.name)
+
+ return list(invalid)
+
+ @classmethod
+ def validate_looks(cls, instance):
+
+ looks = instance.data["lookData"]["relationships"]
+ invalid = []
+ for name, data in looks.items():
+ if not data["uuid"]:
+ cls.log.error("Look '{}' has no UUID".format(name))
+ invalid.append(name)
+
+ return invalid
+
+ @classmethod
+ def validate_files(cls, instance):
+
+ invalid = []
+
+ resources = instance.data.get("resources", [])
+ for resource in resources:
+ files = resource["files"]
+ if len(files) == 0:
+ node = resource["node"]
+ cls.log.error("File node '%s' uses no or non-existing "
+ "files" % node)
+ invalid.append(node)
+
+ return invalid
\ No newline at end of file
diff --git a/config/plugins/maya/publish/validate_look_default_shaders_connections.py b/config/plugins/maya/publish/validate_look_default_shaders_connections.py
new file mode 100644
index 0000000000..dc8e7446c9
--- /dev/null
+++ b/config/plugins/maya/publish/validate_look_default_shaders_connections.py
@@ -0,0 +1,59 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateLookDefaultShadersConnections(pyblish.api.InstancePlugin):
+ """Validate default shaders in the scene have their default connections.
+
+ For example the lambert1 could potentially be disconnected from the
+ initialShadingGroup. As such it's not lambert1 that will be identified
+ as the default shader which can have unpredictable results.
+
+ To fix the default connections need to be made again. See the logs for
+ more details on which connections are missing.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ families = ['colorbleed.look']
+ hosts = ['maya']
+ label = 'Look Default Shader Connections'
+
+ # The default connections to check
+ DEFAULTS = [("initialShadingGroup.surfaceShader", "lambert1"),
+ ("initialParticleSE.surfaceShader", "lambert1"),
+ ("initialParticleSE.volumeShader", "particleCloud1")
+ ]
+
+ def process(self, instance):
+
+ # Ensure check is run only once. We don't use ContextPlugin because
+ # of a bug where the ContextPlugin will always be visible. Even when
+ # the family is not present in an instance.
+ key = "__validate_look_default_shaders_connections_checked"
+ context = instance.context
+ is_run = context.data.get(key, False)
+ if is_run:
+ return
+ else:
+ context.data[key] = True
+
+ # Process as usual
+ invalid = list()
+ for plug, input_node in self.DEFAULTS:
+ inputs = cmds.listConnections(plug,
+ source=True,
+ destination=False) or None
+
+ if not inputs or inputs[0] != input_node:
+ self.log.error("{0} is not connected to {1}. "
+ "This can result in unexpected behavior. "
+ "Please reconnect to continue.".format(
+ plug,
+ input_node))
+ invalid.append(plug)
+
+ if invalid:
+ raise RuntimeError("Invalid connections.")
diff --git a/config/plugins/maya/publish/validate_look_members_unique.py b/config/plugins/maya/publish/validate_look_members_unique.py
new file mode 100644
index 0000000000..a42410c123
--- /dev/null
+++ b/config/plugins/maya/publish/validate_look_members_unique.py
@@ -0,0 +1,73 @@
+from collections import defaultdict
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateUniqueRelationshipMembers(pyblish.api.InstancePlugin):
+ """Validate the relational nodes of the look data to ensure every node is
+ unique.
+
+ This ensures the all member ids are unique. Every node id must be from
+ a single node in the scene.
+
+ That means there's only ever one of a specific node inside the look to be
+ published. For example if you'd have a loaded 3x the same tree and by
+ accident you're trying to publish them all together in a single look that
+ would be invalid, because they are the same tree. It should be included
+ inside the look instance only once.
+
+ """
+
+ order = colorbleed.api.ValidatePipelineOrder
+ label = 'Look members unique'
+ hosts = ['maya']
+ families = ['colorbleed.look']
+
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.GenerateUUIDsOnInvalidAction]
+
+ def process(self, instance):
+ """Process all meshes"""
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Members found without non-unique IDs: "
+ "{0}".format(invalid))
+
+ @staticmethod
+ def get_invalid(instance):
+ """
+ Check all the relationship members of the objectSets
+
+ Example of the lookData relationships:
+ {"uuid": 59b2bb27bda2cb2776206dd8:79ab0a63ffdf,
+ "members":[{"uuid": 59b2bb27bda2cb2776206dd8:1b158cc7496e,
+ "name": |model_GRP|body_GES|body_GESShape}
+ ...,
+ ...]}
+
+ Args:
+ instance:
+
+ Returns:
+
+ """
+
+ # Get all members from the sets
+ id_nodes = defaultdict(set)
+ relationships = instance.data["lookData"]["relationships"]
+
+ for relationship in relationships.values():
+ for member in relationship['members']:
+ node_id = member["uuid"]
+ node = member["name"]
+ id_nodes[node_id].add(node)
+
+ # Check if any id has more than 1 node
+ invalid = []
+ for nodes in id_nodes.values():
+ if len(nodes) > 1:
+ invalid.extend(nodes)
+
+ return invalid
diff --git a/config/plugins/maya/publish/validate_look_no_default_shaders.py b/config/plugins/maya/publish/validate_look_no_default_shaders.py
new file mode 100644
index 0000000000..9315359184
--- /dev/null
+++ b/config/plugins/maya/publish/validate_look_no_default_shaders.py
@@ -0,0 +1,61 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateLookNoDefaultShaders(pyblish.api.InstancePlugin):
+ """Validate if any node has a connection to a default shader.
+
+ This checks whether the look has any members of:
+ - lambert1
+ - initialShadingGroup
+ - initialParticleSE
+ - particleCloud1
+
+ If any of those is present it will raise an error. A look is not allowed
+ to have any of the "default" shaders present in a scene as they can
+ introduce problems when referenced (overriding local scene shaders).
+
+ To fix this no shape nodes in the look must have any of default shaders
+ applied.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder + 0.01
+ families = ['colorbleed.look']
+ hosts = ['maya']
+ label = 'Look No Default Shaders'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ DEFAULT_SHADERS = {"lambert1", "initialShadingGroup",
+ "initialParticleSE", "particleCloud1"}
+
+ def process(self, instance):
+ """Process all the nodes in the instance"""
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Invalid node relationships found: "
+ "{0}".format(invalid))
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ invalid = set()
+ for node in instance:
+ # Get shading engine connections
+ shaders = cmds.listConnections(node, type="shadingEngine") or []
+
+ # Check for any disallowed connections on *all* nodes
+ if any(s in cls.DEFAULT_SHADERS for s in shaders):
+
+ # Explicitly log each individual "wrong" connection.
+ for s in shaders:
+ if s in cls.DEFAULT_SHADERS:
+ cls.log.error("Node has unallowed connection to "
+ "'{}': {}".format(s, node))
+
+ invalid.add(node)
+
+ return list(invalid)
diff --git a/config/plugins/maya/publish/validate_look_sets.py b/config/plugins/maya/publish/validate_look_sets.py
new file mode 100644
index 0000000000..c7a54c4cd8
--- /dev/null
+++ b/config/plugins/maya/publish/validate_look_sets.py
@@ -0,0 +1,97 @@
+from config.apps.maya import lib
+
+import pyblish.api
+import colorbleed.api
+
+from cb.utils.maya import context
+
+
+class ValidateLookSets(pyblish.api.InstancePlugin):
+ """Validate if any sets are missing from the instance and look data
+
+ A node might have a relationship with a shader but has no Colorbleed ID.
+ Because it is missing the ID it has not been collected in the instance.
+ When the relationship needs to be maintained the artist might need to
+ create a different* relationship or ensure the node has the Colorbleed ID.
+
+ * The relationship might be too broad (assigned to top node if hierarchy).
+ This can be countered by creating the relationship on the shape or its
+ transform.
+ In essence, ensure item the shader is assigned to has the Colorbleed ID!
+
+ Displacement shaders:
+ Ensure all geometry is added to the displacement objectSet.
+ It is best practice to add the transform group of the shape to the
+ displacement objectSet
+ Example content:
+ [asset_GRP|geometry_GRP|body_GES,
+ asset_GRP|geometry_GRP|L_eye_GES,
+ asset_GRP|geometry_GRP|R_eye_GES,
+ asset_GRP|geometry_GRP|wings_GEO]
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ families = ['colorbleed.look']
+ hosts = ['maya']
+ label = 'Look Sets'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ def process(self, instance):
+ """Process all the nodes in the instance"""
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("'{}' has invalid look "
+ "content".format(instance.name))
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Get all invalid nodes"""
+
+ cls.log.info("Validating look content for "
+ "'{}'".format(instance.name))
+
+ relationships = instance.data["lookData"]["relationships"]
+ invalid = []
+
+ renderlayer = instance.data.get("renderlayer", "defaultRenderLayer")
+ with context.renderlayer(renderlayer):
+ for node in instance:
+ # get the connected objectSets of the node
+ sets = lib.get_related_sets(node)
+ if not sets:
+ continue
+
+ # check if any objectSets are not present ion the relationships
+ missing_sets = [s for s in sets if s not in relationships]
+ if missing_sets:
+ # A set of this node is not coming along, this is wrong!
+ cls.log.error("Missing sets '{}' for node "
+ "'{}'".format(missing_sets, node))
+ invalid.append(node)
+ continue
+
+ # Ensure the node is in the sets that are collected
+ for shaderset, data in relationships.items():
+ if shaderset not in sets:
+ # no need to check for a set if the node
+ # isn't in it anyway
+ continue
+
+ member_nodes = [member['name'] for member in
+ data['members']]
+ if node not in member_nodes:
+ # The node is not found in the collected set
+ # relationships
+ cls.log.error("Missing '{}' in collected set node "
+ "'{}'".format(node, shaderset))
+ invalid.append(node)
+
+ continue
+
+ return invalid
+
+ @classmethod
+ def repair(cls, context, instance):
+ pass
diff --git a/config/plugins/maya/publish/validate_look_single_shader.py b/config/plugins/maya/publish/validate_look_single_shader.py
new file mode 100644
index 0000000000..f56f8eb64e
--- /dev/null
+++ b/config/plugins/maya/publish/validate_look_single_shader.py
@@ -0,0 +1,53 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateSingleShader(pyblish.api.InstancePlugin):
+ """Validate all nurbsSurfaces and meshes have exactly one shader assigned.
+
+ This will error if a shape has no shaders or more than one shader.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ families = ['colorbleed.look']
+ hosts = ['maya']
+ label = 'Look Single Shader Per Shape'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ # The default connections to check
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Found shapes which don't have a single shader "
+ "assigned: "
+ "\n{}".format(invalid))
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ # Get all shapes from the instance
+ shapes = cmds.ls(instance, type=["nurbsSurface", "mesh"], long=True)
+
+ # Check the number of connected shadingEngines per shape
+ no_shaders = []
+ more_than_one_shaders = []
+ for shape in shapes:
+ shading_engines = cmds.listConnections(shape,
+ destination=True,
+ type="shadingEngine") or []
+ if not shading_engines:
+ no_shaders.append(shape)
+ elif len(shading_engines) > 1:
+ more_than_one_shaders.append(shape)
+
+ if no_shaders:
+ cls.log.error("No shaders found on: {}".format(no_shaders))
+ if more_than_one_shaders:
+ cls.log.error("More than one shader found on: "
+ "{}".format(more_than_one_shaders))
+
+ return no_shaders + more_than_one_shaders
diff --git a/config/plugins/maya/publish/validate_maya_units.py b/config/plugins/maya/publish/validate_maya_units.py
new file mode 100644
index 0000000000..9904936f6b
--- /dev/null
+++ b/config/plugins/maya/publish/validate_maya_units.py
@@ -0,0 +1,53 @@
+import maya.cmds as cmds
+
+import pyblish.api
+import colorbleed.api
+from colorbleed import lib
+import config.apps.maya.lib as mayalib
+
+
+class ValidateMayaUnits(pyblish.api.ContextPlugin):
+ """Check if the Maya units are set correct"""
+
+ order = colorbleed.api.ValidateSceneOrder
+ label = "Maya Units"
+ hosts = ['maya']
+ actions = [colorbleed.api.RepairContextAction]
+
+ def process(self, context):
+
+ linearunits = context.data('linearUnits')
+ angularunits = context.data('angularUnits')
+
+ fps = context.data['fps']
+ project_fps = lib.get_project_fps()
+
+ self.log.info('Units (linear): {0}'.format(linearunits))
+ self.log.info('Units (angular): {0}'.format(angularunits))
+ self.log.info('Units (time): {0} FPS'.format(fps))
+
+ # Check if units are correct
+ assert linearunits and linearunits == 'cm', ("Scene linear units must "
+ "be centimeters")
+
+ assert angularunits and angularunits == 'deg', ("Scene angular units "
+ "must be degrees")
+ assert fps and fps == project_fps, "Scene must be %s FPS" % project_fps
+
+ @classmethod
+ def repair(cls):
+ """Fix the current FPS setting of the scene, set to PAL(25.0 fps)"""
+
+ cls.log.info("Setting angular unit to 'degrees'")
+ cmds.currentUnit(angle="degree")
+ current_angle = cmds.currentUnit(query=True, angle=True)
+ cls.log.debug(current_angle)
+
+ cls.log.info("Setting linear unit to 'centimeter'")
+ cmds.currentUnit(linear="centimeter")
+ current_linear = cmds.currentUnit(query=True, linear=True)
+ cls.log.debug(current_linear)
+
+ cls.log.info("Setting time unit to match project")
+ project_fps = lib.get_project_fps()
+ mayalib.set_scene_fps(project_fps)
diff --git a/config/plugins/maya/publish/validate_mesh_has_uv.py b/config/plugins/maya/publish/validate_mesh_has_uv.py
new file mode 100644
index 0000000000..bc7c19e5ad
--- /dev/null
+++ b/config/plugins/maya/publish/validate_mesh_has_uv.py
@@ -0,0 +1,94 @@
+import re
+
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+def len_flattened(components):
+ """Return the length of the list as if it was flattened.
+
+ Maya will return consecutive components as a single entry
+ when requesting with `maya.cmds.ls` without the `flatten`
+ flag. Though enabling `flatten` on a large list (e.g. millions)
+ will result in a slow result. This command will return the amount
+ of entries in a non-flattened list by parsing the result with
+ regex.
+
+ Args:
+ components (list): The non-flattened components.
+
+ Returns:
+ int: The amount of entries.
+
+ """
+ assert isinstance(components, (list, tuple))
+ n = 0
+ for c in components:
+ match = re.search("\[([0-9]+):([0-9]+)\]", c)
+ if match:
+ start, end = match.groups()
+ n += int(end) - int(start) + 1
+ else:
+ n += 1
+ return n
+
+
+class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
+ """Validate the current mesh has UVs.
+
+ It validates whether the current UV set has non-zero UVs and
+ at least more than the vertex count. It's not really bulletproof,
+ but a simple quick validation to check if there are likely
+ UVs for every face.
+ """
+
+ order = colorbleed.api.ValidateMeshOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ category = 'geometry'
+ label = 'Mesh Has UVs'
+ actions = [colorbleed.api.SelectInvalidAction]
+ optional = True
+
+ @classmethod
+ def get_invalid(cls, instance):
+ invalid = []
+
+ for node in cmds.ls(instance, type='mesh'):
+ uv = cmds.polyEvaluate(node, uv=True)
+
+ if uv == 0:
+ invalid.append(node)
+ continue
+
+ vertex = cmds.polyEvaluate(node, vertex=True)
+ if uv < vertex:
+ # Workaround:
+ # Maya can have instanced UVs in a single mesh, for example
+ # imported from an Alembic. With instanced UVs the UV count
+ # from `maya.cmds.polyEvaluate(uv=True)` will only result in
+ # the unique UV count instead of for all vertices.
+ #
+ # Note: Maya can save instanced UVs to `mayaAscii` but cannot
+ # load this as instanced. So saving, opening and saving
+ # again will lose this information.
+ map_attr = "{}.map[*]".format(node)
+ uv_to_vertex = cmds.polyListComponentConversion(map_attr,
+ toVertex=True)
+ uv_vertex_count = len_flattened(uv_to_vertex)
+ if uv_vertex_count < vertex:
+ invalid.append(node)
+ else:
+ cls.log.warning("Node has instanced UV points: "
+ "{0}".format(node))
+
+ return invalid
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Meshes found in instance without "
+ "valid UVs: {0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_mesh_lamina_faces.py b/config/plugins/maya/publish/validate_mesh_lamina_faces.py
new file mode 100644
index 0000000000..80a6968e55
--- /dev/null
+++ b/config/plugins/maya/publish/validate_mesh_lamina_faces.py
@@ -0,0 +1,37 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin):
+ """Validate meshes don't have lamina faces.
+
+ Lamina faces share all of their edges.
+
+ """
+
+ order = colorbleed.api.ValidateMeshOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ category = 'geometry'
+ version = (0, 1, 0)
+ label = 'Mesh Lamina Faces'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ @staticmethod
+ def get_invalid(instance):
+ meshes = cmds.ls(instance, type='mesh', long=True)
+ invalid = [mesh for mesh in meshes if
+ cmds.polyInfo(mesh, laminaFaces=True)]
+
+ return invalid
+
+ def process(self, instance):
+ """Process all the nodes in the instance 'objectSet'"""
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise ValueError("Meshes found with lamina faces: "
+ "{0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_mesh_no_negative_scale.py b/config/plugins/maya/publish/validate_mesh_no_negative_scale.py
new file mode 100644
index 0000000000..91fc720cbe
--- /dev/null
+++ b/config/plugins/maya/publish/validate_mesh_no_negative_scale.py
@@ -0,0 +1,49 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateMeshNoNegativeScale(pyblish.api.Validator):
+ """Ensure that meshes don't have a negative scale.
+
+ Using negatively scaled proxies in a VRayMesh results in inverted
+ normals. As such we want to avoid this.
+
+ We also avoid this on the rig or model because these are often the
+ previous steps for those that are cached to proxies so we can catch this
+ issue early.
+
+ """
+
+ order = colorbleed.api.ValidateMeshOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ label = 'Mesh No Negative Scale'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ @staticmethod
+ def get_invalid(instance):
+ meshes = cmds.ls(instance,
+ type='mesh',
+ long=True,
+ noIntermediate=True)
+
+ invalid = []
+ for mesh in meshes:
+ transform = cmds.listRelatives(mesh, parent=True, fullPath=True)[0]
+ scale = cmds.getAttr("{0}.scale".format(transform))[0]
+
+ if any(x < 0 for x in scale):
+ invalid.append(mesh)
+
+ return invalid
+
+ def process(self, instance):
+ """Process all the nodes in the instance 'objectSet'"""
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise ValueError("Meshes found with negative "
+ "scale: {0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_mesh_non_manifold.py b/config/plugins/maya/publish/validate_mesh_non_manifold.py
new file mode 100644
index 0000000000..c1185cf587
--- /dev/null
+++ b/config/plugins/maya/publish/validate_mesh_non_manifold.py
@@ -0,0 +1,41 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateMeshNonManifold(pyblish.api.Validator):
+ """Ensure that meshes don't have non-manifold edges or vertices
+
+ To debug the problem on the meshes you can use Maya's modeling
+ tool: "Mesh > Cleanup..."
+
+ """
+
+ order = colorbleed.api.ValidateMeshOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ label = 'Mesh Non-Manifold Vertices/Edges'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ @staticmethod
+ def get_invalid(instance):
+
+ meshes = cmds.ls(instance, type='mesh', long=True)
+
+ invalid = []
+ for mesh in meshes:
+ if (cmds.polyInfo(mesh, nonManifoldVertices=True) or
+ cmds.polyInfo(mesh, nonManifoldEdges=True)):
+ invalid.append(mesh)
+
+ return invalid
+
+ def process(self, instance):
+ """Process all the nodes in the instance 'objectSet'"""
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise ValueError("Meshes found with non-manifold "
+ "edges/vertices: {0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_mesh_non_zero_edge.py b/config/plugins/maya/publish/validate_mesh_non_zero_edge.py
new file mode 100644
index 0000000000..bf7ab0ce21
--- /dev/null
+++ b/config/plugins/maya/publish/validate_mesh_non_zero_edge.py
@@ -0,0 +1,55 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+import config.apps.maya.lib as lib
+
+
+class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
+ """Validate meshes don't have edges with a zero length.
+
+ Based on Maya's polyCleanup 'Edges with zero length'.
+
+ Note:
+ This can be slow for high-res meshes.
+
+ """
+
+ order = colorbleed.api.ValidateMeshOrder
+ families = ['colorbleed.model']
+ hosts = ['maya']
+ category = 'geometry'
+ version = (0, 1, 0)
+ label = 'Mesh Edge Length Non Zero'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ __tolerance = 1e-5
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Return the invalid edges.
+ Also see: http://help.autodesk.com/view/MAYAUL/2015/ENU/?guid=Mesh__Cleanup
+
+ """
+
+ meshes = cmds.ls(instance, type='mesh', long=True)
+ if not meshes:
+ return list()
+
+ # Get all edges
+ edges = ['{0}.e[*]'.format(node) for node in meshes]
+
+ # Filter by constraint on edge length
+ invalid = lib.polyConstraint(edges,
+ t=0x8000, # type=edge
+ length=1,
+ lengthbound=(0, cls.__tolerance))
+
+ return invalid
+
+ def process(self, instance):
+ """Process all meshes"""
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Meshes found with zero "
+ "edge length: {0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_mesh_normals_unlocked.py b/config/plugins/maya/publish/validate_mesh_normals_unlocked.py
new file mode 100644
index 0000000000..68049fd60a
--- /dev/null
+++ b/config/plugins/maya/publish/validate_mesh_normals_unlocked.py
@@ -0,0 +1,53 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateMeshNormalsUnlocked(pyblish.api.Validator):
+ """Validate all meshes in the instance have unlocked normals
+
+ These can be unlocked manually through:
+ Modeling > Mesh Display > Unlock Normals
+
+ """
+
+ order = colorbleed.api.ValidateMeshOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ category = 'geometry'
+ version = (0, 1, 0)
+ label = 'Mesh Normals Unlocked'
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.RepairAction]
+ optional = True
+
+ @staticmethod
+ def has_locked_normals(mesh):
+ """Return whether a mesh node has locked normals"""
+ return any(cmds.polyNormalPerVertex("{}.vtxFace[*][*]".format(mesh),
+ query=True,
+ freezeNormal=True))
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Return the meshes with locked normals in instance"""
+
+ meshes = cmds.ls(instance, type='mesh', long=True)
+ return [mesh for mesh in meshes if cls.has_locked_normals(mesh)]
+
+ def process(self, instance):
+ """Raise invalid when any of the meshes have locked normals"""
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise ValueError("Meshes found with "
+ "locked normals: {0}".format(invalid))
+
+ @classmethod
+ def repair(cls, instance):
+ """Unlocks all normals on the meshes in this instance."""
+ invalid = cls.get_invalid(instance)
+ for mesh in invalid:
+ cmds.polyNormalPerVertex(mesh, unFreezeNormal=True)
diff --git a/config/plugins/maya/publish/validate_mesh_shader_connections.py b/config/plugins/maya/publish/validate_mesh_shader_connections.py
new file mode 100644
index 0000000000..5f44bf1f9f
--- /dev/null
+++ b/config/plugins/maya/publish/validate_mesh_shader_connections.py
@@ -0,0 +1,111 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+def pairs(iterable):
+ """Iterate over iterable per group of two"""
+ a = iter(iterable)
+ for i, y in zip(a, a):
+ yield i, y
+
+
+def get_invalid_sets(shape):
+ """Get sets that are considered related but do not contain the shape.
+
+ In some scenarios Maya keeps connections to multiple shaders
+ even if just a single one is assigned on the full object.
+
+ These are related sets returned by `maya.cmds.listSets` that don't
+ actually have the shape as member.
+
+ """
+
+ invalid = []
+ sets = cmds.listSets(object=shape, t=1, extendToShape=False)
+ for s in sets:
+ members = cmds.sets(s, query=True, nodesOnly=True)
+ if not members:
+ invalid.append(s)
+ continue
+
+ members = set(cmds.ls(members, long=True))
+ if shape not in members:
+ invalid.append(s)
+
+ return invalid
+
+
+def disconnect(node_a, node_b):
+ """Remove all connections between node a and b."""
+
+ # Disconnect outputs
+ outputs = cmds.listConnections(node_a,
+ plugs=True,
+ connections=True,
+ source=False,
+ destination=True)
+ for output, destination in pairs(outputs):
+ if destination.split(".", 1)[0] == node_b:
+ cmds.disconnectAttr(output, destination)
+
+ # Disconnect inputs
+ inputs = cmds.listConnections(node_a,
+ plugs=True,
+ connections=True,
+ source=True,
+ destination=False)
+ for input, source in pairs(inputs):
+ if source.split(".", 1)[0] == node_b:
+ cmds.disconnectAttr(source, input)
+
+
+class ValidateMeshShaderConnections(pyblish.api.InstancePlugin):
+ """Ensure mesh shading engine connections are valid.
+
+ In some scenarios Maya keeps connections to multiple shaders even if just
+ a single one is assigned on the shape.
+
+ These are related sets returned by `maya.cmds.listSets` that don't
+ actually have the shape as member.
+
+ """
+
+ order = colorbleed.api.ValidateMeshOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ label = "Mesh Shader Connections"
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.RepairAction]
+
+ def process(self, instance):
+ """Process all the nodes in the instance 'objectSet'"""
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise RuntimeError("Shapes found with invalid shader "
+ "connections: {0}".format(invalid))
+
+ @staticmethod
+ def get_invalid(instance):
+
+ shapes = cmds.ls(instance[:], dag=1, leaf=1, shapes=1, long=True)
+ shapes = cmds.ls(shapes, shapes=True, noIntermediate=True, long=True)
+
+ invalid = []
+ for shape in shapes:
+ if get_invalid_sets(shape):
+ invalid.append(shape)
+
+ return invalid
+
+ @classmethod
+ def repair(cls, instance):
+
+ shapes = cls.get_invalid(instance)
+ for shape in shapes:
+ invalid_sets = get_invalid_sets(shape)
+ for set_node in invalid_sets:
+ disconnect(shape, set_node)
diff --git a/config/plugins/maya/publish/validate_mesh_single_uv_set.py b/config/plugins/maya/publish/validate_mesh_single_uv_set.py
new file mode 100644
index 0000000000..db964d932f
--- /dev/null
+++ b/config/plugins/maya/publish/validate_mesh_single_uv_set.py
@@ -0,0 +1,68 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+import config.apps.maya.lib as lib
+
+
+class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin):
+ """Warn on multiple UV sets existing for each polygon mesh.
+
+ On versions prior to Maya 2017 this will force no multiple uv sets because
+ the Alembic exports in Maya prior to 2017 don't support writing multiple
+ UV sets.
+
+ """
+
+ order = colorbleed.api.ValidateMeshOrder
+ hosts = ['maya']
+ families = ['colorbleed.model', 'colorbleed.pointcache']
+ category = 'uv'
+ optional = True
+ version = (0, 1, 0)
+ label = "Mesh Single UV Set"
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.RepairAction]
+
+ @staticmethod
+ def get_invalid(instance):
+
+ meshes = cmds.ls(instance, type='mesh', long=True)
+
+ invalid = []
+ for mesh in meshes:
+ uvSets = cmds.polyUVSet(mesh,
+ query=True,
+ allUVSets=True) or []
+
+ # ensure unique (sometimes maya will list 'map1' twice)
+ uvSets = set(uvSets)
+
+ if len(uvSets) != 1:
+ invalid.append(mesh)
+
+ return invalid
+
+ def process(self, instance):
+ """Process all the nodes in the instance 'objectSet'"""
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+
+ message = "Nodes found with multiple UV sets: {0}".format(invalid)
+
+ # Maya 2017 and up allows multiple UV sets in Alembic exports
+ # so we allow it, yet just warn the user to ensure they know about
+ # the other UV sets.
+ allowed = int(cmds.about(version=True)) >= 2017
+
+ if allowed:
+ self.log.warning(message)
+ else:
+ raise ValueError(message)
+
+ @classmethod
+ def repair(cls, instance):
+ for mesh in cls.get_invalid(instance):
+ lib.remove_other_uv_sets(mesh)
diff --git a/config/plugins/maya/publish/validate_mesh_vertices_have_edges.py b/config/plugins/maya/publish/validate_mesh_vertices_have_edges.py
new file mode 100644
index 0000000000..90eca6e6f4
--- /dev/null
+++ b/config/plugins/maya/publish/validate_mesh_vertices_have_edges.py
@@ -0,0 +1,102 @@
+import re
+
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+def len_flattened(components):
+ """Return the length of the list as if it was flattened.
+
+ Maya will return consecutive components as a single entry
+ when requesting with `maya.cmds.ls` without the `flatten`
+ flag. Though enabling `flatten` on a large list (e.g. millions)
+ will result in a slow result. This command will return the amount
+ of entries in a non-flattened list by parsing the result with
+ regex.
+
+ Args:
+ components (list): The non-flattened components.
+
+ Returns:
+ int: The amount of entries.
+
+ """
+ assert isinstance(components, (list, tuple))
+ n = 0
+
+ pattern = re.compile(r"\[(\d+):(\d+)\]")
+ for c in components:
+ match = pattern.search(c)
+ if match:
+ start, end = match.groups()
+ n += int(end) - int(start) + 1
+ else:
+ n += 1
+ return n
+
+
+class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin):
+ """Validate meshes have only vertices that are connected to edges.
+
+ Maya can have invalid geometry with vertices that have no edges or
+ faces connected to them.
+
+ In Maya 2016 EXT 2 and later there's a command to fix this:
+ `maya.cmds.polyClean(mesh, cleanVertices=True)`
+
+ In older versions of Maya it works to select the invalid vertices
+ and merge the components.
+
+ To find these invalid vertices select all vertices of the mesh
+ that are visible in the viewport (drag to select), afterwards
+ invert your selection (Ctrl + Shift + I). The remaining selection
+ contains the invalid vertices.
+
+ """
+
+ order = colorbleed.api.ValidateMeshOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ category = 'geometry'
+ label = 'Mesh Vertices Have Edges'
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.RepairAction]
+
+ @classmethod
+ def repair(cls, instance):
+
+ # This fix only works in Maya 2016 EXT2 and newer
+ if float(cmds.about(version=True)) <= 2016.0:
+ raise RuntimeError("Repair not supported in Maya version below "
+ "2016 EXT 2")
+
+ invalid = cls.get_invalid(instance)
+ for node in invalid:
+ cmds.polyClean(node, cleanVertices=True)
+
+ @classmethod
+ def get_invalid(cls, instance):
+ invalid = []
+
+ meshes = cmds.ls(instance, type="mesh", long=True)
+ for mesh in meshes:
+ num_vertices = cmds.polyEvaluate(mesh, vertex=True)
+
+ # Vertices from all edges
+ edges = "%s.e[*]" % mesh
+ vertices = cmds.polyListComponentConversion(edges, toVertex=True)
+ num_vertices_from_edges = len_flattened(vertices)
+
+ if num_vertices != num_vertices_from_edges:
+ invalid.append(mesh)
+
+ return invalid
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Meshes found in instance with vertices that "
+ "have no edges: %s" % invalid)
diff --git a/config/plugins/maya/publish/validate_model_content.py b/config/plugins/maya/publish/validate_model_content.py
new file mode 100644
index 0000000000..eb56804fa1
--- /dev/null
+++ b/config/plugins/maya/publish/validate_model_content.py
@@ -0,0 +1,95 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+import config.apps.maya.lib as lib
+
+
+class ValidateModelContent(pyblish.api.InstancePlugin):
+ """Adheres to the content of 'model' family
+
+ - Must have one top group.
+ - Must only contain: transforms, meshes and groups
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ["maya"]
+ families = ["colorbleed.model"]
+ label = "Model Content"
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ content_instance = instance.data.get("setMembers", None)
+ if not content_instance:
+ cls.log.error("Instance has no nodes!")
+ return True
+
+ # All children will be included in the extracted export so we also
+ # validate *all* descendents of the set members and we skip any
+ # intermediate shapes
+ descendants = cmds.listRelatives(content_instance,
+ allDescendents=True,
+ fullPath=True) or []
+ descendants = cmds.ls(descendants, noIntermediate=True, long=True)
+ content_instance = list(set(content_instance + descendants))
+
+ # Ensure only valid node types
+ allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface')
+ nodes = cmds.ls(content_instance, long=True)
+ valid = cmds.ls(content_instance, long=True, type=allowed)
+ invalid = set(nodes) - set(valid)
+
+ if invalid:
+ cls.log.error("These nodes are not allowed: %s" % invalid)
+ return list(invalid)
+
+ if not valid:
+ cls.log.error("No valid nodes in the instance")
+ return True
+
+ # Ensure it has shapes
+ shapes = cmds.ls(valid, long=True, shapes=True)
+ if not shapes:
+ cls.log.error("No shapes in the model instance")
+ return True
+
+ # Top group
+ assemblies = cmds.ls(content_instance, assemblies=True, long=True)
+ if len(assemblies) != 1:
+ cls.log.error("Must have exactly one top group")
+ if len(assemblies) == 0:
+ cls.log.warning("No top group found. "
+ "(Are there objects in the instance?)")
+ return assemblies or True
+
+ def _is_visible(node):
+ """Return whether node is visible"""
+ return lib.is_visible(node,
+ displayLayer=False,
+ intermediateObject=True,
+ parentHidden=True,
+ visibility=True)
+
+ # The roots must be visible (the assemblies)
+ for assembly in assemblies:
+ if not _is_visible(assembly):
+ cls.log.error("Invisible assembly (root node) is not "
+ "allowed: {0}".format(assembly))
+ invalid.add(assembly)
+
+ # Ensure at least one shape is visible
+ if not any(_is_visible(shape) for shape in shapes):
+ cls.log.error("No visible shapes in the model instance")
+ invalid.update(shapes)
+
+ return list(invalid)
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise RuntimeError("Model content is invalid. See log.")
diff --git a/config/plugins/maya/publish/validate_no_animation.py b/config/plugins/maya/publish/validate_no_animation.py
new file mode 100644
index 0000000000..8b0da47d89
--- /dev/null
+++ b/config/plugins/maya/publish/validate_no_animation.py
@@ -0,0 +1,40 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateNoAnimation(pyblish.api.Validator):
+ """Ensure no keyframes on nodes in the Instance.
+
+ Even though a Model would extract without animCurves correctly this avoids
+ getting different output from a model when extracted from a different
+ frame than the first frame. (Might be overly restrictive though)
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ label = "No Animation"
+ hosts = ["maya"]
+ families = ["colorbleed.model"]
+ optional = True
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Keyframes found: {0}".format(invalid))
+
+ @staticmethod
+ def get_invalid(instance):
+
+ nodes = instance[:]
+ if not nodes:
+ return []
+
+ curves = cmds.keyframe(nodes, query=True, name=True)
+ if curves:
+ return list(set(cmds.listConnections(curves)))
+
+ return []
diff --git a/config/plugins/maya/publish/validate_no_default_camera.py b/config/plugins/maya/publish/validate_no_default_camera.py
new file mode 100644
index 0000000000..6bdb830f0b
--- /dev/null
+++ b/config/plugins/maya/publish/validate_no_default_camera.py
@@ -0,0 +1,31 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateNoDefaultCameras(pyblish.api.InstancePlugin):
+ """Ensure no default (startup) cameras are in the instance.
+
+ This might be unnecessary. In the past there were some issues with
+ referencing/importing files that contained the start up cameras overriding
+ settings when being loaded and sometimes being skipped.
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ['maya']
+ families = ['colorbleed.camera']
+ version = (0, 1, 0)
+ label = "No Default Cameras"
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ @staticmethod
+ def get_invalid(instance):
+ cameras = cmds.ls(instance, type='camera', long=True)
+ return [cam for cam in cameras if
+ cmds.camera(cam, query=True, startupCamera=True)]
+
+ def process(self, instance):
+ """Process all the cameras in the instance"""
+ invalid = self.get_invalid(instance)
+ assert not invalid, "Default cameras found: {0}".format(invalid)
diff --git a/config/plugins/maya/publish/validate_no_namespace.py b/config/plugins/maya/publish/validate_no_namespace.py
new file mode 100644
index 0000000000..0f0bbad1c1
--- /dev/null
+++ b/config/plugins/maya/publish/validate_no_namespace.py
@@ -0,0 +1,54 @@
+import pymel.core as pm
+import maya.cmds as cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+def get_namespace(node_name):
+ # ensure only node's name (not parent path)
+ node_name = node_name.rsplit("|")[-1]
+ # ensure only namespace
+ return node_name.rpartition(":")[0]
+
+
+class ValidateNoNamespace(pyblish.api.InstancePlugin):
+ """Ensure the nodes don't have a namespace"""
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ category = 'cleanup'
+ version = (0, 1, 0)
+ label = 'No Namespaces'
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.RepairAction]
+
+ @staticmethod
+ def get_invalid(instance):
+ nodes = cmds.ls(instance, long=True)
+ return [node for node in nodes if get_namespace(node)]
+
+ def process(self, instance):
+ """Process all the nodes in the instance"""
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise ValueError("Namespaces found: {0}".format(invalid))
+
+ @classmethod
+ def repair(cls, instance):
+ """Remove all namespaces from the nodes in the instance"""
+
+ invalid = cls.get_invalid(instance)
+
+ # Get nodes with pymel since we'll be renaming them
+ # Since we don't want to keep checking the hierarchy
+ # or full paths
+ nodes = pm.ls(invalid)
+
+ for node in nodes:
+ namespace = node.namespace()
+ if namespace:
+ name = node.nodeName()
+ node.rename(name[len(namespace):])
diff --git a/config/plugins/maya/publish/validate_no_null_transforms.py b/config/plugins/maya/publish/validate_no_null_transforms.py
new file mode 100644
index 0000000000..d94938493b
--- /dev/null
+++ b/config/plugins/maya/publish/validate_no_null_transforms.py
@@ -0,0 +1,76 @@
+import maya.cmds as cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+def has_shape_children(node):
+ # Check if any descendants
+ allDescendents = cmds.listRelatives(node,
+ allDescendents=True,
+ fullPath=True)
+ if not allDescendents:
+ return False
+
+ # Check if there are any shapes at all
+ shapes = cmds.ls(allDescendents, shapes=True)
+ if not shapes:
+ return False
+
+ # Check if all descendent shapes are intermediateObjects;
+ # if so we consider this node a null node and return False.
+ if all(cmds.getAttr('{0}.intermediateObject'.format(x)) for x in shapes):
+ return False
+
+ return True
+
+
+class ValidateNoNullTransforms(pyblish.api.InstancePlugin):
+ """Ensure no null transforms are in the scene.
+
+ Warning:
+ Transforms with only intermediate shapes are also considered null
+ transforms. These transform nodes could potentially be used in your
+ construction history, so take care when automatically fixing this or
+ when deleting the empty transforms manually.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ category = 'cleanup'
+ version = (0, 1, 0)
+ label = 'No Empty/Null Transforms'
+ actions = [colorbleed.api.RepairAction, colorbleed.api.SelectInvalidAction]
+
+ @staticmethod
+ def get_invalid(instance):
+ """Return invalid transforms in instance"""
+
+ transforms = cmds.ls(instance, type='transform', long=True)
+
+ invalid = []
+ for transform in transforms:
+ if not has_shape_children(transform):
+ invalid.append(transform)
+
+ return invalid
+
+ def process(self, instance):
+ """Process all the transform nodes in the instance """
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise ValueError("Empty transforms found: {0}".format(invalid))
+
+ @classmethod
+ def repair(cls, instance):
+ """Delete all null transforms.
+
+ Note: If the node is used elsewhere (eg. connection to attributes or
+ in history) deletion might mess up things.
+
+ """
+ invalid = cls.get_invalid(instance)
+ if invalid:
+ cmds.delete(invalid)
diff --git a/config/plugins/maya/publish/validate_no_unknown_nodes.py b/config/plugins/maya/publish/validate_no_unknown_nodes.py
new file mode 100644
index 0000000000..221e8f8b61
--- /dev/null
+++ b/config/plugins/maya/publish/validate_no_unknown_nodes.py
@@ -0,0 +1,34 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateNoUnknownNodes(pyblish.api.InstancePlugin):
+ """Checks to see if there are any unknown nodes in the instance.
+
+ This often happens if nodes from plug-ins are used but are not available
+ on this machine.
+
+ Note: Some studios use unknown nodes to store data on (as attributes)
+ because it's a lightweight node.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ['maya']
+ families = ['colorbleed.model', 'colorbleed.rig']
+ optional = True
+ label = "Unknown Nodes"
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ @staticmethod
+ def get_invalid(instance):
+ return cmds.ls(instance, type='unknown')
+
+ def process(self, instance):
+ """Process all the nodes in the instance"""
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise ValueError("Unknown nodes found: {0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_no_vraymesh.py b/config/plugins/maya/publish/validate_no_vraymesh.py
new file mode 100644
index 0000000000..575ad7e549
--- /dev/null
+++ b/config/plugins/maya/publish/validate_no_vraymesh.py
@@ -0,0 +1,24 @@
+import pyblish.api
+from maya import cmds
+
+
+class ValidateNoVRayMesh(pyblish.api.InstancePlugin):
+ """Validate there are no VRayMesh objects in the instance"""
+
+ order = pyblish.api.ValidatorOrder
+ label = 'No V-Ray Proxies (VRayMesh)'
+ families = ["colorbleed.pointcache"]
+
+ def process(self, instance):
+
+ shapes = cmds.ls(instance,
+ shapes=True,
+ type="mesh")
+
+ inputs = cmds.listConnections(shapes,
+ destination=False,
+ source=True) or []
+ vray_meshes = cmds.ls(inputs, type='VRayMesh')
+ if vray_meshes:
+ raise RuntimeError("Meshes that are VRayMeshes shouldn't "
+ "be pointcached: {0}".format(vray_meshes))
diff --git a/config/plugins/maya/publish/validate_node_ids.py b/config/plugins/maya/publish/validate_node_ids.py
new file mode 100644
index 0000000000..ea0ec18f85
--- /dev/null
+++ b/config/plugins/maya/publish/validate_node_ids.py
@@ -0,0 +1,48 @@
+import pyblish.api
+import colorbleed.api
+
+from config.apps.maya import lib
+
+
+class ValidateNodeIDs(pyblish.api.InstancePlugin):
+ """Validate nodes have a Colorbleed Id.
+
+ When IDs are missing from nodes *save your scene* and they should be
+ automatically generated because IDs are created on non-referenced nodes
+ in Maya upon scene save.
+
+ """
+
+ order = colorbleed.api.ValidatePipelineOrder
+ label = 'Instance Nodes Have ID'
+ hosts = ['maya']
+ families = ["colorbleed.model",
+ "colorbleed.look",
+ "colorbleed.rig",
+ "colorbleed.pointcache",
+ "colorbleed.animation",
+ "colorbleed.setdress"]
+
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.GenerateUUIDsOnInvalidAction]
+
+ def process(self, instance):
+ """Process all meshes"""
+
+ # Ensure all nodes have a cbId
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Nodes found without "
+ "IDs: {0}".format(invalid))
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Return the member nodes that are invalid"""
+
+ # We do want to check the referenced nodes as it might be
+ # part of the end product.
+ id_nodes = lib.get_id_required_nodes(referenced_nodes=True,
+ nodes=instance[:])
+ invalid = [n for n in id_nodes if not lib.get_id(n)]
+
+ return invalid
diff --git a/config/plugins/maya/publish/validate_node_ids_deformed_shapes.py b/config/plugins/maya/publish/validate_node_ids_deformed_shapes.py
new file mode 100644
index 0000000000..2a4b6ce8c5
--- /dev/null
+++ b/config/plugins/maya/publish/validate_node_ids_deformed_shapes.py
@@ -0,0 +1,66 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+import config.apps.maya.lib as lib
+
+
+class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin):
+ """Validate if deformed shapes have related IDs to the original shapes.
+
+ When a deformer is applied in the scene on a referenced mesh that already
+ had deformers then Maya will create a new shape node for the mesh that
+ does not have the original id. This validator checks whether the ids are
+ valid on all the shape nodes in the instance.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ families = ['colorbleed.look']
+ hosts = ['maya']
+ label = 'Deformed shape ids'
+ actions = [colorbleed.api.SelectInvalidAction, colorbleed.api.RepairAction]
+
+ def process(self, instance):
+ """Process all the nodes in the instance"""
+
+ # Ensure all nodes have a cbId and a related ID to the original shapes
+ # if a deformer has been created on the shape
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Shapes found that are considered 'Deformed'"
+ "without object ids: {0}".format(invalid))
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Get all nodes which do not match the criteria"""
+
+ shapes = cmds.ls(instance[:],
+ dag=True,
+ leaf=True,
+ shapes=True,
+ long=True,
+ noIntermediate=True)
+
+ invalid = []
+ for shape in shapes:
+ history_id = lib.get_id_from_history(shape)
+ if history_id:
+ current_id = lib.get_id(shape)
+ if current_id != history_id:
+ invalid.append(shape)
+
+ return invalid
+
+ @classmethod
+ def repair(cls, instance):
+
+ for node in cls.get_invalid(instance):
+ # Get the original id from history
+ history_id = lib.get_id_from_history(node)
+ if not history_id:
+ cls.log.error("Could not find ID in history for '%s'", node)
+ continue
+
+ lib.set_id(node, history_id, overwrite=True)
+
diff --git a/config/plugins/maya/publish/validate_node_ids_in_database.py b/config/plugins/maya/publish/validate_node_ids_in_database.py
new file mode 100644
index 0000000000..788baf994d
--- /dev/null
+++ b/config/plugins/maya/publish/validate_node_ids_in_database.py
@@ -0,0 +1,60 @@
+import pyblish.api
+
+import avalon.io as io
+
+import colorbleed.api
+from config.apps.maya import lib
+
+
+class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin):
+ """Validate if the CB Id is related to an asset in the database
+
+ All nodes with the `cbId` attribute will be validated to ensure that
+ the loaded asset in the scene is related to the current project.
+
+ Tip: If there is an asset which is being reused from a different project
+ please ensure the asset is republished in the new project
+
+ """
+
+ order = colorbleed.api.ValidatePipelineOrder
+ label = 'Node Ids in Database'
+ hosts = ['maya']
+ families = ["*"]
+
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Found asset IDs which are not related to "
+ "current project in instance: "
+ "`%s`" % instance.name)
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ invalid = []
+
+ # Get all id required nodes
+ id_required_nodes = lib.get_id_required_nodes(referenced_nodes=True,
+ nodes=instance[:])
+
+ # check ids against database ids
+ db_asset_ids = io.find({"type": "asset"}).distinct("_id")
+ db_asset_ids = set(str(i) for i in db_asset_ids)
+
+ # Get all asset IDs
+ for node in id_required_nodes:
+ cb_id = lib.get_id(node)
+
+ # Ignore nodes without id, those are validated elsewhere
+ if not cb_id:
+ continue
+
+ asset_id = cb_id.split(":", 1)[0]
+ if asset_id not in db_asset_ids:
+ cls.log.error("`%s` has unassociated asset ID" % node)
+ invalid.append(node)
+
+ return invalid
diff --git a/config/plugins/maya/publish/validate_node_ids_related.py b/config/plugins/maya/publish/validate_node_ids_related.py
new file mode 100644
index 0000000000..67d10d3059
--- /dev/null
+++ b/config/plugins/maya/publish/validate_node_ids_related.py
@@ -0,0 +1,60 @@
+import pyblish.api
+import colorbleed.api
+
+import avalon.io as io
+
+from config.apps.maya import lib
+
+
+class ValidateNodeIDsRelated(pyblish.api.InstancePlugin):
+ """Validate nodes have a related Colorbleed Id to the instance.data[asset]
+
+ """
+
+ order = colorbleed.api.ValidatePipelineOrder
+ label = 'Node Ids Related (ID)'
+ hosts = ['maya']
+ families = ["colorbleed.model",
+ "colorbleed.look",
+ "colorbleed.rig"]
+ optional = True
+
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.GenerateUUIDsOnInvalidAction]
+
+ def process(self, instance):
+ """Process all nodes in instance (including hierarchy)"""
+ # Ensure all nodes have a cbId
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Nodes IDs found that are not related to asset "
+ "'{}' : {}".format(instance.data['asset'],
+ invalid))
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Return the member nodes that are invalid"""
+ invalid = list()
+
+ asset = instance.data['asset']
+ asset_data = io.find_one({"name": asset,
+ "type": "asset"},
+ projection={"_id": True})
+ asset_id = str(asset_data['_id'])
+
+ # We do want to check the referenced nodes as we it might be
+ # part of the end product
+ for node in instance:
+
+ _id = lib.get_id(node)
+ if not _id:
+ continue
+
+ node_asset_id = _id.split(":", 1)[0]
+ if node_asset_id != asset_id:
+ invalid.append(node)
+
+ return invalid
+
+
+
diff --git a/config/plugins/maya/publish/validate_node_ids_unique.py b/config/plugins/maya/publish/validate_node_ids_unique.py
new file mode 100644
index 0000000000..0de2defdf3
--- /dev/null
+++ b/config/plugins/maya/publish/validate_node_ids_unique.py
@@ -0,0 +1,58 @@
+from collections import defaultdict
+
+import pyblish.api
+import colorbleed.api
+import config.apps.maya.lib as lib
+
+
+class ValidateNodeIdsUnique(pyblish.api.InstancePlugin):
+ """Validate the nodes in the instance have a unique Colorbleed Id
+
+ Here we ensure that what has been added to the instance is unique
+ """
+
+ order = colorbleed.api.ValidatePipelineOrder
+ label = 'Non Duplicate Instance Members (ID)'
+ hosts = ['maya']
+ families = ["colorbleed.model",
+ "colorbleed.look",
+ "colorbleed.rig"]
+
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.GenerateUUIDsOnInvalidAction]
+
+ def process(self, instance):
+ """Process all meshes"""
+
+ # Ensure all nodes have a cbId
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Nodes found with non-unique "
+ "asset IDs: {0}".format(invalid))
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Return the member nodes that are invalid"""
+
+ # Check only non intermediate shapes
+ # todo: must the instance itself ensure to have no intermediates?
+ # todo: how come there are intermediates?
+ from maya import cmds
+ instance_members = cmds.ls(instance, noIntermediate=True, long=True)
+
+ # Collect each id with their members
+ ids = defaultdict(list)
+ for member in instance_members:
+ object_id = lib.get_id(member)
+ if not object_id:
+ continue
+ ids[object_id].append(member)
+
+ # Take only the ids with more than one member
+ invalid = list()
+ for _ids, members in ids.iteritems():
+ if len(members) > 1:
+ cls.log.error("ID found on multiple nodes: '%s'" % members)
+ invalid.extend(members)
+
+ return invalid
diff --git a/config/plugins/maya/publish/validate_node_no_ghosting.py b/config/plugins/maya/publish/validate_node_no_ghosting.py
new file mode 100644
index 0000000000..ca5c4a1edc
--- /dev/null
+++ b/config/plugins/maya/publish/validate_node_no_ghosting.py
@@ -0,0 +1,49 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateNodeNoGhosting(pyblish.api.InstancePlugin):
+ """Ensure nodes do not have ghosting enabled.
+
+ If one would publish towards a non-Maya format it's likely that stats
+ like ghosting won't be exported, eg. exporting to Alembic.
+
+ Instead of creating many micro-managing checks (like this one) to ensure
+ attributes have not been changed from their default it could be more
+ efficient to export to a format that will never hold such data anyway.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ['maya']
+ families = ['colorbleed.model', 'colorbleed.rig']
+ label = "No Ghosting"
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ _attributes = {'ghosting': 0}
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ # Transforms and shapes seem to have ghosting
+ nodes = cmds.ls(instance, long=True, type=['transform', 'shape'])
+ invalid = []
+ for node in nodes:
+ for attr, required_value in cls._attributes.iteritems():
+ if cmds.attributeQuery(attr, node=node, exists=True):
+
+ value = cmds.getAttr('{0}.{1}'.format(node, attr))
+ if value != required_value:
+ invalid.append(node)
+
+ return invalid
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise ValueError("Nodes with ghosting enabled found: "
+ "{0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_render_image_rule.py b/config/plugins/maya/publish/validate_render_image_rule.py
new file mode 100644
index 0000000000..c16b5ecc3a
--- /dev/null
+++ b/config/plugins/maya/publish/validate_render_image_rule.py
@@ -0,0 +1,26 @@
+import maya.mel as mel
+
+import pyblish.api
+import colorbleed.api
+
+
+def get_file_rule(rule):
+ """Workaround for a bug in python with cmds.workspace"""
+ return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule))
+
+
+class ValidateRenderImageRule(pyblish.api.ContextPlugin):
+ """Validates "images" file rule is set to "renders/"
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ label = "Images File Rule (Workspace)"
+ hosts = ["maya"]
+ families = ["colorbleed.renderlayer"]
+
+ def process(self, context):
+
+ assert get_file_rule("images") == "renders", (
+ "Workspace's `images` file rule must be set to: renders"
+ )
diff --git a/config/plugins/maya/publish/validate_renderlayer_aovs.py b/config/plugins/maya/publish/validate_renderlayer_aovs.py
new file mode 100644
index 0000000000..387f4b4881
--- /dev/null
+++ b/config/plugins/maya/publish/validate_renderlayer_aovs.py
@@ -0,0 +1,56 @@
+import pyblish.api
+
+from avalon import io
+import colorbleed.api
+
+
+class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin):
+ """Validate created AOVs / RenderElement is registered in the database
+
+ Each render element is registered as a subset which is formatted based on
+ the render layer and the render element, example:
+
+ .
+
+ This translates to something like this:
+
+ CHAR.diffuse
+
+ This check is needed to ensure the render output is still complete
+
+ """
+
+ order = pyblish.api.ValidatorOrder + 0.1
+ label = "Render Passes / AOVs Are Registered"
+ hosts = ["maya"]
+ families = ["colorbleed.renderlayer"]
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Found unregistered subsets: {}".format(invalid))
+
+ def get_invalid(self, instance):
+
+ invalid = []
+
+ asset_name = instance.data["asset"]
+ render_passses = instance.data.get("renderPasses", [])
+ for render_pass in render_passses:
+ is_valid = self.validate_subset_registered(asset_name, render_pass)
+ if not is_valid:
+ invalid.append(render_pass)
+
+ return invalid
+
+ def validate_subset_registered(self, asset_name, subset_name):
+ """Check if subset is registered in the database under the asset"""
+
+ asset = io.find_one({"type": "asset", "name": asset_name})
+ is_valid = io.find_one({"type": "subset",
+ "name": subset_name,
+ "parent": asset["_id"]})
+
+ return is_valid
+
diff --git a/config/plugins/maya/publish/validate_rendersettings.py b/config/plugins/maya/publish/validate_rendersettings.py
new file mode 100644
index 0000000000..5782a327f8
--- /dev/null
+++ b/config/plugins/maya/publish/validate_rendersettings.py
@@ -0,0 +1,108 @@
+import maya.cmds as cmds
+
+import pyblish.api
+import colorbleed.api
+import config.apps.maya.lib as lib
+
+
+class ValidateRenderSettings(pyblish.api.InstancePlugin):
+ """Validates the global render settings
+
+ * File Name Prefix must be as followed:
+ * vray: //
+ * arnold: //
+ * default: //
+
+ * Frame Padding must be:
+ * default: 4
+
+ * Animation must be toggle on, in Render Settings - Common tab:
+ * vray: Animation on standard of specific
+ * arnold: Frame / Animation ext: Any choice without "(Single Frame)"
+ * redshift: Animation toggled on
+
+ NOTE:
+ The repair function of this plugin does not repair the animation
+ setting of the render settings due to multiple possibilities.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ label = "Render Settings"
+ hosts = ["maya"]
+ families = ["colorbleed.renderlayer"]
+ actions = [colorbleed.api.RepairAction]
+
+ DEFAULT_PADDING = 4
+ RENDERER_PREFIX = {"vray": "/_/"}
+ DEFAULT_PREFIX = "/_/"
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise ValueError("Invalid render settings found for '%s'!"
+ % instance.name)
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ invalid = False
+
+ renderer = instance.data['renderer']
+ layer_node = instance.data['setMembers']
+
+ # Collect the filename prefix in the render layer
+ with lib.renderlayer(layer_node):
+
+ render_attrs = lib.RENDER_ATTRS.get(renderer,
+ lib.RENDER_ATTRS['default'])
+ node = render_attrs["node"]
+ padding_attr = render_attrs["padding"]
+ prefix_attr = render_attrs["prefix"]
+
+ prefix = cmds.getAttr("{}.{}".format(node, prefix_attr))
+ padding = cmds.getAttr("{}.{}".format(node, padding_attr))
+
+ anim_override = cmds.getAttr("defaultRenderGlobals.animation")
+ if not anim_override:
+ invalid = True
+ cls.log.error("Animation needs to be enabled. Use the same "
+ "frame for start and end to render single frame")
+
+ fname_prefix = cls.RENDERER_PREFIX.get(renderer,
+ cls.DEFAULT_PREFIX)
+ if prefix != fname_prefix:
+ invalid = True
+ cls.log.error("Wrong file name prefix, expecting %s"
+ % fname_prefix)
+
+ if padding != cls.DEFAULT_PADDING:
+ invalid = True
+ cls.log.error("Expecting padding of {} ( {} )".format(
+ cls.DEFAULT_PADDING, "0" * cls.DEFAULT_PADDING))
+
+ return invalid
+
+ @classmethod
+ def repair(cls, instance):
+
+ renderer = instance.data['renderer']
+ layer_node = instance.data['setMembers']
+
+ with lib.renderlayer(layer_node):
+ default = lib.RENDER_ATTRS['default']
+ render_attrs = lib.RENDER_ATTRS.get(renderer, default)
+
+ # Repair prefix
+ node = render_attrs["node"]
+ prefix_attr = render_attrs["prefix"]
+
+ fname_prefix = cls.RENDERER_PREFIX.get(renderer, cls.DEFAULT_PREFIX)
+ cmds.setAttr("{}.{}".format(node, prefix_attr),
+ fname_prefix, type="string")
+
+ # Repair padding
+ padding_attr = render_attrs["padding"]
+ cmds.setAttr("{}.{}".format(node, padding_attr),
+ cls.DEFAULT_PADDING)
diff --git a/config/plugins/maya/publish/validate_resources.py b/config/plugins/maya/publish/validate_resources.py
new file mode 100644
index 0000000000..2dc6f8c313
--- /dev/null
+++ b/config/plugins/maya/publish/validate_resources.py
@@ -0,0 +1,29 @@
+import pyblish.api
+import colorbleed.api
+
+import os
+
+
+class ValidateResources(pyblish.api.InstancePlugin):
+ """Validates mapped resources.
+
+ These are external files to the current application, for example
+ these could be textures, image planes, cache files or other linked
+ media.
+
+ This validates:
+ - The resources are existing files.
+ - The resources have correctly collected the data.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ label = "Resources"
+
+ def process(self, instance):
+
+ for resource in instance.data.get('resources', []):
+ # Required data
+ assert "source" in resource, "No source found"
+ assert "files" in resource, "No files from source"
+ assert all(os.path.exists(f) for f in resource['files'])
diff --git a/config/plugins/maya/publish/validate_rig_contents.py b/config/plugins/maya/publish/validate_rig_contents.py
new file mode 100644
index 0000000000..7530936a66
--- /dev/null
+++ b/config/plugins/maya/publish/validate_rig_contents.py
@@ -0,0 +1,131 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateRigContents(pyblish.api.InstancePlugin):
+ """Ensure rig contains pipeline-critical content
+
+ Every rig must contain at least two object sets:
+ "controls_SET" - Set of all animatable controls
+ "out_SET" - Set of all cachable meshes
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ label = "Rig Contents"
+ hosts = ["maya"]
+ families = ["colorbleed.rig"]
+
+ accepted_output = ["mesh", "transform"]
+ accepted_controllers = ["transform"]
+
+ def process(self, instance):
+
+ objectsets = ("controls_SET", "out_SET")
+ missing = [obj for obj in objectsets if obj not in instance]
+ assert not missing, ("%s is missing %s" % (instance, missing))
+
+ # Ensure there are at least some transforms or dag nodes
+ # in the rig instance
+ set_members = instance.data['setMembers']
+ if not cmds.ls(set_members, type="dagNode", long=True):
+ raise RuntimeError("No dag nodes in the pointcache instance. "
+ "(Empty instance?)")
+
+ # Ensure contents in sets and retrieve long path for all objects
+ output_content = cmds.sets("out_SET", query=True) or []
+ assert output_content, "Must have members in rig out_SET"
+ output_content = cmds.ls(output_content, long=True)
+
+ controls_content = cmds.sets("controls_SET", query=True) or []
+ assert controls_content, "Must have members in rig controls_SET"
+ controls_content = cmds.ls(controls_content, long=True)
+
+ # Validate members are inside the hierarchy from root node
+ root_node = cmds.ls(set_members, assemblies=True)
+ hierarchy = cmds.listRelatives(root_node, allDescendents=True,
+ fullPath=True)
+ hierarchy = set(hierarchy)
+
+ invalid_hierarchy = []
+ for node in output_content:
+ if node not in hierarchy:
+ invalid_hierarchy.append(node)
+ for node in controls_content:
+ if node not in hierarchy:
+ invalid_hierarchy.append(node)
+
+ # Additional validations
+ invalid_geometry = self.validate_geometry(output_content)
+ invalid_controls = self.validate_controls(controls_content)
+
+ error = False
+ if invalid_hierarchy:
+ self.log.error("Found nodes which reside outside of root group "
+ "while they are set up for publishing."
+ "\n%s" % invalid_hierarchy)
+ error = True
+
+ if invalid_controls:
+ self.log.error("Only transforms can be part of the controls_SET."
+ "\n%s" % invalid_controls)
+ error = True
+
+ if invalid_geometry:
+ self.log.error("Only meshes can be part of the out_SET\n%s"
+ % invalid_geometry)
+ error = True
+
+ if error:
+ raise RuntimeError("Invalid rig content. See log for details.")
+
+ def validate_geometry(self, set_members):
+ """Check if the out set passes the validations
+
+ Checks if all its set members are within the hierarchy of the root
+ Checks if the node types of the set members valid
+
+ Args:
+ set_members: list of nodes of the controls_set
+ hierarchy: list of nodes which reside under the root node
+
+ Returns:
+ errors (list)
+ """
+
+ # Validate all shape types
+ invalid = []
+ shapes = cmds.listRelatives(set_members,
+ allDescendents=True,
+ shapes=True,
+ fullPath=True) or []
+ all_shapes = cmds.ls(set_members + shapes, long=True, shapes=True)
+ for shape in all_shapes:
+ if cmds.nodeType(shape) not in self.accepted_output:
+ invalid.append(shape)
+
+ return invalid
+
+ def validate_controls(self, set_members):
+ """Check if the controller set passes the validations
+
+ Checks if all its set members are within the hierarchy of the root
+ Checks if the node types of the set members valid
+
+ Args:
+ set_members: list of nodes of the controls_set
+ hierarchy: list of nodes which reside under the root node
+
+ Returns:
+ errors (list)
+ """
+
+ # Validate control types
+ invalid = []
+ for node in set_members:
+ if cmds.nodeType(node) not in self.accepted_controllers:
+ invalid.append(node)
+
+ return invalid
diff --git a/config/plugins/maya/publish/validate_rig_controllers.py b/config/plugins/maya/publish/validate_rig_controllers.py
new file mode 100644
index 0000000000..2ae799fa71
--- /dev/null
+++ b/config/plugins/maya/publish/validate_rig_controllers.py
@@ -0,0 +1,202 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+from cb.utils.maya.context import undo_chunk
+
+
+class ValidateRigControllers(pyblish.api.InstancePlugin):
+ """Validate rig controllers.
+
+ Controls must have the transformation attributes on their default
+ values of translate zero, rotate zero and scale one when they are
+ unlocked attributes.
+
+ Unlocked keyable attributes may not have any incoming connections. If
+ these connections are required for the rig then lock the attributes.
+
+ The visibility attribute must be locked.
+
+ Note that `repair` will:
+ - Lock all visibility attributes
+ - Reset all default values for translate, rotate, scale
+ - Break all incoming connections to keyable attributes
+
+ """
+ order = colorbleed.api.ValidateContentsOrder + 0.05
+ label = "Rig Controllers"
+ hosts = ["maya"]
+ families = ["colorbleed.rig"]
+ actions = [colorbleed.api.RepairAction,
+ colorbleed.api.SelectInvalidAction]
+
+ # Default controller values
+ CONTROLLER_DEFAULTS = {
+ "translateX": 0,
+ "translateY": 0,
+ "translateZ": 0,
+ "rotateX": 0,
+ "rotateY": 0,
+ "rotateZ": 0,
+ "scaleX": 1,
+ "scaleY": 1,
+ "scaleZ": 1
+ }
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError('{} failed, see log '
+ 'information'.format(self.label))
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ controllers_sets = [i for i in instance if i == "controls_SET"]
+ controls = cmds.sets(controllers_sets, query=True)
+ assert controls, "Must have 'controls_SET' in rig instance"
+
+ # Ensure all controls are within the top group
+ lookup = set(instance[:])
+ assert all(control in lookup for control in cmds.ls(controls,
+ long=True)), (
+ "All controls must be inside the rig's group."
+ )
+
+ # Validate all controls
+ has_connections = list()
+ has_unlocked_visibility = list()
+ has_non_default_values = list()
+ for control in controls:
+ if cls.get_connected_attributes(control):
+ has_connections.append(control)
+
+ # check if visibility is locked
+ attribute = "{}.visibility".format(control)
+ locked = cmds.getAttr(attribute, lock=True)
+ if not locked:
+ has_unlocked_visibility.append(control)
+
+ if cls.get_non_default_attributes(control):
+ has_non_default_values.append(control)
+
+ if has_connections:
+ cls.log.error("Controls have input connections: "
+ "%s" % has_connections)
+
+ if has_non_default_values:
+ cls.log.error("Controls have non-default values: "
+ "%s" % has_non_default_values)
+
+ if has_unlocked_visibility:
+ cls.log.error("Controls have unlocked visibility "
+ "attribute: %s" % has_unlocked_visibility)
+
+ invalid = []
+ if (has_connections or
+ has_unlocked_visibility or
+ has_non_default_values):
+ invalid = set()
+ invalid.update(has_connections)
+ invalid.update(has_non_default_values)
+ invalid.update(has_unlocked_visibility)
+ invalid = list(invalid)
+ cls.log.error("Invalid rig controllers. See log for details.")
+
+ return invalid
+
+ @classmethod
+ def get_non_default_attributes(cls, control):
+ """Return attribute plugs with non-default values
+
+ Args:
+ control (str): Name of control node.
+
+ Returns:
+ list: The invalid plugs
+
+ """
+
+ invalid = []
+ for attr, default in cls.CONTROLLER_DEFAULTS.items():
+ if cmds.attributeQuery(attr, node=control, exists=True):
+ plug = "{}.{}".format(control, attr)
+
+ # Ignore locked attributes
+ locked = cmds.getAttr(plug, lock=True)
+ if locked:
+ continue
+
+ value = cmds.getAttr(plug)
+ if value != default:
+ cls.log.warning("Control non-default value: "
+ "%s = %s" % (plug, value))
+ invalid.append(plug)
+
+ return invalid
+
+ @staticmethod
+ def get_connected_attributes(control):
+ """Return attribute plugs with incoming connections.
+
+ This will also ensure no (driven) keys on unlocked keyable attributes.
+
+ Args:
+ control (str): Name of control node.
+
+ Returns:
+ list: The invalid plugs
+
+ """
+ import maya.cmds as mc
+
+ attributes = mc.listAttr(control, keyable=True, scalar=True)
+ invalid = []
+ for attr in attributes:
+ plug = "{}.{}".format(control, attr)
+
+ # Ignore locked attributes
+ locked = cmds.getAttr(plug, lock=True)
+ if locked:
+ continue
+
+ # Check for incoming connections
+ if cmds.listConnections(plug, source=True, destination=False):
+ invalid.append(plug)
+
+ return invalid
+
+ @classmethod
+ def repair(cls, instance):
+
+ # Use a single undo chunk
+ with undo_chunk():
+ controls = cmds.sets("controls_SET", query=True)
+ for control in controls:
+
+ # Lock visibility
+ attr = "{}.visibility".format(control)
+ locked = cmds.getAttr(attr, lock=True)
+ if not locked:
+ cls.log.info("Locking visibility for %s" % control)
+ cmds.setAttr(attr, lock=True)
+
+ # Remove incoming connections
+ invalid_plugs = cls.get_connected_attributes(control)
+ if invalid_plugs:
+ for plug in invalid_plugs:
+ cls.log.info("Breaking input connection to %s" % plug)
+ source = cmds.listConnections(plug,
+ source=True,
+ destination=False,
+ plugs=True)[0]
+ cmds.disconnectAttr(source, plug)
+
+ # Reset non-default values
+ invalid_plugs = cls.get_non_default_attributes(control)
+ if invalid_plugs:
+ for plug in invalid_plugs:
+ attr = plug.split(".")[-1]
+ default = cls.CONTROLLER_DEFAULTS[attr]
+ cls.log.info("Setting %s to %s" % (plug, default))
+ cmds.setAttr(plug, default)
diff --git a/config/plugins/maya/publish/validate_rig_controllers_arnold_attributes.py b/config/plugins/maya/publish/validate_rig_controllers_arnold_attributes.py
new file mode 100644
index 0000000000..b6a1191006
--- /dev/null
+++ b/config/plugins/maya/publish/validate_rig_controllers_arnold_attributes.py
@@ -0,0 +1,89 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+from cb.utils.maya.context import undo_chunk
+
+
+class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin):
+ """Validate rig control curves have no keyable arnold attributes.
+
+ The Arnold plug-in will create curve attributes like:
+ - aiRenderCurve
+ - aiCurveWidth
+ - aiSampleRate
+ - aiCurveShaderR
+ - aiCurveShaderG
+ - aiCurveShaderB
+
+ Unfortunately these attributes visible in the channelBox are *keyable*
+ by default and visible in the channelBox. As such pressing a regular "S"
+ set key shortcut will set keys on these attributes too, thus cluttering
+ the animator's scene.
+
+ This validator will ensure they are hidden or unkeyable attributes.
+
+ """
+ order = colorbleed.api.ValidateContentsOrder + 0.05
+ label = "Rig Controllers (Arnold Attributes)"
+ hosts = ["maya"]
+ families = ["colorbleed.rig"]
+ actions = [colorbleed.api.RepairAction,
+ colorbleed.api.SelectInvalidAction]
+
+ attributes = [
+ "rcurve",
+ "cwdth",
+ "srate",
+ "ai_curve_shaderr",
+ "ai_curve_shaderg",
+ "ai_curve_shaderb"
+ ]
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError('{} failed, see log '
+ 'information'.format(self.label))
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ controllers_sets = [i for i in instance if i == "controls_SET"]
+ if not controllers_sets:
+ return []
+
+ controls = cmds.sets(controllers_sets, query=True) or []
+ if not controls:
+ return []
+
+ shapes = cmds.ls(controls,
+ dag=True,
+ leaf=True,
+ long=True,
+ shapes=True,
+ noIntermediate=True)
+ curves = cmds.ls(shapes, type="nurbsCurve", long=True)
+
+ invalid = list()
+ for node in curves:
+
+ for attribute in cls.attributes:
+ if cmds.attributeQuery(attribute, node=node, exists=True):
+ plug = "{}.{}".format(node, attribute)
+ if cmds.getAttr(plug, keyable=True):
+ invalid.append(node)
+ break
+
+ return invalid
+
+ @classmethod
+ def repair(cls, instance):
+
+ invalid = cls.get_invalid(instance)
+ with undo_chunk():
+ for node in invalid:
+ for attribute in cls.attributes:
+ if cmds.attributeQuery(attribute, node=node, exists=True):
+ plug = "{}.{}".format(node, attribute)
+ cmds.setAttr(plug, channelBox=False, keyable=False)
diff --git a/config/plugins/maya/publish/validate_rig_out_set_node_ids.py b/config/plugins/maya/publish/validate_rig_out_set_node_ids.py
new file mode 100644
index 0000000000..2d8c06ab4a
--- /dev/null
+++ b/config/plugins/maya/publish/validate_rig_out_set_node_ids.py
@@ -0,0 +1,68 @@
+import maya.cmds as cmds
+
+import pyblish.api
+import colorbleed.api
+import config.apps.maya.lib as lib
+
+
+class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin):
+ """Validate if deformed shapes have related IDs to the original shapes.
+
+ When a deformer is applied in the scene on a referenced mesh that already
+ had deformers then Maya will create a new shape node for the mesh that
+ does not have the original id. This validator checks whether the ids are
+ valid on all the shape nodes in the instance.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ families = ["colorbleed.rig"]
+ hosts = ['maya']
+ label = 'Rig Out Set Node Ids'
+ actions = [colorbleed.api.SelectInvalidAction, colorbleed.api.RepairAction]
+
+ def process(self, instance):
+ """Process all meshes"""
+
+ # Ensure all nodes have a cbId and a related ID to the original shapes
+ # if a deformer has been created on the shape
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Nodes found with non-related "
+ "asset IDs: {0}".format(invalid))
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Get all nodes which do not match the criteria"""
+
+ invalid = []
+
+ out_set = next(x for x in instance if x.endswith("out_SET"))
+ members = cmds.sets(out_set, query=True)
+ shapes = cmds.ls(members,
+ dag=True,
+ leaf=True,
+ shapes=True,
+ long=True,
+ noIntermediate=True)
+
+ for shape in shapes:
+ history_id = lib.get_id_from_history(shape)
+ if history_id:
+ current_id = lib.get_id(shape)
+ if current_id != history_id:
+ invalid.append(shape)
+
+ return invalid
+
+ @classmethod
+ def repair(cls, instance):
+
+ for node in cls.get_invalid(instance):
+ # Get the original id from history
+ history_id = lib.get_id_from_history(node)
+ if not history_id:
+ cls.log.error("Could not find ID in history for '%s'", node)
+ continue
+
+ lib.set_id(node, history_id, overwrite=True)
diff --git a/config/plugins/maya/publish/validate_scene_set_workspace.py b/config/plugins/maya/publish/validate_scene_set_workspace.py
new file mode 100644
index 0000000000..3f4f631897
--- /dev/null
+++ b/config/plugins/maya/publish/validate_scene_set_workspace.py
@@ -0,0 +1,48 @@
+import os
+
+import maya.cmds as cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+def is_subdir(path, root_dir):
+ """ Returns whether path is a subdirectory (or file) within root_dir """
+ path = os.path.realpath(path)
+ root_dir = os.path.realpath(root_dir)
+
+ # If not on same drive
+ if os.path.splitdrive(path)[0] != os.path.splitdrive(root_dir)[0]:
+ return False
+
+ # Get 'relative path' (can contain ../ which means going up)
+ relative = os.path.relpath(path, root_dir)
+
+ # Check if the path starts by going up, if so it's not a subdirectory. :)
+ if relative.startswith(os.pardir) or relative == os.curdir:
+ return False
+ else:
+ return True
+
+
+class ValidateSceneSetWorkspace(pyblish.api.ContextPlugin):
+ """Validate the scene is inside the currently set Maya workspace"""
+
+ order = colorbleed.api.ValidatePipelineOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ category = 'scene'
+ version = (0, 1, 0)
+ label = 'Maya Workspace Set'
+
+ def process(self, context):
+
+ scene_name = cmds.file(query=True, sceneName=True)
+ if not scene_name:
+ raise RuntimeError("Scene hasn't been saved. Workspace can't be "
+ "validated.")
+
+ root_dir = cmds.workspace(query=True, rootDirectory=True)
+
+ if not is_subdir(scene_name, root_dir):
+ raise RuntimeError("Maya workspace is not set correctly.")
diff --git a/config/plugins/maya/publish/validate_setdress_namespaces.py b/config/plugins/maya/publish/validate_setdress_namespaces.py
new file mode 100644
index 0000000000..1eda02cf74
--- /dev/null
+++ b/config/plugins/maya/publish/validate_setdress_namespaces.py
@@ -0,0 +1,39 @@
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateSetdressNamespaces(pyblish.api.InstancePlugin):
+ """Ensure namespaces are not nested
+
+ In the outliner an item in a normal namespace looks as following:
+ props_desk_01_:modelDefault
+
+ Any namespace which diverts from that is illegal, example of an illegal
+ namespace:
+ room_study_01_:props_desk_01_:modelDefault
+
+ """
+
+ label = "Validate Setdress Namespaces"
+ order = pyblish.api.ValidatorOrder
+ families = ["colorbleed.setdress"]
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ def process(self, instance):
+
+ self.log.info("Checking namespace for %s" % instance.name)
+ if self.get_invalid(instance):
+ raise RuntimeError("Nested namespaces found")
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ from maya import cmds
+
+ invalid = []
+ for item in cmds.ls(instance):
+ item_parts = item.split("|", 1)[0].rsplit(":")
+ if len(item_parts[:-1]) > 1:
+ invalid.append(item)
+
+ return invalid
diff --git a/config/plugins/maya/publish/validate_setdress_transforms.py b/config/plugins/maya/publish/validate_setdress_transforms.py
new file mode 100644
index 0000000000..d2caa03ca2
--- /dev/null
+++ b/config/plugins/maya/publish/validate_setdress_transforms.py
@@ -0,0 +1,107 @@
+import pyblish.api
+import colorbleed.api
+
+from maya import cmds
+
+
+class ValidateSetDressModelTransforms(pyblish.api.InstancePlugin):
+ """Verify only root nodes of the loaded asset have transformations.
+
+ Note: This check is temporary and is subject to change.
+
+ Example outliner:
+ <> means referenced
+ ===================================================================
+
+ setdress_GRP|
+ props_GRP|
+ barrel_01_:modelDefault| [can have transforms]
+ <> barrel_01_:barrel_GRP [CAN'T have transforms]
+
+ fence_01_:modelDefault| [can have transforms]
+ <> fence_01_:fence_GRP [CAN'T have transforms]
+
+ """
+
+ order = pyblish.api.ValidatorOrder + 0.49
+ label = "Setdress Model Transforms"
+ families = ["colorbleed.setdress"]
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.RepairAction]
+
+ prompt_message = ("You are about to reset the matrix to the default values."
+ " This can alter the look of your scene. "
+ "Are you sure you want to continue?")
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Found {} invalid transforms of setdress "
+ "items".format(len(invalid)))
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ import config.apps.maya.lib as lib
+
+ # Get all transforms in the loaded containers
+ container_roots = cmds.listRelatives(instance.data["hierarchy"],
+ children=True,
+ type="transform",
+ fullPath=True)
+
+ transforms_in_container = cmds.listRelatives(container_roots,
+ allDescendents=True,
+ type="transform",
+ fullPath=True)
+
+ # Extra check due to the container roots still being passed through
+ transforms_in_container = [i for i in transforms_in_container if i
+ not in container_roots]
+
+ # Ensure all are identity matrix
+ invalid = []
+ for transform in transforms_in_container:
+ node_matrix = cmds.xform(transform,
+ query=True,
+ matrix=True,
+ objectSpace=True)
+ if not lib.matrix_equals(node_matrix, lib.DEFAULT_MATRIX):
+ invalid.append(transform)
+
+ return invalid
+
+ @classmethod
+ def repair(cls, instance):
+ """Reset matrix for illegally transformed nodes
+
+ We want to ensure the user knows the reset will alter the look of
+ the current scene because the transformations were done on asset
+ nodes instead of the asset top node.
+
+ Args:
+ instance:
+
+ Returns:
+ None
+
+ """
+
+ import config.apps.maya.lib as lib
+ from avalon.vendor.Qt import QtWidgets
+
+ # Store namespace in variable, cosmetics thingy
+ messagebox = QtWidgets.QMessageBox
+ mode = messagebox.StandardButton.Ok | messagebox.StandardButton.Cancel
+ choice = messagebox.warning(None,
+ "Matrix reset",
+ cls.prompt_message,
+ mode)
+
+ invalid = cls.get_invalid(instance)
+ if not invalid:
+ cls.log.info("No invalid nodes")
+ return
+
+ if choice:
+ cmds.xform(invalid, matrix=lib.DEFAULT_MATRIX, objectSpace=True)
diff --git a/config/plugins/maya/publish/validate_shape_default_names.py b/config/plugins/maya/publish/validate_shape_default_names.py
new file mode 100644
index 0000000000..75fd8f3f1e
--- /dev/null
+++ b/config/plugins/maya/publish/validate_shape_default_names.py
@@ -0,0 +1,88 @@
+import re
+
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+def short_name(node):
+ return node.rsplit("|", 1)[-1].rsplit(":", 1)[-1]
+
+
+class ValidateShapeDefaultNames(pyblish.api.InstancePlugin):
+ """Validates that Shape names are using Maya's default format.
+
+ When you create a new polygon cube Maya will name the transform
+ and shape respectively:
+ - ['pCube1', 'pCubeShape1']
+ If you rename it to `bar1` it will become:
+ - ['bar1', 'barShape1']
+ Then if you rename it to `bar` it will become:
+ - ['bar', 'barShape']
+ Rename it again to `bar1` it will differ as opposed to before:
+ - ['bar1', 'bar1Shape']
+ Note that bar1Shape != barShape1
+ Thus the suffix number can be either in front of Shape or behind it.
+ Then it becomes harder to define where what number should be when a
+ node contains multiple shapes, for example with many controls in
+ rigs existing of multiple curves.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ category = 'cleanup'
+ optional = True
+ version = (0, 1, 0)
+ label = "Shape Default Naming"
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.RepairAction]
+
+ @staticmethod
+ def _define_default_name(shape):
+ parent = cmds.listRelatives(shape, parent=True, fullPath=True)[0]
+ transform = short_name(parent)
+ return '{0}Shape'.format(transform)
+
+ @staticmethod
+ def _is_valid(shape):
+ """ Return whether the shape's name is similar to Maya's default. """
+ transform = cmds.listRelatives(shape, parent=True, fullPath=True)[0]
+
+ transform_name = short_name(transform)
+ shape_name = short_name(shape)
+
+ # A Shape's name can be either {transform}{numSuffix}
+ # Shape or {transform}Shape{numSuffix}
+ # Upon renaming nodes in Maya that is
+ # the pattern Maya will act towards.
+ transform_no_num = transform_name.rstrip("0123456789")
+ pattern = '^{transform}[0-9]*Shape[0-9]*$'.format(
+ transform=transform_no_num)
+
+ if re.match(pattern, shape_name):
+ return True
+ else:
+ return False
+
+ @classmethod
+ def get_invalid(cls, instance):
+ shapes = cmds.ls(instance, shapes=True, long=True)
+ return [shape for shape in shapes if not cls._is_valid(shape)]
+
+ def process(self, instance):
+ """Process all the shape nodes in the instance"""
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise ValueError("Incorrectly named shapes "
+ "found: {0}".format(invalid))
+
+ @classmethod
+ def repair(cls, instance):
+ """Process all the shape nodes in the instance"""
+ for shape in cls.get_invalid(instance):
+ correct_shape_name = cls._define_default_name(shape)
+ cmds.rename(shape, correct_shape_name)
diff --git a/config/plugins/maya/publish/validate_shape_render_stats.py b/config/plugins/maya/publish/validate_shape_render_stats.py
new file mode 100644
index 0000000000..9a3067badb
--- /dev/null
+++ b/config/plugins/maya/publish/validate_shape_render_stats.py
@@ -0,0 +1,59 @@
+import pyblish.api
+import colorbleed.api
+
+from maya import cmds
+
+
+class ValidateShapeRenderStats(pyblish.api.Validator):
+ """Ensure all render stats are set to the default values."""
+
+ order = colorbleed.api.ValidateMeshOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ label = 'Shape Default Render Stats'
+ actions = [colorbleed.api.SelectInvalidAction,
+ colorbleed.api.RepairAction]
+
+ defaults = {'castsShadows': 1,
+ 'receiveShadows': 1,
+ 'motionBlur': 1,
+ 'primaryVisibility': 1,
+ 'smoothShading': 1,
+ 'visibleInReflections': 1,
+ 'visibleInRefractions': 1,
+ 'doubleSided': 1,
+ 'opposite': 0}
+
+ @classmethod
+ def get_invalid(cls, instance):
+ # It seems the "surfaceShape" and those derived from it have
+ # `renderStat` attributes.
+ shapes = cmds.ls(instance, long=True, type='surfaceShape')
+ invalid = []
+ for shape in shapes:
+ for attr, default_value in cls.defaults.iteritems():
+ if cmds.attributeQuery(attr, node=shape, exists=True):
+ value = cmds.getAttr('{}.{}'.format(shape, attr))
+ if value != default_value:
+ invalid.append(shape)
+
+ return invalid
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise ValueError("Shapes with non-default renderStats "
+ "found: {0}".format(invalid))
+
+ @classmethod
+ def repair(cls, instance):
+ for shape in cls.get_invalid(instance):
+ for attr, default_value in cls.defaults.iteritems():
+
+ if cmds.attributeQuery(attr, node=shape, exists=True):
+ plug = '{0}.{1}'.format(shape, attr)
+ value = cmds.getAttr(plug)
+ if value != default_value:
+ cmds.setAttr(plug, default_value)
diff --git a/config/plugins/maya/publish/validate_single_assembly.py b/config/plugins/maya/publish/validate_single_assembly.py
new file mode 100644
index 0000000000..886e88ada3
--- /dev/null
+++ b/config/plugins/maya/publish/validate_single_assembly.py
@@ -0,0 +1,36 @@
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateSingleAssembly(pyblish.api.InstancePlugin):
+ """Ensure the content of the instance is grouped in a single hierarchy
+
+ The instance must have a single root node containing all the content.
+ This root node *must* be a top group in the outliner.
+
+ Example outliner:
+ root_GRP
+ -- geometry_GRP
+ -- mesh_GEO
+ -- controls_GRP
+ -- control_CTL
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ['maya']
+ families = ['colorbleed.rig', 'colorbleed.animation']
+ label = 'Single Assembly'
+
+ def process(self, instance):
+ from maya import cmds
+
+ assemblies = cmds.ls(instance, assemblies=True)
+
+ # ensure unique (somehow `maya.cmds.ls` doesn't manage that)
+ assemblies = set(assemblies)
+
+ assert len(assemblies) > 0, (
+ "One assembly required for: %s (currently empty?)" % instance)
+ assert len(assemblies) < 2, (
+ 'Multiple assemblies found: %s' % assemblies)
diff --git a/config/plugins/maya/publish/validate_step_size.py b/config/plugins/maya/publish/validate_step_size.py
new file mode 100644
index 0000000000..7267d99a35
--- /dev/null
+++ b/config/plugins/maya/publish/validate_step_size.py
@@ -0,0 +1,41 @@
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateStepSize(pyblish.api.InstancePlugin):
+ """Validates the step size for the instance is in a valid range.
+
+ For example the `step` size should never be lower or equal to zero.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ label = 'Step size'
+ families = ['colorbleed.camera',
+ 'colorbleed.pointcache',
+ 'colorbleed.animation']
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ MIN = 0.01
+ MAX = 1.0
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ objset = instance.data['name']
+ step = instance.data.get("step", 1.0)
+
+ if step < cls.MIN or step > cls.MAX:
+ cls.log.warning("Step size is outside of valid range: {0} "
+ "(valid: {1} to {2})".format(step,
+ cls.MIN,
+ cls.MAX))
+ return objset
+
+ return []
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Invalid instances found: {0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_transfers.py b/config/plugins/maya/publish/validate_transfers.py
new file mode 100644
index 0000000000..41544b63cf
--- /dev/null
+++ b/config/plugins/maya/publish/validate_transfers.py
@@ -0,0 +1,54 @@
+import pyblish.api
+import colorbleed.api
+import os
+
+from collections import defaultdict
+
+
+class ValidateTransfers(pyblish.api.InstancePlugin):
+ """Validates mapped resources.
+
+ This validates:
+ - The resources all transfer to a unique destination.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ label = "Transfers"
+
+ def process(self, instance):
+
+ transfers = instance.data.get("transfers", [])
+ if not transfers:
+ return
+
+ verbose = instance.data.get('verbose', False)
+
+ # Collect all destination with its sources
+ collected = defaultdict(set)
+ for source, destination in transfers:
+
+ # Use normalized paths in comparison and ignore case sensitivity
+ source = os.path.normpath(source).lower()
+ destination = os.path.normpath(destination).lower()
+
+ collected[destination].add(source)
+
+ invalid_destinations = list()
+ for destination, sources in collected.items():
+ if len(sources) > 1:
+ invalid_destinations.append(destination)
+
+ if verbose:
+ self.log.error("Non-unique file transfer for resources: "
+ "{0} (sources: {1})".format(destination,
+ sources))
+
+ if invalid_destinations:
+ if not verbose:
+ # If not verbose then still log the resource destination as
+ # opposed to every individual file transfer
+ self.log.error("Non-unique file transfers to destinations: "
+ "%s" % "\n".join(invalid_destinations))
+
+ raise RuntimeError("Invalid transfers in queue.")
diff --git a/config/plugins/maya/publish/validate_transform_naming_suffix.py b/config/plugins/maya/publish/validate_transform_naming_suffix.py
new file mode 100644
index 0000000000..51d63ad505
--- /dev/null
+++ b/config/plugins/maya/publish/validate_transform_naming_suffix.py
@@ -0,0 +1,87 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+SUFFIX_NAMING_TABLE = {'mesh': ["_GEO", "_GES", "_GEP", "_OSD"],
+ 'nurbsCurve': ["_CRV"],
+ 'nurbsSurface': ["_NRB"],
+ None: ['_GRP']}
+
+ALLOW_IF_NOT_IN_SUFFIX_TABLE = True
+
+
+class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
+ """Validates transform suffix based on the type of its children shapes.
+
+ Suffices must be:
+ - mesh:
+ _GEO (regular geometry)
+ _GES (geometry to be smoothed at render)
+ _GEP (proxy geometry; usually not to be rendered)
+ _OSD (open subdiv smooth at rendertime)
+ - nurbsCurve: _CRV
+ - nurbsSurface: _NRB
+ - null/group: _GRP
+
+ .. warning::
+ This grabs the first child shape as a reference and doesn't use the
+ others in the check.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ['maya']
+ families = ['colorbleed.model']
+ category = 'cleanup'
+ optional = True
+ version = (0, 1, 0)
+ label = 'Suffix Naming Conventions'
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ @staticmethod
+ def is_valid_name(node_name, shape_type):
+ """Return whether node's name is correct.
+
+ The correctness for a transform's suffix is dependent on what
+ `shape_type` it holds. E.g. a transform with a mesh might need and
+ `_GEO` suffix.
+
+ When `shape_type` is None the transform doesn't have any direct
+ children shapes.
+
+ """
+ if shape_type not in SUFFIX_NAMING_TABLE:
+ return ALLOW_IF_NOT_IN_SUFFIX_TABLE
+ else:
+ suffices = SUFFIX_NAMING_TABLE[shape_type]
+ for suffix in suffices:
+ if node_name.endswith(suffix):
+ return True
+ return False
+
+ @classmethod
+ def get_invalid(cls, instance):
+ transforms = cmds.ls(instance, type='transform', long=True)
+
+ invalid = []
+ for transform in transforms:
+ shapes = cmds.listRelatives(transform,
+ shapes=True,
+ fullPath=True,
+ noIntermediate=True)
+
+ shape_type = cmds.nodeType(shapes[0]) if shapes else None
+ if not cls.is_valid_name(transform, shape_type):
+ invalid.append(transform)
+
+ return invalid
+
+ def process(self, instance):
+ """Process all the nodes in the instance"""
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise ValueError("Incorrectly named geometry "
+ "transforms: {0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_transform_zero.py b/config/plugins/maya/publish/validate_transform_zero.py
new file mode 100644
index 0000000000..fa87539d96
--- /dev/null
+++ b/config/plugins/maya/publish/validate_transform_zero.py
@@ -0,0 +1,64 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateTransformZero(pyblish.api.Validator):
+ """Transforms can't have any values
+
+ To solve this issue, try freezing the transforms. So long
+ as the transforms, rotation and scale values are zero,
+ you're all good.
+
+ """
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ["maya"]
+ families = ["colorbleed.model"]
+ category = "geometry"
+ version = (0, 1, 0)
+ label = "Transform Zero (Freeze)"
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ _identity = [1.0, 0.0, 0.0, 0.0,
+ 0.0, 1.0, 0.0, 0.0,
+ 0.0, 0.0, 1.0, 0.0,
+ 0.0, 0.0, 0.0, 1.0]
+ _tolerance = 1e-30
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Returns the invalid transforms in the instance.
+
+ This is the same as checking:
+ - translate == [0, 0, 0] and rotate == [0, 0, 0] and
+ scale == [1, 1, 1] and shear == [0, 0, 0]
+
+ .. note::
+ This will also catch camera transforms if those
+ are in the instances.
+
+ Returns:
+ list: Transforms that are not identity matrix
+
+ """
+
+ transforms = cmds.ls(instance, type="transform")
+
+ invalid = []
+ for transform in transforms:
+ mat = cmds.xform(transform, q=1, matrix=True, objectSpace=True)
+ if not all(abs(x-y) < cls._tolerance
+ for x, y in zip(cls._identity, mat)):
+ invalid.append(transform)
+
+ return invalid
+
+ def process(self, instance):
+ """Process all the nodes in the instance "objectSet"""
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise ValueError("Nodes found with transform "
+ "values: {0}".format(invalid))
diff --git a/config/plugins/maya/publish/validate_vrayproxy.py b/config/plugins/maya/publish/validate_vrayproxy.py
new file mode 100644
index 0000000000..40c45094db
--- /dev/null
+++ b/config/plugins/maya/publish/validate_vrayproxy.py
@@ -0,0 +1,27 @@
+import pyblish.api
+
+
+class ValidateVrayProxy(pyblish.api.InstancePlugin):
+
+ order = pyblish.api.ValidatorOrder
+ label = 'VRay Proxy Settings'
+ hosts = ['maya']
+ families = ['colorbleed.vrayproxy']
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("'%s' has invalid settings for VRay Proxy "
+ "export!" % instance.name)
+
+ @classmethod
+ def get_invalid(cls, instance):
+ data = instance.data
+
+ if not data["setMembers"]:
+ cls.log.error("'%s' is empty! This is a bug" % instance.name)
+
+ if data["animation"]:
+ if data["endFrame"] < data["startFrame"]:
+ cls.log.error("End frame is smaller than start frame")
diff --git a/config/plugins/maya/publish/validate_yeti_rig_input_in_instance.py b/config/plugins/maya/publish/validate_yeti_rig_input_in_instance.py
new file mode 100644
index 0000000000..58d9834617
--- /dev/null
+++ b/config/plugins/maya/publish/validate_yeti_rig_input_in_instance.py
@@ -0,0 +1,43 @@
+from maya import cmds
+
+import pyblish.api
+import colorbleed.api
+
+
+class ValidateYetiRigInputShapesInInstance(pyblish.api.Validator):
+ """Validate if all input nodes are part of the instance's hierarchy"""
+
+ order = colorbleed.api.ValidateContentsOrder
+ hosts = ["maya"]
+ families = ["colorbleed.yetiRig"]
+ label = "Yeti Rig Input Shapes In Instance"
+ actions = [colorbleed.api.SelectInvalidAction]
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Yeti Rig has invalid input meshes")
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ input_set = next((i for i in instance if i == "input_SET"), None)
+ assert input_set, "Current %s instance has no `input_SET`" % instance
+
+ # Get all children, we do not care about intermediates
+ input_nodes = cmds.ls(cmds.sets(input_set, query=True), long=True)
+ dag = cmds.ls(input_nodes, dag=True, long=True)
+ shapes = cmds.ls(dag, long=True, shapes=True, noIntermediate=True)
+
+ # Allow publish without input meshes.
+ if not shapes:
+ cls.log.info("Found no input meshes for %s, skipping ..."
+ % instance)
+ return []
+
+ # check if input node is part of groomRig instance
+ instance_lookup = set(instance[:])
+ invalid = [s for s in shapes if s not in instance_lookup]
+
+ return invalid
diff --git a/config/plugins/maya/publish/validate_yetirig_cache_state.py b/config/plugins/maya/publish/validate_yetirig_cache_state.py
new file mode 100644
index 0000000000..2c4d4dbc72
--- /dev/null
+++ b/config/plugins/maya/publish/validate_yetirig_cache_state.py
@@ -0,0 +1,61 @@
+import pyblish.api
+
+import colorbleed.action
+
+import maya.cmds as cmds
+
+
+class ValidateYetiRigCacheState(pyblish.api.InstancePlugin):
+ """Validate the I/O attributes of the node
+
+ Every pgYetiMaya cache node per instance should have:
+ 1. Input Mode is set to `None`
+ 2. Input Cache File Name is empty
+
+ """
+
+ order = pyblish.api.ValidatorOrder
+ label = "Yeti Rig Cache State"
+ hosts = ["maya"]
+ families = ["colorbleed.yetiRig"]
+ actions = [colorbleed.action.RepairAction,
+ colorbleed.action.SelectInvalidAction]
+
+ def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Nodes have incorrect I/O settings")
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ invalid = []
+
+ yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
+ for node in yeti_nodes:
+ # Check reading state
+ state = cmds.getAttr("%s.fileMode" % node)
+ if state == 1:
+ cls.log.error("Node `%s` is set to mode `cache`" % node)
+ invalid.append(node)
+ continue
+
+ # Check reading state
+ has_cache = cmds.getAttr("%s.cacheFileName" % node)
+ if has_cache:
+ cls.log.error("Node `%s` has a cache file set" % node)
+ invalid.append(node)
+ continue
+
+ return invalid
+
+ @classmethod
+ def repair(cls, instance):
+ """Repair all errors"""
+
+ # Create set to ensure all nodes only pass once
+ invalid = cls.get_invalid(instance)
+ for node in invalid:
+ cmds.setAttr("%s.fileMode" % node, 0)
+ cmds.setAttr("%s.cacheFileName" % node, "", type="string")
+
diff --git a/config/scripts/__init__.py b/config/scripts/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/config/scripts/fusion_switch_shot.py b/config/scripts/fusion_switch_shot.py
new file mode 100644
index 0000000000..656b5a9d85
--- /dev/null
+++ b/config/scripts/fusion_switch_shot.py
@@ -0,0 +1,246 @@
+import os
+import re
+import sys
+import logging
+
+# Pipeline imports
+from avalon import api, io, pipeline
+import avalon.fusion
+
+# Config imports
+import colorbleed.lib as colorbleed
+import config.apps.fusion.lib as fusion_lib
+
+log = logging.getLogger("Update Slap Comp")
+
+self = sys.modules[__name__]
+self._project = None
+
+
+def _format_version_folder(folder):
+ """Format a version folder based on the filepath
+
+ Assumption here is made that, if the path does not exists the folder
+ will be "v001"
+
+ Args:
+ folder: file path to a folder
+
+ Returns:
+ str: new version folder name
+ """
+
+ new_version = 1
+ if os.path.isdir(folder):
+ re_version = re.compile("v\d+$")
+ versions = [i for i in os.listdir(folder) if os.path.isdir(i)
+ and re_version.match(i)]
+ if versions:
+ # ensure the "v" is not included
+ new_version = int(max(versions)[1:]) + 1
+
+ version_folder = "v{:03d}".format(new_version)
+
+ return version_folder
+
+
+def _get_work_folder(session):
+ """Convenience function to get the work folder path of the current asset"""
+
+ # Get new filename, create path based on asset and work template
+ template_work = self._project["config"]["template"]["work"]
+ work_path = pipeline._format_work_template(template_work, session)
+
+ return os.path.normpath(work_path)
+
+
+def _get_fusion_instance():
+ fusion = getattr(sys.modules["__main__"], "fusion", None)
+ if fusion is None:
+ try:
+ # Support for FuScript.exe, BlackmagicFusion module for py2 only
+ import BlackmagicFusion as bmf
+ fusion = bmf.scriptapp("Fusion")
+ except ImportError:
+ raise RuntimeError("Could not find a Fusion instance")
+ return fusion
+
+
+def _format_filepath(session):
+
+ project = session["AVALON_PROJECT"]
+ asset = session["AVALON_ASSET"]
+
+ # Save updated slap comp
+ work_path = _get_work_folder(session)
+ walk_to_dir = os.path.join(work_path, "scenes", "slapcomp")
+ slapcomp_dir = os.path.abspath(walk_to_dir)
+
+ # Ensure destination exists
+ if not os.path.isdir(slapcomp_dir):
+ log.warning("Folder did not exist, creating folder structure")
+ os.makedirs(slapcomp_dir)
+
+ # Compute output path
+ new_filename = "{}_{}_slapcomp_v001.comp".format(project, asset)
+ new_filepath = os.path.join(slapcomp_dir, new_filename)
+
+ # Create new unqiue filepath
+ if os.path.exists(new_filepath):
+ new_filepath = colorbleed.version_up(new_filepath)
+
+ return new_filepath
+
+
+def _update_savers(comp, session):
+ """Update all savers of the current comp to ensure the output is correct
+
+ Args:
+ comp (object): current comp instance
+ session (dict): the current Avalon session
+
+ Returns:
+ None
+ """
+
+ new_work = _get_work_folder(session)
+ renders = os.path.join(new_work, "renders")
+ version_folder = _format_version_folder(renders)
+ renders_version = os.path.join(renders, version_folder)
+
+ comp.Print("New renders to: %s\n" % renders)
+
+ with avalon.fusion.comp_lock_and_undo_chunk(comp):
+ savers = comp.GetToolList(False, "Saver").values()
+ for saver in savers:
+ filepath = saver.GetAttrs("TOOLST_Clip_Name")[1.0]
+ filename = os.path.basename(filepath)
+ new_path = os.path.join(renders_version, filename)
+ saver["Clip"] = new_path
+
+
+def update_frame_range(comp, representations):
+ """Update the frame range of the comp and render length
+
+ The start and end frame are based on the lowest start frame and the highest
+ end frame
+
+ Args:
+ comp (object): current focused comp
+ representations (list) collection of dicts
+
+ Returns:
+ None
+
+ """
+
+ version_ids = [r["parent"] for r in representations]
+ versions = io.find({"type": "version", "_id": {"$in": version_ids}})
+ versions = list(versions)
+
+ start = min(v["data"]["startFrame"] for v in versions)
+ end = max(v["data"]["endFrame"] for v in versions)
+
+ fusion_lib.update_frame_range(start, end, comp=comp)
+
+
+def switch(asset_name, filepath=None, new=True):
+ """Switch the current containers of the file to the other asset (shot)
+
+ Args:
+ filepath (str): file path of the comp file
+ asset_name (str): name of the asset (shot)
+ new (bool): Save updated comp under a different name
+
+ Returns:
+ comp path (str): new filepath of the updated comp
+
+ """
+
+ # If filepath provided, ensure it is valid absolute path
+ if filepath is not None:
+ if not os.path.isabs(filepath):
+ filepath = os.path.abspath(filepath)
+
+ assert os.path.exists(filepath), "%s must exist " % filepath
+
+ # Assert asset name exists
+ # It is better to do this here then to wait till switch_shot does it
+ asset = io.find_one({"type": "asset", "name": asset_name})
+ assert asset, "Could not find '%s' in the database" % asset_name
+
+ # Get current project
+ self._project = io.find_one({"type": "project",
+ "name": api.Session["AVALON_PROJECT"]})
+
+ # Go to comp
+ if not filepath:
+ current_comp = avalon.fusion.get_current_comp()
+ assert current_comp is not None, "Could not find current comp"
+ else:
+ fusion = _get_fusion_instance()
+ current_comp = fusion.LoadComp(filepath, quiet=True)
+ assert current_comp is not None, "Fusion could not load '%s'" % filepath
+
+ host = api.registered_host()
+ containers = list(host.ls())
+ assert containers, "Nothing to update"
+
+ representations = []
+ for container in containers:
+ try:
+ representation = colorbleed.switch_item(container,
+ asset_name=asset_name)
+ representations.append(representation)
+ except Exception as e:
+ current_comp.Print("Error in switching! %s\n" % e.message)
+
+ message = "Switched %i Loaders of the %i\n" % (len(representations),
+ len(containers))
+ current_comp.Print(message)
+
+ # Build the session to switch to
+ switch_to_session = api.Session.copy()
+ switch_to_session["AVALON_ASSET"] = asset['name']
+
+ if new:
+ comp_path = _format_filepath(switch_to_session)
+
+ # Update savers output based on new session
+ _update_savers(current_comp, switch_to_session)
+ else:
+ comp_path = colorbleed.version_up(filepath)
+
+ current_comp.Print(comp_path)
+
+ current_comp.Print("\nUpdating frame range")
+ update_frame_range(current_comp, representations)
+
+ current_comp.Save(comp_path)
+
+ return comp_path
+
+
+if __name__ == '__main__':
+
+ import argparse
+
+ parser = argparse.ArgumentParser(description="Switch to a shot within an"
+ "existing comp file")
+
+ parser.add_argument("--file_path",
+ type=str,
+ default=True,
+ help="File path of the comp to use")
+
+ parser.add_argument("--asset_name",
+ type=str,
+ default=True,
+ help="Name of the asset (shot) to switch")
+
+ args, unknown = parser.parse_args()
+
+ api.install(avalon.fusion)
+ switch(args.asset_name, args.file_path)
+
+ sys.exit(0)
diff --git a/config/scripts/publish_filesequence.py b/config/scripts/publish_filesequence.py
new file mode 100644
index 0000000000..c37ceee07c
--- /dev/null
+++ b/config/scripts/publish_filesequence.py
@@ -0,0 +1,87 @@
+"""This module is used for command line publishing of image sequences."""
+
+import os
+import sys
+import logging
+
+handler = logging.basicConfig()
+log = logging.getLogger("Publish Image Sequences")
+log.setLevel(logging.DEBUG)
+
+error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
+
+
+def publish(paths, gui=False):
+ """Publish rendered image sequences based on the job data
+
+ Args:
+ paths (list): a list of paths where to publish from
+ gui (bool, Optional): Choose to show Pyblish GUI, default is False
+
+ Returns:
+ None
+
+ """
+
+ assert isinstance(paths, (list, tuple)), "Must be list of paths"
+ log.info(paths)
+ assert any(paths), "No paths found in the list"
+ # Set the paths to publish for the collector if any provided
+ if paths:
+ os.environ["FILESEQUENCE"] = os.pathsep.join(paths)
+
+ # Install Avalon with shell as current host
+ from avalon import api, shell
+ api.install(shell)
+
+ # Register target and host
+ import pyblish.api
+ pyblish.api.register_target("filesequence")
+ pyblish.api.register_host("shell")
+
+ # Publish items
+ if gui:
+ import pyblish_qml
+ pyblish_qml.show(modal=True)
+ else:
+
+ import pyblish.util
+ context = pyblish.util.publish()
+
+ if not context:
+ log.warning("Nothing collected.")
+ sys.exit(1)
+
+ # Collect errors, {plugin name: error}
+ error_results = [r for r in context.data["results"] if r["error"]]
+
+ if error_results:
+ log.error(" Errors occurred ...")
+ for result in error_results:
+ log.error(error_format.format(**result))
+ sys.exit(2)
+
+
+def __main__():
+ import argparse
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--paths",
+ nargs="*",
+ default=[],
+ help="The filepaths to publish. This can be a "
+ "directory or a path to a .json publish "
+ "configuration.")
+ parser.add_argument("--gui",
+ default=False,
+ action="store_true",
+ help="Whether to run Pyblish in GUI mode.")
+
+ kwargs, args = parser.parse_known_args()
+
+ print("Running publish imagesequence...")
+ print("Paths: {}".format(kwargs.paths or [os.getcwd()]))
+ publish(kwargs.paths, gui=kwargs.gui)
+
+
+if __name__ == '__main__':
+ __main__()
diff --git a/config/setdress_api.py b/config/setdress_api.py
new file mode 100644
index 0000000000..70d8054357
--- /dev/null
+++ b/config/setdress_api.py
@@ -0,0 +1,548 @@
+import logging
+import json
+import os
+
+import contextlib
+import copy
+
+from maya import cmds
+
+from avalon import api
+import avalon.io as io
+from avalon.maya.lib import unique_namespace
+from config.apps.maya.lib import matrix_equals
+
+log = logging.getLogger("PackageLoader")
+
+
+def to_namespace(node, namespace):
+ """Return node name as if it's inside the namespace.
+
+ Args:
+ node (str): Node name
+ namespace (str): Namespace
+
+ Returns:
+ str: The node in the namespace.
+
+ """
+ namespace_prefix = "|{}:".format(namespace)
+ node = namespace_prefix.join(node.split("|"))
+ return node
+
+
+@contextlib.contextmanager
+def namespaced(namespace, new=True):
+ """Work inside namespace during context
+
+ Args:
+ new (bool): When enabled this will rename the namespace to a unique
+ namespace if the input namespace already exists.
+
+ Yields:
+ str: The namespace that is used during the context
+
+ """
+ original = cmds.namespaceInfo(cur=True)
+ if new:
+ namespace = unique_namespace(namespace)
+ cmds.namespace(add=namespace)
+
+ try:
+ cmds.namespace(set=namespace)
+ yield namespace
+ finally:
+ cmds.namespace(set=original)
+
+
+@contextlib.contextmanager
+def unlocked(nodes):
+
+ # Get node state by Maya's uuid
+ nodes = cmds.ls(nodes, long=True)
+ uuids = cmds.ls(nodes, uuid=True)
+ states = cmds.lockNode(nodes, query=True, lock=True)
+ states = {uuid: state for uuid, state in zip(uuids, states)}
+ originals = {uuid: node for uuid, node in zip(uuids, nodes)}
+
+ try:
+ cmds.lockNode(nodes, lock=False)
+ yield
+ finally:
+ # Reapply original states
+ for uuid, state in states.iteritems():
+ nodes_from_id = cmds.ls(uuid, long=True)
+ if nodes_from_id:
+ node = nodes_from_id[0]
+ else:
+ log.debug("Falling back to node name: %s", node)
+ node = originals[uuid]
+ if not cmds.objExists(node):
+ log.warning("Unable to find: %s", node)
+ continue
+ cmds.lockNode(node, lock=state)
+
+
+def load_package(filepath, name, namespace=None):
+ """Load a package that was gathered elsewhere.
+
+ A package is a group of published instances, possibly with additional data
+ in a hierarchy.
+
+ """
+
+ if namespace is None:
+ # Define a unique namespace for the package
+ namespace = os.path.basename(filepath).split(".")[0]
+ unique_namespace(namespace)
+ assert isinstance(namespace, basestring)
+
+ # Load the setdress package data
+ with open(filepath, "r") as fp:
+ data = json.load(fp)
+
+ # Load the setdress alembic hierarchy
+ # We import this into the namespace in which we'll load the package's
+ # instances into afterwards.
+ alembic = filepath.replace(".json", ".abc")
+ hierarchy = cmds.file(alembic,
+ reference=True,
+ namespace=namespace,
+ returnNewNodes=True,
+ groupReference=True,
+ groupName="{}:{}".format(namespace, name),
+ typ="Alembic")
+
+ # Get the top root node (the reference group)
+ root = "{}:{}".format(namespace, name)
+
+ containers = []
+ all_loaders = api.discover(api.Loader)
+ for representation_id, instances in data.items():
+
+ # Find the compatible loaders
+ loaders = api.loaders_from_representation(all_loaders,
+ representation_id)
+
+ for instance in instances:
+ container = _add(instance=instance,
+ representation_id=representation_id,
+ loaders=loaders,
+ namespace=namespace,
+ root=root)
+ containers.append(container)
+
+ # TODO: Do we want to cripple? Or do we want to add a 'parent' parameter?
+ # Cripple the original avalon containers so they don't show up in the
+ # manager
+ # for container in containers:
+ # cmds.setAttr("%s.id" % container,
+ # "colorbleed.setdress.container",
+ # type="string")
+
+ # TODO: Lock all loaded nodes
+ # This is to ensure the hierarchy remains unaltered by the artists
+ # for node in nodes:
+ # cmds.lockNode(node, lock=True)
+
+ return containers + hierarchy
+
+
+def _add(instance, representation_id, loaders, namespace, root="|"):
+ """Add an item from the package
+
+ Args:
+ instance (dict):
+ representation_id (str):
+ loaders (list):
+ namespace (str):
+
+ Returns:
+ str: The created Avalon container.
+
+ """
+
+ from config.apps.maya.lib import get_container_transforms
+
+ # Process within the namespace
+ with namespaced(namespace, new=False) as namespace:
+
+ # Get the used loader
+ Loader = next((x for x in loaders if
+ x.__name__ == instance['loader']),
+ None)
+
+ if Loader is None:
+ log.warning("Loader is missing: %s. Skipping %s",
+ instance['loader'], instance)
+ raise RuntimeError("Loader is missing.")
+
+ container = api.load(Loader,
+ representation_id,
+ namespace=instance['namespace'])
+
+ # Get the root from the loaded container
+ loaded_root = get_container_transforms({"objectName": container},
+ root=True)
+
+ # Apply matrix to root node (if any matrix edits)
+ matrix = instance.get("matrix", None)
+ if matrix:
+ cmds.xform(loaded_root, objectSpace=True, matrix=matrix)
+
+ # Parent into the setdress hierarchy
+ # Namespace is missing from parent node(s), add namespace
+ # manually
+ parent = root + to_namespace(instance["parent"], namespace)
+ cmds.parent(loaded_root, parent, relative=True)
+
+ return container
+
+
+# Store root nodes based on representation and namespace
+def _instances_by_namespace(data):
+ """Rebuild instance data so we can look it up by namespace.
+
+ Note that the `representation` is added into the instance's
+ data with a `representation` key.
+
+ Args:
+ data (dict): scene build data
+
+ Returns:
+ dict
+
+ """
+ result = {}
+ # Add new assets
+ for representation_id, instances in data.items():
+
+ # Ensure we leave the source data unaltered
+ instances = copy.deepcopy(instances)
+ for instance in instances:
+ instance['representation'] = representation_id
+ result[instance['namespace']] = instance
+
+ return result
+
+
+def get_contained_containers(container):
+ """Get the Avalon containers in this container
+
+ Args:
+ container (dict): The container dict.
+
+ Returns:
+ list: A list of member container dictionaries.
+
+ """
+
+ import avalon.schema
+ from avalon.maya.pipeline import parse_container
+
+ # Get avalon containers in this package setdress container
+ containers = []
+ members = cmds.sets(container['objectName'], query=True)
+ for node in cmds.ls(members, type="objectSet"):
+ try:
+ member_container = parse_container(node)
+ containers.append(member_container)
+ except avalon.schema.ValidationError:
+ pass
+
+ return containers
+
+
+def update_package_version(container, version):
+ """
+ Update package by version number
+
+ Args:
+ container (dict): container data of the container node
+ version (int): the new version number of the package
+
+ Returns:
+ None
+
+ """
+
+ # Versioning (from `core.maya.pipeline`)
+ current_representation = io.find_one({
+ "_id": io.ObjectId(container["representation"])
+ })
+
+ assert current_representation is not None, "This is a bug"
+
+ version_, subset, asset, project = io.parenthood(current_representation)
+
+ if version == -1:
+ new_version = io.find_one({
+ "type": "version",
+ "parent": subset["_id"]
+ }, sort=[("name", -1)])
+ else:
+ new_version = io.find_one({
+ "type": "version",
+ "parent": subset["_id"],
+ "name": version,
+ })
+
+ assert new_version is not None, "This is a bug"
+
+ # Get the new representation (new file)
+ new_representation = io.find_one({
+ "type": "representation",
+ "parent": new_version["_id"],
+ "name": current_representation["name"]
+ })
+
+ update_package(container, new_representation)
+
+
+def update_package(set_container, representation):
+ """Update any matrix changes in the scene based on the new data
+
+ Args:
+ set_container (dict): container data from `ls()`
+ representation (dict): the representation document from the database
+
+ Returns:
+ None
+
+ """
+
+ # Load the original package data
+ current_representation = io.find_one({
+ "_id": io.ObjectId(set_container['representation']),
+ "type": "representation"
+ })
+
+ current_file = api.get_representation_path(current_representation)
+ assert current_file.endswith(".json")
+ with open(current_file, "r") as fp:
+ current_data = json.load(fp)
+
+ # Load the new package data
+ new_file = api.get_representation_path(representation)
+ assert new_file.endswith(".json")
+ with open(new_file, "r") as fp:
+ new_data = json.load(fp)
+
+ # Update scene content
+ containers = get_contained_containers(set_container)
+ update_scene(set_container, containers, current_data, new_data, new_file)
+
+ # TODO: This should be handled by the pipeline itself
+ cmds.setAttr(set_container['objectName'] + ".representation",
+ str(representation['_id']), type="string")
+
+
+def update_scene(set_container, containers, current_data, new_data, new_file):
+ """Updates the hierarchy, assets and their matrix
+
+ Updates the following withing the scene:
+ * Setdress hierarchy alembic
+ * Matrix
+ * Parenting
+ * Representations
+
+ It removes any assets which are not present in the new build data
+
+ Args:
+ set_container (dict): the setdress container of the scene
+ containers (list): the list of containers under the setdress container
+ current_data (dict): the current build data of the setdress
+ new_data (dict): the new build data of the setdres
+
+ Returns:
+ processed_containers (list): all new and updated containers
+
+ """
+
+ from config.apps.maya.lib import DEFAULT_MATRIX, get_container_transforms
+
+ set_namespace = set_container['namespace']
+
+ # Update the setdress hierarchy alembic
+ set_root = get_container_transforms(set_container, root=True)
+ set_hierarchy_root = cmds.listRelatives(set_root, fullPath=True)[0]
+ set_hierarchy_reference = cmds.referenceQuery(set_hierarchy_root,
+ referenceNode=True)
+ new_alembic = new_file.replace(".json", ".abc")
+ assert os.path.exists(new_alembic), "%s does not exist." % new_alembic
+ with unlocked(cmds.listRelatives(set_root, ad=True, fullPath=True)):
+ cmds.file(new_alembic,
+ loadReference=set_hierarchy_reference,
+ type="Alembic")
+
+ identity = DEFAULT_MATRIX[:]
+
+ processed_namespaces = set()
+ processed_containers = list()
+
+ new_lookup = _instances_by_namespace(new_data)
+ old_lookup = _instances_by_namespace(current_data)
+ for container in containers:
+ container_ns = container['namespace']
+
+ # Consider it processed here, even it it fails we want to store that
+ # the namespace was already available.
+ processed_namespaces.add(container_ns)
+ processed_containers.append(container['objectName'])
+
+ if container_ns in new_lookup:
+ root = get_container_transforms(container, root=True)
+ if not root:
+ log.error("Can't find root for %s", container['objectName'])
+ continue
+
+ old_instance = old_lookup.get(container_ns, {})
+ new_instance = new_lookup[container_ns]
+
+ # Update the matrix
+ # check matrix against old_data matrix to find local overrides
+ current_matrix = cmds.xform(root,
+ query=True,
+ matrix=True,
+ objectSpace=True)
+
+ original_matrix = old_instance.get("matrix", identity)
+ has_matrix_override = not matrix_equals(current_matrix,
+ original_matrix)
+
+ if has_matrix_override:
+ log.warning("Matrix override preserved on %s", container_ns)
+ else:
+ new_matrix = new_instance.get("matrix", identity)
+ cmds.xform(root, matrix=new_matrix, objectSpace=True)
+
+ # Update the parenting
+ if old_instance.get("parent", None) != new_instance["parent"]:
+
+ parent = to_namespace(new_instance['parent'], set_namespace)
+ if not cmds.objExists(parent):
+ log.error("Can't find parent %s", parent)
+ continue
+
+ # Set the new parent
+ cmds.lockNode(root, lock=False)
+ root = cmds.parent(root, parent, relative=True)
+ cmds.lockNode(root, lock=True)
+
+ # Update the representation
+ representation_current = container['representation']
+ representation_old = old_instance['representation']
+ representation_new = new_instance['representation']
+ has_representation_override = (representation_current !=
+ representation_old)
+
+ if representation_new != representation_current:
+
+ if has_representation_override:
+ log.warning("Your scene had local representation "
+ "overrides within the set. New "
+ "representations not loaded for %s.",
+ container_ns)
+ continue
+
+ # We check it against the current 'loader' in the scene instead
+ # of the original data of the package that was loaded because
+ # an Artist might have made scene local overrides
+ if new_instance['loader'] != container['loader']:
+ log.warning("Loader is switched - local edits will be "
+ "lost. Removing: %s",
+ container_ns)
+
+ # Remove this from the "has been processed" list so it's
+ # considered as new element and added afterwards.
+ processed_containers.pop()
+ processed_namespaces.remove(container_ns)
+ api.remove(container)
+ continue
+
+ # Check whether the conversion can be done by the Loader.
+ # They *must* use the same asset, subset and Loader for
+ # `api.update` to make sense.
+ old = io.find_one({"_id": io.ObjectId(representation_current)})
+ new = io.find_one({"_id": io.ObjectId(representation_new)})
+ is_valid = compare_representations(old=old, new=new)
+ if not is_valid:
+ log.error("Skipping: %s. See log for details.",
+ container_ns)
+ continue
+
+ new_version = new["context"]["version"]
+ api.update(container, version=new_version)
+
+ else:
+ # Remove this container because it's not in the new data
+ log.warning("Removing content: %s", container_ns)
+ api.remove(container)
+
+ # Add new assets
+ all_loaders = api.discover(api.Loader)
+ for representation_id, instances in new_data.items():
+
+ # Find the compatible loaders
+ loaders = api.loaders_from_representation(all_loaders,
+ representation_id)
+ for instance in instances:
+
+ # Already processed in update functionality
+ if instance['namespace'] in processed_namespaces:
+ continue
+
+ container = _add(instance=instance,
+ representation_id=representation_id,
+ loaders=loaders,
+ namespace=set_container['namespace'],
+ root=set_root)
+
+ # Add to the setdress container
+ cmds.sets(container,
+ addElement=set_container['objectName'])
+
+ processed_containers.append(container)
+
+ return processed_containers
+
+
+def compare_representations(old, new):
+ """Check if the old representation given can be updated
+
+ Due to limitations of the `api.update` function we cannot allow
+ differences in the following data:
+
+ * Representation name (extension)
+ * Asset name
+ * Subset name (variation)
+
+ If any of those data values differs, the function will raise an
+ RuntimeError
+
+ Args:
+ old(dict): representation data from the database
+ new(dict): representation data from the database
+
+ Returns:
+ bool: False if the representation is not invalid else True
+ """
+
+ if new["name"] != old["name"]:
+ log.error("Cannot switch extensions")
+ return False
+
+ new_context = new["context"]
+ old_context = old["context"]
+
+ if new_context["asset"] != old_context["asset"]:
+ log.error("Changing assets between updates is "
+ "not supported.")
+ return False
+
+ if new_context["subset"] != old_context["subset"]:
+ log.error("Changing subsets between updates is "
+ "not supported.")
+ return False
+
+ return True
diff --git a/config/vendor/__init__.py b/config/vendor/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/config/vendor/pather/__init__.py b/config/vendor/pather/__init__.py
new file mode 100644
index 0000000000..91094e45a8
--- /dev/null
+++ b/config/vendor/pather/__init__.py
@@ -0,0 +1,5 @@
+__author__ = 'Roy Nieterau'
+
+
+from .core import *
+from .version import *
diff --git a/config/vendor/pather/core.py b/config/vendor/pather/core.py
new file mode 100644
index 0000000000..f2a469dc8b
--- /dev/null
+++ b/config/vendor/pather/core.py
@@ -0,0 +1,168 @@
+
+__all__ = ['parse', 'ls', 'ls_iter', 'format']
+
+import os
+import re
+import string
+import glob
+
+from .error import ParseError
+
+# Regex pattern that matches valid file
+# TODO: Implement complete pattern if required
+RE_FILENAME = '[-\w.,; \[\]]'
+
+
+def format(pattern, data, allow_partial=True):
+ """Format a pattern with a set of data
+
+ Examples:
+
+ Full formatting
+ >>> format("{a}/{b}/{c}", {"a": "foo", "b": "bar", "c": "nugget"})
+ 'foo/bar/nugget'
+
+ Partial formatting
+ >>> format("{asset}/{character}", {"asset": "hero"})
+ 'hero/{character}'
+
+ Disallow partial formatting
+ >>> format("{asset}/{character}", {"asset": "hero"},
+ ... allow_partial=False)
+ Traceback (most recent call last):
+ ...
+ KeyError: 'character'
+
+ Args:
+ pattern (str): The pattern to format.
+ data (dict): The key, value pairs used for formatting.
+ allow_partial (bool): Whether to raise error on partial format.
+
+ Returns:
+ str: The formatted result
+ """
+
+ assert isinstance(data, dict)
+
+ if not all(isinstance(value, basestring) for value in data.values()):
+ raise TypeError("The values in the data "
+ "dictionary must be strings")
+
+ if allow_partial:
+ return _partial_format(pattern, data)
+ else:
+ return pattern.format(**data)
+
+
+def parse(pattern, path):
+ """Parse data from a path based on a pattern
+
+ Example:
+ >>> pattern = "root/{task}/{version}/data/"
+ >>> path = "root/modeling/v001/data/"
+ >>> parse(pattern, path)
+ {'task': 'modeling', 'version': 'v001'}
+
+ Returns:
+ dict: The data retrieved from path using pattern.
+ """
+
+ pattern = os.path.normpath(pattern)
+ path = os.path.normpath(path)
+
+ # Force forward slashes
+ path = path.replace('\\', '/')
+ pattern = pattern.replace('\\', '/')
+
+ # Escape characters in path that are regex patterns so they are
+ # excluded by the regex searches. Exclude '{' and '}' in escaping.
+ pattern = re.escape(pattern)
+ pattern = pattern.replace('\{', '{').replace('\}', '}')
+
+ keys = re.findall(r'{(%s+)}' % RE_FILENAME,
+ pattern)
+ if not keys:
+ return []
+
+ # Find the corresponding values
+ value_pattern = re.sub(r'{(%s+)}' % RE_FILENAME,
+ r'(%s+)' % RE_FILENAME,
+ pattern)
+ match_values = re.match(value_pattern, path)
+
+ if not match_values:
+ raise ParseError("Path doesn't match with pattern. No values parsed")
+
+ values = match_values.groups()
+
+ return dict(zip(keys, values))
+
+
+def ls_iter(pattern, include=None, with_matches=False):
+ """Yield all matches for the given pattern.
+
+ If the pattern starts with a relative path (or a dynamic key) the search
+ will start from the current working directory, defined by os.path.realpath.
+
+ Arguments:
+ pattern (str): The pattern to match and search against.
+ include (dict): A dictionary used to target the search with the pattern
+ to include only those key-value pairs within the pattern. With this
+ you can reduce the filesystem query to a specified subset.
+
+ Example:
+ >>> data = {"root": "foobar", "content": "nugget"}
+ >>> for path in ls_iter("{root}/{project}/data/{content}/",
+ ... include=data):
+ ... print path
+
+ Returns:
+ (str, tuple): The matched paths (and data if `with_matches` is True)
+
+ The returned value changes whether `with_matches` parameter is True or
+ False. If True a 2-tuple is yielded for each match as (path, data) else
+ only the path is returned
+ """
+
+ # format rule by data already provided to reduce query
+ if include is not None:
+ pattern = format(pattern, include, allow_partial=True)
+
+ pattern = os.path.expandvars(pattern)
+ pattern = os.path.realpath(pattern)
+
+ glob_pattern = re.sub(r'([/\\]{\w+}[/\\])', '/*/', pattern) # folder
+ glob_pattern = re.sub(r'({\w+})', '*', glob_pattern) # filename
+
+ for path in glob.iglob(glob_pattern):
+ path = os.path.realpath(path)
+ if with_matches:
+ data = parse(pattern, path)
+ yield path, data
+ else:
+ yield path
+
+
+def ls(pattern, include=None, with_matches=False):
+ return list(ls_iter(pattern, include, with_matches=with_matches))
+
+
+def _partial_format(s, data):
+ """Return string `s` formatted by `data` allowing a partial format
+
+ Arguments:
+ s (str): The string that will be formatted
+ data (dict): The dictionary used to format with.
+
+ Example:
+ >>> _partial_format("{d} {a} {b} {c} {d}", {'b': "and", 'd': "left"})
+ 'left {a} and {c} left'
+ """
+
+ class FormatDict(dict):
+ def __missing__(self, key):
+ return "{" + key + "}"
+
+ formatter = string.Formatter()
+ mapping = FormatDict(**data)
+ return formatter.vformat(s, (), mapping)
diff --git a/config/vendor/pather/error.py b/config/vendor/pather/error.py
new file mode 100644
index 0000000000..92006534d4
--- /dev/null
+++ b/config/vendor/pather/error.py
@@ -0,0 +1,5 @@
+
+
+class ParseError(ValueError):
+ """Error raised when parsing a path with a pattern fails"""
+ pass
diff --git a/config/vendor/pather/version.py b/config/vendor/pather/version.py
new file mode 100644
index 0000000000..85f96b1e3f
--- /dev/null
+++ b/config/vendor/pather/version.py
@@ -0,0 +1,10 @@
+
+VERSION_MAJOR = 0
+VERSION_MINOR = 1
+VERSION_PATCH = 0
+
+version_info = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
+version = '%i.%i.%i' % version_info
+__version__ = version
+
+__all__ = ['version', 'version_info', '__version__']
diff --git a/config/widgets/__init__.py b/config/widgets/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/config/widgets/popup.py b/config/widgets/popup.py
new file mode 100644
index 0000000000..8f28dc5269
--- /dev/null
+++ b/config/widgets/popup.py
@@ -0,0 +1,139 @@
+import sys
+import logging
+import contextlib
+
+
+from avalon.vendor.Qt import QtCore, QtWidgets, QtGui
+
+log = logging.getLogger(__name__)
+
+
+class Popup(QtWidgets.QDialog):
+
+ on_show = QtCore.Signal()
+
+ def __init__(self, parent=None, *args, **kwargs):
+ super(Popup, self).__init__(parent=parent, *args, **kwargs)
+ self.setContentsMargins(0, 0, 0, 0)
+
+ # Layout
+ layout = QtWidgets.QHBoxLayout(self)
+ layout.setContentsMargins(10, 5, 10, 10)
+ message = QtWidgets.QLabel("")
+ message.setStyleSheet("""
+ QLabel {
+ font-size: 12px;
+ }
+ """)
+ show = QtWidgets.QPushButton("Show")
+ show.setSizePolicy(QtWidgets.QSizePolicy.Maximum,
+ QtWidgets.QSizePolicy.Maximum)
+ show.setStyleSheet("""QPushButton { background-color: #BB0000 }""")
+
+ layout.addWidget(message)
+ layout.addWidget(show)
+
+ # Size
+ self.resize(400, 40)
+ geometry = self.calculate_window_geometry()
+ self.setGeometry(geometry)
+
+ self.widgets = {
+ "message": message,
+ "show": show,
+ }
+
+ # Signals
+ show.clicked.connect(self._on_show_clicked)
+
+ # Set default title
+ self.setWindowTitle("Popup")
+
+ def setMessage(self, message):
+ self.widgets['message'].setText(message)
+
+ def _on_show_clicked(self):
+ """Callback for when the 'show' button is clicked.
+
+ Raises the parent (if any)
+
+ """
+
+ parent = self.parent()
+ self.close()
+
+ # Trigger the signal
+ self.on_show.emit()
+
+ if parent:
+ parent.raise_()
+
+ def calculate_window_geometry(self):
+ """Respond to status changes
+
+ On creation, align window with screen bottom right.
+
+ """
+
+ window = self
+
+ width = window.width()
+ width = max(width, window.minimumWidth())
+
+ height = window.height()
+ height = max(height, window.sizeHint().height())
+
+ desktop_geometry = QtWidgets.QDesktopWidget().availableGeometry()
+ screen_geometry = window.geometry()
+
+ screen_width = screen_geometry.width()
+ screen_height = screen_geometry.height()
+
+ # Calculate width and height of system tray
+ systray_width = screen_geometry.width() - desktop_geometry.width()
+ systray_height = screen_geometry.height() - desktop_geometry.height()
+
+ padding = 10
+
+ x = screen_width - width
+ y = screen_height - height
+
+ x -= systray_width + padding
+ y -= systray_height + padding
+
+ return QtCore.QRect(x, y, width, height)
+
+
+class Popup2(Popup):
+
+ on_show = QtCore.Signal()
+
+ def __init__(self, parent=None, *args, **kwargs):
+ Popup.__init__(self, parent=parent, *args, **kwargs)
+
+ layout = self.layout()
+
+ # Add toggle
+ toggle = QtWidgets.QCheckBox("Update Keys")
+ layout.insertWidget(1, toggle)
+ self.widgets["toggle"] = toggle
+
+ layout.insertStretch(1, 1)
+
+ # Update button text
+ fix = self.widgets["show"]
+ fix.setText("Fix")
+
+
+@contextlib.contextmanager
+def application():
+ app = QtWidgets.QApplication(sys.argv)
+ yield
+ app.exec_()
+
+
+if __name__ == "__main__":
+ with application():
+ dialog = Popup()
+ dialog.setMessage("There are outdated containers in your Maya scene.")
+ dialog.show()
diff --git a/res/workspace.mel b/res/workspace.mel
new file mode 100644
index 0000000000..60fda895d5
--- /dev/null
+++ b/res/workspace.mel
@@ -0,0 +1,74 @@
+//Maya 2018 Project Definition
+
+workspace -fr "fluidCache" "cache/nCache/fluid";
+workspace -fr "JT_ATF" "data";
+workspace -fr "images" "renders";
+workspace -fr "offlineEdit" "scenes/edits";
+workspace -fr "STEP_ATF Export" "data";
+workspace -fr "furShadowMap" "renderData/fur/furShadowMap";
+workspace -fr "INVENTOR_ATF Export" "data";
+workspace -fr "SVG" "data";
+workspace -fr "scripts" "scripts";
+workspace -fr "STL_ATF" "data";
+workspace -fr "DAE_FBX" "data";
+workspace -fr "shaders" "renderData/shaders";
+workspace -fr "NX_ATF" "data";
+workspace -fr "furFiles" "renderData/fur/furFiles";
+workspace -fr "CATIAV5_ATF Export" "data";
+workspace -fr "OBJ" "data";
+workspace -fr "PARASOLID_ATF Export" "data";
+workspace -fr "FBX export" "data";
+workspace -fr "furEqualMap" "renderData/fur/furEqualMap";
+workspace -fr "BIF" "data";
+workspace -fr "DAE_FBX export" "data";
+workspace -fr "CATIAV5_ATF" "data";
+workspace -fr "SAT_ATF Export" "data";
+workspace -fr "movie" "movies";
+workspace -fr "ASS Export" "data";
+workspace -fr "move" "data";
+workspace -fr "mayaAscii" "scenes";
+workspace -fr "autoSave" "autosave";
+workspace -fr "NX_ATF Export" "data";
+workspace -fr "sound" "sound";
+workspace -fr "mayaBinary" "scenes";
+workspace -fr "timeEditor" "Time Editor";
+workspace -fr "DWG_ATF" "data";
+workspace -fr "JT_ATF Export" "data";
+workspace -fr "iprImages" "renderData/iprImages";
+workspace -fr "FBX" "data";
+workspace -fr "renderData" "renderData";
+workspace -fr "CATIAV4_ATF" "data";
+workspace -fr "fileCache" "cache/nCache";
+workspace -fr "eps" "data";
+workspace -fr "3dPaintTextures" "sourceimages/3dPaintTextures";
+workspace -fr "STL_ATF Export" "data";
+workspace -fr "mel" "scripts";
+workspace -fr "translatorData" "data";
+workspace -fr "particles" "cache/particles";
+workspace -fr "scene" "scenes";
+workspace -fr "SAT_ATF" "data";
+workspace -fr "PROE_ATF" "data";
+workspace -fr "WIRE_ATF Export" "data";
+workspace -fr "sourceImages" "sourceimages";
+workspace -fr "clips" "clips";
+workspace -fr "furImages" "renderData/fur/furImages";
+workspace -fr "INVENTOR_ATF" "data";
+workspace -fr "STEP_ATF" "data";
+workspace -fr "DWG_ATF Export" "data";
+workspace -fr "depth" "renderData/depth";
+workspace -fr "sceneAssembly" "sceneAssembly";
+workspace -fr "IGES_ATF Export" "data";
+workspace -fr "PARASOLID_ATF" "data";
+workspace -fr "IGES_ATF" "data";
+workspace -fr "teClipExports" "Time Editor/Clip Exports";
+workspace -fr "ASS" "data";
+workspace -fr "audio" "sound";
+workspace -fr "bifrostCache" "cache/bifrost";
+workspace -fr "Alembic" "data";
+workspace -fr "illustrator" "data";
+workspace -fr "diskCache" "data";
+workspace -fr "WIRE_ATF" "data";
+workspace -fr "templates" "assets";
+workspace -fr "OBJexport" "data";
+workspace -fr "furAttrMap" "renderData/fur/furAttrMap";
+workspace -fr "alembicCache" "cache/alembic";
\ No newline at end of file
diff --git a/setup/fusion/scripts/Comp/colorbleed/32bit/backgrounds_selected_to32bit.py b/setup/fusion/scripts/Comp/colorbleed/32bit/backgrounds_selected_to32bit.py
new file mode 100644
index 0000000000..c0dcef5410
--- /dev/null
+++ b/setup/fusion/scripts/Comp/colorbleed/32bit/backgrounds_selected_to32bit.py
@@ -0,0 +1,12 @@
+from avalon.fusion import comp_lock_and_undo_chunk
+
+
+def main():
+ """Set all selected backgrounds to 32 bit"""
+ with comp_lock_and_undo_chunk(comp, 'Selected Backgrounds to 32bit'):
+ tools = comp.GetToolList(True, "Background").values()
+ for tool in tools:
+ tool.Depth = 5
+
+
+main()
diff --git a/setup/fusion/scripts/Comp/colorbleed/32bit/backgrounds_to32bit.py b/setup/fusion/scripts/Comp/colorbleed/32bit/backgrounds_to32bit.py
new file mode 100644
index 0000000000..92ca18a82d
--- /dev/null
+++ b/setup/fusion/scripts/Comp/colorbleed/32bit/backgrounds_to32bit.py
@@ -0,0 +1,12 @@
+from avalon.fusion import comp_lock_and_undo_chunk
+
+
+def main():
+ """Set all backgrounds to 32 bit"""
+ with comp_lock_and_undo_chunk(comp, 'Backgrounds to 32bit'):
+ tools = comp.GetToolList(False, "Background").values()
+ for tool in tools:
+ tool.Depth = 5
+
+
+main()
diff --git a/setup/fusion/scripts/Comp/colorbleed/32bit/loaders_selected_to32bit.py b/setup/fusion/scripts/Comp/colorbleed/32bit/loaders_selected_to32bit.py
new file mode 100644
index 0000000000..6e3802d9ff
--- /dev/null
+++ b/setup/fusion/scripts/Comp/colorbleed/32bit/loaders_selected_to32bit.py
@@ -0,0 +1,12 @@
+from avalon.fusion import comp_lock_and_undo_chunk
+
+
+def main():
+ """Set all selected loaders to 32 bit"""
+ with comp_lock_and_undo_chunk(comp, 'Selected Loaders to 32bit'):
+ tools = comp.GetToolList(True, "Loader").values()
+ for tool in tools:
+ tool.Depth = 5
+
+
+main()
diff --git a/setup/fusion/scripts/Comp/colorbleed/32bit/loaders_to32bit.py b/setup/fusion/scripts/Comp/colorbleed/32bit/loaders_to32bit.py
new file mode 100644
index 0000000000..d86bef35f3
--- /dev/null
+++ b/setup/fusion/scripts/Comp/colorbleed/32bit/loaders_to32bit.py
@@ -0,0 +1,12 @@
+from avalon.fusion import comp_lock_and_undo_chunk
+
+
+def main():
+ """Set all loaders to 32 bit"""
+ with comp_lock_and_undo_chunk(comp, 'Loaders to 32bit'):
+ tools = comp.GetToolList(False, "Loader").values()
+ for tool in tools:
+ tool.Depth = 5
+
+
+main()
diff --git a/setup/fusion/scripts/Comp/colorbleed/duplicate_with_input_connections.py b/setup/fusion/scripts/Comp/colorbleed/duplicate_with_input_connections.py
new file mode 100644
index 0000000000..9f4f4a8f0a
--- /dev/null
+++ b/setup/fusion/scripts/Comp/colorbleed/duplicate_with_input_connections.py
@@ -0,0 +1,43 @@
+from avalon.fusion import comp_lock_and_undo_chunk
+
+
+def is_connected(input):
+ """Return whether an input has incoming connection"""
+ return input.GetAttrs()["INPB_Connected"]
+
+
+def duplicate_with_input_connections():
+ """Duplicate selected tools with incoming connections."""
+
+ original_tools = comp.GetToolList(True).values()
+ if not original_tools:
+ return # nothing selected
+
+ with comp_lock_and_undo_chunk(comp, "Duplicate With Input Connections"):
+
+ # Generate duplicates
+ comp.Copy()
+ comp.SetActiveTool()
+ comp.Paste()
+ duplicate_tools = comp.GetToolList(True).values()
+
+ # Copy connections
+ for original, new in zip(original_tools, duplicate_tools):
+
+ original_inputs = original.GetInputList().values()
+ new_inputs = new.GetInputList().values()
+ assert len(original_inputs) == len(new_inputs)
+
+ for original_input, new_input in zip(original_inputs, new_inputs):
+
+ if is_connected(original_input):
+
+ if is_connected(new_input):
+ # Already connected if it is between the copied tools
+ continue
+
+ new_input.ConnectTo(original_input.GetConnectedOutput())
+ assert is_connected(new_input), "Must be connected now"
+
+
+duplicate_with_input_connections()
diff --git a/setup/fusion/scripts/Comp/colorbleed/set_rendermode.py b/setup/fusion/scripts/Comp/colorbleed/set_rendermode.py
new file mode 100644
index 0000000000..0fbcf1bf86
--- /dev/null
+++ b/setup/fusion/scripts/Comp/colorbleed/set_rendermode.py
@@ -0,0 +1,122 @@
+from avalon.vendor.Qt import QtCore, QtWidgets
+from avalon.vendor import qtawesome
+import avalon.fusion as avalon
+from avalon import style
+
+
+_help = {"renderlocal": "Render the comp on your own machine and publish "
+ "it from that the destination folder",
+ "deadline": "Submit a Fusion render job to Deadline to use all other "
+ "computers and add a publish job"}
+
+
+class SetRenderMode(QtWidgets.QWidget):
+
+ def __init__(self, parent=None):
+ QtWidgets.QWidget.__init__(self, parent)
+
+ self._comp = avalon.get_current_comp()
+ self._comp_name = self._get_comp_name()
+
+ self.setWindowTitle("Set Render Mode")
+ self.setFixedSize(300, 175)
+
+ layout = QtWidgets.QVBoxLayout()
+
+ # region comp info
+ comp_info_layout = QtWidgets.QHBoxLayout()
+
+ update_btn = QtWidgets.QPushButton(qtawesome.icon("fa.refresh",
+ color="white"), "")
+ update_btn.setFixedWidth(25)
+ update_btn.setFixedHeight(25)
+
+ comp_information = QtWidgets.QLineEdit()
+ comp_information.setEnabled(False)
+
+ comp_info_layout.addWidget(comp_information)
+ comp_info_layout.addWidget(update_btn)
+ # endregion comp info
+
+ # region modes
+ mode_options = QtWidgets.QComboBox()
+ mode_options.addItems(_help.keys())
+
+ mode_information = QtWidgets.QTextEdit()
+ mode_information.setReadOnly(True)
+ # endregion modes
+
+ accept_btn = QtWidgets.QPushButton("Accept")
+
+ layout.addLayout(comp_info_layout)
+ layout.addWidget(mode_options)
+ layout.addWidget(mode_information)
+ layout.addWidget(accept_btn)
+
+ self.setLayout(layout)
+
+ self.comp_information = comp_information
+ self.update_btn = update_btn
+
+ self.mode_options = mode_options
+ self.mode_information = mode_information
+
+ self.accept_btn = accept_btn
+
+ self.connections()
+ self.update()
+
+ # Force updated render mode help text
+ self._update_rendermode_info()
+
+ def connections(self):
+ """Build connections between code and buttons"""
+
+ self.update_btn.clicked.connect(self.update)
+ self.accept_btn.clicked.connect(self._set_comp_rendermode)
+ self.mode_options.currentIndexChanged.connect(
+ self._update_rendermode_info)
+
+ def update(self):
+ """Update all information in the UI"""
+
+ self._comp = avalon.get_current_comp()
+ self._comp_name = self._get_comp_name()
+ self.comp_information.setText(self._comp_name)
+
+ # Update current comp settings
+ mode = self._get_comp_rendermode()
+ index = self.mode_options.findText(mode)
+ self.mode_options.setCurrentIndex(index)
+
+ def _update_rendermode_info(self):
+ rendermode = self.mode_options.currentText()
+ self.mode_information.setText(_help[rendermode])
+
+ def _get_comp_name(self):
+ return self._comp.GetAttrs("COMPS_Name")
+
+ def _get_comp_rendermode(self):
+ return self._comp.GetData("colorbleed.rendermode") or "renderlocal"
+
+ def _set_comp_rendermode(self):
+ rendermode = self.mode_options.currentText()
+ self._comp.SetData("colorbleed.rendermode", rendermode)
+
+ self._comp.Print("Updated render mode to '%s'\n" % rendermode)
+
+ def _validation(self):
+ ui_mode = self.mode_options.currentText()
+ comp_mode = self._get_comp_rendermode()
+
+ return comp_mode == ui_mode
+
+
+if __name__ == '__main__':
+
+ import sys
+ app = QtWidgets.QApplication(sys.argv)
+ window = SetRenderMode()
+ window.setStyleSheet(style.load_stylesheet())
+ window.show()
+ sys.exit(app.exec_())
diff --git a/setup/fusion/scripts/Comp/colorbleed/switch_ui.py b/setup/fusion/scripts/Comp/colorbleed/switch_ui.py
new file mode 100644
index 0000000000..8f1466abe0
--- /dev/null
+++ b/setup/fusion/scripts/Comp/colorbleed/switch_ui.py
@@ -0,0 +1,201 @@
+import os
+import glob
+import logging
+
+import avalon.io as io
+import avalon.api as api
+import avalon.pipeline as pipeline
+import avalon.fusion
+import avalon.style as style
+from avalon.vendor.Qt import QtWidgets, QtCore
+from avalon.vendor import qtawesome as qta
+
+
+log = logging.getLogger("Fusion Switch Shot")
+
+
+class App(QtWidgets.QWidget):
+
+ def __init__(self, parent=None):
+
+ ################################################
+ # |---------------------| |------------------| #
+ # |Comp | |Asset | #
+ # |[..][ v]| |[ v]| #
+ # |---------------------| |------------------| #
+ # | Update existing comp [ ] | #
+ # |------------------------------------------| #
+ # | Switch | #
+ # |------------------------------------------| #
+ ################################################
+
+ QtWidgets.QWidget.__init__(self, parent)
+
+ layout = QtWidgets.QVBoxLayout()
+
+ # Comp related input
+ comp_hlayout = QtWidgets.QHBoxLayout()
+ comp_label = QtWidgets.QLabel("Comp file")
+ comp_label.setFixedWidth(50)
+ comp_box = QtWidgets.QComboBox()
+
+ button_icon = qta.icon("fa.folder", color="white")
+ open_from_dir = QtWidgets.QPushButton()
+ open_from_dir.setIcon(button_icon)
+
+ comp_box.setFixedHeight(25)
+ open_from_dir.setFixedWidth(25)
+ open_from_dir.setFixedHeight(25)
+
+ comp_hlayout.addWidget(comp_label)
+ comp_hlayout.addWidget(comp_box)
+ comp_hlayout.addWidget(open_from_dir)
+
+ # Asset related input
+ asset_hlayout = QtWidgets.QHBoxLayout()
+ asset_label = QtWidgets.QLabel("Shot")
+ asset_label.setFixedWidth(50)
+
+ asset_box = QtWidgets.QComboBox()
+ asset_box.setLineEdit(QtWidgets.QLineEdit())
+ asset_box.setFixedHeight(25)
+
+ refresh_icon = qta.icon("fa.refresh", color="white")
+ refresh_btn = QtWidgets.QPushButton()
+ refresh_btn.setIcon(refresh_icon)
+
+ asset_box.setFixedHeight(25)
+ refresh_btn.setFixedWidth(25)
+ refresh_btn.setFixedHeight(25)
+
+ asset_hlayout.addWidget(asset_label)
+ asset_hlayout.addWidget(asset_box)
+ asset_hlayout.addWidget(refresh_btn)
+
+ # Options
+ options = QtWidgets.QHBoxLayout()
+ options.setAlignment(QtCore.Qt.AlignLeft)
+
+ current_comp_check = QtWidgets.QCheckBox()
+ current_comp_check.setChecked(True)
+ current_comp_label = QtWidgets.QLabel("Use current comp")
+
+ options.addWidget(current_comp_label)
+ options.addWidget(current_comp_check)
+
+ accept_btn = QtWidgets.QPushButton("Switch")
+
+ layout.addLayout(options)
+ layout.addLayout(comp_hlayout)
+ layout.addLayout(asset_hlayout)
+ layout.addWidget(accept_btn)
+
+ self._open_from_dir = open_from_dir
+ self._comps = comp_box
+ self._assets = asset_box
+ self._use_current = current_comp_check
+ self._accept_btn = accept_btn
+ self._refresh_btn = refresh_btn
+
+ self.setWindowTitle("Fusion Switch Shot")
+ self.setLayout(layout)
+
+ self.resize(260, 140)
+ self.setMinimumWidth(260)
+ self.setFixedHeight(140)
+
+ self.connections()
+
+ # Update ui to correct state
+ self._on_use_current_comp()
+ self._refresh()
+
+ def connections(self):
+ self._use_current.clicked.connect(self._on_use_current_comp)
+ self._open_from_dir.clicked.connect(self._on_open_from_dir)
+ self._refresh_btn.clicked.connect(self._refresh)
+ self._accept_btn.clicked.connect(self._on_switch)
+
+ def _on_use_current_comp(self):
+ state = self._use_current.isChecked()
+ self._open_from_dir.setEnabled(not state)
+ self._comps.setEnabled(not state)
+
+ def _on_open_from_dir(self):
+
+ start_dir = self._get_context_directory()
+ comp_file, _ = QtWidgets.QFileDialog.getOpenFileName(
+ self, "Choose comp", start_dir)
+
+ if not comp_file:
+ return
+
+ # Create completer
+ self.populate_comp_box([comp_file])
+ self._refresh()
+
+ def _refresh(self):
+ # Clear any existing items
+ self._assets.clear()
+
+ asset_names = [a["name"] for a in self.collect_assets()]
+ completer = QtWidgets.QCompleter(asset_names)
+
+ self._assets.setCompleter(completer)
+ self._assets.addItems(asset_names)
+
+ def _on_switch(self):
+
+ if not self._use_current.isChecked():
+ file_name = self._comps.itemData(self._comps.currentIndex())
+ else:
+ comp = avalon.fusion.get_current_comp()
+ file_name = comp.GetAttrs("COMPS_FileName")
+
+ asset = self._assets.currentText()
+
+ import colorbleed.scripts.fusion_switch_shot as switch_shot
+ switch_shot.switch(asset_name=asset, filepath=file_name, new=True)
+
+ def _get_context_directory(self):
+
+ project = io.find_one({"type": "project",
+ "name": api.Session["AVALON_PROJECT"]},
+ projection={"config": True})
+
+ template = project["config"]["template"]["work"]
+ dir = pipeline._format_work_template(template, api.Session)
+
+ return dir
+
+ def collect_slap_comps(self, directory):
+ items = glob.glob("{}/*.comp".format(directory))
+ return items
+
+ def collect_assets(self):
+ return list(io.find({"type": "asset", "silo": "film"}))
+
+ def populate_comp_box(self, files):
+ """Ensure we display the filename only but the path is stored as well
+
+ Args:
+ files (list): list of full file path [path/to/item/item.ext,]
+
+ Returns:
+ None
+ """
+
+ for f in files:
+ filename = os.path.basename(f)
+ self._comps.addItem(filename, userData=f)
+
+
+if __name__ == '__main__':
+ import sys
+ api.install(avalon.fusion)
+
+ app = QtWidgets.QApplication(sys.argv)
+ window = App()
+ window.setStyleSheet(style.load_stylesheet())
+ window.show()
+ sys.exit(app.exec_())
diff --git a/setup/fusion/scripts/Comp/colorbleed/update_selected_loader_ranges.py b/setup/fusion/scripts/Comp/colorbleed/update_selected_loader_ranges.py
new file mode 100644
index 0000000000..f42a032e84
--- /dev/null
+++ b/setup/fusion/scripts/Comp/colorbleed/update_selected_loader_ranges.py
@@ -0,0 +1,32 @@
+"""Forces Fusion to 'retrigger' the Loader to update.
+
+Warning:
+ This might change settings like 'Reverse', 'Loop', trims and other
+ settings of the Loader. So use this at your own risk.
+
+"""
+
+from avalon.fusion import comp_lock_and_undo_chunk
+
+
+with comp_lock_and_undo_chunk(comp, "Reload clip time ranges"):
+ tools = comp.GetToolList(True, "Loader").values()
+ for tool in tools:
+
+ # Get tool attributes
+ tool_a = tool.GetAttrs()
+ clipTable = tool_a['TOOLST_Clip_Name']
+ altclipTable = tool_a['TOOLST_AltClip_Name']
+ startTime = tool_a['TOOLNT_Clip_Start']
+ old_global_in = tool.GlobalIn[comp.CurrentTime]
+
+ # Reapply
+ for index, _ in clipTable.items():
+ time = startTime[index]
+ tool.Clip[time] = tool.Clip[time]
+
+ for index, _ in altclipTable.items():
+ time = startTime[index]
+ tool.ProxyFilename[time] = tool.ProxyFilename[time]
+
+ tool.GlobalIn[comp.CurrentTime] = old_global_in