From 6d7262d26b9b6b6fab3fd7498d0b22b7614cbded Mon Sep 17 00:00:00 2001 From: aardschok Date: Thu, 27 Jul 2017 11:46:01 +0200 Subject: [PATCH 01/28] fixed assignement to alembic mesh, code improvement --- colorbleed/maya/lib.py | 64 ++++++++++------------- colorbleed/plugins/maya/load/load_look.py | 39 +++++++------- 2 files changed, 47 insertions(+), 56 deletions(-) diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index a2715863ee..4a8fd2238c 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -5,6 +5,7 @@ import os import bson import json import logging +import pprint import contextlib from collections import OrderedDict, defaultdict @@ -635,13 +636,9 @@ def filter_by_id(nodes, uuids): if node is None: continue - if not cmds.attributeQuery("cbId", node=node, exists=True): - continue + attribute_value = _get_id(node) - # Deformed shaped - attr = "{}.cbId".format(node) - attribute_value = cmds.getAttr(attr) - if attribute_value not in uuids: + if attribute_value not in uuids or attribute_value is None: continue filtered_nodes.append(node) @@ -792,48 +789,45 @@ def assign_look(nodes, subset="lookDefault"): assign_look_by_version(asset_nodes, version['_id']) -def apply_shaders(relationships, shader_nodes, nodes): - """Apply all shaders to the nodes based on the relationship data +def apply_shaders(relationships, shadernodes, nodes): + """Link shadingEngine to the right nodes based on relationship data + + Relationship data is constructed of a collection of `sets` and `attributes` + `sets` corresponds with the shaderEngines found in the lookdev. + Each set has the keys `name`, `members` and `uuid`, the `members` + hold a collection of node information `name` and `uuid`. Args: - relationships (dict): shader to node relationships - shader_nodes (list): shader network nodes - nodes (list): nodes to assign to + relationships (dict): relationship data + shadernodes (list): list of nodes of the shading engine + nodes (list): list of nodes to apply shader to Returns: None """ + # attributes = relationships.get("attributes", []) shader_sets = relationships.get("sets", []) - shading_engines = cmds.ls(shader_nodes, type="shadingEngine", long=True) + + if isinstance(nodes, set): + nodes = list(nodes) + + shading_engines = cmds.ls(shadernodes, type="shadingEngine", long=True) assert len(shading_engines) > 0, ("Error in retrieving shading engine " "from reference") - # Pre-filter nodes and shader nodes - nodes_by_id = defaultdict(list) - shader_nodes_by_id = defaultdict(list) - for node in nodes: - _id = _get_id(node) - nodes_by_id[_id].append(node) - - for shader_node in shader_nodes: - _id = _get_id(shader_node) - shader_nodes_by_id[_id].append(shader_node) - - # get all nodes which we need to link per shader + # get all nodes which we need to link + ns_nodes = cmds.ls(nodes, long=True) for shader_set in shader_sets: - # collect shading engine - uuid = shader_set["uuid"] - shading_engine = shader_nodes_by_id.get(uuid, []) + # collect all unique IDs of the set members + shader_uuid = shader_set["uuid"] + member_uuids = [member["uuid"] for member in shader_set["members"]] + + filtered_nodes = filter_by_id(ns_nodes, member_uuids) + shading_engine = filter_by_id(shading_engines, [shader_uuid]) + assert len(shading_engine) == 1, ("Could not find the correct " "shading engine with cbId " - "'{}'".format(uuid)) - - # collect members - filtered_nodes = list() - for member in shader_set["members"]: - member_uuid = member["uuid"] - members = nodes_by_id.get(member_uuid, []) - filtered_nodes.extend(members) + "'{}'".format(shader_uuid)) cmds.sets(filtered_nodes, forceElement=shading_engine[0]) diff --git a/colorbleed/plugins/maya/load/load_look.py b/colorbleed/plugins/maya/load/load_look.py index 557d18a3c2..567cdf4bf5 100644 --- a/colorbleed/plugins/maya/load/load_look.py +++ b/colorbleed/plugins/maya/load/load_look.py @@ -30,8 +30,6 @@ class LookLoader(api.Loader): """ - - # improve readability of the namespace assetname = context["asset"]["name"] ns_assetname = "{}_".format(assetname) @@ -88,29 +86,25 @@ class LookLoader(api.Loader): """ - # types = ["transform", "mesh"] list_nodes = [] - namespaces = cmds.namespaceInfo(listOnlyNamespaces=True) - # remove basic namespaces - namespaces.remove("UI") - namespaces.remove("shared") + namespaces = [ns for ns in cmds.namespaceInfo(listOnlyNamespaces=True) + if ns not in ["UI", "shared"] or not ns.endswith("look")] - for ns in namespaces: - if not ns.startswith(assetname): + for namespace in namespaces: + if not namespace.startswith(assetname): continue + + ns_nodes = cmds.namespaceInfo(namespace, + listOnlyDependencyNodes=True) # get reference nodes - ns_nodes = cmds.namespaceInfo(ns, listOnlyDependencyNodes=True) - # TODO: might need to extend the types - # check if any nodes are connected to something else than lambert1 - list_nodes = cmds.ls(ns_nodes, long=True) - unassigned_nodes = [self.has_default_shader(n) for n in list_nodes] - nodes = [n for n in unassigned_nodes if n is not None] + list_nodes.extend([self.has_default_shader(n) for n in ns_nodes]) - list_nodes.extend(nodes) + # ensure unique nodes and kick out any None types + result = [node for node in list_nodes if node is not None] - return set(list_nodes) + return result def has_default_shader(self, node): """Check if the nodes have `initialShadingGroup` shader assigned @@ -122,12 +116,15 @@ class LookLoader(api.Loader): str """ - shaders = cmds.listConnections(node, type="shadingEngine") or [] - if "initialShadingGroup" in shaders: + shaders = cmds.listConnections(node, type="shadingEngine") + if shaders is None or "initialShadingGroup" in shaders: # return transform node - transform = cmds.listRelatives(node, parent=True, type="transform", + transform = cmds.listRelatives(node, + parent=True, + type="transform", fullPath=True) + if not transform: - return [] + return return transform[0] From 62b92dfbb3ca94f497f105499c916409df62ec5e Mon Sep 17 00:00:00 2001 From: aardschok Date: Wed, 2 Aug 2017 10:12:54 +0200 Subject: [PATCH 02/28] added previous pipeline environment paths --- .../maya/publish/_validate_node_ids.py | 46 +++ .../publish/validate_filename_convention.py | 37 +++ .../publish/collect_resource_destination.py | 94 ++++++ colorbleed/plugins/publish/integrate.py | 302 ++++++++++++++++++ maya_environment.bat | 70 ++++ python_environment.bat | 30 ++ set_environment.bat | 33 ++ 7 files changed, 612 insertions(+) create mode 100644 colorbleed/plugins/maya/publish/_validate_node_ids.py create mode 100644 colorbleed/plugins/maya/publish/validate_filename_convention.py create mode 100644 colorbleed/plugins/publish/collect_resource_destination.py create mode 100644 colorbleed/plugins/publish/integrate.py create mode 100644 maya_environment.bat create mode 100644 python_environment.bat create mode 100644 set_environment.bat diff --git a/colorbleed/plugins/maya/publish/_validate_node_ids.py b/colorbleed/plugins/maya/publish/_validate_node_ids.py new file mode 100644 index 0000000000..bfb47abe33 --- /dev/null +++ b/colorbleed/plugins/maya/publish/_validate_node_ids.py @@ -0,0 +1,46 @@ +import pyblish.api +import colorbleed.api + + +class ValidateNodeIds(pyblish.api.InstancePlugin): + """Validate nodes have colorbleed id attributes + + All look sets should have id attributes. + + """ + + label = 'Node Id Attributes' + families = ['colorbleed.look', 'colorbleed.model'] + hosts = ['maya'] + order = colorbleed.api.ValidatePipelineOrder + actions = [colorbleed.api.SelectInvalidAction, + colorbleed.api.GenerateUUIDsOnInvalidAction] + + @staticmethod + def get_invalid(instance): + import maya.cmds as cmds + + nodes = instance.data["setMembers"] + + # Ensure all nodes have a cbId + data_id = {} + invalid = [] + for node in nodes: + try: + uuid = cmds.getAttr("{}.cbId".format(node)) + data_id[uuid] = node + if uuid in data_id: + invalid.append(node) + except RuntimeError: + pass + + return invalid + + def process(self, instance): + """Process all meshes""" + + invalid = self.get_invalid(instance) + + if invalid: + raise RuntimeError("Nodes found with invalid" + "asset IDs: {0}".format(invalid)) diff --git a/colorbleed/plugins/maya/publish/validate_filename_convention.py b/colorbleed/plugins/maya/publish/validate_filename_convention.py new file mode 100644 index 0000000000..7a9a44e02f --- /dev/null +++ b/colorbleed/plugins/maya/publish/validate_filename_convention.py @@ -0,0 +1,37 @@ +import re + +import pyblish.api +import colorbleed.api + + +class ValidateFileNameConvention(pyblish.api.InstancePlugin): + + label = "" + families = ["colorbleed.lookdev"] + host = ["maya"] + optional = True + + order = pyblish.api.ValidatorOrder + actions = [colorbleed.api.SelectInvalidAction] + + @staticmethod + def get_invalid(instance): + + invalid = [] + # todo: change pattern to company standard + pattern = re.compile("[a-zA-Z]+_[A-Z]{3}") + + nodes = list(instance) + for node in nodes: + match = pattern.match(node) + if not match: + invalid.append(node) + + return invalid + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + self.log.error("Found invalid naming convention. Failed noted :\n" + "%s" % invalid) diff --git a/colorbleed/plugins/publish/collect_resource_destination.py b/colorbleed/plugins/publish/collect_resource_destination.py new file mode 100644 index 0000000000..814f6fb612 --- /dev/null +++ b/colorbleed/plugins/publish/collect_resource_destination.py @@ -0,0 +1,94 @@ +import pyblish.api +import os + +import avalon.io as io + + +class CollectResourceDestination(pyblish.api.InstancePlugin): + """This plug-ins displays the comment dialog box per default""" + + label = "Collect Resource Destination" + order = pyblish.api.CollectorOrder + 0.499 + + def process(self, instance): + + self.create_destination_template(instance) + + template_data = instance.data["assumedTemplateData"] + template = instance.data["template"] + + mock_template = template.format(**template_data) + + # For now assume resources end up in a "resources" folder in the + # published folder + mock_destination = os.path.join(os.path.dirname(mock_template), + "resources") + + # Clean the path + mock_destination = os.path.abspath(os.path.normpath(mock_destination)) + + # Define resource destination and transfers + resources = instance.data.get("resources", list()) + transfers = instance.data.get("transfers", list()) + for resource in resources: + + # Add destination to the resource + source_filename = os.path.basename(resource["source"]) + destination = os.path.join(mock_destination, source_filename) + resource['destination'] = destination + + # Collect transfers for the individual files of the resource + # e.g. all individual files of a cache or UDIM textures. + files = resource['files'] + for fsrc in files: + fname = os.path.basename(fsrc) + fdest = os.path.join(mock_destination, fname) + transfers.append([fsrc, fdest]) + + instance.data["resources"] = resources + instance.data["transfers"] = transfers + + def create_destination_template(self, instance): + """Create a filepath based on the current data available + + Example template: + {root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/ + {subset}.{representation} + Args: + instance: the instance to publish + + Returns: + file path (str) + """ + + # get all the stuff from the database + subset_name = instance.data["subset"] + project_name = os.environ["AVALON_PROJECT"] + + project = io.find_one({"type": "project", + "name": project_name}, + projection={"config": True}) + template = project["config"]["template"]["publish"] + + subset = io.find_one({"type": "subset", + "name": subset_name}) + + # assume there is no version yet, we start at `1` + version_number = 1 + if subset is not None: + version = io.find_one({"type": "version", + "parent": subset["_id"]}, + sort=[("name", -1)]) + # if there is a subset there ought to be version + version_number += version["name"] + + template_data = {"root": os.environ["AVALON_ROOT"], + "project": project_name, + "silo": os.environ["AVALON_SILO"], + "asset": instance.data["asset"], + "subset": subset_name, + "version": version_number, + "representation": "TEMP"} + + instance.data["assumedTemplateData"] = template_data + instance.data["template"] = template diff --git a/colorbleed/plugins/publish/integrate.py b/colorbleed/plugins/publish/integrate.py new file mode 100644 index 0000000000..2c21866514 --- /dev/null +++ b/colorbleed/plugins/publish/integrate.py @@ -0,0 +1,302 @@ +import os +import logging +import shutil + +import errno +import pyblish.api +from avalon import api, io + + +log = logging.getLogger(__name__) + + +class IntegrateAsset(pyblish.api.InstancePlugin): + """Resolve any dependency issies + + This plug-in resolves any paths which, if not updated might break + the published file. + + The order of families is important, when working with lookdev you want to + first publish the texture, update the texture paths in the nodes and then + publish the shading network. Same goes for file dependent assets. + """ + + label = "Intergrate Asset" + order = pyblish.api.IntegratorOrder + families = ["colorbleed.model", + "colorbleed.rig", + "colorbleed.animation", + "colorbleed.camera", + "colorbleed.lookdev", + "colorbleed.texture", + "colorbleed.historyLookdev", + "colorbleed.group"] + + def process(self, instance): + + self.log.info("Integrating Asset in to the database ...") + + self.register(instance) + self.intergrate(instance) + + self.log.info("Removing temporary files and folders ...") + stagingdir = instance.data["stagingDir"] + shutil.rmtree(stagingdir) + + def register(self, instance): + + # Required environment variables + PROJECT = os.environ["AVALON_PROJECT"] + ASSET = instance.data.get("asset") or os.environ["AVALON_ASSET"] + SILO = os.environ["AVALON_SILO"] + LOCATION = os.getenv("AVALON_LOCATION") + + # todo(marcus): avoid hardcoding labels in the integrator + representation_labels = {".ma": "Maya Ascii", + ".source": "Original source file", + ".abc": "Alembic"} + + context = instance.context + # Atomicity + # + # Guarantee atomic publishes - each asset contains + # an identical set of members. + # __ + # / o + # / \ + # | o | + # \ / + # o __/ + # + assert all(result["success"] for result in context.data["results"]), ( + "Atomicity not held, aborting.") + + # Assemble + # + # | + # v + # ---> <---- + # ^ + # | + # + stagingdir = instance.data.get("stagingDir") + assert stagingdir, ("Incomplete instance \"%s\": " + "Missing reference to staging area." % instance) + + # extra check if stagingDir actually exists and is available + + self.log.debug("Establishing staging directory @ %s" % stagingdir) + + project = io.find_one({"type": "project"}, + projection={"config.template.publish": True}) + + asset = io.find_one({"type": "asset", + "name": ASSET, + "parent": project["_id"]}) + + assert all([project, asset]), ("Could not find current project or " + "asset '%s'" % ASSET) + + subset = self.get_subset(asset, instance) + + # get next version + latest_version = io.find_one({"type": "version", + "parent": subset["_id"]}, + {"name": True}, + sort=[("name", -1)]) + + next_version = 1 + if latest_version is not None: + next_version += latest_version["name"] + + self.log.debug("Next version: %i" % next_version) + + version_data = self.create_version_data(context, instance) + version = self.create_version(subset=subset, + version_number=next_version, + locations=[LOCATION], + data=version_data) + + self.log.debug("Creating version ...") + version_id = io.insert_one(version).inserted_id + + # Write to disk + # _ + # | | + # _| |_ + # ____\ / + # |\ \ / \ + # \ \ v \ + # \ \________. + # \|________| + # + root = api.registered_root() + template_data = {"root": root, + "project": PROJECT, + "silo": SILO, + "asset": ASSET, + "subset": subset["name"], + "version": version["name"]} + + template_publish = project["config"]["template"]["publish"] + + representations = [] + staging_content = os.listdir(stagingdir) + for v, fname in enumerate(staging_content): + + name, ext = os.path.splitext(fname) + template_data["representation"] = ext[1:] + + src = os.path.join(stagingdir, fname) + dst = template_publish.format(**template_data) + + # Backwards compatibility + if fname == ".metadata.json": + dirname = os.path.dirname(dst) + dst = os.path.join(dirname, fname) + + # copy source to destination (library) + instance.data["transfers"].append([src, dst]) + + representation = { + "schema": "avalon-core:representation-2.0", + "type": "representation", + "parent": version_id, + "name": ext[1:], + "data": {"label": representation_labels.get(ext)}, + "dependencies": instance.data.get("dependencies", "").split(), + + # Imprint shortcut to context + # for performance reasons. + "context": { + "project": PROJECT, + "asset": ASSET, + "silo": SILO, + "subset": subset["name"], + "version": version["name"], + "representation": ext[1:] + } + } + representations.append(representation) + + # store data for database and source / destinations + instance.data["representations"] = representations + + return representations + + def intergrate(self, instance): + """Register the representations and move the files + + Through the stored `representations` and `transfers` + + Args: + instance: the instance to integrate + """ + + # get needed data + traffic = instance.data["transfers"] + representations = instance.data["representations"] + + self.log.info("Registering {} items".format(len(representations))) + io.insert_many(representations) + + # moving files + for src, dest in traffic: + self.log.info("Copying file .. {} -> {}".format(src, dest)) + self.copy_file(src, dest) + + + def copy_file(self, src, dst): + """ Copy given source to destination + + Arguments: + src (str): the source file which needs to be copied + dst (str): the destination of the sourc file + Returns: + None + """ + + dirname = os.path.dirname(dst) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + raise + + shutil.copy(src, dst) + + def get_subset(self, asset, instance): + + subset = io.find_one({"type": "subset", + "parent": asset["_id"], + "name": instance.data["subset"]}) + + if subset is None: + subset_name = instance.data["subset"] + self.log.info("Subset '%s' not found, creating.." % subset_name) + + _id = io.insert_one({ + "schema": "avalon-core:subset-2.0", + "type": "subset", + "name": subset_name, + "data": {}, + "parent": asset["_id"] + }).inserted_id + + subset = io.find_one({"_id": _id}) + + return subset + + def create_version(self, subset, version_number, locations, data=None): + """ Copy given source to destination + + Arguments: + subset (dict): the registered subset of the asset + version_number (int): the version number + locations (list): the currently registered locations + """ + # Imprint currently registered location + version_locations = [location for location in locations if + location is not None] + + return {"schema": "avalon-core:version-2.0", + "type": "version", + "parent": subset["_id"], + "name": version_number, + "locations": version_locations, + "data": data} + + def create_version_data(self, context, instance): + """Create the data collection for th version + + Args: + context: the current context + instance: the current instance being published + + Returns: + dict: the required information with instance.data as key + """ + + families = [] + current_families = instance.data.get("families", list()) + instance_family = instance.data.get("family", None) + + families += current_families + if instance_family is not None: + families.append(instance_family) + + # create relative source path for DB + relative_path = os.path.relpath(context.data["currentFile"], + api.registered_root()) + source = os.path.join("{root}", relative_path).replace("\\", "/") + + version_data = {"families": families, + "time": context.data["time"], + "author": context.data["user"], + "source": source, + "comment": context.data.get("comment")} + + return dict(instance.data, **version_data) diff --git a/maya_environment.bat b/maya_environment.bat new file mode 100644 index 0000000000..efeb2d4063 --- /dev/null +++ b/maya_environment.bat @@ -0,0 +1,70 @@ +@echo OFF + +echo Entering Maya2016 environment... + +:: Environment: Maya +set CB_MAYA_VERSION=2016 +set CB_MAYA_SHARED=%CB_APP_SHARED%\maya_shared\%CB_MAYA_VERSION% + +if "%CB_MAYA_SHARED%" == "" ( + echo Error: "CB_MAYA_SHARED" not set + goto :eof +) + + +:: Colorbleed Maya +set PYTHONPATH=%CB_PIPELINE%\git\cbMayaScripts;%PYTHONPATH% +set PYTHONPATH=%CB_PIPELINE%\git\inventory\python;%PYTHONPATH% + +:: Maya shared +set MAYA_PLUG_IN_PATH=%CB_MAYA_SHARED%\plugins;%MAYA_PLUGIN_PATH% +set MAYA_SHELF_PATH=%CB_MAYA_SHARED%\prefs\shelves;%MAYA_SHELF_PATH% +set MAYA_SCRIPT_PATH=%CB_MAYA_SHARED%\scripts;%MAYA_SCRIPT_PATH% +set XBMLANGPATH=%CB_MAYA_SHARED%\prefs\icons;%XBMLANGPATH% +set MAYA_PRESET_PATH=%CB_MAYA_SHARED%\prefs\attrPresets;%MAYA_PRESET_PATH% +set PYTHONPATH=%CB_MAYA_SHARED%\scripts;%PYTHONPATH% +set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules;%MAYA_MODULE_PATH% + +:: Additional modules +set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\mGear_2016;%MAYA_MODULE_PATH% +set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\SOuP;%MAYA_MODULE_PATH% +set MAYA_SHELF_PATH=%CB_MAYA_SHARED%\modules\SOuP\shelves;%MAYA_SHELF_PATH% +set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\pdipro35c_Maya2016x64;%MAYA_MODULE_PATH% +set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\ovdb\maya\maya2016;%MAYA_MODULE_PATH% +set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\cvshapeinverter;%MAYA_MODULE_PATH% +set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Toolchefs;%MAYA_MODULE_PATH% +set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Exocortex;%MAYA_MODULE_PATH% + +:: Miarmy +set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Basefount\Miarmy;%MAYA_MODULE_PATH% +set PATH=%CB_MAYA_SHARED%\modules\Basefount\Miarmy\bin;%PATH% +set VRAY_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Basefount\Miarmy\bin\vray\vray_3.1_3.3_3.4\Maya2015and2016;%VRAY_PLUGINS_x64%; + +:: Yeti +set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64;%MAYA_MODULE_PATH% +set PATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%PATH%; +set VRAY_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%VRAY_PLUGINS_x64%; +set VRAY_FOR_MAYA2016_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%VRAY_FOR_MAYA2016_PLUGINS_x64%; +set REDSHIFT_MAYAEXTENSIONSPATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\plug-ins;%REDSHIFT_MAYAEXTENSIONSPATH% +set peregrinel_LICENSE=5053@CBserver + +:: maya-capture +set PYTHONPATH=%CB_PIPELINE%\git\maya-capture;%PYTHONPATH% +set PYTHONPATH=%CB_PIPELINE%\git\maya-capture-gui;%PYTHONPATH% +set PYTHONPATH=%CB_PIPELINE%\git\maya-capture-gui-cb;%PYTHONPATH% + +:: maya-matrix-deform +set PYTHONPATH=%CB_PIPELINE%\git\maya-matrix-deformers;%PYTHONPATH% +set MAYA_PLUG_IN_PATH=%CB_PIPELINE%\git\maya-matrix-deformers\plugin;%MAYA_PLUG_IN_PATH% + +:: rapid-rig +set XBMLANGPATH=%CB_MAYA_SHARED%\scripts\RapidRig_Modular_V02;%XBMLANGPATH% +set MAYA_SCRIPT_PATH=%CB_MAYA_SHARED%\scripts\RapidRig_Modular_V02;%MAYA_SCRIPT_PATH% + + +:: Fix Maya Playblast Color Management depth +set MAYA_FLOATING_POINT_RT_PLAYBLAST=1 + + +:: Fix V-ray forcing affinity to 100% +set VRAY_USE_THREAD_AFFINITY=0 \ No newline at end of file diff --git a/python_environment.bat b/python_environment.bat new file mode 100644 index 0000000000..50fafd391c --- /dev/null +++ b/python_environment.bat @@ -0,0 +1,30 @@ +@echo OFF +echo Entering Python environment... + +set CB_PYTHON_VERSION=2.7 + +where /Q python.exe +if ERRORLEVEL 1 ( + if EXIST C:\Python27\python.exe ( + echo Adding C:\Python27 to PATH + set "PATH=%PATH%;C:\Python27" + goto:has-python + ) else ( + echo Adding embedded python (pipeline) + set "PATH=%PATH%;%CB_APP_SHARED%\python\standalone\%CB_PYTHON_VERSION%\bin" + goto:has-python + ) +) +:has-python + +:: Python universal (non-compiled) +set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\universal\site-packages + +:: Python version/windows-specific +:: set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\win\%CB_PYTHON_VERSION% + +:: Python standalone (compiled to version) +if NOT "%CB_PYTHON_STANDALONE%" == "0" ( + echo Entering Python Standalone environment... + set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\standalone\%CB_PYTHON_VERSION%\site-packages +) diff --git a/set_environment.bat b/set_environment.bat new file mode 100644 index 0000000000..832d1de1d5 --- /dev/null +++ b/set_environment.bat @@ -0,0 +1,33 @@ +@echo off +echo Entering pipeline (raw development) environment... + +:: Initialize environment +set CB_PIPELINE=P:\pipeline\dev + +set CB_APP_SHARED=%CB_PIPELINE%\apps + +if "%CB_APP_SHARED%" == "" ( + echo Error: "CB_APP_SHARED" not set + goto :eof +) + +echo setting STORAGE.. +set STORAGE=P: + +set LAUNCHER_ROOT=%~dp0/launchers + +:: Core +echo Add cb core.. +set PYTHONPATH=%CB_PIPELINE%\git\cb;%PYTHONPATH% +set PYTHONPATH=%CB_PIPELINE%\git\cbra;%PYTHONPATH% + +:: Extra +set PYTHONPATH=%CB_PIPELINE%\git\pyseq;%PYTHONPATH% +set PYTHONPATH=%CB_PIPELINE%\git\Qt.py;%PYTHONPATH% + + +:: Ftrack-connect +::set PYTHONPATH=%CB_PIPELINE%\git\ftrack-connect\source;%PYTHONPATH% + +:: FFMPEG +set FFMPEG_PATH=%CB_APP_SHARED%\ffmpeg\bin\ffmpeg.exe \ No newline at end of file From c8821cbe05ab37eedbce667ebece5de79a3b5d56 Mon Sep 17 00:00:00 2001 From: aardschok Date: Wed, 2 Aug 2017 10:27:17 +0200 Subject: [PATCH 03/28] updated collect look and model, improved extract look to ensure textures can be publish with lookdev --- .../plugins/maya/publish/collect_look.py | 14 ++++++-- .../plugins/maya/publish/collect_model.py | 2 +- .../plugins/maya/publish/extract_look.py | 33 ++++++++++++------- 3 files changed, 34 insertions(+), 15 deletions(-) diff --git a/colorbleed/plugins/maya/publish/collect_look.py b/colorbleed/plugins/maya/publish/collect_look.py index 8511914313..f11699dd8f 100644 --- a/colorbleed/plugins/maya/publish/collect_look.py +++ b/colorbleed/plugins/maya/publish/collect_look.py @@ -1,6 +1,10 @@ +import os +import pprint + from maya import cmds import pyblish.api +import avalon.io as io from cb.utils.maya import context, shaders import cbra.utils.maya.node_uuid as id_utils @@ -120,9 +124,15 @@ class CollectLook(pyblish.api.InstancePlugin): instance.data["lookData"] = {"attributes": attributes, "relationships": sets.values(), "sets": looksets} - # Collect textures - resources = [self.collect_resource(n) for n in cmds.ls(type="file")] + + # Collect file nodes used by shading engines + history = cmds.listHistory(looksets) + files = cmds.ls(history, type="file", long=True) + + # Collect textures, + resources = [self.collect_resource(n) for n in files] instance.data["resources"] = resources + # pprint.pprint(resources) # Log a warning when no relevant sets were retrieved for the look. if not instance.data["lookData"]["sets"]: diff --git a/colorbleed/plugins/maya/publish/collect_model.py b/colorbleed/plugins/maya/publish/collect_model.py index b1294c7824..c8324a8297 100644 --- a/colorbleed/plugins/maya/publish/collect_model.py +++ b/colorbleed/plugins/maya/publish/collect_model.py @@ -11,7 +11,7 @@ class CollectModelData(pyblish.api.InstancePlugin): """ order = pyblish.api.CollectorOrder + 0.499 - label = 'Model Data' + label = 'Collect Model Data' families = ["colorbleed.model"] def process(self, instance): diff --git a/colorbleed/plugins/maya/publish/extract_look.py b/colorbleed/plugins/maya/publish/extract_look.py index 04c632e2d9..46d45400a4 100644 --- a/colorbleed/plugins/maya/publish/extract_look.py +++ b/colorbleed/plugins/maya/publish/extract_look.py @@ -39,26 +39,35 @@ class ExtractLook(colorbleed.api.Extractor): # Remove all members of the sets so they are not included in the # exported file by accident - self.log.info("Extract sets (Maya ASCII)..") + self.log.info("Extract sets (Maya ASCII) ...") lookdata = instance.data["lookData"] sets = lookdata["sets"] + resources = instance.data["resources"] + remap = {} + for resource in resources: + attr = resource['attribute'] + remap[attr] = resource['destination'] + + self.log.info("Finished remapping destinations ...") + # Extract in correct render layer layer = instance.data.get("renderlayer", "defaultRenderLayer") with context.renderlayer(layer): # TODO: Ensure membership edits don't become renderlayer overrides with context.empty_sets(sets): - with avalon.maya.maintained_selection(): - cmds.select(sets, noExpand=True) - cmds.file(maya_path, - force=True, - typ="mayaAscii", - exportSelected=True, - preserveReferences=False, - channels=True, - constraints=True, - expressions=True, - constructionHistory=True) + with context.attribute_values(remap): + with avalon.maya.maintained_selection(): + cmds.select(sets, noExpand=True) + cmds.file(maya_path, + force=True, + typ="mayaAscii", + exportSelected=True, + preserveReferences=False, + channels=True, + constraints=True, + expressions=True, + constructionHistory=True) # Write the JSON data self.log.info("Extract json..") From e4214bc40dd079843f09f8f4e72447ce00ca552b Mon Sep 17 00:00:00 2001 From: aardschok Date: Wed, 2 Aug 2017 10:32:27 +0200 Subject: [PATCH 04/28] added colorbleed scripts menu --- colorbleed/maya/__init__.py | 32 ++--- colorbleed/maya/lib.py | 20 +++- colorbleed/maya/menu.json | 232 +++++++++++++++--------------------- colorbleed/maya/menu.py | 2 +- 4 files changed, 129 insertions(+), 157 deletions(-) diff --git a/colorbleed/maya/__init__.py b/colorbleed/maya/__init__.py index 3097683464..4dc6833923 100644 --- a/colorbleed/maya/__init__.py +++ b/colorbleed/maya/__init__.py @@ -88,7 +88,7 @@ def on_new(): maya.commands.reset_resolution() -def on_save(): +def on_save(nodes=None): """Automatically add IDs to new nodes Any transform of a mesh, without an existing ID, is given one automatically on file save. @@ -102,28 +102,28 @@ def on_save(): types = ["mesh", "shadingEngine", "file", "nurbsCurve"] # the items which need to pass the id to their parent - nodes = (set(cmds.ls(type=types, long=True)) - - set(cmds.ls(long=True, readOnly=True)) - - set(cmds.ls(long=True, lockedNodes=True))) + if not nodes: + nodes = (set(cmds.ls(type=types, long=True)) - + set(cmds.ls(long=True, readOnly=True)) - + set(cmds.ls(long=True, lockedNodes=True))) - transforms = set() - for n in cmds.ls(type=types, long=True): - # pass id to parent of node if in subtypes - relatives = cmds.listRelatives(n, parent=True, fullPath=True) - if not relatives: - continue + transforms = set() + for n in cmds.ls(type=types, long=True): + # pass id to parent of node if in subtypes + relatives = cmds.listRelatives(n, parent=True, fullPath=True) + if not relatives: + continue - for r in cmds.listRelatives(n, parent=True, fullPath=True): - transforms.add(r) + for r in cmds.listRelatives(n, parent=True, fullPath=True): + transforms.add(r) - # merge transforms and nodes in one set to make sure every item - # is unique - nodes |= transforms + # merge transforms and nodes in one set to make sure every item + # is unique + nodes |= transforms # Lead with asset ID from the database asset = os.environ["AVALON_ASSET"] asset_id = io.find_one({"type": "asset", "name": asset}) - for node in nodes: if node in defaults: continue diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index 4a8fd2238c..722662cc6d 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -5,7 +5,6 @@ import os import bson import json import logging -import pprint import contextlib from collections import OrderedDict, defaultdict @@ -671,7 +670,12 @@ def get_reference_node(path): Returns: node (str): name of the reference node in question """ - node = cmds.file(path, query=True, referenceNode=True) + try: + node = cmds.file(path, query=True, referenceNode=True) + except RuntimeError: + log.debug('Received file not loaded : "{}"'.format(path)) + return + reference_path = cmds.referenceQuery(path, filename=True) if os.path.normpath(path) == os.path.normpath(reference_path): return node @@ -760,9 +764,15 @@ def assign_look(nodes, subset="lookDefault"): # Group all nodes per asset id grouped = defaultdict(list) for node in nodes: - colorbleed_id = cmds.getAttr("{}.cbId".format(node)) - asset_id = colorbleed_id.split(":")[0] - grouped[asset_id].append(node) + colorbleed_id = _get_id(node) + if not colorbleed_id: + continue + + parts = colorbleed_id.split(":") + if len(parts) != 2: + continue + + grouped[parts[0]].append(node) for asset_id, asset_nodes in grouped.items(): # create objectId for database diff --git a/colorbleed/maya/menu.json b/colorbleed/maya/menu.json index 4931063ee7..0b14de274b 100644 --- a/colorbleed/maya/menu.json +++ b/colorbleed/maya/menu.json @@ -58,7 +58,7 @@ "tooltip": "" }, { - "command": "$COLORBLEED_SCRIPTS\\modeling\\selectOutlineUI", + "command": "$COLORBLEED_SCRIPTS\\modeling\\selectOutlineUI.py", "sourcetype": "file", "tags": [ "modeling", @@ -405,351 +405,351 @@ "Rigging": [ { "command": "$COLORBLEED_SCRIPTS\\rigging\\addCurveBetween.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "addCurveBetween", - "python" + "file" ], "title": "Add Curve Between" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\averageSkinWeights.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "averageSkinWeights", - "python" + "file" ], "title": "Average Skin Weights" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\cbSmoothSkinWeightUI.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "cbSmoothSkinWeightUI", - "python" + "file" ], "title": "CB Smooth Skin Weight UI" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\channelBoxManagerUI.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "channelBoxManagerUI", - "python" + "file" ], "title": "Channel Box Manager UI" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\characterAutorigger.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "characterAutorigger", - "python" + "file" ], "title": "Character Auto Rigger" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\connectUI.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "connectUI", - "python" + "file" ], "title": "Connect UI" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\copySkinWeightsLocal.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "copySkinWeightsLocal", - "python" + "file" ], "title": "Copy Skin Weights Local" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\createCenterLocator.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "createCenterLocator", - "python" + "file" ], "title": "Create Center Locator" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\freezeTransformToGroup.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "freezeTransformToGroup", - "python" + "file" ], "title": "Freeze Transform To Group" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\groupSelected.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "groupSelected", - "python" + "file" ], "title": "Group Selected" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\ikHandlePoleVectorLocator.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "ikHandlePoleVectorLocator", - "python" + "file" ], "title": "IK Handle Pole Vector Locator" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\jointOrientUI.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "jointOrientUI", - "python" + "file" ], "title": "Joint Orient UI" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\jointsOnCurve.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "jointsOnCurve", - "python" + "file" ], "title": "Joints On Curve" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\resetBindSelectedSkinJoints.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "resetBindSelectedSkinJoints", - "python" + "file" ], "title": "Reset Bind Selected Skin Joints" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedComponents.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "selectSkinclusterJointsFromSelectedComponents", - "python" + "file" ], "title": "Select Skincluster Joints From Selected Components" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedMesh.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "selectSkinclusterJointsFromSelectedMesh", - "python" + "file" ], "title": "Select Skincluster Joints From Selected Mesh" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\setJointLabels.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "setJointLabels", - "python" + "file" ], "title": "Set Joint Labels" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\setJointOrientationFromCurrentRotation.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "setJointOrientationFromCurrentRotation", - "python" + "file" ], "title": "Set Joint Orientation From Current Rotation" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\setSelectedJointsOrientationZero.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "setSelectedJointsOrientationZero", - "python" + "file" ], "title": "Set Selected Joints Orientation Zero" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\mirrorCurveShape.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "mirrorCurveShape", - "python" + "file" ], "title": "Mirror Curve Shape" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\setRotationOrderUI.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "setRotationOrderUI", - "python" + "file" ], "title": "Set Rotation Order UI" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\paintItNowUI.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "paintItNowUI", - "python" + "file" ], "title": "Paint It Now UI" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\parentScaleConstraint.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "parentScaleConstraint", - "python" + "file" ], "title": "Parent Scale Constraint" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\quickSetWeightsUI.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "quickSetWeightsUI", - "python" + "file" ], "title": "Quick Set Weights UI" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\rapidRig.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "rapidRig", - "python" + "file" ], "title": "Rapid Rig" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\regenerate_blendshape_targets.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "regenerate_blendshape_targets", - "python" + "file" ], "title": "Regenerate Blendshape Targets" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\removeRotationAxis.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "removeRotationAxis", - "python" + "file" ], "title": "Remove Rotation Axis" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\resetBindSelectedMeshes.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "resetBindSelectedMeshes", - "python" + "file" ], "title": "Reset Bind Selected Meshes" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\simpleControllerOnSelection.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "simpleControllerOnSelection", - "python" + "file" ], "title": "Simple Controller On Selection" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\simpleControllerOnSelectionHierarchy.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "simpleControllerOnSelectionHierarchy", - "python" + "file" ], "title": "Simple Controller On Selection Hierarchy" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\superRelativeCluster.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "superRelativeCluster", - "python" + "file" ], "title": "Super Relative Cluster" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\tfSmoothSkinWeight.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "tfSmoothSkinWeight", - "python" + "file" ], "title": "TF Smooth Skin Weight" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\toggleIntermediates.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "toggleIntermediates", - "python" + "file" ], "title": "Toggle Intermediates" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\toggleSegmentScaleCompensate.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "toggleSegmentScaleCompensate", - "python" + "file" ], "title": "Toggle Segment Scale Compensate" }, { "command": "$COLORBLEED_SCRIPTS\\rigging\\toggleSkinclusterDeformNormals.py", - "sourcetype": "python", + "sourcetype": "file", "tags": [ "rigging", "toggleSkinclusterDeformNormals", - "python" + "file" ], "title": "Toggle Skincluster Deform Normals" } @@ -1007,12 +1007,16 @@ ] }, { - "command": "$COLORBLEED_SCRIPTS\\shading\\LightLinkUI.py", + "command": "$COLORBLEED_SCRIPTS\\shading\\autoLookdevAssignment.py", "sourcetype": "file", - "tags": [ - "shading", - "LightLinkUI" - ], + "tags": ["shading", "lookdev", "assign", "shaders", "auto"], + "title": "Assign lookDefault Shader", + "tooltip": "Assign the latest 'lookDefault' to assets without any lookdev in the scene" + }, + { + "command": "$COLORBLEED_SCRIPTS\\shading\\LightLinkUi.py", + "sourcetype": "file", + "tags": ["shading", "light", "link", "ui"], "title": "Light Link UI", "tooltip": "" }, @@ -1029,7 +1033,7 @@ "tooltip": "" }, { - "command": "$COLORBLEED_SCRIPTS\\shading\\fixDefaultShaderSetBehavior", + "command": "$COLORBLEED_SCRIPTS\\shading\\fixDefaultShaderSetBehavior.py", "sourcetype": "file", "tags": [ "shading", @@ -1037,7 +1041,7 @@ "DefaultShaderSet", "Behavior" ], - "title": "fixDefaultShaderSetBehavior", + "title": "Fix Default Shader Set Behavior", "tooltip": "" }, { @@ -1147,7 +1151,7 @@ "tooltip": "" }, { - "command": "", + "command": "$COLORBLEED_SCRIPTS\\layout\\spPaint3d.py", "sourcetype": "file", "tags": ["layout", "spPaint3d", "paint", "tool"], "title": "SP Paint 3d", @@ -1232,60 +1236,42 @@ { "command": "", "sourcetype": "file", - "tags": [ - "particles", - "instancerToObjectsInstancesWithAnimation" - ], + "tags": ["particles", "instancerToObjectsInstancesWithAnimation"], "title": "instancerToObjectsInstancesWithAnimation", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "particles", - "objectsToParticles" - ], + "tags": ["particles", "objectsToParticles"], "title": "objectsToParticles", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "particles", - "add_particle_cacheFile_attrs" - ], + "tags": ["particles", "add_particle_cacheFile_attrs"], "title": "add_particle_cacheFile_attrs", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "particles", - "mergeParticleSystems" - ], + "tags": ["particles", "mergeParticleSystems"], "title": "mergeParticleSystems", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "particles", - "particlesToLocators" - ], + "tags": ["particles", "particlesToLocators"], "title": "particlesToLocators", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "particles", - "instancerToObjectsWithAnimation" - ], + "tags": ["particles", "instancerToObjectsWithAnimation"], "title": "instancerToObjectsWithAnimation", "tooltip": "" }, @@ -1314,80 +1300,56 @@ { "command": "", "sourcetype": "file", - "tags": [ - "cleanup", - "selectIntermediateObjects" - ], + "tags": ["cleanup", "selectIntermediateObjects"], "title": "selectIntermediateObjects", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "cleanup", - "resetViewportCache" - ], + "tags": ["cleanup", "resetViewportCache"], "title": "resetViewportCache", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "cleanup", - "selectNonUniqueNames" - ], + "tags": ["cleanup", "selectNonUniqueNames"], "title": "selectNonUniqueNames", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "cleanup", - "uniqifyNodeNames" - ], + "tags": ["cleanup", "uniqifyNodeNames"], "title": "uniqifyNodeNames", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "cleanup", - "selectByType" - ], + "tags": ["cleanup", "selectByType"], "title": "selectByType", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "cleanup", - "removeNamespaces" - ], + "tags": ["cleanup", "removeNamespaces"], "title": "removeNamespaces", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "cleanup", - "autoRenameFileNodes" - ], + "tags": ["cleanup", "autoRenameFileNodes"], "title": "autoRenameFileNodes", "tooltip": "" }, { "command": "", "sourcetype": "file", - "tags": [ - "cleanup", - "remove_user_defined_attributes" - ], + "tags": ["cleanup", "remove_user_defined_attributes"], "title": "remove_user_defined_attributes", "tooltip": "" }, diff --git a/colorbleed/maya/menu.py b/colorbleed/maya/menu.py index 003c303097..a59d0310c7 100644 --- a/colorbleed/maya/menu.py +++ b/colorbleed/maya/menu.py @@ -11,7 +11,7 @@ self = sys.modules[__name__] self._menu = "colorbleed" # set colorbleed scripts path in environment keys -os.environ["COLORBLEED_SCRIPTS"] = r"P:\pipeline\dev\git\cbMayaScripts\cbMayaScripts" +os.environ["COLORBLEED_SCRIPTS"] = "P:\pipeline\dev\git\cbMayaScripts\cbMayaScripts" log = logging.getLogger(__name__) From 2c8a4eca64a81705b5d06cb60fd10436010a666d Mon Sep 17 00:00:00 2001 From: aardschok Date: Wed, 2 Aug 2017 10:37:19 +0200 Subject: [PATCH 05/28] updated loader to create a nice namespace --- .../plugins/maya/load/load_animation.py | 1 - colorbleed/plugins/maya/load/load_model.py | 40 ++++++++++--------- colorbleed/plugins/maya/load/load_rig.py | 1 + 3 files changed, 22 insertions(+), 20 deletions(-) diff --git a/colorbleed/plugins/maya/load/load_animation.py b/colorbleed/plugins/maya/load/load_animation.py index 6ef3d07110..e48b29ef62 100644 --- a/colorbleed/plugins/maya/load/load_animation.py +++ b/colorbleed/plugins/maya/load/load_animation.py @@ -23,7 +23,6 @@ class AbcLoader(api.Loader): # Create unique namespace for the cameras # Get name from asset being loaded - assert "_" in name, "Naming convention not followed" assetname = "{}_".format(name.split("_")[0]) namespace = maya.unique_namespace(assetname, format="%03d", diff --git a/colorbleed/plugins/maya/load/load_model.py b/colorbleed/plugins/maya/load/load_model.py index ad70ad3440..f81174c14e 100644 --- a/colorbleed/plugins/maya/load/load_model.py +++ b/colorbleed/plugins/maya/load/load_model.py @@ -1,7 +1,7 @@ -from maya import cmds +import maya.cmds as cmds from avalon import api -from avalon import maya +import avalon.maya class ModelLoader(api.Loader): @@ -10,22 +10,26 @@ class ModelLoader(api.Loader): families = ["colorbleed.model"] representations = ["ma"] - label = "Reference model" + label = "Reference Model" order = -10 icon = "code-fork" color = "orange" def process(self, name, namespace, context, data): - with maya.maintained_selection(): - nodes = cmds.file( - self.fname, - namespace=namespace, - reference=True, - returnNewNodes=True, - groupReference=True, - groupName="{}:{}".format(namespace, name) - ) + # Create a readable namespace + # Namespace should contain asset name and counter + # TEST_001{_descriptor} where `descriptor` can be `_abc` for example + assetname = "{}_".format(namespace.split("_")[0]) + namespace = avalon.maya.unique_namespace(assetname, format="%03d") + + with avalon.maya.maintained_selection(): + nodes = cmds.file(self.fname, + namespace=namespace, + reference=True, + returnNewNodes=True, + groupReference=True, + groupName="{}:{}".format(namespace, name)) self[:] = nodes @@ -54,15 +58,13 @@ class ModelGPUCacheLoader(api.Loader): cmds.loadPlugin("gpuCache", quiet=True) # Create transform with shape - transform = cmds.createNode("transform", - name=name) - cache = cmds.createNode("gpuCache", - parent=transform, - name="{0}Shape".format(name)) + node_name = "{0}Shape".format(name) + transform = cmds.createNode("transform", name=name) + cache = cmds.createNode("gpuCache", parent=transform, name=node_name) # Set the cache filepath - cmds.setAttr(cache + '.cacheFileName', path, type="string") - cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root + cmds.setAttr('{}.cacheFileName'.format(cache), path, type="string") + cmds.setAttr('{}.cacheGeomPath'.format(cache), "|", type="string") # root # Select the transform cmds.select(transform, r=1) diff --git a/colorbleed/plugins/maya/load/load_rig.py b/colorbleed/plugins/maya/load/load_rig.py index 5604ec22c5..0dffefd84b 100644 --- a/colorbleed/plugins/maya/load/load_rig.py +++ b/colorbleed/plugins/maya/load/load_rig.py @@ -22,6 +22,7 @@ class RigLoader(api.Loader): def process(self, name, namespace, context, data): + assert "_" in name, "Naming convention not followed" assetname = "{}_".format(context["asset"]["name"]) unique_namespace = maya.unique_namespace(assetname, format="%03d") nodes = cmds.file(self.fname, From 816b9923982c094095ba857ced0e6a4f04051075 Mon Sep 17 00:00:00 2001 From: aardschok Date: Wed, 2 Aug 2017 10:44:38 +0200 Subject: [PATCH 06/28] removed redundant modules --- colorbleed/plugins/publish/integrate_asset.py | 80 ------ .../plugins/publish/post_look_integrate.py | 40 --- colorbleed/plugins/publish/pre_integrate.py | 251 ------------------ 3 files changed, 371 deletions(-) delete mode 100644 colorbleed/plugins/publish/integrate_asset.py delete mode 100644 colorbleed/plugins/publish/post_look_integrate.py delete mode 100644 colorbleed/plugins/publish/pre_integrate.py diff --git a/colorbleed/plugins/publish/integrate_asset.py b/colorbleed/plugins/publish/integrate_asset.py deleted file mode 100644 index fd4469f871..0000000000 --- a/colorbleed/plugins/publish/integrate_asset.py +++ /dev/null @@ -1,80 +0,0 @@ -import os -import errno -import shutil - -import pyblish.api -from avalon import io - - -class IntegrateAsset(pyblish.api.InstancePlugin): - """Write to files and metadata - - This plug-in exposes your data to others by encapsulating it - into a new version. - - Schema: - Data is written in the following format. - ____________________ - | | - | version | - | ________________ | - | | | | - | | representation | | - | |________________| | - | | | | - | | ... | | - | |________________| | - |____________________| - - """ - - label = "Integrate Asset" - order = pyblish.api.IntegratorOrder + 0.1 - families = ["colorbleed.model", - "colorbleed.rig", - "colorbleed.animation", - "colorbleed.camera", - "colorbleed.lookdev", - "colorbleed.texture", - "colorbleed.historyLookdev", - "colorbleed.group"] - - def process(self, instance): - - # get needed data - traffic = instance.data["traffic"] - representations = instance.data["representations"] - - self.log.info("Registering {} items".format(len(representations))) - io.insert_many(representations) - - # moving files - for src, dest in traffic: - self.log.info("Copying file .. {} -> {}".format(src, dest)) - self.copy_file(src, dest) - - self.log.info("Removing temporary files and folders ...") - stagingdir = instance.data["stagingDir"] - shutil.rmtree(stagingdir) - - def copy_file(self, src, dst): - """ Copy given source to destination - - Arguments: - src (str): the source file which needs to be copied - dst (str): the destination of the sourc file - Returns: - None - """ - - dirname = os.path.dirname(dst) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - raise - - shutil.copy(src, dst) diff --git a/colorbleed/plugins/publish/post_look_integrate.py b/colorbleed/plugins/publish/post_look_integrate.py deleted file mode 100644 index e66828b3c8..0000000000 --- a/colorbleed/plugins/publish/post_look_integrate.py +++ /dev/null @@ -1,40 +0,0 @@ -import json -import os - - -import colorbleed.maya.lib as lib - -import pyblish.api - - -class IntegrateAsset(pyblish.api.InstancePlugin): - """Remap source paths for lookdev and textures - - """ - - label = "Remap source paths" - order = pyblish.api.IntegratorOrder + 0.15 - families = ["colorbleed.lookdev", - "colorbleed.texture"] - - def process(self, instance): - - family = instance.data['family'] - resources = instance.data['resources'] - version_folder = instance.data['versionFolder'] - - if family == "colorbleed.texture": - try: - lib.remap_resource_nodes(resources, folder=version_folder) - except Exception as e: - self.log.error(e) - - if family == "colorbleed.lookdev": - try: - tmp_dir = lib.maya_temp_folder() - resource_file = os.path.join(tmp_dir, "resources.json") - with open(resource_file, "r") as f: - resources = json.load(f) - lib.remap_resource_nodes(resources) - except Exception as e: - self.log.error(e) diff --git a/colorbleed/plugins/publish/pre_integrate.py b/colorbleed/plugins/publish/pre_integrate.py deleted file mode 100644 index a6c798914d..0000000000 --- a/colorbleed/plugins/publish/pre_integrate.py +++ /dev/null @@ -1,251 +0,0 @@ -import os -import logging - -import pyblish.api -from avalon import api, io -import colorbleed.filetypes as filetypes - - -log = logging.getLogger(__name__) - - -class PreIntegrateAsset(pyblish.api.InstancePlugin): - """Resolve any dependency issies - - This plug-in resolves any paths which, if not updated might break - the published file. - - The order of families is important, when working with lookdev you want to - first publish the texture, update the texture paths in the nodes and then - publish the shading network. Same goes for file dependent assets. - """ - - label = "Pre Intergrate Asset" - order = pyblish.api.IntegratorOrder - families = ["colorbleed.model", - "colorbleed.rig", - "colorbleed.animation", - "colorbleed.camera", - "colorbleed.lookdev", - "colorbleed.texture", - "colorbleed.historyLookdev", - "colorbleed.group"] - - def process(self, instance): - - # Required environment variables - PROJECT = os.environ["AVALON_PROJECT"] - ASSET = instance.data.get("asset") or os.environ["AVALON_ASSET"] - SILO = os.environ["AVALON_SILO"] - LOCATION = os.getenv("AVALON_LOCATION") - - # todo(marcus): avoid hardcoding labels in the integrator - representation_labels = {".ma": "Maya Ascii", - ".source": "Original source file", - ".abc": "Alembic"} - - context = instance.context - # Atomicity - # - # Guarantee atomic publishes - each asset contains - # an identical set of members. - # __ - # / o - # / \ - # | o | - # \ / - # o __/ - # - assert all(result["success"] for result in context.data["results"]), ( - "Atomicity not held, aborting.") - - # Assemble - # - # | - # v - # ---> <---- - # ^ - # | - # - stagingdir = instance.data.get("stagingDir") - assert stagingdir, ("Incomplete instance \"%s\": " - "Missing reference to staging area." % instance) - - # extra check if stagingDir actually exists and is available - - self.log.debug("Establishing staging directory @ %s" % stagingdir) - - project = io.find_one({"type": "project"}) - asset = io.find_one({"name": ASSET}) - - assert all([project, asset]), ("Could not find current project or " - "asset '%s'" % ASSET) - - subset = self.get_subset(asset, instance) - - # get next version - latest_version = io.find_one({"type": "version", - "parent": subset["_id"]}, - {"name": True}, - sort=[("name", -1)]) - - next_version = 1 - if latest_version is not None: - next_version += latest_version["name"] - - self.log.debug("Next version: %i" % next_version) - - version_data = self.create_version_data(context, instance) - version = self.create_version(subset=subset, - version_number=next_version, - locations=[LOCATION], - data=version_data) - - self.log.debug("Creating version ...") - version_id = io.insert_one(version).inserted_id - - # Write to disk - # _ - # | | - # _| |_ - # ____\ / - # |\ \ / \ - # \ \ v \ - # \ \________. - # \|________| - # - root = api.registered_root() - template_data = {"root": root, - "project": PROJECT, - "silo": SILO, - "asset": ASSET, - "subset": subset["name"], - "version": version["name"]} - - template_publish = project["config"]["template"]["publish"] - - representations = [] - traffic = [] - staging_content = os.listdir(stagingdir) - for v, fname in enumerate(staging_content): - - name, ext = os.path.splitext(fname) - template_data["representation"] = ext[1:] - - src = os.path.join(stagingdir, fname) - dst = template_publish.format(**template_data) - if v == 0: - instance.data["versionFolder"] = os.path.dirname(dst) - - # Files to copy as if or to specific folder - if ext in filetypes.accepted_images_types: - dirname = os.path.dirname(dst) - dst = os.path.join(dirname, fname) - - # Backwards compatibility - if fname == ".metadata.json": - dirname = os.path.dirname(dst) - dst = os.path.join(dirname, fname) - - # copy source to destination (library) - traffic.append([src, dst]) - - representation = { - "schema": "avalon-core:representation-2.0", - "type": "representation", - "parent": version_id, - "name": ext[1:], - "data": {"label": representation_labels.get(ext)}, - "dependencies": instance.data.get("dependencies", "").split(), - - # Imprint shortcut to context - # for performance reasons. - "context": { - "project": PROJECT, - "asset": ASSET, - "silo": SILO, - "subset": subset["name"], - "version": version["name"], - "representation": ext[1:] - } - } - representations.append(representation) - - # store data for database and source / destinations - instance.data["representations"] = representations - instance.data["traffic"] = traffic - - return representations - - def get_subset(self, asset, instance): - - subset = io.find_one({"type": "subset", - "parent": asset["_id"], - "name": instance.data["subset"]}) - - if subset is None: - subset_name = instance.data["subset"] - self.log.info("Subset '%s' not found, creating.." % subset_name) - - _id = io.insert_one({ - "schema": "avalon-core:subset-2.0", - "type": "subset", - "name": subset_name, - "data": {}, - "parent": asset["_id"] - }).inserted_id - - subset = io.find_one({"_id": _id}) - - return subset - - def create_version(self, subset, version_number, locations, data=None): - """ Copy given source to destination - - Arguments: - subset (dict): the registered subset of the asset - version_number (int): the version number - locations (list): the currently registered locations - """ - # Imprint currently registered location - version_locations = [location for location in locations if - location is not None] - - return {"schema": "avalon-core:version-2.0", - "type": "version", - "parent": subset["_id"], - "name": version_number, - "locations": version_locations, - "data": data} - - def create_version_data(self, context, instance): - """Create the data collection for th version - - Args: - context: the current context - instance: the current instance being published - - Returns: - dict: the required information with instance.data as key - """ - - families = [] - current_families = instance.data.get("families", list()) - instance_family = instance.data.get("family", None) - - families += current_families - if instance_family is not None: - families.append(instance_family) - - # create relative source path for DB - relative_path = os.path.relpath(context.data["currentFile"], - api.registered_root()) - source = os.path.join("{root}", relative_path).replace("\\", "/") - - version_data = {"families": families, - "time": context.data["time"], - "author": context.data["user"], - "source": source, - "comment": context.data.get("comment")} - - return dict(instance.data, **version_data) From 0e7999adb15beab441ab7a4ab68c9e8753e7cc64 Mon Sep 17 00:00:00 2001 From: aardschok Date: Wed, 2 Aug 2017 11:44:43 +0200 Subject: [PATCH 07/28] removed redudant modues, improved validate_unique_node_ids --- .../maya/publish/_validate_look_node_ids.py | 41 ------------------- .../publish/validate_naming_convention.py | 34 --------------- .../maya/publish/validate_unique_node_ids.py | 26 ++++++++---- 3 files changed, 17 insertions(+), 84 deletions(-) delete mode 100644 colorbleed/plugins/maya/publish/_validate_look_node_ids.py delete mode 100644 colorbleed/plugins/maya/publish/validate_naming_convention.py diff --git a/colorbleed/plugins/maya/publish/_validate_look_node_ids.py b/colorbleed/plugins/maya/publish/_validate_look_node_ids.py deleted file mode 100644 index fcb91701e7..0000000000 --- a/colorbleed/plugins/maya/publish/_validate_look_node_ids.py +++ /dev/null @@ -1,41 +0,0 @@ -import pyblish.api -import colorbleed.api - - -class ValidateLookNodeIds(pyblish.api.InstancePlugin): - """Validate nodes have colorbleed id attributes - - All look sets should have id attributes. - - """ - - order = colorbleed.api.ValidatePipelineOrder - families = ['colorbleed.look'] - hosts = ['maya'] - label = 'Look Id Attributes' - actions = [colorbleed.api.SelectInvalidAction, - colorbleed.api.GenerateUUIDsOnInvalidAction] - - @staticmethod - def get_invalid(instance): - import maya.cmds as cmds - - nodes = instance.data["lookSets"] - - # Ensure all nodes have a cbId - invalid = list() - for node in nodes: - uuid = cmds.attributeQuery("mbId", node=node, exists=True) - if not uuid: - invalid.append(node) - - return invalid - - def process(self, instance): - """Process all meshes""" - - invalid = self.get_invalid(instance) - - if invalid: - raise RuntimeError("Nodes found without " - "asset IDs: {0}".format(invalid)) diff --git a/colorbleed/plugins/maya/publish/validate_naming_convention.py b/colorbleed/plugins/maya/publish/validate_naming_convention.py deleted file mode 100644 index 7dbf9ad3f7..0000000000 --- a/colorbleed/plugins/maya/publish/validate_naming_convention.py +++ /dev/null @@ -1,34 +0,0 @@ -import re - -import pyblish.api -import colorbleed.api - - -class ValidateNamingConvention(pyblish.api.InstancePlugin): - - label = "" - families = ["colorbleed.model"] - host = ["maya"] - actions = [colorbleed.api.SelectInvalidAction] - - @staticmethod - def get_invalid(instance): - - invalid = [] - # todo: change pattern to company standard - pattern = re.compile("[a-zA-Z]+_[A-Z]{3}") - - nodes = list(instance) - for node in nodes: - match = pattern.match(node) - if not match: - invalid.append(node) - - return invalid - - def process(self, instance): - - invalid = self.get_invalid(instance) - if invalid: - self.log.error("Found invalid naming convention. Failed noted :\n" - "%s" % invalid) diff --git a/colorbleed/plugins/maya/publish/validate_unique_node_ids.py b/colorbleed/plugins/maya/publish/validate_unique_node_ids.py index 8e3ccb8a43..9dbd62e7ff 100644 --- a/colorbleed/plugins/maya/publish/validate_unique_node_ids.py +++ b/colorbleed/plugins/maya/publish/validate_unique_node_ids.py @@ -10,14 +10,17 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin): """Validate nodes have colorbleed id attributes""" order = colorbleed.api.ValidatePipelineOrder - families = ['colorbleed.model'] - hosts = ['maya'] label = 'Unique Id Attributes' + hosts = ['maya'] + families = ['colorbleed.model', + 'colorbleed.lookdev', + 'colorbleed.rig'] + actions = [colorbleed.api.SelectInvalidAction, colorbleed.api.GenerateUUIDsOnInvalidAction] - @staticmethod - def get_invalid_dict(instance): + @classmethod + def get_invalid_dict(cls, instance): """Return a dictionary mapping of id key to list of member nodes""" uuid_attr = "cbId" @@ -25,18 +28,21 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin): # Collect each id with their members ids = defaultdict(list) for member in instance: - has_attr = cmds.attributeQuery(uuid_attr, node=member, exists=True) - if not has_attr: + try: + object_id = cmds.getAttr("{}.{}".format(member, uuid_attr)) + except Exception as exception: + # Object will node have the attribute so skip + cls.log.debug(exception) continue - mbid = cmds.getAttr("{}.{}".format(member, uuid_attr)) - ids[mbid].append(member) + + ids[object_id].append(member) # Skip those without IDs (if everything should have an ID that should # be another validation) ids.pop(None, None) # Take only the ids with more than one member - invalid = dict((id, members) for id, members in ids.iteritems() if + invalid = dict((_id, members) for _id, members in ids.iteritems() if len(members) > 1) return invalid @@ -61,3 +67,5 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin): if invalid: raise RuntimeError("Nodes found with non-unique " "asset IDs: {0}".format(invalid)) + + From 63deb2a652d8831858649e775f1000e2570b6ae5 Mon Sep 17 00:00:00 2001 From: aardschok Date: Wed, 2 Aug 2017 11:45:46 +0200 Subject: [PATCH 08/28] improved Generate UUID action --- colorbleed/action.py | 88 +++++++++++--------------------------------- 1 file changed, 21 insertions(+), 67 deletions(-) diff --git a/colorbleed/action.py b/colorbleed/action.py index 382699035e..360b17ccc7 100644 --- a/colorbleed/action.py +++ b/colorbleed/action.py @@ -1,11 +1,9 @@ # absolute_import is needed to counter the `module has no cmds error` in Maya from __future__ import absolute_import -import os import uuid from maya import cmds - import pyblish.api @@ -164,7 +162,7 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action): instance = result["instance"] errored_instances.append(instance) - # Apply pyblish.logic to get the instances for the plug-in + # Apply pyblish logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(errored_instances, plugin) # Get the nodes from the all instances that ran through this plug-in @@ -178,78 +176,34 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action): self.log.info("No invalid nodes found.") return - # Ensure unique (process each node only once) + # Ensure unique ( process each node only once ) invalid = list(set(invalid)) # Parse context from current file - self.log.info("Parsing current context..") - print(">>> DEBUG CONTEXT :", context) - print(">>> DEBUG CONTEXT DATA:", context.data) + self.log.info("Updating node IDs ...") + # Update the attributes + self._update_id_attribute(invalid) - # # Generate and add the ids to the nodes - node_ids = self.generate_ids(context, invalid) - self.apply_ids(node_ids) self.log.info("Generated ids on nodes: {0}".format(invalid)) - def get_context(self, instance=None): + def _update_id_attribute(self, nodes): + """Delete the id attribute - PROJECT = os.environ["AVALON_PROJECT"] - ASSET = instance.data.get("asset") or os.environ["AVALON_ASSET"] - SILO = os.environ["AVALON_SILO"] - LOCATION = os.getenv("AVALON_LOCATION") - - return {"project": PROJECT, - "asset": ASSET, - "silo": SILO, - "location": LOCATION} - - def generate_ids(self, context, nodes): - """Generate cb UUIDs for nodes. - - The identifiers are formatted like: - assets:character/test:bluey:46D221D9-4150-8E49-6B17-43B04BFC26B6 - - This is a concatenation of: - - entity (shots or assets) - - folders (parent hierarchy) - - asset (the name of the asset) - - uuid (unique id for node in the scene) - - Raises: - RuntimeError: When context can't be parsed of the current asset - - Returns: - dict: node, uuid dictionary - - """ - - # Make a copy of the context - data = context.copy() - - # Define folders - - node_ids = dict() - for node in nodes: - # Generate a unique ID per node - data['uuid'] = uuid.uuid4() - unique_id = "{asset}:{item}:{uuid}".format(**data) - node_ids[node] = unique_id - - return node_ids - - def apply_ids(self, node_ids): - """Apply the created unique IDs to the node Args: - node_ids (dict): each node with a unique id - - Returns: - None + nodes (list): all nodes to remove the attribute from """ - attribute = "mbId" - for node, id in node_ids.items(): - # check if node has attribute - if not cmds.attributeQuery(attribute, node=node, exists=True): - cmds.addAttr(node, longName=attribute, dataType="string") + for node in nodes: + + # get the database asset id + attr = "{}.cbId".format(node) + id_attr = cmds.getAttr(attr) + asset_id = id_attr.split(":")[0] + + # create a new unique id + _, uid = str(uuid.uuid4()).rsplit("-", 1) + cb_uid = "{}:{}".format(asset_id, uid) + + # set the new id + cmds.setAttr(attr, cb_uid, type="string") - cmds.setAttr("{}.{}".format(node, attribute), id) From c2cb08df8387ad3ddc359c1c5d1f23faeae2ead5 Mon Sep 17 00:00:00 2001 From: aardschok Date: Wed, 2 Aug 2017 12:05:03 +0200 Subject: [PATCH 09/28] updated families to have abbreviation attribute --- colorbleed/plugin.py | 6 ++++++ colorbleed/plugins/maya/create/colorbleed_animation.py | 6 ++++-- colorbleed/plugins/maya/create/colorbleed_camera.py | 5 +++-- colorbleed/plugins/maya/create/colorbleed_groom.py | 5 +++-- colorbleed/plugins/maya/create/colorbleed_instancer.py | 5 +++-- colorbleed/plugins/maya/create/colorbleed_mayaascii.py | 5 +++-- 6 files changed, 22 insertions(+), 10 deletions(-) diff --git a/colorbleed/plugin.py b/colorbleed/plugin.py index 63a8b0e278..6c7a7870b0 100644 --- a/colorbleed/plugin.py +++ b/colorbleed/plugin.py @@ -1,5 +1,6 @@ import tempfile import pyblish.api +import avalon.maya ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05 @@ -8,6 +9,11 @@ ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2 ValidateMeshOrder = pyblish.api.ValidatorOrder + 0.3 +class Creator(avalon.maya.Creator): + + abbreviation = "" + + class Extractor(pyblish.api.InstancePlugin): """Extractor base class. diff --git a/colorbleed/plugins/maya/create/colorbleed_animation.py b/colorbleed/plugins/maya/create/colorbleed_animation.py index 2030042a01..d9bbfb3d7a 100644 --- a/colorbleed/plugins/maya/create/colorbleed_animation.py +++ b/colorbleed/plugins/maya/create/colorbleed_animation.py @@ -1,14 +1,16 @@ from collections import OrderedDict -import avalon.maya + +import colorbleed.plugin from colorbleed.maya import lib -class CreateAnimation(avalon.maya.Creator): +class CreateAnimation(colorbleed.plugin.Creator): """THe animated objects in the scene""" name = "animationDefault" label = "Animation" family = "colorbleed.animation" + abbreviation = "anim" def __init__(self, *args, **kwargs): super(CreateAnimation, self).__init__(*args, **kwargs) diff --git a/colorbleed/plugins/maya/create/colorbleed_camera.py b/colorbleed/plugins/maya/create/colorbleed_camera.py index 749945caf3..50fd85949f 100644 --- a/colorbleed/plugins/maya/create/colorbleed_camera.py +++ b/colorbleed/plugins/maya/create/colorbleed_camera.py @@ -1,14 +1,15 @@ from collections import OrderedDict -import avalon.maya +import colorbleed.plugin from colorbleed.maya import lib -class CreateCamera(avalon.maya.Creator): +class CreateCamera(colorbleed.plugin.Creator): """Single baked camera extraction""" name = "cameraDefault" label = "Camera" family = "colorbleed.camera" + abbreviation = "cam" def __init__(self, *args, **kwargs): super(CreateCamera, self).__init__(*args, **kwargs) diff --git a/colorbleed/plugins/maya/create/colorbleed_groom.py b/colorbleed/plugins/maya/create/colorbleed_groom.py index 6f8d0f72f7..82815c7dc7 100644 --- a/colorbleed/plugins/maya/create/colorbleed_groom.py +++ b/colorbleed/plugins/maya/create/colorbleed_groom.py @@ -1,9 +1,10 @@ -import avalon.maya +import colorbleed.plugin -class CreateGroom(avalon.maya.Creator): +class CreateGroom(colorbleed.plugin.Creator): """Hair / fur definition for an asset""" name = "groomDefault" label = "Groom" family = "colorbleed.groom" + abbreviation = "groom" diff --git a/colorbleed/plugins/maya/create/colorbleed_instancer.py b/colorbleed/plugins/maya/create/colorbleed_instancer.py index 4721253752..4ed159d626 100644 --- a/colorbleed/plugins/maya/create/colorbleed_instancer.py +++ b/colorbleed/plugins/maya/create/colorbleed_instancer.py @@ -1,9 +1,10 @@ -import avalon.maya +import colorbleed.plugin -class CreateInstance(avalon.maya.Creator): +class CreateInstance(colorbleed.plugin.Creator): """Maya instancer using cached particles""" name = "instanceDefault" label = "Instance" family = "colorbleed.instance" + abbreviation = "inst" diff --git a/colorbleed/plugins/maya/create/colorbleed_mayaascii.py b/colorbleed/plugins/maya/create/colorbleed_mayaascii.py index 2c19e13711..4a29c4dac1 100644 --- a/colorbleed/plugins/maya/create/colorbleed_mayaascii.py +++ b/colorbleed/plugins/maya/create/colorbleed_mayaascii.py @@ -1,9 +1,10 @@ -import avalon.maya +import colorbleed.plugin -class CreateMayaAscii(avalon.maya.Creator): +class CreateMayaAscii(colorbleed.plugin.Creator): """Raw Maya Ascii file of the item(s)""" name = "mayaAscii" label = "Maya Ascii" family = "colorbleed.mayaAscii" + abbreviation = "ascii" From f35818264b30d6391cd817a62e4d19cfa9f18c21 Mon Sep 17 00:00:00 2001 From: aardschok Date: Wed, 2 Aug 2017 14:53:27 +0200 Subject: [PATCH 10/28] removed colorbleed creator override, restored families --- colorbleed/plugin.py | 5 ----- colorbleed/plugins/maya/create/colorbleed_animation.py | 8 ++++---- colorbleed/plugins/maya/create/colorbleed_camera.py | 5 ++--- colorbleed/plugins/maya/create/colorbleed_groom.py | 5 ++--- colorbleed/plugins/maya/create/colorbleed_instancer.py | 5 ++--- colorbleed/plugins/maya/create/colorbleed_mayaascii.py | 5 ++--- 6 files changed, 12 insertions(+), 21 deletions(-) diff --git a/colorbleed/plugin.py b/colorbleed/plugin.py index 6c7a7870b0..70b2c76c6f 100644 --- a/colorbleed/plugin.py +++ b/colorbleed/plugin.py @@ -9,11 +9,6 @@ ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2 ValidateMeshOrder = pyblish.api.ValidatorOrder + 0.3 -class Creator(avalon.maya.Creator): - - abbreviation = "" - - class Extractor(pyblish.api.InstancePlugin): """Extractor base class. diff --git a/colorbleed/plugins/maya/create/colorbleed_animation.py b/colorbleed/plugins/maya/create/colorbleed_animation.py index d9bbfb3d7a..1703cfe59f 100644 --- a/colorbleed/plugins/maya/create/colorbleed_animation.py +++ b/colorbleed/plugins/maya/create/colorbleed_animation.py @@ -1,16 +1,15 @@ from collections import OrderedDict -import colorbleed.plugin +import avalon.maya from colorbleed.maya import lib -class CreateAnimation(colorbleed.plugin.Creator): +class CreateAnimation(avalon.maya.Creator): """THe animated objects in the scene""" name = "animationDefault" label = "Animation" - family = "colorbleed.animation" - abbreviation = "anim" + family = "colorbleed.anim" def __init__(self, *args, **kwargs): super(CreateAnimation, self).__init__(*args, **kwargs) @@ -34,3 +33,4 @@ class CreateAnimation(colorbleed.plugin.Creator): data["visibleOnly"] = False self.data = data + self.options["abbreviation"] = self.abbreviation \ No newline at end of file diff --git a/colorbleed/plugins/maya/create/colorbleed_camera.py b/colorbleed/plugins/maya/create/colorbleed_camera.py index 50fd85949f..749945caf3 100644 --- a/colorbleed/plugins/maya/create/colorbleed_camera.py +++ b/colorbleed/plugins/maya/create/colorbleed_camera.py @@ -1,15 +1,14 @@ from collections import OrderedDict -import colorbleed.plugin +import avalon.maya from colorbleed.maya import lib -class CreateCamera(colorbleed.plugin.Creator): +class CreateCamera(avalon.maya.Creator): """Single baked camera extraction""" name = "cameraDefault" label = "Camera" family = "colorbleed.camera" - abbreviation = "cam" def __init__(self, *args, **kwargs): super(CreateCamera, self).__init__(*args, **kwargs) diff --git a/colorbleed/plugins/maya/create/colorbleed_groom.py b/colorbleed/plugins/maya/create/colorbleed_groom.py index 82815c7dc7..6f8d0f72f7 100644 --- a/colorbleed/plugins/maya/create/colorbleed_groom.py +++ b/colorbleed/plugins/maya/create/colorbleed_groom.py @@ -1,10 +1,9 @@ -import colorbleed.plugin +import avalon.maya -class CreateGroom(colorbleed.plugin.Creator): +class CreateGroom(avalon.maya.Creator): """Hair / fur definition for an asset""" name = "groomDefault" label = "Groom" family = "colorbleed.groom" - abbreviation = "groom" diff --git a/colorbleed/plugins/maya/create/colorbleed_instancer.py b/colorbleed/plugins/maya/create/colorbleed_instancer.py index 4ed159d626..4721253752 100644 --- a/colorbleed/plugins/maya/create/colorbleed_instancer.py +++ b/colorbleed/plugins/maya/create/colorbleed_instancer.py @@ -1,10 +1,9 @@ -import colorbleed.plugin +import avalon.maya -class CreateInstance(colorbleed.plugin.Creator): +class CreateInstance(avalon.maya.Creator): """Maya instancer using cached particles""" name = "instanceDefault" label = "Instance" family = "colorbleed.instance" - abbreviation = "inst" diff --git a/colorbleed/plugins/maya/create/colorbleed_mayaascii.py b/colorbleed/plugins/maya/create/colorbleed_mayaascii.py index 4a29c4dac1..2c19e13711 100644 --- a/colorbleed/plugins/maya/create/colorbleed_mayaascii.py +++ b/colorbleed/plugins/maya/create/colorbleed_mayaascii.py @@ -1,10 +1,9 @@ -import colorbleed.plugin +import avalon.maya -class CreateMayaAscii(colorbleed.plugin.Creator): +class CreateMayaAscii(avalon.maya.Creator): """Raw Maya Ascii file of the item(s)""" name = "mayaAscii" label = "Maya Ascii" family = "colorbleed.mayaAscii" - abbreviation = "ascii" From 8becf73791d560325914b4afd2a94f62cb434f69 Mon Sep 17 00:00:00 2001 From: aardschok Date: Thu, 3 Aug 2017 15:36:53 +0200 Subject: [PATCH 11/28] improved functions and ready for look assignment tool --- colorbleed/maya/__init__.py | 3 +- colorbleed/maya/lib.py | 54 ++++++++++------- colorbleed/plugins/maya/load/load_look.py | 58 +------------------ .../plugins/maya/publish/collect_look.py | 5 -- 4 files changed, 36 insertions(+), 84 deletions(-) diff --git a/colorbleed/maya/__init__.py b/colorbleed/maya/__init__.py index 4dc6833923..300688cc7b 100644 --- a/colorbleed/maya/__init__.py +++ b/colorbleed/maya/__init__.py @@ -99,7 +99,8 @@ def on_save(nodes=None): defaults = ["initialShadingGroup", "initialParticleSE"] # the default items which always want to have an ID - types = ["mesh", "shadingEngine", "file", "nurbsCurve"] + # objectSets include: shading engines, vray object properties + types = ["mesh", "objectSet", "file", "nurbsCurve"] # the items which need to pass the id to their parent if not nodes: diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index 722662cc6d..7f6a21a66e 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -740,7 +740,7 @@ def assign_look_by_version(nodes, version_id): reference=True, returnNewNodes=True) else: - log.info("Reusing existing lookdev..") + log.info("Reusing existing lookdev '{}'".format(reference_node)) shader_nodes = cmds.referenceQuery(reference_node, nodes=True) # Assign relationships @@ -768,26 +768,29 @@ def assign_look(nodes, subset="lookDefault"): if not colorbleed_id: continue - parts = colorbleed_id.split(":") - if len(parts) != 2: - continue - + parts = colorbleed_id.split(":", 1) grouped[parts[0]].append(node) for asset_id, asset_nodes in grouped.items(): # create objectId for database - asset_id = bson.ObjectId(asset_id) - subset = io.find_one({"type": "subset", - "name": subset, - "parent": asset_id}) + try: + asset_id = bson.ObjectId(asset_id) + except Exception: + log.warning("Asset ID is not compatible with bson") + continue + subset_data = io.find_one({"type": "subset", + "name": subset, + "parent": asset_id}) - assert subset, "No subset found for {}".format(asset_id) + if not subset_data: + log.warning("No subset '{}' found for {}".format(subset, asset_id)) + continue # get last version - version = io.find_one({"parent": subset['_id'], + version = io.find_one({"parent": subset_data['_id'], "type": "version", "data.families": - {"$in":["colorbleed.lookdev"]} + {"$in": ["colorbleed.lookdev"]} }, sort=[("name", -1)], projection={"_id": True}) @@ -816,28 +819,37 @@ def apply_shaders(relationships, shadernodes, nodes): None """ - # attributes = relationships.get("attributes", []) shader_sets = relationships.get("sets", []) - if isinstance(nodes, set): - nodes = list(nodes) - shading_engines = cmds.ls(shadernodes, type="shadingEngine", long=True) - assert len(shading_engines) > 0, ("Error in retrieving shading engine " + assert len(shading_engines) > 0, ("Error in retrieving shading engines " "from reference") - # get all nodes which we need to link - ns_nodes = cmds.ls(nodes, long=True) + # region compute lookup + ns_nodes_by_id = defaultdict(list) + for node in nodes: + ns_nodes_by_id[_get_id(node)].append(node) + + shading_engines_by_id = defaultdict(list) + for shad in shading_engines: + shading_engines_by_id[_get_id(shad)].append(shad) + # endregion + + # region assign for shader_set in shader_sets: # collect all unique IDs of the set members shader_uuid = shader_set["uuid"] member_uuids = [member["uuid"] for member in shader_set["members"]] - filtered_nodes = filter_by_id(ns_nodes, member_uuids) - shading_engine = filter_by_id(shading_engines, [shader_uuid]) + filtered_nodes = list() + for uuid in member_uuids: + filtered_nodes.extend(ns_nodes_by_id[uuid]) + shading_engine = shading_engines_by_id[shader_uuid] assert len(shading_engine) == 1, ("Could not find the correct " "shading engine with cbId " "'{}'".format(shader_uuid)) cmds.sets(filtered_nodes, forceElement=shading_engine[0]) + + # endregion diff --git a/colorbleed/plugins/maya/load/load_look.py b/colorbleed/plugins/maya/load/load_look.py index 567cdf4bf5..d0b05d1252 100644 --- a/colorbleed/plugins/maya/load/load_look.py +++ b/colorbleed/plugins/maya/load/load_look.py @@ -69,62 +69,6 @@ class LookLoader(api.Loader): # Get all nodes which belong to a matching name space # Currently this is the safest way to get all the nodes - namespace_nodes = self.get_namespace_nodes(assetname) - lib.apply_shaders(relationships, nodes, namespace_nodes) + lib.apply_shaders(relationships, nodes) self[:] = nodes - - def get_namespace_nodes(self, assetname): - """ - Get all nodes of namespace `asset_*` and check if they have a shader - assigned, if not add to list - Args: - context (dict): current context of asset - - Returns: - list - - """ - - list_nodes = [] - - # remove basic namespaces - namespaces = [ns for ns in cmds.namespaceInfo(listOnlyNamespaces=True) - if ns not in ["UI", "shared"] or not ns.endswith("look")] - - for namespace in namespaces: - if not namespace.startswith(assetname): - continue - - ns_nodes = cmds.namespaceInfo(namespace, - listOnlyDependencyNodes=True) - # get reference nodes - list_nodes.extend([self.has_default_shader(n) for n in ns_nodes]) - - # ensure unique nodes and kick out any None types - result = [node for node in list_nodes if node is not None] - - return result - - def has_default_shader(self, node): - """Check if the nodes have `initialShadingGroup` shader assigned - - Args: - node (str): node to check - - Returns: - str - """ - - shaders = cmds.listConnections(node, type="shadingEngine") - if shaders is None or "initialShadingGroup" in shaders: - # return transform node - transform = cmds.listRelatives(node, - parent=True, - type="transform", - fullPath=True) - - if not transform: - return - - return transform[0] diff --git a/colorbleed/plugins/maya/publish/collect_look.py b/colorbleed/plugins/maya/publish/collect_look.py index f11699dd8f..73deec968b 100644 --- a/colorbleed/plugins/maya/publish/collect_look.py +++ b/colorbleed/plugins/maya/publish/collect_look.py @@ -1,11 +1,6 @@ -import os -import pprint - from maya import cmds import pyblish.api -import avalon.io as io - from cb.utils.maya import context, shaders import cbra.utils.maya.node_uuid as id_utils From afd490ce33c2204d189b66f6f66918cc44fdd4dc Mon Sep 17 00:00:00 2001 From: aardschok Date: Thu, 3 Aug 2017 15:51:53 +0200 Subject: [PATCH 12/28] addes explicit bson error catching in assign_look --- colorbleed/maya/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index 7f6a21a66e..547335cf6a 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -775,7 +775,7 @@ def assign_look(nodes, subset="lookDefault"): # create objectId for database try: asset_id = bson.ObjectId(asset_id) - except Exception: + except bson.errors.InvalidId: log.warning("Asset ID is not compatible with bson") continue subset_data = io.find_one({"type": "subset", From e95d5507d2f80bbbe5a73151f960350160d80537 Mon Sep 17 00:00:00 2001 From: aardschok Date: Fri, 4 Aug 2017 12:06:41 +0200 Subject: [PATCH 13/28] Added create attribute function --- colorbleed/maya/lib.py | 69 +++++++++++++++++++++++++++++++++++++++++- 1 file changed, 68 insertions(+), 1 deletion(-) diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index 547335cf6a..fcd144182b 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -21,6 +21,24 @@ project = io.find_one({"type": "project", "_id": False}) TEMPLATE = project["config"]["template"]["publish"] +ATTRIBUTE_DICT = {"int": {"attributeType": "long"}, + "str": {"dataType": "string"}, + "unicode": {"dataType": "string"}, + "float": {"attributeType": "double"}, + "bool": {"attributeType": "bool"}} + +SHAPE_ATTRS = ["castsShadows", + "receiveShadows", + "motionBlur", + "primaryVisibility", + "smoothShading", + "visibleInReflections", + "visibleInRefractions", + "doubleSided", + "opposite"] + +SHAPE_ATTRS = set(SHAPE_ATTRS) + def maintained_selection(arg=None): if arg is not None: @@ -681,6 +699,53 @@ def get_reference_node(path): return node +def process_attribute_change(attribute_data, node): + """Adjust attributes based on the value from the attribute data + + Args: + attribute_data (dict): attribute as key with value as value + node (str): name of the node + + Returns: + None + """ + + for attribute, value in attribute_data.items(): + value_type = type(value).__name__ + kwargs = ATTRIBUTE_DICT[value_type] + if not cmds.attributeQuery(attribute, node=node, exists=True): + log.debug("Creating attribute '{}' on " + "'{}'".format(attribute, node)) + cmds.addAttr(node, longName=attribute, **kwargs) + + node_attr = "{}.{}".format(node, attribute) + if "dataType" in kwargs: + attr_type = kwargs["dataType"] + cmds.setAttr(node_attr, value, type=attr_type) + else: + cmds.setAttr(node_attr, value) + + +def apply_attributes(attributes, nodes_by_id): + """Alter the attributes to match the state when publishing + + Apply attribute settings from the publish to the node in the scene based + on the UUID which is stored in the cbId attribute. + + Args: + attributes (list): list of dictionaries + nodes_by_id (dict): collection of nodes based on UUID + {uuid: [node, node]} + + """ + + for attr_data in attributes: + nodes = nodes_by_id[attr_data["uuid"]] + attr_value = attr_data["attributes"] + for node in nodes: + process_attribute_change(attr_value, node) + + def list_looks(asset_id): """Return all look subsets for the given asset @@ -819,6 +884,7 @@ def apply_shaders(relationships, shadernodes, nodes): None """ + attributes = relationships.get("attributes", []) shader_sets = relationships.get("sets", []) shading_engines = cmds.ls(shadernodes, type="shadingEngine", long=True) @@ -851,5 +917,6 @@ def apply_shaders(relationships, shadernodes, nodes): "'{}'".format(shader_uuid)) cmds.sets(filtered_nodes, forceElement=shading_engine[0]) - # endregion + + apply_attributes(attributes, ns_nodes_by_id) From 8b2a129066ee1453338155f0082cc5bc62675b9b Mon Sep 17 00:00:00 2001 From: aardschok Date: Fri, 4 Aug 2017 12:07:57 +0200 Subject: [PATCH 14/28] added version validation for assumed template data --- colorbleed/plugins/maya/publish/collect_look.py | 1 - colorbleed/plugins/publish/integrate.py | 12 +++++++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/colorbleed/plugins/maya/publish/collect_look.py b/colorbleed/plugins/maya/publish/collect_look.py index 73deec968b..4b1a16b194 100644 --- a/colorbleed/plugins/maya/publish/collect_look.py +++ b/colorbleed/plugins/maya/publish/collect_look.py @@ -127,7 +127,6 @@ class CollectLook(pyblish.api.InstancePlugin): # Collect textures, resources = [self.collect_resource(n) for n in files] instance.data["resources"] = resources - # pprint.pprint(resources) # Log a warning when no relevant sets were retrieved for the look. if not instance.data["lookData"]["sets"]: diff --git a/colorbleed/plugins/publish/integrate.py b/colorbleed/plugins/publish/integrate.py index 2c21866514..9a1c81df68 100644 --- a/colorbleed/plugins/publish/integrate.py +++ b/colorbleed/plugins/publish/integrate.py @@ -109,7 +109,17 @@ class IntegrateAsset(pyblish.api.InstancePlugin): if latest_version is not None: next_version += latest_version["name"] - self.log.debug("Next version: %i" % next_version) + self.log.info("Verifying version from assumed destination") + + assumed_data = instance.data["assumedTemplateData"] + assumed_version = assumed_data["version"] + if assumed_version != next_version: + raise AttributeError("Assumed version 'v{0:03d}' does not match" + "next version in database " + "('v{1:03d}')".format(assumed_version, + next_version)) + + self.log.debug("Next version: v{0:03d}".format(next_version)) version_data = self.create_version_data(context, instance) version = self.create_version(subset=subset, From 52e20f63ff573656ebd5644835faf72f9bdb426c Mon Sep 17 00:00:00 2001 From: aardschok Date: Fri, 4 Aug 2017 13:55:08 +0200 Subject: [PATCH 15/28] removed node argument in on_save, improved function --- colorbleed/maya/__init__.py | 43 +++++++++++++++++++++---------------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/colorbleed/maya/__init__.py b/colorbleed/maya/__init__.py index 300688cc7b..792952eeef 100644 --- a/colorbleed/maya/__init__.py +++ b/colorbleed/maya/__init__.py @@ -88,7 +88,7 @@ def on_new(): maya.commands.reset_resolution() -def on_save(nodes=None): +def on_save(): """Automatically add IDs to new nodes Any transform of a mesh, without an existing ID, is given one automatically on file save. @@ -98,29 +98,33 @@ def on_save(nodes=None): defaults = ["initialShadingGroup", "initialParticleSE"] - # the default items which always want to have an ID - # objectSets include: shading engines, vray object properties - types = ["mesh", "objectSet", "file", "nurbsCurve"] + ignore = set(cmds.ls(long=True, readOnly=True)) + locked = set(cmds.ls(long=True, lockedNodes=True)) + ignore |= locked + + types = ["shadingEngine", "file"] + shape_types = ["mesh", "nurbsCurve"] # the items which need to pass the id to their parent - if not nodes: - nodes = (set(cmds.ls(type=types, long=True)) - - set(cmds.ls(long=True, readOnly=True)) - - set(cmds.ls(long=True, lockedNodes=True))) + transforms = set() + nodes = set(cmds.ls(type=types, long=True)) + for n in cmds.ls(type=shape_types, long=True): + if n in defaults: + continue - transforms = set() - for n in cmds.ls(type=types, long=True): - # pass id to parent of node if in subtypes - relatives = cmds.listRelatives(n, parent=True, fullPath=True) - if not relatives: - continue + # generate id on parent of shape nodes + parents = cmds.listRelatives(n, parent=True, fullPath=True) + if not parents: + raise RuntimeError("Bug! Shape has no parent: {0}".format(n)) - for r in cmds.listRelatives(n, parent=True, fullPath=True): - transforms.add(r) + for parent in parents: + transforms.add(parent) - # merge transforms and nodes in one set to make sure every item - # is unique - nodes |= transforms + # Add the collected transform to the nodes + nodes |= transforms + + # Remove the ignored nodes + nodes -= ignore # Lead with asset ID from the database asset = os.environ["AVALON_ASSET"] @@ -129,3 +133,4 @@ def on_save(nodes=None): if node in defaults: continue _set_uuid(str(asset_id["_id"]), node) + From 3e22683eed7a9f2634a0748a57ac6f3e1f8c8472 Mon Sep 17 00:00:00 2001 From: aardschok Date: Fri, 4 Aug 2017 13:58:37 +0200 Subject: [PATCH 16/28] cosmetics --- colorbleed/plugins/publish/integrate.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/colorbleed/plugins/publish/integrate.py b/colorbleed/plugins/publish/integrate.py index 9a1c81df68..07038cdb1b 100644 --- a/colorbleed/plugins/publish/integrate.py +++ b/colorbleed/plugins/publish/integrate.py @@ -21,7 +21,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): publish the shading network. Same goes for file dependent assets. """ - label = "Intergrate Asset" + label = "Integrate Asset" order = pyblish.api.IntegratorOrder families = ["colorbleed.model", "colorbleed.rig", @@ -37,7 +37,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.info("Integrating Asset in to the database ...") self.register(instance) - self.intergrate(instance) + self.integrate(instance) self.log.info("Removing temporary files and folders ...") stagingdir = instance.data["stagingDir"] @@ -194,7 +194,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): return representations - def intergrate(self, instance): + def integrate(self, instance): """Register the representations and move the files Through the stored `representations` and `transfers` From 5ef5bd15a44849910af9fb5316886a08e5e4a218 Mon Sep 17 00:00:00 2001 From: aardschok Date: Fri, 4 Aug 2017 15:12:40 +0200 Subject: [PATCH 17/28] both transform and shape get ID for lookdev purposes --- colorbleed/filetypes.py | 2 -- colorbleed/maya/__init__.py | 40 ++++++++++++++----------------------- 2 files changed, 15 insertions(+), 27 deletions(-) delete mode 100644 colorbleed/filetypes.py diff --git a/colorbleed/filetypes.py b/colorbleed/filetypes.py deleted file mode 100644 index fbefeff9be..0000000000 --- a/colorbleed/filetypes.py +++ /dev/null @@ -1,2 +0,0 @@ -accepted_images_types = [".png", ".jpg", ".tga", ".tiff"] -ignored_images_types = [".pds"] diff --git a/colorbleed/maya/__init__.py b/colorbleed/maya/__init__.py index 792952eeef..8bf5764a0a 100644 --- a/colorbleed/maya/__init__.py +++ b/colorbleed/maya/__init__.py @@ -70,14 +70,14 @@ def _copy_uuid(source, target): cmds.setAttr(target_attr, attribute_value, type="string") -def on_init(): +def on_init(_): avalon.logger.info("Running callback on init..") maya.commands.reset_frame_range() maya.commands.reset_resolution() -def on_new(): +def on_new(_): avalon.logger.info("Running callback on new..") # Load dependencies @@ -88,7 +88,7 @@ def on_new(): maya.commands.reset_resolution() -def on_save(): +def on_save(_): """Automatically add IDs to new nodes Any transform of a mesh, without an existing ID, is given one automatically on file save. @@ -96,32 +96,22 @@ def on_save(): avalon.logger.info("Running callback on save..") - defaults = ["initialShadingGroup", "initialParticleSE"] + # establish set of nodes to ignore + ignore = set(["initialShadingGroup", "initialParticleSE"]) + ignore |= set(cmds.ls(long=True, readOnly=True)) + ignore |= set(cmds.ls(long=True, lockedNodes=True)) - ignore = set(cmds.ls(long=True, readOnly=True)) - locked = set(cmds.ls(long=True, lockedNodes=True)) - ignore |= locked - - types = ["shadingEngine", "file"] - shape_types = ["mesh", "nurbsCurve"] + types = ["shadingEngine", "file", "mesh", "nurbsCurve"] # the items which need to pass the id to their parent - transforms = set() nodes = set(cmds.ls(type=types, long=True)) - for n in cmds.ls(type=shape_types, long=True): - if n in defaults: - continue - - # generate id on parent of shape nodes - parents = cmds.listRelatives(n, parent=True, fullPath=True) - if not parents: - raise RuntimeError("Bug! Shape has no parent: {0}".format(n)) - - for parent in parents: - transforms.add(parent) # Add the collected transform to the nodes - nodes |= transforms + transforms = cmds.listRelatives(list(nodes), + parent=True, + fullPath=True) or [] + + nodes |= set(transforms) # Remove the ignored nodes nodes -= ignore @@ -129,8 +119,8 @@ def on_save(): # Lead with asset ID from the database asset = os.environ["AVALON_ASSET"] asset_id = io.find_one({"type": "asset", "name": asset}) + + # generate the ids for node in nodes: - if node in defaults: - continue _set_uuid(str(asset_id["_id"]), node) From b1151924ad9be3e56f9216c81ca4f26b0f4f230a Mon Sep 17 00:00:00 2001 From: aardschok Date: Fri, 4 Aug 2017 16:18:58 +0200 Subject: [PATCH 18/28] added explicit attribute check --- .../plugins/maya/publish/validate_unique_node_ids.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/colorbleed/plugins/maya/publish/validate_unique_node_ids.py b/colorbleed/plugins/maya/publish/validate_unique_node_ids.py index 9dbd62e7ff..edae718c02 100644 --- a/colorbleed/plugins/maya/publish/validate_unique_node_ids.py +++ b/colorbleed/plugins/maya/publish/validate_unique_node_ids.py @@ -28,13 +28,10 @@ class ValidateUniqueNodeIds(pyblish.api.InstancePlugin): # Collect each id with their members ids = defaultdict(list) for member in instance: - try: - object_id = cmds.getAttr("{}.{}".format(member, uuid_attr)) - except Exception as exception: - # Object will node have the attribute so skip - cls.log.debug(exception) + if not cmds.attributeQuery(uuid_attr, node=member, exists=True): continue + object_id = cmds.getAttr("{}.{}".format(member, uuid_attr)) ids[object_id].append(member) # Skip those without IDs (if everything should have an ID that should From ac6b2c271e5ff17c20407cc40c068f58ac3cc438 Mon Sep 17 00:00:00 2001 From: aardschok Date: Fri, 4 Aug 2017 16:25:12 +0200 Subject: [PATCH 19/28] removed redundant module --- colorbleed/maya/lib.py | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index fcd144182b..9d1419fe5a 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -637,32 +637,6 @@ def _get_id(node): return attribute_value -def filter_by_id(nodes, uuids): - """Filter all nodes which match the UUIDs - - Args: - nodes (list): collection of nodes to check - uuids (list): a list of UUIDs which are linked to the shader - - Returns: - list: matching nodes - """ - - filtered_nodes = [] - for node in nodes: - if node is None: - continue - - attribute_value = _get_id(node) - - if attribute_value not in uuids or attribute_value is None: - continue - - filtered_nodes.append(node) - - return filtered_nodes - - def get_representation_file(representation, template=TEMPLATE): """ Rebuild the filepath of the representation's context From 8731c4f507bd6ff4df6e1e8e42f664a6d427f6a2 Mon Sep 17 00:00:00 2001 From: aardschok Date: Fri, 4 Aug 2017 17:40:44 +0200 Subject: [PATCH 20/28] namespace resolved, fixed name in creator --- colorbleed/plugins/maya/create/colorbleed_animation.py | 5 ++--- colorbleed/plugins/maya/load/load_animation.py | 7 +------ colorbleed/plugins/maya/load/load_rig.py | 3 +-- 3 files changed, 4 insertions(+), 11 deletions(-) diff --git a/colorbleed/plugins/maya/create/colorbleed_animation.py b/colorbleed/plugins/maya/create/colorbleed_animation.py index 1703cfe59f..68f7ec2744 100644 --- a/colorbleed/plugins/maya/create/colorbleed_animation.py +++ b/colorbleed/plugins/maya/create/colorbleed_animation.py @@ -9,7 +9,7 @@ class CreateAnimation(avalon.maya.Creator): name = "animationDefault" label = "Animation" - family = "colorbleed.anim" + family = "colorbleed.animation" def __init__(self, *args, **kwargs): super(CreateAnimation, self).__init__(*args, **kwargs) @@ -32,5 +32,4 @@ class CreateAnimation(avalon.maya.Creator): # frame range. data["visibleOnly"] = False - self.data = data - self.options["abbreviation"] = self.abbreviation \ No newline at end of file + self.data = data \ No newline at end of file diff --git a/colorbleed/plugins/maya/load/load_animation.py b/colorbleed/plugins/maya/load/load_animation.py index e48b29ef62..8f05b7ac7b 100644 --- a/colorbleed/plugins/maya/load/load_animation.py +++ b/colorbleed/plugins/maya/load/load_animation.py @@ -23,10 +23,7 @@ class AbcLoader(api.Loader): # Create unique namespace for the cameras # Get name from asset being loaded - assetname = "{}_".format(name.split("_")[0]) - namespace = maya.unique_namespace(assetname, - format="%03d", - suffix="_abc") + namespace = "{name}_abc".format(name=name) nodes = cmds.file(self.fname, namespace=namespace, sharedReferenceFile=False, @@ -51,8 +48,6 @@ class CurvesLoader(api.Loader): icon = "question" def process(self, name, namespace, context, data): - from maya import cmds - from avalon import maya cmds.loadPlugin("atomImportExport.mll", quiet=True) diff --git a/colorbleed/plugins/maya/load/load_rig.py b/colorbleed/plugins/maya/load/load_rig.py index 0dffefd84b..f314468d2c 100644 --- a/colorbleed/plugins/maya/load/load_rig.py +++ b/colorbleed/plugins/maya/load/load_rig.py @@ -22,7 +22,6 @@ class RigLoader(api.Loader): def process(self, name, namespace, context, data): - assert "_" in name, "Naming convention not followed" assetname = "{}_".format(context["asset"]["name"]) unique_namespace = maya.unique_namespace(assetname, format="%03d") nodes = cmds.file(self.fname, @@ -62,8 +61,8 @@ class RigLoader(api.Loader): else: asset = "{}".format(asset_name) + cmds.select([output, controls], noExpand=True) with maya.maintained_selection(): - cmds.select([output, controls], noExpand=True) # TODO(marcus): Hardcoding the family here, better separate this. dependencies = [context["representation"]["_id"]] From 4a2f30d56ad36550d3075aa58509d3c1da759868 Mon Sep 17 00:00:00 2001 From: aardschok Date: Fri, 4 Aug 2017 17:51:53 +0200 Subject: [PATCH 21/28] Cosmetics for error messages --- colorbleed/maya/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index 9d1419fe5a..5b1972ddf0 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -665,7 +665,7 @@ def get_reference_node(path): try: node = cmds.file(path, query=True, referenceNode=True) except RuntimeError: - log.debug('Received file not loaded : "{}"'.format(path)) + log.debug('File is not referenced : "{}"'.format(path)) return reference_path = cmds.referenceQuery(path, filename=True) From e71da0000cdbf98d83a271f9143e21728eb4fc69 Mon Sep 17 00:00:00 2001 From: aardschok Date: Fri, 4 Aug 2017 18:02:30 +0200 Subject: [PATCH 22/28] improved apply_attributes function --- colorbleed/maya/lib.py | 39 +++++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index 5b1972ddf0..22134c04e5 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -673,31 +673,34 @@ def get_reference_node(path): return node -def process_attribute_change(attribute_data, node): +def set_attribute(attribute, value, node): """Adjust attributes based on the value from the attribute data + If an attribute does not exists on the target it will be added with + the dataType being controlled by the value type. + Args: - attribute_data (dict): attribute as key with value as value + attribute (str): name of the attribute to change + value: the value to change to attribute to node (str): name of the node Returns: None """ - for attribute, value in attribute_data.items(): - value_type = type(value).__name__ - kwargs = ATTRIBUTE_DICT[value_type] - if not cmds.attributeQuery(attribute, node=node, exists=True): - log.debug("Creating attribute '{}' on " - "'{}'".format(attribute, node)) - cmds.addAttr(node, longName=attribute, **kwargs) + value_type = type(value).__name__ + kwargs = ATTRIBUTE_DICT[value_type] + if not cmds.attributeQuery(attribute, node=node, exists=True): + log.debug("Creating attribute '{}' on " + "'{}'".format(attribute, node)) + cmds.addAttr(node, longName=attribute, **kwargs) - node_attr = "{}.{}".format(node, attribute) - if "dataType" in kwargs: - attr_type = kwargs["dataType"] - cmds.setAttr(node_attr, value, type=attr_type) - else: - cmds.setAttr(node_attr, value) + node_attr = "{}.{}".format(node, attribute) + if "dataType" in kwargs: + attr_type = kwargs["dataType"] + cmds.setAttr(node_attr, value, type=attr_type) + else: + cmds.setAttr(node_attr, value) def apply_attributes(attributes, nodes_by_id): @@ -714,10 +717,10 @@ def apply_attributes(attributes, nodes_by_id): """ for attr_data in attributes: - nodes = nodes_by_id[attr_data["uuid"]] + node = nodes_by_id[attr_data["uuid"]] attr_value = attr_data["attributes"] - for node in nodes: - process_attribute_change(attr_value, node) + for attr, value in attr_value: + set_attribute(attr, value, node) def list_looks(asset_id): From 31e02669aba3255d502ca22465b95e4ecc785b0d Mon Sep 17 00:00:00 2001 From: aardschok Date: Mon, 7 Aug 2017 09:45:07 +0200 Subject: [PATCH 23/28] fixed dict iteration for attr_value --- colorbleed/maya/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index 22134c04e5..9469168e79 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -719,7 +719,7 @@ def apply_attributes(attributes, nodes_by_id): for attr_data in attributes: node = nodes_by_id[attr_data["uuid"]] attr_value = attr_data["attributes"] - for attr, value in attr_value: + for attr, value in attr_value.items(): set_attribute(attr, value, node) From a712214f4cfd65e9d282da2a90cbff0f95e92582 Mon Sep 17 00:00:00 2001 From: aardschok Date: Mon, 7 Aug 2017 10:20:29 +0200 Subject: [PATCH 24/28] added explanaition for split, ensure unique namespace --- colorbleed/plugins/maya/load/load_animation.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/colorbleed/plugins/maya/load/load_animation.py b/colorbleed/plugins/maya/load/load_animation.py index 8f05b7ac7b..058dc85562 100644 --- a/colorbleed/plugins/maya/load/load_animation.py +++ b/colorbleed/plugins/maya/load/load_animation.py @@ -23,7 +23,16 @@ class AbcLoader(api.Loader): # Create unique namespace for the cameras # Get name from asset being loaded - namespace = "{name}_abc".format(name=name) + # Assuming name is subset name from the animation, we split the number + # suffix from the name to ensure the namespace is unique + name = name.split("_")[0] + namespace = maya.unique_namespace("{}_".format(name), + format="%03d", + suffix="_abc") + + # hero_001 (abc) + # asset_counter{optional} + nodes = cmds.file(self.fname, namespace=namespace, sharedReferenceFile=False, @@ -33,7 +42,6 @@ class AbcLoader(api.Loader): returnNewNodes=True) # load colorbleed ID attribute - self[:] = nodes From 8dd3099fe4cbee9518f2881ac147d54c21618aca Mon Sep 17 00:00:00 2001 From: aardschok Date: Mon, 7 Aug 2017 10:41:35 +0200 Subject: [PATCH 25/28] disabled file name validation --- ...te_filename_convention.py => _validate_filename_convention.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename colorbleed/plugins/maya/publish/{validate_filename_convention.py => _validate_filename_convention.py} (100%) diff --git a/colorbleed/plugins/maya/publish/validate_filename_convention.py b/colorbleed/plugins/maya/publish/_validate_filename_convention.py similarity index 100% rename from colorbleed/plugins/maya/publish/validate_filename_convention.py rename to colorbleed/plugins/maya/publish/_validate_filename_convention.py From b90b169079fbc2c7d799dedc26ca2fcf79a87bae Mon Sep 17 00:00:00 2001 From: aardschok Date: Mon, 7 Aug 2017 12:06:56 +0200 Subject: [PATCH 26/28] added containerise to look loader --- colorbleed/maya/commands.py | 637 --------------------------------- colorbleed/maya/interactive.py | 288 --------------- colorbleed/maya/lib.py | 21 +- 3 files changed, 16 insertions(+), 930 deletions(-) delete mode 100644 colorbleed/maya/commands.py delete mode 100644 colorbleed/maya/interactive.py diff --git a/colorbleed/maya/commands.py b/colorbleed/maya/commands.py deleted file mode 100644 index c2943128a3..0000000000 --- a/colorbleed/maya/commands.py +++ /dev/null @@ -1,637 +0,0 @@ -"""Used for scripting - -These are used in other scripts and mostly require explicit input, -such as which specific nodes they apply to. - -For interactive use, see :mod:`interactive.py` - -""" - -import sys - -from maya import cmds - -from . import lib - -if sys.version_info[0] == 3: - basestring = str - -# Flags -LocalSpace = 1 << 0 -WorldSpace = 1 << 1 - - -def auto_connect2(src, dst): - """Connect to `dst` based on what `dst` is and `src` has available - - TODO: Offer optionbox of choices when multiple inputs are possible. - For example, connecting a mesh to a wrap node could either - go to driverMesh, or baseMesh. - - """ - - to_from = { - "mesh": ( - ["mesh", (".outMesh", ".inMesh")], - ), - "nurbsSurface": ( - ["nurbsSurface", (".local", ".create")], - ), - "nurbsCurve": ( - ["nurbsCurve", (".local", ".create")], - ), - "decomposeMatrix": ( - ["transform", (".worldMatrix", ".inputMatrix")], - ), - "transform": ( - [ - "transform", ( - (".translate", ".rotate", ".scale"), - (".translate", ".rotate", ".scale")) - ], - [ - "decomposeMatrix", ( - (".outTranslate", ".outRotate", ".outScale"), - (".translate", ".rotate", ".scale")) - ], - ), - "objectSet": ( - ["dagNode", (".message", ".dagSetMembers")], - ["entity", (".message", ".dnSetMembers")], - ), - } - - support = next( - (to_from[to] for to in to_from - if to in cmds.nodeType(dst, inherited=True)), None - ) - - if not support: - # Guess, based on available inputs, - # what is the closest match - print("Guessing..") - pass - - assert support, "No supported outputs for '%s'" % (cmds.nodeType(src)) - - out_, in_ = next( - (typ for typ in support - if typ[0] in cmds.nodeType(src, inherited=True)), (None, None) - ) - - assert in_ and out_, "No matching attributes found for %s" % src - - if not isinstance(in_, tuple): - in_ = (in_,) - - if not isinstance(out_, tuple): - out_ = (out_,) - - assert len(in_) == len(out_) - - map(lambda io: cmds.connectAttr(src + io[0], - dst + io[1], - force=True), zip(out_, in_)) - - -def auto_connect(src, dst): - """Connect `src` to `dst` via the most likely input and output - - Usage: - >>> # Create cube and transfer mesh into new shape - >>> shape = cmds.createNode("mesh", name="newShape") - >>> transform, generator = cmds.polyCube(name="original") - >>> auto_connect(generator, shape) - >>> cmds.delete(transform) - - """ - - out_ = { - "mesh": ".outMesh", - "nurbsSurface": ".local", - "nurbsCurve": ".local", - "decomposeMatrix": (".outTranslate", - ".outRotate", - ".outScale"), - "transform": (".translate", - ".rotate", - ".scale", - ".visibility") - } - - in_ = { - "mesh": ".inMesh", - "nurbsSurface": ".create", - "nurbsCurve": ".create", - "decomposeMatrix": "inputMatrix", - "transform": (".translate", - ".rotate", - ".scale", - ".visibility"), - "objectSet": ["dnSetMembers", "dgSetMembers"] - } - - try: - in_ = in_[cmds.nodeType(dst)] - except KeyError: - in_ = next((attr for attr in (".input", - ".inputGeometry") - if cmds.objExists(dst + attr)), None) - - try: - out_ = out_[cmds.nodeType(src)] - except KeyError: - out_ = next((attr for attr in (".output", - ".outputGeometry") - if cmds.objExists(src + attr)), None) - - assert in_ and out_, "No matching attributes found for %s" % src - - if not isinstance(in_, tuple): - in_ = (in_,) - - if not isinstance(out_, tuple): - out_ = (out_,) - - assert len(in_) == len(out_) - - map(lambda io: cmds.connectAttr(src + io[0], - dst + io[1], - force=True), zip(out_, in_)) - - -@lib.maintained_selection -def match_transform(src, dst): - """Transform `src` to `dst`, taking worldspace into account - - Arguments: - src (str): Absolute path to source transform - dst (str): Absolute path to destination transform - - """ - - try: - parent = cmds.listRelatives(src, parent=True)[0] - except Exception: - parent = None - - node_decompose = cmds.createNode("decomposeMatrix") - node_multmatrix = cmds.createNode("multMatrix") - - connections = { - dst + ".worldMatrix": node_multmatrix + ".matrixIn[0]", - node_multmatrix + ".matrixSum": node_decompose + ".inputMatrix", - node_decompose + ".outputTranslate": src + ".translate", - node_decompose + ".outputRotate": src + ".rotate", - node_decompose + ".outputScale": src + ".scale", - } - - if parent: - connections.update({ - parent + ".worldInverseMatrix": node_multmatrix + ".matrixIn[1]" - }) - - for s, d in connections.iteritems(): - cmds.connectAttr(s, d, force=True) - - cmds.refresh() - - cmds.delete([node_decompose, node_multmatrix]) - - -def connect_shapes(src, dst): - """Connect geometry of `src` to source geometry of dst - - Arguments: - src (str): Name of source shape - dst (list): Names of destination nodes - - """ - - out_attr = None - - if cmds.nodeType(src) == "mesh": - out_attr = ".outMesh" - - elif cmds.nodeType(src) in ("nurbsSurface", "nurbsCurve"): - out_attr = ".local" - - else: - for wildcard in (".output",): - if cmds.objExists(src + wildcard): - out_attr = wildcard - break - - if not out_attr: - return cmds.warning("Could not detect output of %s" % src) - - for target in dst: - in_attr = None - - if cmds.nodeType(target) == "mesh": - in_attr = ".inMesh" - - elif cmds.nodeType(target) in ("nurbsSurface", "nurbsCurve"): - in_attr = ".create" - - else: - # Support unspecific nodes with common input attributes - for support, wildcard in (("mesh", ".inputPolymesh"), - ("mesh", ".inputMesh"), - ("mesh", ".inputGeometry")): - if cmds.objExists(target + wildcard): - if not cmds.nodeType(src) == support: - cmds.warning("Could not connect: %s -> %s" % (src, - target)) - break - - in_attr = wildcard - break - - if not in_attr: - cmds.warning("Could not detect input of %s" % target) - continue - - try: - cmds.connectAttr(src + out_attr, - target + in_attr, - force=True) - except Exception as e: - cmds.warning("Could not connect: %s%s -> %s%s (%s)" % ( - src, out_attr, - target, in_attr, e) - ) - - -def connect_transform(driver, driven, source=WorldSpace, compensate=False): - """Connect translation, rotation and scale via decomposeMatrix - - Arguments: - driver (str): Absolute path to driver - driven (str): Absolute path to driven - source (str, optional): Either WorldSpace or LocalSpace, - default WorldSpace - compensate (bool, optional): Whether or not to take into account - the current transform, default False. - - Returns: - output (list): Newly created nodes - - """ - - outputattr = ".matrix" if source == LocalSpace else ".worldMatrix[0]" - - assert cmds.objExists(driver), "%s not found" % driver - assert cmds.objExists(driven), "%s not found" % driven - - decompose = driver + "_decompose" - output = [decompose] - - if not cmds.objExists(decompose): - decompose = cmds.createNode("decomposeMatrix", name=decompose) - - if compensate: - - multMatrix = cmds.createNode( - "multMatrix", name=driver + "_multMatrix") - - # Compensate for drivens parentMatrix. - cmds.connectAttr(driver + outputattr, - multMatrix + ".matrixIn[0]") - cmds.connectAttr(driven + ".parentInverseMatrix", - multMatrix + ".matrixIn[1]") - cmds.connectAttr(multMatrix + ".matrixSum", - decompose + ".inputMatrix") - - output.append(multMatrix) - else: - cmds.connectAttr(driver + outputattr, - decompose + ".inputMatrix") - - # Drive driven with compensated driver. - cmds.connectAttr(decompose + ".outputTranslate", driven + ".t") - cmds.connectAttr(decompose + ".outputRotate", driven + ".r") - cmds.connectAttr(decompose + ".outputScale", driven + ".s") - - return output - - -def clone(shape, worldspace=False): - """Clone `shape` - - Arguments: - shape (str): Absolute path to shape - worldspace (bool, optional): Whether or not to consider worldspace - - Returns: - node (str): Newly created clone - - """ - - type = cmds.nodeType(shape) - assert type in ("mesh", "nurbsSurface", "nurbsCurve"), ( - "clone() works on polygonal and nurbs surfaces") - - src, dst = { - "mesh": (".outMesh", ".inMesh"), - "nurbsSurface": (".local", ".create"), - "nurbsCurve": (".local", ".create"), - }[type] - - nodetype = cmds.nodeType(shape) - - name = lib.unique(name=shape.rsplit("|")[-1]) - clone = cmds.createNode(nodetype, name=name) - - cmds.connectAttr(shape + src, clone + dst, force=True) - - if worldspace: - transform = cmds.createNode("transformGeometry", - name=name + "_transformGeometry") - - cmds.connectAttr(shape + src, - transform + ".inputGeometry", force=True) - cmds.connectAttr(shape + ".worldMatrix[0]", - transform + ".transform", force=True) - cmds.connectAttr(transform + ".outputGeometry", - clone + dst, force=True) - - # Assign default shader - cmds.sets(clone, addElement="initialShadingGroup") - - return clone - - -def combine(nodes): - """Produce a new mesh with the contents of `nodes` - - Arguments: - nodes (list): Path to shapes - - """ - - unite = cmds.createNode("polyUnite", n=nodes[0] + "_polyUnite") - - count = 0 - for node in nodes: - # Are we dealing with transforms, or shapes directly? - shapes = cmds.listRelatives(node, shapes=True) or [node] - - for shape in shapes: - try: - cmds.connectAttr(shape + ".outMesh", - unite + ".inputPoly[%s]" % count, force=True) - cmds.connectAttr(shape + ".worldMatrix", - unite + ".inputMat[%s]" % count, force=True) - count += 1 - - except Exception: - cmds.warning("'%s' is not a polygonal mesh" % shape) - - if count: - output = cmds.createNode("mesh", n=nodes[0] + "_combinedShape") - cmds.connectAttr(unite + ".output", output + ".inMesh", force=True) - return output - - else: - cmds.delete(unite) - return None - - -def transfer_outgoing_connections(src, dst): - """Connect outgoing connections from `src` to `dst` - - Connections that cannot be made are ignored. - - Arguments: - src (str): Absolute path to source node - dst (str): Absolute path to destination node - - """ - - for destination in cmds.listConnections(src, - source=False, - plugs=True) or []: - for source in cmds.listConnections(destination, - destination=False, - plugs=True) or []: - try: - cmds.connectAttr(source.replace(src, dst), - destination, force=True) - except RuntimeError: - continue - - -def parent_group(source, transferTransform=True): - """Create and transfer transforms to parent group""" - assert cmds.objExists(source), "%s does not exist" % source - assert cmds.nodeType(source) == "transform", ( - "%s must be transform" % source) - - parent = cmds.listRelatives(source, parent=True) - - if transferTransform: - group = cmds.createNode("transform", n="%s_parent" % source) - match_transform(group, source) - - try: - cmds.parent(source, group) - except Exception: - cmds.warning("Failed to parent child under new parent") - cmds.delete(group) - - if parent: - cmds.parent(group, parent[0]) - - else: - cmds.select(source) - group = cmds.group(n="%s_parent" % source) - - return group - - -def _output_node(source, type, suffix): - newname = lib.unique(name=source.rsplit("_", 1)[0] + suffix) - - node = cmds.createNode(type) - node = [cmds.listRelatives(node, parent=True) or node][0] - node = cmds.rename(node, newname) - - try: - cmds.parent(node, source) - match_transform(node, source) - - except Exception: - cmds.warning("Could not create %s" % node) - cmds.delete(node) - - return node - - -def output_locator(source, suffix="_LOC"): - """Create child locator - - Arguments: - source (str): Parent node - suffix (str): Suffix of output - - """ - - return _output_node(source, "locator", suffix) - - -def output_joint(source, suffix="_JNT"): - """Create child joint - - Arguments: - source (str): Parent node - suffix (str): Suffix of output - - """ - - return _output_node(source, "joint", suffix) - - -def follicle(shape, u=0, v=0, name=""): - """Attach follicle to "shape" at specified "u" and "v" values""" - - type = cmds.nodeType(shape) - assert type in ("mesh", "nurbsSurface"), ( - "follicle() works on polygonal meshes and nurbs") - - src, dst = { - "mesh": (".outMesh", ".inputMesh"), - "nurbsSurface": (".local", ".inputSurface") - }[type] - - follicle = cmds.createNode("follicle", name=name + "Shape") - transform = cmds.listRelatives(follicle, parent=True)[0] - - cmds.setAttr(follicle + ".parameterU", u) - cmds.setAttr(follicle + ".parameterV", v) - - cmds.connectAttr(follicle + ".outTranslate", transform + ".translate") - cmds.connectAttr(follicle + ".outRotate", transform + ".rotate") - cmds.connectAttr(shape + ".worldMatrix[0]", follicle + ".inputWorldMatrix") - cmds.connectAttr(shape + src, follicle + dst, force=True) - - return transform - - -def connect_matching_attributes(source, target): - """Connect matching attributes from source to target - - Arguments: - source (str): Absolute path to node from which to connect - target (str): Target node - - Example: - >>> # Select two matching nodes - >>> source = cmds.createNode("transform", name="source") - >>> target = cmds.createNode("transform", name="target") - >>> cmds.select([source, target], replace=True) - >>> source, target = cmds.ls(selection=True) - >>> connect_matching_attributes(source, target) - - """ - - dsts = cmds.listAttr(target, keyable=True) - for src in cmds.listAttr(source, keyable=True): - if src not in dsts: - continue - - try: - src = "." + src - cmds.connectAttr(source + src, - target + src, - force=True) - except RuntimeError as e: - cmds.warning("Could not connect %s: %s" % (src, e)) - - -def create_ncloth(input_mesh): - """Replace Create nCloth menu item - - This performs the identical option of nCloth -> Create nCloth - with the following changes. - - 1. Input mesh not made intermediate - 2. Current mesh and shape named "currentMesh" - - Arguments: - input_mesh (str): Path to shape - - """ - - assert cmds.nodeType(input_mesh) == "mesh", ( - "%s was not of type mesh" % input_mesh) - - nucleus = cmds.createNode("nucleus", name="nucleus1") - ncloth = cmds.createNode("nCloth", name="nClothShape1") - current_mesh = cmds.createNode("mesh", name="currentMesh") - - cmds.connectAttr(input_mesh + ".worldMesh[0]", ncloth + ".inputMesh") - cmds.connectAttr(ncloth + ".outputMesh", current_mesh + ".inMesh") - cmds.connectAttr("time1.outTime", nucleus + ".currentTime") - cmds.connectAttr("time1.outTime", ncloth + ".currentTime") - cmds.connectAttr(ncloth + ".currentState", nucleus + ".inputActive[0]") - cmds.connectAttr(ncloth + ".startState", nucleus + ".inputActiveStart[0]") - cmds.connectAttr(nucleus + ".outputObjects[0]", ncloth + ".nextState") - cmds.connectAttr(nucleus + ".startFrame", ncloth + ".startFrame") - - # Assign default shader - cmds.sets(current_mesh, addElement="initialShadingGroup") - - return current_mesh - - -def enhanced_parent(child, parent): - if "shape" in cmds.nodeType(child, inherited=True): - cmds.parent(relative=True, shape=True) - else: - cmds.parent(child, parent) - - -def auto_connect_assets(src, dst): - """Attempt to automatically two assets - - Arguments: - src (str): Name of source reference node - dst (str): Name of destination reference node - - Raises: - StopIteration on missing in_SET - - """ - - in_set = None - - for node in cmds.referenceQuery(dst, nodes=True): - if node.endswith("in_SET"): - in_set = node - break - - for input_transform in cmds.sets(in_set, query=True): - mbid = cmds.getAttr(input_transform + ".cbId") - input_shape = cmds.listRelatives(input_transform, shapes=True)[0] - - for output_transform in lib.lsattr("cbId", value=mbid): - - ref = cmds.referenceQuery(output_transform, referenceNode=True) - if ref != src: - continue - - print("Connecting %s -> %s" % (output_transform, input_transform)) - output_shape = cmds.listRelatives(output_transform, shapes=True)[0] - - try: - auto_connect(output_transform, input_transform) - except RuntimeError: - # Already connected - pass - - try: - auto_connect(output_shape, input_shape) - except RuntimeError: - # Already connected - pass diff --git a/colorbleed/maya/interactive.py b/colorbleed/maya/interactive.py deleted file mode 100644 index 740870e0d2..0000000000 --- a/colorbleed/maya/interactive.py +++ /dev/null @@ -1,288 +0,0 @@ -"""Interactive functionality - -These depend on user selection in Maya, and may be used as-is. They -implement the functionality in :mod:`commands.py`. - -Each of these functions take `*args` as argument, because when used -in a Maya menu an additional argument is passed with metadata about -what state the button was pressed in. None of this data is used here. - -""" - -from maya import cmds, mel -from . import commands, lib - - -def connect_shapes(*args): - """Connect the first selection to the last selection(s)""" - selection = cmds.ls(selection=True) - - src = selection.pop(0) - commands.connect_shapes(src, dst=selection) - - -def combine(*args): - """Combine currently selected meshes - - This differs from the default Maya combine in that it - retains the original mesh and produces a new mesh with the result. - - """ - - commands.combine(cmds.ls(sl=1)) - - -def read_selected_channels(*args): - """Return a list of selected channels in the Channel Box""" - channelbox = mel.eval("global string $gChannelBoxName; " - "$temp=$gChannelBoxName;") - return cmds.channelBox(channelbox, - query=True, - selectedMainAttributes=True) or [] - - -def set_defaults(*args): - """Set currently selected values from channel box to their default value - - If no channel is selected, default all keyable attributes. - - """ - - for node in cmds.ls(selection=True): - selected_channels = read_selected_channels() - for channel in (selected_channels or - cmds.listAttr(node, keyable=True)): - try: - default = cmds.attributeQuery(channel, - node=node, - listDefault=True)[0] - except Exception: - continue - - else: - cmds.setAttr(node + "." + channel, default) - - -def transfer_outgoing_connections(*args): - """Connect outgoing connections from first to second selected node""" - - try: - src, dst = cmds.ls(selection=True) - except ValueError: - return cmds.warning("Select source and destination nodes") - - commands.transfer_outgoing_connections(src, dst) - - -def clone_special(*args): - """Clone in localspace, and preserve user-defined attributes""" - - for transform in cmds.ls(selection=True, long=True): - if cmds.nodeType(transform) != "transform": - cmds.warning("Skipping '%s', not a `transform`" % transform) - continue - - shape = _find_shape(transform) - type = cmds.nodeType(shape) - - if type not in ("mesh", "nurbsSurface", "nurbsCurve"): - cmds.warning("Skipping '{transform}': cannot clone nodes " - "of type '{type}'".format(**locals())) - continue - - cloned = commands.clone(shape, worldspace=False) - new_transform = cmds.listRelatives(cloned, - parent=True, - fullPath=True)[0] - - new_transform = cmds.rename(new_transform, - new_transform.rsplit(":", 1)[-1]) - - for attr in cmds.listAttr(transform, - userDefined=True) or list(): - try: - cmds.addAttr(new_transform, longName=attr, dataType="string") - except Exception: - continue - - value = cmds.getAttr(transform + "." + attr) - cmds.setAttr(new_transform + "." + attr, value, type="string") - - # Connect visibility - cmds.connectAttr(transform + ".visibility", - new_transform + ".visibility") - - -def clone_worldspace(*args): - return _clone(worldspace=True) - - -def clone_localspace(*args): - return _clone(worldspace=False) - - -def _clone(worldspace=False): - """Clone selected objects in viewport - - Arguments: - worldspace (bool): Whether or not to append a transformGeometry to - resulting clone. - - """ - - clones = list() - - for node in cmds.ls(selection=True, long=True): - shape = _find_shape(node) - type = cmds.nodeType(shape) - - if type not in ("mesh", "nurbsSurface", "nurbsCurve"): - cmds.warning("Skipping '{node}': cannot clone nodes " - "of type '{type}'".format(**locals())) - continue - - cloned = commands.clone(shape, worldspace=worldspace) - clones.append(cloned) - - if not clones: - return - - # Select newly created transform nodes in the viewport - transforms = list() - - for clone in clones: - transform = cmds.listRelatives(clone, parent=True, fullPath=True)[0] - transforms.append(transform) - - cmds.select(transforms, replace=True) - - -def _find_shape(element): - """Return shape of given 'element' - - Supports components, meshes, and surfaces - - Arguments: - element (str): Path to component, mesh or surface - - Returns: - str of path if found, None otherwise - - """ - - # Get either shape or transform, based on element-type - node = cmds.ls(element, objectsOnly=True, long=True)[0] - - if cmds.nodeType(node) == "transform": - try: - return cmds.listRelatives(node, shapes=True, fullPath=True)[0] - except IndexError: - return cmds.warning("Could not find shape in %s" % element) - else: - return node - - -def connect_matching_attributes_from_selection(*args): - try: - source, target = cmds.ls(sl=True) - except ValueError: - raise ValueError("Select (1) source and (2) target nodes only.") - - return commands.connect_matching_attributes(source, target) - - -def auto_connect(*args): - """Connect `src` to `dst` via the most likely input and output""" - try: - commands.auto_connect(*cmds.ls(selection=True)) - except TypeError: - cmds.warning("Select only source and destination nodes.") - - -def create_ncloth(): - selection = cmds.ls(selection=True)[0] - - input_mesh = cmds.listRelatives(selection, shapes=True)[0] - current_mesh = commands.create_ncloth(input_mesh) - - # Optionally append suffix - comp = selection.rsplit("_", 1) - suffix = ("_" + comp[-1]) if len(comp) > 1 else "" - - cmds.rename(current_mesh, "currentMesh%sShape" % suffix) - - # Mimic default nCloth command - cmds.hide(selection) - - -def follicle(*args): - supported = ["mesh", "nurbsSurface"] - selection = cmds.ls(sl=1) - - new_follicles = [] - for sel in selection: - uv = lib.uv_from_element(sel) - - geometry_shape = lib.shape_from_element(sel) - geometry_transform = cmds.listRelatives(geometry_shape, parent=True)[0] - - # Figure out output connection - inputs = [".inputMesh", ".inputSurface"] - outputs = [".outMesh", ".local"] - - failed = False - type = cmds.nodeType(geometry_shape) - if type not in supported: - failed = True - shapes = cmds.listRelatives(geometry_shape, shapes=True) - - if shapes: - geometry_shape = shapes[0] - type = cmds.nodeType(geometry_shape) - if type in supported: - failed = False - - if failed: - cmds.error("Skipping '%s': Type not accepted" % type) - return - - input = inputs[supported.index(type)] - output = outputs[supported.index(type)] - - # Make follicle - follicle = cmds.createNode("follicle", - name=geometry_transform + "_follicleShape1") - follicle_transform = cmds.listRelatives(follicle, parent=True)[0] - follicle_transform = cmds.rename(follicle_transform, - geometry_transform + "_follicle1") - - # Set U and V value - cmds.setAttr(follicle + ".parameterU", uv[0]) - cmds.setAttr(follicle + ".parameterV", uv[1]) - - # Make the connections - cmds.connectAttr(follicle + ".outTranslate", - follicle_transform + ".translate") - cmds.connectAttr(follicle + ".outRotate", - follicle_transform + ".rotate") - cmds.connectAttr(geometry_shape + output, - follicle + input) - - # Select last - new_follicles.append(follicle_transform) - - # Select newly created follicles - if new_follicles: - cmds.select(new_follicles, r=1) - - return new_follicles - - -def auto_connect_assets(*args): - references = cmds.ls(selection=True, type="reference") - - if not len(references) == 2: - raise RuntimeError("Select source and destination " - "reference nodes, in that order.") - - return commands.auto_connect_assets(*references) diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index 9469168e79..9583cce86b 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -8,10 +8,10 @@ import logging import contextlib from collections import OrderedDict, defaultdict -from avalon import maya, io - from maya import cmds, mel +from avalon import maya, io + log = logging.getLogger(__name__) @@ -717,10 +717,11 @@ def apply_attributes(attributes, nodes_by_id): """ for attr_data in attributes: - node = nodes_by_id[attr_data["uuid"]] + nodes = nodes_by_id[attr_data["uuid"]] attr_value = attr_data["attributes"] - for attr, value in attr_value.items(): - set_attribute(attr, value, node) + for node in nodes: + for attr, value in attr_value.items(): + set_attribute(attr, value, node) def list_looks(asset_id): @@ -784,6 +785,16 @@ def assign_look_by_version(nodes, version_id): else: log.info("Reusing existing lookdev '{}'".format(reference_node)) shader_nodes = cmds.referenceQuery(reference_node, nodes=True) + namespace = cmds.referenceQuery(reference_node, namespace=True) + + # containerise like avalon (for manager) + # give re + context = {"representation": shader_file} + subset_name = shader_file["context"]["subset"] + maya.containerise(name=subset_name, + namespace=namespace, + nodes=shader_nodes, + context=context) # Assign relationships with open(shader_relation, "r") as f: From ac3ed4dd7128bfcfc0a8fb3886f6c4e740f393bf Mon Sep 17 00:00:00 2001 From: aardschok Date: Mon, 7 Aug 2017 12:10:12 +0200 Subject: [PATCH 27/28] fixed missing args in assign_look --- colorbleed/plugins/maya/load/load_look.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/colorbleed/plugins/maya/load/load_look.py b/colorbleed/plugins/maya/load/load_look.py index d0b05d1252..12edc082a7 100644 --- a/colorbleed/plugins/maya/load/load_look.py +++ b/colorbleed/plugins/maya/load/load_look.py @@ -69,6 +69,7 @@ class LookLoader(api.Loader): # Get all nodes which belong to a matching name space # Currently this is the safest way to get all the nodes - lib.apply_shaders(relationships, nodes) + # Pass empty list as nodes to assign to in order to only load + lib.apply_shaders(relationships, nodes, []) self[:] = nodes From 394054e6d55ad54ea7a8147a72d6edff7d938c39 Mon Sep 17 00:00:00 2001 From: aardschok Date: Mon, 7 Aug 2017 12:10:37 +0200 Subject: [PATCH 28/28] finishing code for future testing --- .../maya/publish/_validate_filename_convention.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/colorbleed/plugins/maya/publish/_validate_filename_convention.py b/colorbleed/plugins/maya/publish/_validate_filename_convention.py index 7a9a44e02f..74c4592bce 100644 --- a/colorbleed/plugins/maya/publish/_validate_filename_convention.py +++ b/colorbleed/plugins/maya/publish/_validate_filename_convention.py @@ -1,4 +1,7 @@ import re +import os + +import maya.cmds as cmds import pyblish.api import colorbleed.api @@ -21,9 +24,15 @@ class ValidateFileNameConvention(pyblish.api.InstancePlugin): # todo: change pattern to company standard pattern = re.compile("[a-zA-Z]+_[A-Z]{3}") - nodes = list(instance) + nodes = cmds.ls(instance, type="file") for node in nodes: - match = pattern.match(node) + # get texture path + texture = cmds.getAttr("{}.fileTextureName".format(node)) + if not texture: + self.log.error("") + invalid.append(node) + filename = os.path.split(os.path.basename(texture))[0] + match = pattern.match(filename) if not match: invalid.append(node)