From 6c9ad4e238c35179b6dc23eb386d9962f64b0c7e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 19 Jan 2018 11:46:20 +0100 Subject: [PATCH 01/11] Remove debugging print statement --- .../plugins/maya/publish/validate_rig_out_set_node_ids.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/colorbleed/plugins/maya/publish/validate_rig_out_set_node_ids.py b/colorbleed/plugins/maya/publish/validate_rig_out_set_node_ids.py index 32909e6e3e..a109c39171 100644 --- a/colorbleed/plugins/maya/publish/validate_rig_out_set_node_ids.py +++ b/colorbleed/plugins/maya/publish/validate_rig_out_set_node_ids.py @@ -35,8 +35,6 @@ def get_id_from_history(node): similar_nodes = [i for i in similar_nodes if get_parent(i) == parent] - print similar_nodes - # Check all of the remaining similar nodes and take the first one # with an id and assume it's the original. for similar_node in similar_nodes: From ccb97fe1234c7464748eb2a8e9fc6c6233cef57d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 19 Jan 2018 11:47:57 +0100 Subject: [PATCH 02/11] Remove redundant info logging in plug-in --- colorbleed/plugins/maya/publish/validate_rig_contents.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/colorbleed/plugins/maya/publish/validate_rig_contents.py b/colorbleed/plugins/maya/publish/validate_rig_contents.py index d07fafe362..7111d679cb 100644 --- a/colorbleed/plugins/maya/publish/validate_rig_contents.py +++ b/colorbleed/plugins/maya/publish/validate_rig_contents.py @@ -38,8 +38,6 @@ class ValidateRigContents(pyblish.api.InstancePlugin): # in the rig instance set_members = self.check_set_members(instance) - self.log.info("Evaluating contents of object sets..") - # Ensure contents in sets and retrieve long path for all objects output_content = cmds.sets("out_SET", query=True) or [] assert output_content, "Must have members in rig out_SET" From c143f164d0ad06fd7b115a3e8cc381208d5b0795 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 19 Jan 2018 12:06:14 +0100 Subject: [PATCH 03/11] Simplify validate rig contents --- .../maya/publish/validate_rig_contents.py | 153 +++++++----------- 1 file changed, 57 insertions(+), 96 deletions(-) diff --git a/colorbleed/plugins/maya/publish/validate_rig_contents.py b/colorbleed/plugins/maya/publish/validate_rig_contents.py index 7111d679cb..824e298646 100644 --- a/colorbleed/plugins/maya/publish/validate_rig_contents.py +++ b/colorbleed/plugins/maya/publish/validate_rig_contents.py @@ -20,87 +20,68 @@ class ValidateRigContents(pyblish.api.InstancePlugin): accepted_output = ["mesh", "transform"] accepted_controllers = ["transform"] - ignore_nodes = [] - - invalid_hierarchy = [] - invalid_controls = [] - invalid_geometry = [] def process(self, instance): - error = False - objectsets = ("controls_SET", "out_SET") missing = [obj for obj in objectsets if obj not in instance] assert not missing, ("%s is missing %s" % (instance, missing)) # Ensure there are at least some transforms or dag nodes # in the rig instance - set_members = self.check_set_members(instance) - - # Ensure contents in sets and retrieve long path for all objects - output_content = cmds.sets("out_SET", query=True) or [] - assert output_content, "Must have members in rig out_SET" - - controls_content = cmds.sets("controls_SET", query=True) or [] - assert controls_content, "Must have members in rig controls_SET" - - root_node = cmds.ls(set_members, assemblies=True) - hierarchy = cmds.listRelatives(root_node, allDescendents=True, - fullPath=True) - - self.invalid_geometry = self.validate_geometry(output_content, - hierarchy) - self.invalid_controls = self.validate_controls(controls_content, - hierarchy) - - if self.invalid_hierarchy: - self.log.error("Found nodes which reside outside of root group " - "while they are set up for publishing." - "\n%s" % self.invalid_hierarchy) - error = True - - if self.invalid_controls: - self.log.error("Only transforms can be part of the controls_SET." - "\n%s" % self.invalid_controls) - error = True - - if self.invalid_geometry: - self.log.error("Only meshes can be part of the out_SET\n%s" - % self.invalid_geometry) - error = True - - if error: - raise RuntimeError("Invalid rig content. See log for details.") - - def check_set_members(self, instance): - """Check if the instance has any dagNodes - Args: - instance: the instance which needs to be published - Returns: - set_members (list): all dagNodes from instance - """ - set_members = instance.data['setMembers'] if not cmds.ls(set_members, type="dagNode", long=True): raise RuntimeError("No dag nodes in the pointcache instance. " "(Empty instance?)") - return set_members - def validate_hierarchy(self, hierarchy, nodes): - """Collect all nodes which are NOT within the hierarchy - Args: - hierarchy (list): nodes within the root node - nodes (list): nodes to check + # Ensure contents in sets and retrieve long path for all objects + output_content = cmds.sets("out_SET", query=True) or [] + assert output_content, "Must have members in rig out_SET" + output_content = cmds.ls(output_content, long=True) - Returns: - errors (list): list of nodes - """ - errors = [] - for node in nodes: + controls_content = cmds.sets("controls_SET", query=True) or [] + assert controls_content, "Must have members in rig controls_SET" + controls_content = cmds.ls(controls_content, long=True) + + # Validate members are inside the hierarchy from root node + root_node = cmds.ls(set_members, assemblies=True) + hierarchy = cmds.listRelatives(root_node, allDescendents=True, + fullPath=True) + hierarchy = set(hierarchy) + + invalid_hierarchy = [] + for node in output_content: if node not in hierarchy: - errors.append(node) - return errors + invalid_hierarchy.append(node) + for node in controls_content: + if node not in hierarchy: + invalid_hierarchy.append(node) + + # Additional validations + invalid_geometry = self.validate_geometry(output_content, + hierarchy) + invalid_controls = self.validate_controls(controls_content, + hierarchy) + + error = False + if invalid_hierarchy: + self.log.error("Found nodes which reside outside of root group " + "while they are set up for publishing." + "\n%s" % invalid_hierarchy) + error = True + + if invalid_controls: + self.log.error("Only transforms can be part of the controls_SET." + "\n%s" % invalid_controls) + error = True + + if invalid_geometry: + self.log.error("Only meshes can be part of the out_SET\n%s" + % invalid_geometry) + error = True + + if error: + raise RuntimeError("Invalid rig content. See log for details.") def validate_geometry(self, set_members, hierarchy): """Check if the out set passes the validations @@ -116,31 +97,18 @@ class ValidateRigContents(pyblish.api.InstancePlugin): errors (list) """ - errors = [] - # Validate the contents further + # Validate all shape types + invalid = [] shapes = cmds.listRelatives(set_members, allDescendents=True, shapes=True, fullPath=True) or [] - - # The user can add the shape node to the out_set, this will result - # in none when querying allDescendents - all_shapes = set_members + shapes - all_long_names = [cmds.ls(i, long=True)[0] for i in all_shapes] - - # geometry - invalid_shapes = self.validate_hierarchy(hierarchy, - all_long_names) - self.invalid_hierarchy.extend(invalid_shapes) + all_shapes = cmds.ls(set_members + shapes, long=True, shapes=True) for shape in all_shapes: - nodetype = cmds.nodeType(shape) - if nodetype in self.ignore_nodes: - continue + if cmds.nodeType(shape) not in self.accepted_output: + invalid.append(shape) - if nodetype not in self.accepted_output: - errors.append(shape) - - return errors + return invalid def validate_controls(self, set_members, hierarchy): """Check if the controller set passes the validations @@ -156,17 +124,10 @@ class ValidateRigContents(pyblish.api.InstancePlugin): errors (list) """ - errors = [] - all_long_names = [cmds.ls(i, long=True)[0] for i in set_members] - invalid_controllers = self.validate_hierarchy(hierarchy, - all_long_names) - self.invalid_hierarchy.extend(invalid_controllers) + # Validate control types + invalid = [] for node in set_members: - nodetype = cmds.nodeType(node) - if nodetype in self.ignore_nodes: - continue + if cmds.nodeType(node) not in self.accepted_controllers: + invalid.append(node) - if nodetype not in self.accepted_controllers: - errors.append(node) - - return errors + return invalid From 9a702cbd25429469421d5eef07920cf55de6ec97 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 19 Jan 2018 12:28:34 +0100 Subject: [PATCH 04/11] Remove redundant hierarchy argument for methods --- .../plugins/maya/publish/validate_rig_contents.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/colorbleed/plugins/maya/publish/validate_rig_contents.py b/colorbleed/plugins/maya/publish/validate_rig_contents.py index 824e298646..7530936a66 100644 --- a/colorbleed/plugins/maya/publish/validate_rig_contents.py +++ b/colorbleed/plugins/maya/publish/validate_rig_contents.py @@ -58,10 +58,8 @@ class ValidateRigContents(pyblish.api.InstancePlugin): invalid_hierarchy.append(node) # Additional validations - invalid_geometry = self.validate_geometry(output_content, - hierarchy) - invalid_controls = self.validate_controls(controls_content, - hierarchy) + invalid_geometry = self.validate_geometry(output_content) + invalid_controls = self.validate_controls(controls_content) error = False if invalid_hierarchy: @@ -83,7 +81,7 @@ class ValidateRigContents(pyblish.api.InstancePlugin): if error: raise RuntimeError("Invalid rig content. See log for details.") - def validate_geometry(self, set_members, hierarchy): + def validate_geometry(self, set_members): """Check if the out set passes the validations Checks if all its set members are within the hierarchy of the root @@ -110,7 +108,7 @@ class ValidateRigContents(pyblish.api.InstancePlugin): return invalid - def validate_controls(self, set_members, hierarchy): + def validate_controls(self, set_members): """Check if the controller set passes the validations Checks if all its set members are within the hierarchy of the root From ea43cdd868643a26c4e00677d3b67e8f8659b775 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 22 Jan 2018 08:27:56 +0100 Subject: [PATCH 05/11] Remove cb dependency for getting highest in hierarchy - embed in lib.py --- colorbleed/maya/lib.py | 50 +++++++++++++++++++++++++++++++++++++++++- 1 file changed, 49 insertions(+), 1 deletion(-) diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index 82ab58e5f4..8eb4d5799a 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -1126,8 +1126,56 @@ def get_container_transforms(container, members=None, root=False): results = cmds.ls(members, type="transform", long=True) if root: - root = core.getHighestInHierarchy(results) + root = get_highest_in_hierarchy(results) if root: results = root[0] return results + + +def get_highest_in_hierarchy(nodes): + """Return highest nodes in the hierarchy that are in the `nodes` list. + + The "highest in hierarchy" are the nodes closest to world: top-most level. + + Args: + nodes (list): The nodes in which find the highest in hierarchies. + + Returns: + list: The highest nodes from the input nodes. + + """ + + # Ensure we use long names + nodes = cmds.ls(nodes, long=True) + lookup = set(nodes) + + highest = [] + for node in nodes: + # If no parents are within the nodes input list + # then this is a highest node + if not any(n in lookup for n in iter_parents(node)): + highest.append(node) + + return highest + + +def iter_parents(node): + """Iter parents of node from its long name. + + Note: The `node` *must* be the long node name. + + Args: + node (str): Node long name. + + Yields: + str: All parent node names (long names) + + """ + while True: + split = node.rsplit("|", 1) + if len(split) == 1: + return + + node = split[0] + yield node From f26c78b7820c511bd771f97af6bb2c600d1da14e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 23 Jan 2018 11:56:23 +0100 Subject: [PATCH 06/11] Add validate rig controllers for Arnold attributes --- ...idate_rig_controllers_arnold_attributes.py | 91 +++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100644 colorbleed/plugins/maya/publish/validate_rig_controllers_arnold_attributes.py diff --git a/colorbleed/plugins/maya/publish/validate_rig_controllers_arnold_attributes.py b/colorbleed/plugins/maya/publish/validate_rig_controllers_arnold_attributes.py new file mode 100644 index 0000000000..f7e0b43681 --- /dev/null +++ b/colorbleed/plugins/maya/publish/validate_rig_controllers_arnold_attributes.py @@ -0,0 +1,91 @@ +from maya import cmds + +import pyblish.api +import colorbleed.api +from cb.utils.maya.context import undo_chunk + + +class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin): + """Validate rig control curves have no keyable arnold attributes. + + The Arnold plug-in will create curve attributes like: + - aiRenderCurve + - aiCurveWidth + - aiSampleRate + - aiCurveShaderR + - aiCurveShaderG + - aiCurveShaderB + + Unfortunately these attributes visible in the channelBox are *keyable* + by default and visible in the channelBox. As such pressing a regular "S" + set key shortcut will set keys on these attributes too, thus cluttering + the animator's scene. + + This validator will ensure they are hidden or unkeyable attributes. + + """ + order = colorbleed.api.ValidateContentsOrder + 0.05 + label = "Rig Controllers (Arnold Attributes)" + hosts = ["maya"] + families = ["colorbleed.rig"] + actions = [colorbleed.api.RepairAction, + colorbleed.api.SelectInvalidAction] + + attributes = [ + "rcurve", + "cwdth", + "srate", + "ai_curve_shaderr", + "ai_curve_shaderg", + "ai_curve_shaderb" + ] + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError('{} failed, see log ' + 'information'.format(self.label)) + + @classmethod + def get_invalid(cls, instance): + + controllers_sets = [i for i in instance if i == "controls_SET"] + if not controllers_sets: + return [] + + controls = cmds.sets(controllers_sets, query=True) or [] + if not controls: + return [] + + shapes = cmds.ls(controls, + dag=True, + leaf=True, + long=True, + shapes=True, + noIntermediate=True) + curves = cmds.ls(shapes, type="nurbsCurve", long=True) + + print curves + + invalid = list() + for node in curves: + + for attribute in cls.attributes: + if cmds.attributeQuery(attribute, node=node, exists=True): + plug = "{}.{}".format(node, attribute) + if cmds.getAttr(plug, keyable=True): + invalid.append(node) + break + + return invalid + + @classmethod + def repair(cls, instance): + + invalid = cls.get_invalid(instance) + with undo_chunk(): + for node in invalid: + for attribute in cls.attributes: + if cmds.attributeQuery(attribute, node=node, exists=True): + plug = "{}.{}".format(node, attribute) + cmds.setAttr(plug, channelBox=False, keyable=False) From 7b34e1b28bf805ad179c845aea6d7dcce6c3aced Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 23 Jan 2018 12:55:27 +0100 Subject: [PATCH 07/11] Remove debugging print statement --- .../maya/publish/validate_rig_controllers_arnold_attributes.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/colorbleed/plugins/maya/publish/validate_rig_controllers_arnold_attributes.py b/colorbleed/plugins/maya/publish/validate_rig_controllers_arnold_attributes.py index f7e0b43681..b6a1191006 100644 --- a/colorbleed/plugins/maya/publish/validate_rig_controllers_arnold_attributes.py +++ b/colorbleed/plugins/maya/publish/validate_rig_controllers_arnold_attributes.py @@ -65,8 +65,6 @@ class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin): noIntermediate=True) curves = cmds.ls(shapes, type="nurbsCurve", long=True) - print curves - invalid = list() for node in curves: From e97fa88a121ccc13d1dfe091e9bdd5b6048af903 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Jan 2018 12:41:33 +0100 Subject: [PATCH 08/11] Support multiple UV sets in Alembic (Maya 2017+) --- colorbleed/maya/lib.py | 55 +++++++++++ .../plugins/maya/publish/extract_animation.py | 22 +++-- .../maya/publish/extract_pointcache.py | 24 +++-- .../publish/validate_mesh_single_uv_set.py | 98 ++++--------------- 4 files changed, 107 insertions(+), 92 deletions(-) diff --git a/colorbleed/maya/lib.py b/colorbleed/maya/lib.py index 8eb4d5799a..2be9174864 100644 --- a/colorbleed/maya/lib.py +++ b/colorbleed/maya/lib.py @@ -1179,3 +1179,58 @@ def iter_parents(node): node = split[0] yield node + + +def remove_other_uv_sets(mesh): + """Remove all other UV sets than the current UV set. + + Keep only current UV set and ensure it's the renamed to default 'map1'. + + """ + + uvSets = cmds.polyUVSet(mesh, query=True, allUVSets=True) + current = cmds.polyUVSet(mesh, query=True, currentUVSet=True)[0] + + # Copy over to map1 + if current != 'map1': + cmds.polyUVSet(mesh, uvSet=current, newUVSet='map1', copy=True) + cmds.polyUVSet(mesh, currentUVSet=True, uvSet='map1') + current = 'map1' + + # Delete all non-current UV sets + deleteUVSets = [uvSet for uvSet in uvSets if uvSet != current] + uvSet = None + + # Maya Bug (tested in 2015/2016): + # In some cases the API's MFnMesh will report less UV sets than + # maya.cmds.polyUVSet. This seems to happen when the deletion of UV sets + # has not triggered a cleanup of the UVSet array attribute on the mesh + # node. It will still have extra entries in the attribute, though it will + # not show up in API or UI. Nevertheless it does show up in + # maya.cmds.polyUVSet. To ensure we clean up the array we'll force delete + # the extra remaining 'indices' that we don't want. + + # TODO: Implement a better fix + # The best way to fix would be to get the UVSet indices from api with + # MFnMesh (to ensure we keep correct ones) and then only force delete the + # other entries in the array attribute on the node. But for now we're + # deleting all entries except first one. Note that the first entry could + # never be removed (the default 'map1' always exists and is supposed to + # be undeletable.) + try: + for uvSet in deleteUVSets: + cmds.polyUVSet(mesh, delete=True, uvSet=uvSet) + except RuntimeError as exc: + log.warning('Error uvSet: %s - %s', uvSet, exc) + indices = cmds.getAttr('{0}.uvSet'.format(mesh), + multiIndices=True) + if not indices: + log.warning("No uv set found indices for: %s", mesh) + return + + # Delete from end to avoid shifting indices + # and remove the indices in the attribute + indices = reversed(indices[1:]) + for i in indices: + attr = '{0}.uvSet[{1}]'.format(mesh, i) + cmds.removeMultiInstance(attr, b=True) diff --git a/colorbleed/plugins/maya/publish/extract_animation.py b/colorbleed/plugins/maya/publish/extract_animation.py index abc9be046f..d62c34e915 100644 --- a/colorbleed/plugins/maya/publish/extract_animation.py +++ b/colorbleed/plugins/maya/publish/extract_animation.py @@ -10,7 +10,7 @@ from colorbleed.maya.lib import extract_alembic class ExtractColorbleedAnimation(colorbleed.api.Extractor): """Produce an alembic of just point positions and normals. - Positions and normals are preserved, but nothing more, + Positions and normals, uvs, creases are preserved, but nothing more, for plain and predictable point caches. """ @@ -49,18 +49,26 @@ class ExtractColorbleedAnimation(colorbleed.api.Extractor): filename = "{name}.abc".format(**instance.data) path = os.path.join(parent_dir, filename) + options = { + "step": instance.data.get("step", 1.0), + "attr": ["cbId"], + "writeVisibility": True, + "writeCreases": True, + "uvWrite": True, + "selection": True + } + + if int(cmds.about(version=True)) >= 2017: + # Since Maya 2017 alembic supports multiple uv sets - write them. + options["writeUVSets"] = True + with avalon.maya.suspended_refresh(): with avalon.maya.maintained_selection(): cmds.select(nodes, noExpand=True) extract_alembic(file=path, startFrame=start, endFrame=end, - **{"step": instance.data.get("step", 1.0), - "attr": ["cbId"], - "writeVisibility": True, - "writeCreases": True, - "uvWrite": True, - "selection": True}) + **options) if "files" not in instance.data: instance.data["files"] = list() diff --git a/colorbleed/plugins/maya/publish/extract_pointcache.py b/colorbleed/plugins/maya/publish/extract_pointcache.py index 878bb14d6c..405d1e6549 100644 --- a/colorbleed/plugins/maya/publish/extract_pointcache.py +++ b/colorbleed/plugins/maya/publish/extract_pointcache.py @@ -10,7 +10,7 @@ from colorbleed.maya.lib import extract_alembic class ExtractColorbleedAlembic(colorbleed.api.Extractor): """Produce an alembic of just point positions and normals. - Positions and normals are preserved, but nothing more, + Positions and normals, uvs, creases are preserved, but nothing more, for plain and predictable point caches. """ @@ -44,19 +44,27 @@ class ExtractColorbleedAlembic(colorbleed.api.Extractor): filename = "{name}.abc".format(**instance.data) path = os.path.join(parent_dir, filename) + options = { + "step": instance.data.get("step", 1.0), + "attr": ["cbId"], + "writeVisibility": True, + "writeCreases": True, + "writeColorSets": writeColorSets, + "uvWrite": True, + "selection": True + } + + if int(cmds.about(version=True)) >= 2017: + # Since Maya 2017 alembic supports multiple uv sets - write them. + options["writeUVSets"] = True + with avalon.maya.suspended_refresh(): with avalon.maya.maintained_selection(): cmds.select(nodes, noExpand=True) extract_alembic(file=path, startFrame=start, endFrame=end, - **{"step": instance.data.get("step", 1.0), - "attr": ["cbId"], - "writeVisibility": True, - "writeCreases": True, - "writeColorSets": writeColorSets, - "uvWrite": True, - "selection": True}) + **options) if "files" not in instance.data: instance.data["files"] = list() diff --git a/colorbleed/plugins/maya/publish/validate_mesh_single_uv_set.py b/colorbleed/plugins/maya/publish/validate_mesh_single_uv_set.py index 85567631d5..77ec1a0661 100644 --- a/colorbleed/plugins/maya/publish/validate_mesh_single_uv_set.py +++ b/colorbleed/plugins/maya/publish/validate_mesh_single_uv_set.py @@ -2,10 +2,17 @@ from maya import cmds import pyblish.api import colorbleed.api +import colorbleed.maya.lib as lib class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin): - """Ensure no multiple UV sets exist for each polygon mesh""" + """Warn on multiple UV sets existing for each polygon mesh. + + On versions prior to Maya 2017 this will force no multiple uv sets because + the Alembic exports in Maya prior to 2017 don't support writing multiple + UV sets. + + """ order = colorbleed.api.ValidateMeshOrder hosts = ['maya'] @@ -42,83 +49,20 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise ValueError("Nodes found with multiple " - "UV sets: {0}".format(invalid)) + + message = "Nodes found with multiple UV sets: {0}".format(invalid) + + # Maya 2017 and up allows multiple UV sets in Alembic exports + # so we allow it, yet just warn the user to ensure they know about + # the other UV sets. + allowed = int(cmds.about(version=True)) >= 2017 + + if allowed: + self.log.warning(message) + else: + raise ValueError(message) @classmethod def repair(cls, instance): for mesh in cls.get_invalid(instance): - cls._repair_mesh(mesh) - - @classmethod - def _repair_mesh(cls, mesh): - """Process a single mesh, deleting other UV sets than the active one. - - Keep only current UV set and ensure it's the default 'map1' - - """ - from maya import cmds - - uvSets = cmds.polyUVSet(mesh, - query=True, - allUVSets=True) - current = cmds.polyUVSet(mesh, - query=True, - currentUVSet=True)[0] - - # Copy over to map1 - if current != 'map1': - cmds.polyUVSet(mesh, - uvSet=current, - newUVSet='map1', - copy=True) - cmds.polyUVSet(mesh, - currentUVSet=True, - uvSet='map1') - current = 'map1' - - # Delete all non-current UV sets - deleteUVSets = [uvSet for uvSet in uvSets if uvSet != current] - uvSet = None - - # Maya Bug (tested in 2015/2016): - # In some cases the API's MFnMesh will report less UV sets - # than maya.cmds.polyUVSet. - # This seems to happen when the deletion of UV sets has not - # triggered a cleanup of the UVSet array - # attribute on the mesh node. It will still have extra - # entries in the attribute, though it will not - # show up in API or UI. Nevertheless it does show up in - # maya.cmds.polyUVSet. - # To ensure we clean up the array we'll force delete the - # extra remaining 'indices' that we don't want. - - # TODO: Implement a better fix - # The best way to fix would be to get the UVSet - # indices from api with MFnMesh (to ensure we keep - # correct ones) and then only force delete the other - # entries in the array attribute on the node. - # But for now we're deleting all entries except first - # one. Note that the first entry could never - # be removed (the default 'map1' always exists and is - # supposed to be undeletable.) - try: - for uvSet in deleteUVSets: - cmds.polyUVSet(mesh, delete=True, uvSet=uvSet) - except RuntimeError, e: - cls.log.warning('uvSet: {0} - ' - 'Error: {1}'.format(uvSet, e)) - - indices = cmds.getAttr('{0}.uvSet'.format(mesh), - multiIndices=True) - if not indices: - cls.log.warning( - "No uv set found indices for: {0}".format(mesh)) - return - - # Delete from end to avoid shifting indices - # and remove the indices in the attribute - indices = reversed(indices[1:]) - for i in indices: - attr = '{0}.uvSet[{1}]'.format(mesh, i) - cmds.removeMultiInstance(attr, b=True) + lib.remove_other_uv_sets(mesh) From 71a6e4bf6fe943fe317c1f3492346e4251abd790 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Jan 2018 14:01:55 +0100 Subject: [PATCH 09/11] Initial draft for Fusion integration (not fully working) --- colorbleed/fusion/__init__.py | 26 ++++++++++++ .../plugins/fusion/load/load_sequence.py | 41 +++++++++++++++++++ 2 files changed, 67 insertions(+) create mode 100644 colorbleed/fusion/__init__.py create mode 100644 colorbleed/plugins/fusion/load/load_sequence.py diff --git a/colorbleed/fusion/__init__.py b/colorbleed/fusion/__init__.py new file mode 100644 index 0000000000..36270980bf --- /dev/null +++ b/colorbleed/fusion/__init__.py @@ -0,0 +1,26 @@ +import os + +from avalon import api as avalon +from pyblish import api as pyblish + +PARENT_DIR = os.path.dirname(__file__) +PACKAGE_DIR = os.path.dirname(PARENT_DIR) +PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") + +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "fusion", "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "fusion", "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "fusion", "create") + + +def install(): + print("Registering Fusion plug-ins..") + pyblish.register_plugin_path(PUBLISH_PATH) + avalon.register_plugin_path(avalon.Loader, LOAD_PATH) + avalon.register_plugin_path(avalon.Creator, CREATE_PATH) + + +def uninstall(): + print("Deregistering Fusion plug-ins..") + pyblish.deregister_plugin_path(PUBLISH_PATH) + avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) + avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) diff --git a/colorbleed/plugins/fusion/load/load_sequence.py b/colorbleed/plugins/fusion/load/load_sequence.py new file mode 100644 index 0000000000..2a675eee4c --- /dev/null +++ b/colorbleed/plugins/fusion/load/load_sequence.py @@ -0,0 +1,41 @@ +from avalon import api +import fusionless +import fusionless.context as fuCtx +import os + + +class FusionLoadSequence(api.Loader): + """Load image sequence into Fusion""" + + families = ["colorbleed.imagesequence"] + representations = ["*"] + + label = "Load sequence" + order = -10 + icon = "play-circle" + color = "orange" + + def load(self, context, name, namespace, data): + + from avalon.fusion.pipeline import imprint_container + + # Fallback to asset name when namespace is None + if namespace is None: + namespace = context['asset']['name'] + + # Use the first file for now + root = self.fname + files = os.listdir(root) + path = os.path.join(root, files[0]) + + # Create the Loader with the filename path set + comp = fusionless.Comp() + with fuCtx.lock_and_undo_chunk(comp, "Create Loader"): + tool = comp.create_tool("Loader") + tool.input("Clip").set_value(path) + + imprint_container(tool, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__) From 82748d33071d726a76841d733a7c957a51ab36ae Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 5 Feb 2018 16:18:30 +0100 Subject: [PATCH 10/11] Implement FusionLoadSequence, including update and remove - also remove fusionless dependency --- .../plugins/fusion/load/load_sequence.py | 72 ++++++++++++++++--- 1 file changed, 62 insertions(+), 10 deletions(-) diff --git a/colorbleed/plugins/fusion/load/load_sequence.py b/colorbleed/plugins/fusion/load/load_sequence.py index 2a675eee4c..52b869e540 100644 --- a/colorbleed/plugins/fusion/load/load_sequence.py +++ b/colorbleed/plugins/fusion/load/load_sequence.py @@ -1,6 +1,4 @@ from avalon import api -import fusionless -import fusionless.context as fuCtx import os @@ -17,25 +15,79 @@ class FusionLoadSequence(api.Loader): def load(self, context, name, namespace, data): - from avalon.fusion.pipeline import imprint_container + from avalon.fusion import ( + imprint_container, + get_current_comp, + comp_lock_and_undo_chunk + ) # Fallback to asset name when namespace is None if namespace is None: namespace = context['asset']['name'] # Use the first file for now - root = self.fname - files = os.listdir(root) - path = os.path.join(root, files[0]) + path = self._get_first_image(self.fname) # Create the Loader with the filename path set - comp = fusionless.Comp() - with fuCtx.lock_and_undo_chunk(comp, "Create Loader"): - tool = comp.create_tool("Loader") - tool.input("Clip").set_value(path) + comp = get_current_comp() + with comp_lock_and_undo_chunk(comp, "Create Loader"): + + args = (-32768, -32768) + tool = comp.AddTool("Loader", *args) + tool["Clip"] = path imprint_container(tool, name=name, namespace=namespace, context=context, loader=self.__class__.__name__) + + def _get_first_image(self, root): + """Get first file in representation root""" + files = sorted(os.listdir(root)) + return os.path.join(root, files[0]) + + def update(self, container, representation): + """Update the Loader's path + + Fusion automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + - ClipTimeStart (if duration changes) + - ClipTimeEnd (if duration changes) + - GlobalIn (if duration changes) + - GlobalEnd (if duration changes) + - Reverse (sometimes?) + - Loop (sometimes?) + - Depth (always resets to "Format") + - KeyCode (always resets to "") + - TimeCodeOffset (always resets to 0) + + """ + + from avalon.fusion import comp_lock_and_undo_chunk + + root = api.get_representation_path(representation) + path = self._get_first_image(root) + print representation + print path + + tool = container["_tool"] + assert tool.ID == "Loader", "Must be Loader" + comp = tool.Comp() + + with comp_lock_and_undo_chunk(comp, "Update Loader"): + tool["Clip"] = path + + # Update the imprinted representation + tool.SetData("avalon.representation", str(representation["_id"])) + + def remove(self, container): + + from avalon.fusion import comp_lock_and_undo_chunk + + tool = container["_tool"] + assert tool.ID == "Loader", "Must be Loader" + comp = tool.Comp() + with comp_lock_and_undo_chunk(comp, "Remove Loader"): + tool.Delete() From a37820607a2c03ad5b2bc541ef3ce3be563ec698 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 5 Feb 2018 16:18:53 +0100 Subject: [PATCH 11/11] Add set frame range actions for Fusion --- colorbleed/plugins/fusion/load/actions.py | 103 ++++++++++++++++++++++ 1 file changed, 103 insertions(+) create mode 100644 colorbleed/plugins/fusion/load/actions.py diff --git a/colorbleed/plugins/fusion/load/actions.py b/colorbleed/plugins/fusion/load/actions.py new file mode 100644 index 0000000000..68f66775a8 --- /dev/null +++ b/colorbleed/plugins/fusion/load/actions.py @@ -0,0 +1,103 @@ +"""A module containing generic loader actions that will display in the Loader. + +""" + +from avalon import api + + +def _set_frame_range(start, end, set_render_range=True): + """Set Fusion comp's start and end frame range + + Attrs: + set_render_range (bool, Optional): When True this will also set the + composition's render start and end frame. + + Returns: + None + + """ + + from avalon.fusion import get_current_comp, comp_lock_and_undo_chunk + + comp = get_current_comp() + + attrs = { + "COMPN_GlobalStart": start, + "COMPN_GlobalEnd": end + } + + if set_render_range: + attrs.update({ + "COMPN_RenderStart": start, + "COMPN_RenderEnd": end + }) + + with comp_lock_and_undo_chunk(comp): + comp.SetAttrs(attrs) + + +class FusionSetFrameRangeLoader(api.Loader): + """Specific loader of Alembic for the avalon.animation family""" + + families = ["colorbleed.animation", + "colorbleed.camera", + "colorbleed.imagesequence", + "colorbleed.yeticache", + "colorbleed.pointcache"] + representations = ["*"] + + label = "Set frame range" + order = 11 + icon = "clock-o" + color = "white" + + def load(self, context, name, namespace, data): + + version = context['version'] + version_data = version.get("data", {}) + + start = version_data.get("startFrame", None) + end = version_data.get("endFrame", None) + + if start is None or end is None: + print("Skipping setting frame range because start or " + "end frame data is missing..") + return + + _set_frame_range(start, end) + + +class FusionSetFrameRangeWithHandlesLoader(api.Loader): + """Specific loader of Alembic for the avalon.animation family""" + + families = ["colorbleed.animation", + "colorbleed.camera", + "colorbleed.imagesequence", + "colorbleed.yeticache", + "colorbleed.pointcache"] + representations = ["*"] + + label = "Set frame range (with handles)" + order = 12 + icon = "clock-o" + color = "white" + + def load(self, context, name, namespace, data): + + version = context['version'] + version_data = version.get("data", {}) + + start = version_data.get("startFrame", None) + end = version_data.get("endFrame", None) + + if start is None or end is None: + print("Skipping setting frame range because start or " + "end frame data is missing..") + return + + # Include handles + handles = version_data.get("handles", 0) + start -= handles + end += handles + + _set_frame_range(start, end)