diff --git a/openpype/hosts/maya/api/__init__.py b/openpype/hosts/maya/api/__init__.py index 3412803714..b9c184e370 100644 --- a/openpype/hosts/maya/api/__init__.py +++ b/openpype/hosts/maya/api/__init__.py @@ -98,6 +98,7 @@ def uninstall(): pyblish.deregister_plugin_path(PUBLISH_PATH) avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) + avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH) menu.uninstall() diff --git a/openpype/hosts/maya/plugins/inventory/import_modelrender.py b/openpype/hosts/maya/plugins/inventory/import_modelrender.py new file mode 100644 index 0000000000..e3cad4cf2e --- /dev/null +++ b/openpype/hosts/maya/plugins/inventory/import_modelrender.py @@ -0,0 +1,92 @@ +from avalon import api, io + + +class ImportModelRender(api.InventoryAction): + + label = "Import Model Render Sets" + icon = "industry" + color = "#55DDAA" + + scene_type_regex = "meta.render.m[ab]" + look_data_type = "meta.render.json" + + @staticmethod + def is_compatible(container): + return ( + container.get("loader") == "ReferenceLoader" + and container.get("name", "").startswith("model") + ) + + def process(self, containers): + from maya import cmds + + for container in containers: + con_name = container["objectName"] + nodes = [] + for n in cmds.sets(con_name, query=True, nodesOnly=True) or []: + if cmds.nodeType(n) == "reference": + nodes += cmds.referenceQuery(n, nodes=True) + else: + nodes.append(n) + + repr_doc = io.find_one({ + "_id": io.ObjectId(container["representation"]), + }) + version_id = repr_doc["parent"] + + print("Importing render sets for model %r" % con_name) + self.assign_model_render_by_version(nodes, version_id) + + def assign_model_render_by_version(self, nodes, version_id): + """Assign nodes a specific published model render data version by id. + + This assumes the nodes correspond with the asset. + + Args: + nodes(list): nodes to assign render data to + version_id (bson.ObjectId): database id of the version of model + + Returns: + None + """ + import json + from maya import cmds + from avalon import maya, io, pipeline + from openpype.hosts.maya.api import lib + + # Get representations of shader file and relationships + look_repr = io.find_one({ + "type": "representation", + "parent": version_id, + "name": {"$regex": self.scene_type_regex}, + }) + if not look_repr: + print("No model render sets for this model version..") + return + + json_repr = io.find_one({ + "type": "representation", + "parent": version_id, + "name": self.look_data_type, + }) + + context = pipeline.get_representation_context(look_repr["_id"]) + maya_file = pipeline.get_representation_path_from_context(context) + + context = pipeline.get_representation_context(json_repr["_id"]) + json_file = pipeline.get_representation_path_from_context(context) + + # Import the look file + with maya.maintained_selection(): + shader_nodes = cmds.file(maya_file, + i=True, # import + returnNewNodes=True) + # imprint context data + + # Load relationships + shader_relation = json_file + with open(shader_relation, "r") as f: + relationships = json.load(f) + + # Assign relationships + lib.apply_shaders(relationships, shader_nodes, nodes) diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index 0dde52447d..9c047b252f 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -223,8 +223,8 @@ class CollectLook(pyblish.api.InstancePlugin): def process(self, instance): """Collect the Look in the instance with the correct layer settings""" - - with lib.renderlayer(instance.data["renderlayer"]): + renderlayer = instance.data.get("renderlayer", "defaultRenderLayer") + with lib.renderlayer(renderlayer): self.collect(instance) def collect(self, instance): @@ -357,6 +357,23 @@ class CollectLook(pyblish.api.InstancePlugin): for vray_node in vray_plugin_nodes: history.extend(cmds.listHistory(vray_node)) + # handling render attribute sets + render_set_types = [ + "VRayDisplacement", + "VRayLightMesh", + "VRayObjectProperties", + "RedshiftObjectId", + "RedshiftMeshParameters", + ] + render_sets = cmds.ls(look_sets, type=render_set_types) + if render_sets: + history.extend( + cmds.listHistory(render_sets, + future=False, + pruneDagObjects=True) + or [] + ) + files = cmds.ls(history, type="file", long=True) files.extend(cmds.ls(history, type="aiImage", long=True)) files.extend(cmds.ls(history, type="RedshiftNormalMap", long=True)) @@ -550,3 +567,45 @@ class CollectLook(pyblish.api.InstancePlugin): "source": source, # required for resources "files": files, "color_space": color_space} # required for resources + + +class CollectModelRenderSets(CollectLook): + """Collect render attribute sets for model instance. + + Collects additional render attribute sets so they can be + published with model. + + """ + + order = pyblish.api.CollectorOrder + 0.21 + families = ["model"] + label = "Collect Model Render Sets" + hosts = ["maya"] + maketx = True + + def collect_sets(self, instance): + """Collect all related objectSets except shadingEngines + + Args: + instance (list): all nodes to be published + + Returns: + dict + """ + + sets = {} + for node in instance: + related_sets = lib.get_related_sets(node) + if not related_sets: + continue + + for objset in related_sets: + if objset in sets: + continue + + if "shadingEngine" in cmds.nodeType(objset, inherited=True): + continue + + sets[objset] = {"uuid": lib.get_id(objset), "members": list()} + + return sets diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index f09d50d714..bbf25ebdc7 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -122,7 +122,7 @@ def no_workspace_dir(): class ExtractLook(openpype.api.Extractor): - """Extract Look (Maya Ascii + JSON) + """Extract Look (Maya Scene + JSON) Only extracts the sets (shadingEngines and alike) alongside a .json file that stores it relationships for the sets and "attribute" data for the @@ -130,11 +130,12 @@ class ExtractLook(openpype.api.Extractor): """ - label = "Extract Look (Maya ASCII + JSON)" + label = "Extract Look (Maya Scene + JSON)" hosts = ["maya"] families = ["look"] order = pyblish.api.ExtractorOrder + 0.2 scene_type = "ma" + look_data_type = "json" @staticmethod def get_renderer_name(): @@ -176,6 +177,8 @@ class ExtractLook(openpype.api.Extractor): # no preset found pass + return "mayaAscii" if self.scene_type == "ma" else "mayaBinary" + def process(self, instance): """Plugin entry point. @@ -183,10 +186,12 @@ class ExtractLook(openpype.api.Extractor): instance: Instance to process. """ + _scene_type = self.get_maya_scene_type(instance) + # Define extract output file path dir_path = self.staging_dir(instance) maya_fname = "{0}.{1}".format(instance.name, self.scene_type) - json_fname = "{0}.json".format(instance.name) + json_fname = "{0}.{1}".format(instance.name, self.look_data_type) # Make texture dump folder maya_path = os.path.join(dir_path, maya_fname) @@ -196,11 +201,100 @@ class ExtractLook(openpype.api.Extractor): # Remove all members of the sets so they are not included in the # exported file by accident - self.log.info("Extract sets (Maya ASCII) ...") + self.log.info("Extract sets (%s) ..." % _scene_type) lookdata = instance.data["lookData"] relationships = lookdata["relationships"] sets = relationships.keys() + results = self.process_resources(instance, staging_dir=dir_path) + transfers = results["fileTransfers"] + hardlinks = results["fileHardlinks"] + hashes = results["fileHashes"] + remap = results["attrRemap"] + + # Extract in correct render layer + layer = instance.data.get("renderlayer", "defaultRenderLayer") + with lib.renderlayer(layer): + # TODO: Ensure membership edits don't become renderlayer overrides + with lib.empty_sets(sets, force=True): + # To avoid Maya trying to automatically remap the file + # textures relative to the `workspace -directory` we force + # it to a fake temporary workspace. This fixes textures + # getting incorrectly remapped. (LKD-17, PLN-101) + with no_workspace_dir(): + with lib.attribute_values(remap): + with avalon.maya.maintained_selection(): + cmds.select(sets, noExpand=True) + cmds.file( + maya_path, + force=True, + typ=_scene_type, + exportSelected=True, + preserveReferences=False, + channels=True, + constraints=True, + expressions=True, + constructionHistory=True, + ) + + # Write the JSON data + self.log.info("Extract json..") + data = { + "attributes": lookdata["attributes"], + "relationships": relationships + } + + with open(json_path, "w") as f: + json.dump(data, f) + + if "files" not in instance.data: + instance.data["files"] = [] + if "hardlinks" not in instance.data: + instance.data["hardlinks"] = [] + if "transfers" not in instance.data: + instance.data["transfers"] = [] + + instance.data["files"].append(maya_fname) + instance.data["files"].append(json_fname) + + if instance.data.get("representations") is None: + instance.data["representations"] = [] + + instance.data["representations"].append( + { + "name": self.scene_type, + "ext": self.scene_type, + "files": os.path.basename(maya_fname), + "stagingDir": os.path.dirname(maya_fname), + } + ) + instance.data["representations"].append( + { + "name": self.look_data_type, + "ext": self.look_data_type, + "files": os.path.basename(json_fname), + "stagingDir": os.path.dirname(json_fname), + } + ) + + # Set up the resources transfers/links for the integrator + instance.data["transfers"].extend(transfers) + instance.data["hardlinks"].extend(hardlinks) + + # Source hash for the textures + instance.data["sourceHashes"] = hashes + + """ + self.log.info("Returning colorspaces to their original values ...") + for attr, value in remap.items(): + self.log.info(" - {}: {}".format(attr, value)) + cmds.setAttr(attr, value, type="string") + """ + self.log.info("Extracted instance '%s' to: %s" % (instance.name, + maya_path)) + + def process_resources(self, instance, staging_dir): + # Extract the textures to transfer, possibly convert with maketx and # remap the node paths to the destination path. Note that a source # might be included more than once amongst the resources as they could @@ -218,7 +312,6 @@ class ExtractLook(openpype.api.Extractor): color_space = resource.get("color_space") for f in resource["files"]: - files_metadata[os.path.normpath(f)] = { "color_space": color_space} # files.update(os.path.normpath(f)) @@ -244,7 +337,7 @@ class ExtractLook(openpype.api.Extractor): source, mode, texture_hash = self._process_texture( filepath, do_maketx, - staging=dir_path, + staging=staging_dir, linearize=linearize, force=force_copy ) @@ -299,85 +392,13 @@ class ExtractLook(openpype.api.Extractor): self.log.info("Finished remapping destinations ...") - # Extract in correct render layer - layer = instance.data.get("renderlayer", "defaultRenderLayer") - with lib.renderlayer(layer): - # TODO: Ensure membership edits don't become renderlayer overrides - with lib.empty_sets(sets, force=True): - # To avoid Maya trying to automatically remap the file - # textures relative to the `workspace -directory` we force - # it to a fake temporary workspace. This fixes textures - # getting incorrectly remapped. (LKD-17, PLN-101) - with no_workspace_dir(): - with lib.attribute_values(remap): - with avalon.maya.maintained_selection(): - cmds.select(sets, noExpand=True) - cmds.file( - maya_path, - force=True, - typ="mayaAscii", - exportSelected=True, - preserveReferences=False, - channels=True, - constraints=True, - expressions=True, - constructionHistory=True, - ) - - # Write the JSON data - self.log.info("Extract json..") - data = { - "attributes": lookdata["attributes"], - "relationships": relationships + return { + "fileTransfers": transfers, + "fileHardlinks": hardlinks, + "fileHashes": hashes, + "attrRemap": remap, } - with open(json_path, "w") as f: - json.dump(data, f) - - if "files" not in instance.data: - instance.data["files"] = [] - if "hardlinks" not in instance.data: - instance.data["hardlinks"] = [] - if "transfers" not in instance.data: - instance.data["transfers"] = [] - - instance.data["files"].append(maya_fname) - instance.data["files"].append(json_fname) - - instance.data["representations"] = [] - instance.data["representations"].append( - { - "name": "ma", - "ext": "ma", - "files": os.path.basename(maya_fname), - "stagingDir": os.path.dirname(maya_fname), - } - ) - instance.data["representations"].append( - { - "name": "json", - "ext": "json", - "files": os.path.basename(json_fname), - "stagingDir": os.path.dirname(json_fname), - } - ) - - # Set up the resources transfers/links for the integrator - instance.data["transfers"].extend(transfers) - instance.data["hardlinks"].extend(hardlinks) - - # Source hash for the textures - instance.data["sourceHashes"] = hashes - - """ - self.log.info("Returning colorspaces to their original values ...") - for attr, value in remap.items(): - self.log.info(" - {}: {}".format(attr, value)) - cmds.setAttr(attr, value, type="string") - """ - self.log.info("Extracted instance '%s' to: %s" % (instance.name, - maya_path)) - def resource_destination(self, instance, filepath, do_maketx): """Get resource destination path. @@ -467,3 +488,26 @@ class ExtractLook(openpype.api.Extractor): return converted, COPY, texture_hash return filepath, COPY, texture_hash + + +class ExtractModelRenderSets(ExtractLook): + """Extract model render attribute sets as model metadata + + Only extracts the render attrib sets (NO shadingEngines) alongside + a .json file that stores it relationships for the sets and "attribute" + data for the instance members. + + """ + + label = "Model Render Sets" + hosts = ["maya"] + families = ["model"] + scene_type_prefix = "meta.render." + look_data_type = "meta.render.json" + + def get_maya_scene_type(self, instance): + typ = super(ExtractModelRenderSets, self).get_maya_scene_type(instance) + # add prefix + self.scene_type = self.scene_type_prefix + self.scene_type + + return typ