refactor, publishing render sets as model metadata

Instead of loading whole render(augmented) model from
Loader, *import* those render sets from scene inventory
as model's metadata which allow lookDev artist to
modify it and optionally publish a look from there.
This commit is contained in:
David Lai 2021-09-08 05:57:12 +08:00
parent 9f11165e87
commit 7eed3da7c3
5 changed files with 106 additions and 150 deletions

View file

@ -35,6 +35,7 @@ def install():
pyblish.register_plugin_path(PUBLISH_PATH)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
log.info(PUBLISH_PATH)
menu.install()
@ -97,6 +98,7 @@ def uninstall():
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
menu.uninstall()

View file

@ -0,0 +1,85 @@
from avalon import api, io
class ImportModelRender(api.InventoryAction):
label = "Import Model Render Sets"
icon = "industry"
color = "#55DDAA"
scene_type = "meta.render.ma"
look_data_type = "meta.render.json"
@staticmethod
def is_compatible(container):
return container.get("loader") == "ReferenceLoader" \
and container.get("name", "").startswith("model")
def process(self, containers):
from maya import cmds
for container in containers:
container_name = container["objectName"]
nodes = []
for n in cmds.sets(container_name, query=True, nodesOnly=True) or []:
if cmds.nodeType(n) == "reference":
nodes += cmds.referenceQuery(n, nodes=True)
else:
nodes.append(n)
repr_doc = io.find_one({"_id": io.ObjectId(container["representation"])})
version_id = repr_doc["parent"]
print("Importing render sets for model %r" % container_name)
self.assign_model_render_by_version(nodes, version_id)
def assign_model_render_by_version(self, nodes, version_id):
"""Assign nodes a specific published model render data version by id.
This assumes the nodes correspond with the asset.
Args:
nodes(list): nodes to assign render data to
version_id (bson.ObjectId): database id of the version of model
Returns:
None
"""
import json
from maya import cmds
from avalon import maya, io, pipeline
from openpype.hosts.maya.api import lib
# Get representations of shader file and relationships
look_representation = io.find_one({"type": "representation",
"parent": version_id,
"name": self.scene_type})
if not look_representation:
print("No model render sets for this model version..")
return
json_representation = io.find_one({"type": "representation",
"parent": version_id,
"name": self.look_data_type})
context = pipeline.get_representation_context(look_representation['_id'])
maya_file = pipeline.get_representation_path_from_context(context)
context = pipeline.get_representation_context(json_representation['_id'])
json_file = pipeline.get_representation_path_from_context(context)
# Import the look file
with maya.maintained_selection():
shader_nodes = cmds.file(maya_file,
i=True, # import
returnNewNodes=True)
# imprint context data
# Load relationships
shader_relation = json_file
with open(shader_relation, "r") as f:
relationships = json.load(f)
# Assign relationships
lib.apply_shaders(relationships, shader_nodes, nodes)

View file

@ -152,15 +152,3 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
options={"useSelection": True},
data={"dependencies": dependency}
)
class AugmentedModelLoader(ReferenceLoader):
"""Load augmented model via Maya referencing"""
families = ["model"]
representations = ["fried.ma", "fried.mb"]
label = "Fried Model"
order = -9
icon = "code-fork"
color = "yellow"

View file

@ -223,8 +223,8 @@ class CollectLook(pyblish.api.InstancePlugin):
def process(self, instance):
"""Collect the Look in the instance with the correct layer settings"""
with lib.renderlayer(instance.data["renderlayer"]):
renderlayer = instance.data.get("renderlayer", "defaultRenderLayer")
with lib.renderlayer(renderlayer):
self.collect(instance)
def collect(self, instance):
@ -579,26 +579,6 @@ class CollectModelRenderSets(CollectLook):
hosts = ["maya"]
maketx = True
def process(self, instance):
"""Collect the Look in the instance with the correct layer settings"""
model_nodes = instance[:]
renderlayer = instance.data.get("renderlayer", "defaultRenderLayer")
with lib.renderlayer(renderlayer):
self.collect(instance)
set_nodes = [m for m in instance if m not in model_nodes]
instance[:] = model_nodes
if set_nodes:
instance.data["modelRenderSets"] = set_nodes
instance.data["modelRenderSetsHistory"] = \
cmds.listHistory(set_nodes, future=False, pruneDagObjects=True)
self.log.info("Model render sets collected.")
else:
self.log.info("No model render sets.")
def collect_sets(self, instance):
"""Collect all related objectSets except shadingEngines

View file

@ -135,6 +135,7 @@ class ExtractLook(openpype.api.Extractor):
families = ["look"]
order = pyblish.api.ExtractorOrder + 0.2
scene_type = "ma"
look_data_type = "json"
@staticmethod
def get_renderer_name():
@ -186,7 +187,7 @@ class ExtractLook(openpype.api.Extractor):
# Define extract output file path
dir_path = self.staging_dir(instance)
maya_fname = "{0}.{1}".format(instance.name, self.scene_type)
json_fname = "{0}.json".format(instance.name)
json_fname = "{0}.{1}".format(instance.name, self.look_data_type)
# Make texture dump folder
maya_path = os.path.join(dir_path, maya_fname)
@ -252,19 +253,21 @@ class ExtractLook(openpype.api.Extractor):
instance.data["files"].append(maya_fname)
instance.data["files"].append(json_fname)
instance.data["representations"] = []
if instance.data.get("representations") is None:
instance.data["representations"] = []
instance.data["representations"].append(
{
"name": "ma",
"ext": "ma",
"name": self.scene_type,
"ext": self.scene_type,
"files": os.path.basename(maya_fname),
"stagingDir": os.path.dirname(maya_fname),
}
)
instance.data["representations"].append(
{
"name": "json",
"ext": "json",
"name": self.look_data_type,
"ext": self.look_data_type,
"files": os.path.basename(json_fname),
"stagingDir": os.path.dirname(json_fname),
}
@ -483,119 +486,17 @@ class ExtractLook(openpype.api.Extractor):
return filepath, COPY, texture_hash
class ExtractAugmentedModel(ExtractLook):
"""Extract as Augmented Model (Maya Scene).
class ExtractModelRenderSets(ExtractLook):
"""Extract model render attribute sets as model metadata
Rendering attrs augmented model.
Only extracts contents based on the original "setMembers" data to ensure
publishing the least amount of required shapes. From that it only takes
the shapes that are not intermediateObjects
During export it sets a temporary context to perform a clean extraction.
The context ensures:
- Smooth preview is turned off for the geometry
- Default shader is assigned (no materials are exported)
- Remove display layers
Only extracts the render attrib sets (NO shadingEngines) alongside a .json file
that stores it relationships for the sets and "attribute" data for the
instance members.
"""
label = "Augmented Model (Maya Scene)"
label = "Model Render Sets"
hosts = ["maya"]
families = ["model"]
scene_type = "ma"
augmented = "fried"
def process(self, instance):
"""Plugin entry point.
Args:
instance: Instance to process.
"""
render_sets = instance.data.get("modelRenderSetsHistory")
if not render_sets:
self.log.info("Model is not render augmented, skip extraction.")
return
self.get_maya_scene_type(instance)
if "representations" not in instance.data:
instance.data["representations"] = []
# Define extract output file path
stagingdir = self.staging_dir(instance)
ext = "{0}.{1}".format(self.augmented, self.scene_type)
filename = "{0}.{1}".format(instance.name, ext)
path = os.path.join(stagingdir, filename)
# Perform extraction
self.log.info("Performing extraction ...")
results = self.process_resources(instance, staging_dir=stagingdir)
transfers = results["fileTransfers"]
hardlinks = results["fileHardlinks"]
hashes = results["fileHashes"]
remap = results["attrRemap"]
self.log.info(remap)
# Get only the shape contents we need in such a way that we avoid
# taking along intermediateObjects
members = instance.data("setMembers")
members = cmds.ls(members,
dag=True,
shapes=True,
type=("mesh", "nurbsCurve"),
noIntermediate=True,
long=True)
members += instance.data.get("modelRenderSetsHistory")
with lib.no_display_layers(instance):
with lib.displaySmoothness(members,
divisionsU=0,
divisionsV=0,
pointsWire=4,
pointsShaded=1,
polygonObject=1):
with lib.shader(members,
shadingEngine="initialShadingGroup"):
# To avoid Maya trying to automatically remap the file
# textures relative to the `workspace -directory` we force
# it to a fake temporary workspace. This fixes textures
# getting incorrectly remapped. (LKD-17, PLN-101)
with no_workspace_dir():
with lib.attribute_values(remap):
with avalon.maya.maintained_selection():
cmds.select(members, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
exportSelected=True,
preserveReferences=False,
channels=False,
constraints=False,
expressions=False,
constructionHistory=False)
if "hardlinks" not in instance.data:
instance.data["hardlinks"] = []
if "transfers" not in instance.data:
instance.data["transfers"] = []
# Set up the resources transfers/links for the integrator
instance.data["transfers"].extend(transfers)
instance.data["hardlinks"].extend(hardlinks)
# Source hash for the textures
instance.data["sourceHashes"] = hashes
instance.data["representations"].append({
'name': ext,
'ext': ext,
'files': filename,
"stagingDir": stagingdir,
})
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
scene_type = "meta.render.ma"
look_data_type = "meta.render.json"