Ensure content and proxy hierarchy is the same.

This commit is contained in:
Toke Stuart Jepsen 2023-02-09 16:28:55 +00:00
parent 713ede5004
commit d516de4ecd
3 changed files with 145 additions and 16 deletions

View file

@ -21,10 +21,10 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
self.log.warning("Skipped empty instance: \"%s\" " % objset)
continue
if objset.endswith("content_SET"):
instance.data["setMembers"] = members
instance.data["setMembers"] = cmds.ls(members, long=True)
self.log.debug("content members: {}".format(members))
elif objset.endswith("proxy_SET"):
instance.data["proxy"] = members
instance.data["proxy"] = cmds.ls(members, long=True)
self.log.debug("proxy members: {}".format(members))
# Use camera in object set if present else default to render globals

View file

@ -5,14 +5,16 @@ from maya import cmds
import arnold
from openpype.pipeline import publish
from openpype.hosts.maya.api.lib import maintained_selection, attribute_values
from openpype.hosts.maya.api.lib import (
maintained_selection, attribute_values, delete_after
)
from openpype.lib import StringTemplate
class ExtractArnoldSceneSource(publish.Extractor):
"""Extract the content of the instance to an Arnold Scene Source file."""
label = "Arnold Scene Source"
label = "Extract Arnold Scene Source"
hosts = ["maya"]
families = ["ass"]
asciiAss = False
@ -124,22 +126,43 @@ class ExtractArnoldSceneSource(publish.Extractor):
def _extract(self, nodes, attribute_data, kwargs):
self.log.info("Writing: " + kwargs["filename"])
filenames = []
with attribute_values(attribute_data):
with maintained_selection():
self.log.info(
"Writing: {}".format(nodes)
# Duplicating nodes so they are direct children of the world. This
# makes the hierarchy of any exported ass file the same.
with delete_after() as delete_bin:
duplicate_nodes = []
for node in nodes:
duplicate_transform = cmds.duplicate(node)[0]
delete_bin.append(duplicate_transform)
# Discard the children.
shapes = cmds.listRelatives(duplicate_transform, shapes=True)
children = cmds.listRelatives(
duplicate_transform, children=True
)
cmds.select(nodes, noExpand=True)
cmds.delete(set(children) - set(shapes))
self.log.info(
"Extracting ass sequence with: {}".format(kwargs)
)
duplicate_transform = cmds.parent(
duplicate_transform, world=True
)[0]
exported_files = cmds.arnoldExportAss(**kwargs)
duplicate_nodes.append(duplicate_transform)
for file in exported_files:
filenames.append(os.path.split(file)[1])
with attribute_values(attribute_data):
with maintained_selection():
self.log.info(
"Writing: {}".format(duplicate_nodes)
)
cmds.select(duplicate_nodes, noExpand=True)
self.log.info("Exported: {}".format(filenames))
self.log.info(
"Extracting ass sequence with: {}".format(kwargs)
)
exported_files = cmds.arnoldExportAss(**kwargs)
for file in exported_files:
filenames.append(os.path.split(file)[1])
self.log.info("Exported: {}".format(filenames))
return filenames

View file

@ -0,0 +1,106 @@
import os
import types
import maya.cmds as cmds
from mtoa.core import createOptions
import pyblish.api
from openpype.pipeline.publish import (
ValidateContentsOrder, PublishValidationError
)
class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
"""Validate Arnold Scene Source.
If using proxies we need the nodes to share the same names and not be
parent to the world. This ends up needing at least two groups with content
nodes and proxy nodes in another.
"""
order = ValidateContentsOrder
hosts = ["maya"]
families = ["ass"]
label = "Validate Arnold Scene Source"
def _get_nodes_data(self, nodes):
ungrouped_nodes = []
nodes_by_name = {}
parents = []
for node in nodes:
node_split = node.split("|")
if len(node_split) == 2:
ungrouped_nodes.append(node)
parent = "|".join(node_split[:-1])
if parent:
parents.append(parent)
nodes_by_name[node_split[-1]] = node
for shape in cmds.listRelatives(node, shapes=True):
nodes_by_name[shape.split("|")[-1]] = shape
return ungrouped_nodes, nodes_by_name, parents
def process(self, instance):
if not instance.data["proxy"]:
return
ungrouped_nodes = []
nodes, content_nodes_by_name, content_parents = self._get_nodes_data(
instance.data["setMembers"]
)
ungrouped_nodes.extend(nodes)
nodes, proxy_nodes_by_name, proxy_parents = self._get_nodes_data(
instance.data["proxy"]
)
ungrouped_nodes.extend(nodes)
# Validate against nodes directly parented to world.
if ungrouped_nodes:
raise PublishValidationError(
"Found nodes parented to the world: {}\n"
"All nodes need to be grouped.".format(ungrouped_nodes)
)
# Validate for content and proxy nodes amount being the same.
if len(instance.data["setMembers"]) != len(instance.data["proxy"]):
raise PublishValidationError(
"Amount of content nodes ({}) and proxy nodes ({}) needs to "
"be the same.".format(
len(instance.data["setMembers"]),
len(instance.data["proxy"])
)
)
# Validate against content and proxy nodes sharing same parent.
if list(set(content_parents) & set(proxy_parents)):
raise PublishValidationError(
"Content and proxy nodes cannot share the same parent."
)
# Validate for content and proxy nodes sharing same names.
sorted_content_names = sorted(content_nodes_by_name.keys())
sorted_proxy_names = sorted(proxy_nodes_by_name.keys())
odd_content_names = list(
set(sorted_content_names) - set(sorted_proxy_names)
)
odd_content_nodes = [
content_nodes_by_name[x] for x in odd_content_names
]
odd_proxy_names = list(
set(sorted_proxy_names) - set(sorted_content_names)
)
odd_proxy_nodes = [
proxy_nodes_by_name[x] for x in odd_proxy_names
]
if not sorted_content_names == sorted_proxy_names:
raise PublishValidationError(
"Content and proxy nodes need to share the same names.\n"
"Content nodes not matching: {}\n"
"Proxy nodes not matching: {}".format(
odd_content_nodes, odd_proxy_nodes
)
)