Merge pull request #5573 from BigRoy/feature/maya_usd_native_support

This commit is contained in:
Ondřej Samohel 2023-09-19 17:15:36 +02:00 committed by GitHub
commit e6ce9fd9e1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 654 additions and 34 deletions

View file

@ -129,18 +129,49 @@ class MayaCreatorBase(object):
shared_data["maya_cached_legacy_subsets"] = cache_legacy
return shared_data
def get_publish_families(self):
"""Return families for the instances of this creator.
Allow a Creator to define multiple families so that a creator can
e.g. specify `usd` and `usdMaya` and another USD creator can also
specify `usd` but apply different extractors like `usdMultiverse`.
There is no need to override this method if you only have the
primary family defined by the `family` property as that will always
be set.
Returns:
list: families for instances of this creator
"""
return []
def imprint_instance_node(self, node, data):
# We never store the instance_node as value on the node since
# it's the node name itself
data.pop("instance_node", None)
# Don't store `families` since it's up to the creator itself
# to define the initial publish families - not a stored attribute of
# `families`
data.pop("families", None)
# We store creator attributes at the root level and assume they
# will not clash in names with `subset`, `task`, etc. and other
# default names. This is just so these attributes in many cases
# are still editable in the maya UI by artists.
# pop to move to end of dict to sort attributes last on the node
# note: pop to move to end of dict to sort attributes last on the node
creator_attributes = data.pop("creator_attributes", {})
# We only flatten value types which `imprint` function supports
json_creator_attributes = {}
for key, value in dict(creator_attributes).items():
if isinstance(value, (list, tuple, dict)):
creator_attributes.pop(key)
json_creator_attributes[key] = value
# Flatten remaining creator attributes to the node itself
data.update(creator_attributes)
# We know the "publish_attributes" will be complex data of
@ -150,6 +181,10 @@ class MayaCreatorBase(object):
data.pop("publish_attributes", {})
)
# Persist the non-flattened creator attributes (special value types,
# like multiselection EnumDef)
data["creator_attributes"] = json.dumps(json_creator_attributes)
# Since we flattened the data structure for creator attributes we want
# to correctly detect which flattened attributes should end back in the
# creator attributes when reading the data from the node, so we store
@ -170,15 +205,22 @@ class MayaCreatorBase(object):
# being read as 'data'
node_data.pop("cbId", None)
# Make sure we convert any creator attributes from the json string
creator_attributes = node_data.get("creator_attributes")
if creator_attributes:
node_data["creator_attributes"] = json.loads(creator_attributes)
else:
node_data["creator_attributes"] = {}
# Move the relevant attributes into "creator_attributes" that
# we flattened originally
node_data["creator_attributes"] = {}
creator_attribute_keys = node_data.pop("__creator_attributes_keys",
"").split(",")
for key in creator_attribute_keys:
if key in node_data:
node_data["creator_attributes"][key] = node_data.pop(key)
# Make sure we convert any publish attributes from the json string
publish_attributes = node_data.get("publish_attributes")
if publish_attributes:
node_data["publish_attributes"] = json.loads(publish_attributes)
@ -186,6 +228,11 @@ class MayaCreatorBase(object):
# Explicitly re-parse the node name
node_data["instance_node"] = node
# If the creator plug-in specifies
families = self.get_publish_families()
if families:
node_data["families"] = families
return node_data
def _default_collect_instances(self):
@ -230,6 +277,14 @@ class MayaCreator(NewCreator, MayaCreatorBase):
if pre_create_data.get("use_selection"):
members = cmds.ls(selection=True)
# Allow a Creator to define multiple families
publish_families = self.get_publish_families()
if publish_families:
families = instance_data.setdefault("families", [])
for family in self.get_publish_families():
if family not in families:
families.append(family)
with lib.undo_chunk():
instance_node = cmds.sets(members, name=subset_name)
instance_data["instance_node"] = instance_node

View file

@ -0,0 +1,102 @@
from openpype.hosts.maya.api import plugin, lib
from openpype.lib import (
BoolDef,
EnumDef,
TextDef
)
from maya import cmds
class CreateMayaUsd(plugin.MayaCreator):
"""Create Maya USD Export"""
identifier = "io.openpype.creators.maya.mayausd"
label = "Maya USD"
family = "usd"
icon = "cubes"
description = "Create Maya USD Export"
cache = {}
def get_publish_families(self):
return ["usd", "mayaUsd"]
def get_instance_attr_defs(self):
if "jobContextItems" not in self.cache:
# Query once instead of per instance
job_context_items = {}
try:
cmds.loadPlugin("mayaUsdPlugin", quiet=True)
job_context_items = {
cmds.mayaUSDListJobContexts(jobContext=name): name
for name in cmds.mayaUSDListJobContexts(export=True) or []
}
except RuntimeError:
# Likely `mayaUsdPlugin` plug-in not available
self.log.warning("Unable to retrieve available job "
"contexts for `mayaUsdPlugin` exports")
if not job_context_items:
# enumdef multiselection may not be empty
job_context_items = ["<placeholder; do not use>"]
self.cache["jobContextItems"] = job_context_items
defs = lib.collect_animation_defs()
defs.extend([
EnumDef("defaultUSDFormat",
label="File format",
items={
"usdc": "Binary",
"usda": "ASCII"
},
default="usdc"),
BoolDef("stripNamespaces",
label="Strip Namespaces",
tooltip=(
"Remove namespaces during export. By default, "
"namespaces are exported to the USD file in the "
"following format: nameSpaceExample_pPlatonic1"
),
default=True),
BoolDef("mergeTransformAndShape",
label="Merge Transform and Shape",
tooltip=(
"Combine Maya transform and shape into a single USD"
"prim that has transform and geometry, for all"
" \"geometric primitives\" (gprims).\n"
"This results in smaller and faster scenes. Gprims "
"will be \"unpacked\" back into transform and shape "
"nodes when imported into Maya from USD."
),
default=True),
BoolDef("includeUserDefinedAttributes",
label="Include User Defined Attributes",
tooltip=(
"Whether to include all custom maya attributes found "
"on nodes as metadata (userProperties) in USD."
),
default=False),
TextDef("attr",
label="Custom Attributes",
default="",
placeholder="attr1, attr2"),
TextDef("attrPrefix",
label="Custom Attributes Prefix",
default="",
placeholder="prefix1, prefix2"),
EnumDef("jobContext",
label="Job Context",
items=self.cache["jobContextItems"],
tooltip=(
"Specifies an additional export context to handle.\n"
"These usually contain extra schemas, primitives,\n"
"and materials that are to be exported for a "
"specific\ntask, a target renderer for example."
),
multiselection=True),
])
return defs

View file

@ -14,6 +14,10 @@ class CreateMultiverseUsd(plugin.MayaCreator):
label = "Multiverse USD Asset"
family = "usd"
icon = "cubes"
description = "Create Multiverse USD Asset"
def get_publish_families(self):
return ["usd", "mvUsd"]
def get_instance_attr_defs(self):

View file

@ -17,6 +17,7 @@ from openpype.hosts.maya.api.lib import (
)
from openpype.hosts.maya.api.pipeline import containerise
def is_sequence(files):
sequence = False
collections, remainder = clique.assemble(files, minimum_items=1)
@ -29,11 +30,12 @@ def get_current_session_fps():
session_fps = float(legacy_io.Session.get('AVALON_FPS', 25))
return convert_to_maya_fps(session_fps)
class ArnoldStandinLoader(load.LoaderPlugin):
"""Load as Arnold standin"""
families = ["ass", "animation", "model", "proxyAbc", "pointcache"]
representations = ["ass", "abc"]
families = ["ass", "animation", "model", "proxyAbc", "pointcache", "usd"]
representations = ["ass", "abc", "usda", "usdc", "usd"]
label = "Load as Arnold standin"
order = -5

View file

@ -0,0 +1,108 @@
# -*- coding: utf-8 -*-
import maya.cmds as cmds
from openpype.pipeline import (
load,
get_representation_path,
)
from openpype.pipeline.load import get_representation_path_from_context
from openpype.hosts.maya.api.lib import (
namespaced,
unique_namespace
)
from openpype.hosts.maya.api.pipeline import containerise
class MayaUsdLoader(load.LoaderPlugin):
"""Read USD data in a Maya USD Proxy"""
families = ["model", "usd", "pointcache", "animation"]
representations = ["usd", "usda", "usdc", "usdz", "abc"]
label = "Load USD to Maya Proxy"
order = -1
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, options=None):
asset = context['asset']['name']
namespace = namespace or unique_namespace(
asset + "_",
prefix="_" if asset[0].isdigit() else "",
suffix="_",
)
# Make sure we can load the plugin
cmds.loadPlugin("mayaUsdPlugin", quiet=True)
path = get_representation_path_from_context(context)
# Create the shape
cmds.namespace(addNamespace=namespace)
with namespaced(namespace, new=False):
transform = cmds.createNode("transform",
name=name,
skipSelect=True)
proxy = cmds.createNode('mayaUsdProxyShape',
name="{}Shape".format(name),
parent=transform,
skipSelect=True)
cmds.connectAttr("time1.outTime", "{}.time".format(proxy))
cmds.setAttr("{}.filePath".format(proxy), path, type="string")
# By default, we force the proxy to not use a shared stage because
# when doing so Maya will quite easily allow to save into the
# loaded usd file. Since we are loading published files we want to
# avoid altering them. Unshared stages also save their edits into
# the workfile as an artist might expect it to do.
cmds.setAttr("{}.shareStage".format(proxy), False)
# cmds.setAttr("{}.shareStage".format(proxy), lock=True)
nodes = [transform, proxy]
self[:] = nodes
return containerise(
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__)
def update(self, container, representation):
# type: (dict, dict) -> None
"""Update container with specified representation."""
node = container['objectName']
assert cmds.objExists(node), "Missing container"
members = cmds.sets(node, query=True) or []
shapes = cmds.ls(members, type="mayaUsdProxyShape")
path = get_representation_path(representation)
for shape in shapes:
cmds.setAttr("{}.filePath".format(shape), path, type="string")
cmds.setAttr("{}.representation".format(node),
str(representation["_id"]),
type="string")
def switch(self, container, representation):
self.update(container, representation)
def remove(self, container):
# type: (dict) -> None
"""Remove loaded container."""
# Delete container and its contents
if cmds.objExists(container['objectName']):
members = cmds.sets(container['objectName'], query=True) or []
cmds.delete([container['objectName']] + members)
# Remove the namespace, if empty
namespace = container['namespace']
if cmds.namespace(exists=namespace):
members = cmds.namespaceInfo(namespace, listNamespace=True)
if not members:
cmds.namespace(removeNamespace=namespace)
else:
self.log.warning("Namespace not deleted because it "
"still has members: %s", namespace)

View file

@ -58,17 +58,3 @@ class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin):
if instance.data.get("farm"):
instance.data["families"].append("publish.farm")
# Collect user defined attributes.
if not instance.data.get("includeUserDefinedAttributes", False):
return
user_defined_attributes = set()
for node in hierarchy:
attrs = cmds.listAttr(node, userDefined=True) or list()
shapes = cmds.listRelatives(node, shapes=True) or list()
for shape in shapes:
attrs.extend(cmds.listAttr(shape, userDefined=True) or list())
user_defined_attributes.update(attrs)
instance.data["userDefinedAttributes"] = list(user_defined_attributes)

View file

@ -45,18 +45,3 @@ class CollectPointcache(pyblish.api.InstancePlugin):
if proxy_set:
instance.remove(proxy_set)
instance.data["setMembers"].remove(proxy_set)
# Collect user defined attributes.
if not instance.data.get("includeUserDefinedAttributes", False):
return
user_defined_attributes = set()
for node in instance:
attrs = cmds.listAttr(node, userDefined=True) or list()
shapes = cmds.listRelatives(node, shapes=True) or list()
for shape in shapes:
attrs.extend(cmds.listAttr(shape, userDefined=True) or list())
user_defined_attributes.update(attrs)
instance.data["userDefinedAttributes"] = list(user_defined_attributes)

View file

@ -0,0 +1,39 @@
from maya import cmds
import pyblish.api
class CollectUserDefinedAttributes(pyblish.api.InstancePlugin):
"""Collect user defined attributes for nodes in instance."""
order = pyblish.api.CollectorOrder + 0.45
families = ["pointcache", "animation", "usd"]
label = "Collect User Defined Attributes"
hosts = ["maya"]
def process(self, instance):
# Collect user defined attributes.
if not instance.data.get("includeUserDefinedAttributes", False):
return
if "out_hierarchy" in instance.data:
# animation family
nodes = instance.data["out_hierarchy"]
else:
nodes = instance[:]
if not nodes:
return
shapes = cmds.listRelatives(nodes, shapes=True, fullPath=True) or []
nodes = set(nodes).union(shapes)
attrs = cmds.listAttr(list(nodes), userDefined=True) or []
user_defined_attributes = list(sorted(set(attrs)))
instance.data["userDefinedAttributes"] = user_defined_attributes
self.log.debug(
"Collected user defined attributes: {}".format(
", ".join(user_defined_attributes)
)
)

View file

@ -0,0 +1,293 @@
import os
import six
import json
import contextlib
from maya import cmds
import pyblish.api
from openpype.pipeline import publish
from openpype.hosts.maya.api.lib import maintained_selection
@contextlib.contextmanager
def usd_export_attributes(nodes, attrs=None, attr_prefixes=None, mapping=None):
"""Define attributes for the given nodes that should be exported.
MayaUSDExport will export custom attributes if the Maya node has a
string attribute `USD_UserExportedAttributesJson` that provides an
export mapping for the maya attributes. This context manager will try
to autogenerate such an attribute during the export to include attributes
for the export.
Arguments:
nodes (List[str]): Nodes to process.
attrs (Optional[List[str]]): Full name of attributes to include.
attr_prefixes (Optional[List[str]]): Prefixes of attributes to include.
mapping (Optional[Dict[Dict]]): A mapping per attribute name for the
conversion to a USD attribute, including renaming, defining type,
converting attribute precision, etc. This match the usual
`USD_UserExportedAttributesJson` json mapping of `mayaUSDExport`.
When no mapping provided for an attribute it will use `{}` as
value.
Examples:
>>> with usd_export_attributes(
>>> ["pCube1"], attrs="myDoubleAttributeAsFloat", mapping={
>>> "myDoubleAttributeAsFloat": {
>>> "usdAttrName": "my:namespace:attrib",
>>> "translateMayaDoubleToUsdSinglePrecision": True,
>>> }
>>> })
"""
# todo: this might be better done with a custom export chaser
# see `chaser` argument for `mayaUSDExport`
import maya.api.OpenMaya as om
if not attrs and not attr_prefixes:
# context manager does nothing
yield
return
if attrs is None:
attrs = []
if attr_prefixes is None:
attr_prefixes = []
if mapping is None:
mapping = {}
usd_json_attr = "USD_UserExportedAttributesJson"
strings = attrs + ["{}*".format(prefix) for prefix in attr_prefixes]
context_state = {}
for node in set(nodes):
node_attrs = cmds.listAttr(node, st=strings)
if not node_attrs:
# Nothing to do for this node
continue
node_attr_data = {}
for node_attr in set(node_attrs):
node_attr_data[node_attr] = mapping.get(node_attr, {})
if cmds.attributeQuery(usd_json_attr, node=node, exists=True):
existing_node_attr_value = cmds.getAttr(
"{}.{}".format(node, usd_json_attr)
)
if existing_node_attr_value and existing_node_attr_value != "{}":
# Any existing attribute mappings in an existing
# `USD_UserExportedAttributesJson` attribute always take
# precedence over what this function tries to imprint
existing_node_attr_data = json.loads(existing_node_attr_value)
node_attr_data.update(existing_node_attr_data)
context_state[node] = json.dumps(node_attr_data)
sel = om.MSelectionList()
dg_mod = om.MDGModifier()
fn_string = om.MFnStringData()
fn_typed = om.MFnTypedAttribute()
try:
for node, value in context_state.items():
data = fn_string.create(value)
sel.clear()
if cmds.attributeQuery(usd_json_attr, node=node, exists=True):
# Set the attribute value
sel.add("{}.{}".format(node, usd_json_attr))
plug = sel.getPlug(0)
dg_mod.newPlugValue(plug, data)
else:
# Create attribute with the value as default value
sel.add(node)
node_obj = sel.getDependNode(0)
attr_obj = fn_typed.create(usd_json_attr,
usd_json_attr,
om.MFnData.kString,
data)
dg_mod.addAttribute(node_obj, attr_obj)
dg_mod.doIt()
yield
finally:
dg_mod.undoIt()
class ExtractMayaUsd(publish.Extractor):
"""Extractor for Maya USD Asset data.
Upon publish a .usd (or .usdz) asset file will typically be written.
"""
label = "Extract Maya USD Asset"
hosts = ["maya"]
families = ["mayaUsd"]
@property
def options(self):
"""Overridable options for Maya USD Export
Given in the following format
- {NAME: EXPECTED TYPE}
If the overridden option's type does not match,
the option is not included and a warning is logged.
"""
# TODO: Support more `mayaUSDExport` parameters
return {
"defaultUSDFormat": str,
"stripNamespaces": bool,
"mergeTransformAndShape": bool,
"exportDisplayColor": bool,
"exportColorSets": bool,
"exportInstances": bool,
"exportUVs": bool,
"exportVisibility": bool,
"exportComponentTags": bool,
"exportRefsAsInstanceable": bool,
"eulerFilter": bool,
"renderableOnly": bool,
"jobContext": (list, None) # optional list
# "worldspace": bool,
}
@property
def default_options(self):
"""The default options for Maya USD Export."""
# TODO: Support more `mayaUSDExport` parameters
return {
"defaultUSDFormat": "usdc",
"stripNamespaces": False,
"mergeTransformAndShape": False,
"exportDisplayColor": False,
"exportColorSets": True,
"exportInstances": True,
"exportUVs": True,
"exportVisibility": True,
"exportComponentTags": True,
"exportRefsAsInstanceable": False,
"eulerFilter": True,
"renderableOnly": False,
"jobContext": None
# "worldspace": False
}
def parse_overrides(self, instance, options):
"""Inspect data of instance to determine overridden options"""
for key in instance.data:
if key not in self.options:
continue
# Ensure the data is of correct type
value = instance.data[key]
if isinstance(value, six.text_type):
value = str(value)
if not isinstance(value, self.options[key]):
self.log.warning(
"Overridden attribute {key} was of "
"the wrong type: {invalid_type} "
"- should have been {valid_type}".format(
key=key,
invalid_type=type(value).__name__,
valid_type=self.options[key].__name__))
continue
options[key] = value
return options
def filter_members(self, members):
# Can be overridden by inherited classes
return members
def process(self, instance):
# Load plugin first
cmds.loadPlugin("mayaUsdPlugin", quiet=True)
# Define output file path
staging_dir = self.staging_dir(instance)
file_name = "{0}.usd".format(instance.name)
file_path = os.path.join(staging_dir, file_name)
file_path = file_path.replace('\\', '/')
# Parse export options
options = self.default_options
options = self.parse_overrides(instance, options)
self.log.debug("Export options: {0}".format(options))
# Perform extraction
self.log.debug("Performing extraction ...")
members = instance.data("setMembers")
self.log.debug('Collected objects: {}'.format(members))
members = self.filter_members(members)
if not members:
self.log.error('No members!')
return
start = instance.data["frameStartHandle"]
end = instance.data["frameEndHandle"]
def parse_attr_str(attr_str):
result = list()
for attr in attr_str.split(","):
attr = attr.strip()
if not attr:
continue
result.append(attr)
return result
attrs = parse_attr_str(instance.data.get("attr", ""))
attrs += instance.data.get("userDefinedAttributes", [])
attrs += ["cbId"]
attr_prefixes = parse_attr_str(instance.data.get("attrPrefix", ""))
self.log.debug('Exporting USD: {} / {}'.format(file_path, members))
with maintained_selection():
with usd_export_attributes(instance[:],
attrs=attrs,
attr_prefixes=attr_prefixes):
cmds.mayaUSDExport(file=file_path,
frameRange=(start, end),
frameStride=instance.data.get("step", 1.0),
exportRoots=members,
**options)
representation = {
'name': "usd",
'ext': "usd",
'files': file_name,
'stagingDir': staging_dir
}
instance.data.setdefault("representations", []).append(representation)
self.log.debug(
"Extracted instance {} to {}".format(instance.name, file_path)
)
class ExtractMayaUsdAnim(ExtractMayaUsd):
"""Extractor for Maya USD Animation Sparse Cache data.
This will extract the sparse cache data from the scene and generate a
USD file with all the animation data.
Upon publish a .usd sparse cache will be written.
"""
label = "Extract Maya USD Animation Sparse Cache"
families = ["animation", "mayaUsd"]
match = pyblish.api.Subset
def filter_members(self, members):
out_set = next((i for i in members if i.endswith("out_SET")), None)
if out_set is None:
self.log.warning("Expecting out_SET")
return None
members = cmds.ls(cmds.sets(out_set, query=True), long=True)
return members

View file

@ -28,7 +28,7 @@ class ExtractMultiverseUsd(publish.Extractor):
label = "Extract Multiverse USD Asset"
hosts = ["maya"]
families = ["usd"]
families = ["mvUsd"]
scene_type = "usd"
file_formats = ["usd", "usda", "usdz"]

View file

@ -10,6 +10,7 @@ from openpype.client import get_last_version_by_subset_name
from openpype.hosts.maya import api
from . import lib
from .alembic import get_alembic_ids_cache
from .usd import is_usd_lib_supported, get_usd_ids_cache
log = logging.getLogger(__name__)
@ -74,6 +75,13 @@ def get_nodes_by_id(standin):
# Support alembic files directly
return get_alembic_ids_cache(path)
elif (
is_usd_lib_supported and
any(path.endswith(ext) for ext in [".usd", ".usda", ".usdc"])
):
# Support usd files directly
return get_usd_ids_cache(path)
json_path = None
for f in os.listdir(os.path.dirname(path)):
if f.endswith(".json"):

View file

@ -0,0 +1,38 @@
from collections import defaultdict
try:
from pxr import Usd
is_usd_lib_supported = True
except ImportError:
is_usd_lib_supported = False
def get_usd_ids_cache(path):
# type: (str) -> dict
"""Build a id to node mapping in a USD file.
Nodes without IDs are ignored.
Returns:
dict: Mapping of id to nodes in the USD file.
"""
if not is_usd_lib_supported:
raise RuntimeError("No pxr.Usd python library available.")
stage = Usd.Stage.Open(path)
ids = {}
for prim in stage.Traverse():
attr = prim.GetAttribute("userProperties:cbId")
if not attr.IsValid():
continue
value = attr.Get()
if not value:
continue
path = str(prim.GetPath())
ids[path] = value
cache = defaultdict(list)
for path, value in ids.items():
cache[value].append(path)
return dict(cache)