mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 08:24:53 +01:00
323 lines
9.4 KiB
Python
323 lines
9.4 KiB
Python
# -*- coding: utf-8 -*-
|
|
"""Tools for loading looks to vray proxies."""
|
|
import os
|
|
from collections import defaultdict
|
|
import logging
|
|
import json
|
|
|
|
import six
|
|
from bson.objectid import ObjectId
|
|
|
|
import alembic.Abc
|
|
from maya import cmds
|
|
|
|
from avalon import io
|
|
|
|
from openpype.pipeline import (
|
|
load_container,
|
|
loaders_from_representation,
|
|
discover_loader_plugins,
|
|
get_representation_path,
|
|
registered_host,
|
|
)
|
|
from openpype.hosts.maya.api import lib
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
def get_alembic_paths_by_property(filename, attr, verbose=False):
|
|
# type: (str, str, bool) -> dict
|
|
"""Return attribute value per objects in the Alembic file.
|
|
|
|
Reads an Alembic archive hierarchy and retrieves the
|
|
value from the `attr` properties on the objects.
|
|
|
|
Args:
|
|
filename (str): Full path to Alembic archive to read.
|
|
attr (str): Id attribute.
|
|
verbose (bool): Whether to verbosely log missing attributes.
|
|
|
|
Returns:
|
|
dict: Mapping of node full path with its id
|
|
|
|
"""
|
|
# Normalize alembic path
|
|
filename = os.path.normpath(filename)
|
|
filename = filename.replace("\\", "/")
|
|
filename = str(filename) # path must be string
|
|
|
|
try:
|
|
archive = alembic.Abc.IArchive(filename)
|
|
except RuntimeError:
|
|
# invalid alembic file - probably vrmesh
|
|
log.warning("{} is not an alembic file".format(filename))
|
|
return {}
|
|
root = archive.getTop()
|
|
|
|
iterator = list(root.children)
|
|
obj_ids = {}
|
|
|
|
for obj in iterator:
|
|
name = obj.getFullName()
|
|
|
|
# include children for coming iterations
|
|
iterator.extend(obj.children)
|
|
|
|
props = obj.getProperties()
|
|
if props.getNumProperties() == 0:
|
|
# Skip those without properties, e.g. '/materials' in a gpuCache
|
|
continue
|
|
|
|
# THe custom attribute is under the properties' first container under
|
|
# the ".arbGeomParams"
|
|
prop = props.getProperty(0) # get base property
|
|
|
|
_property = None
|
|
try:
|
|
geo_params = prop.getProperty('.arbGeomParams')
|
|
_property = geo_params.getProperty(attr)
|
|
except KeyError:
|
|
if verbose:
|
|
log.debug("Missing attr on: {0}".format(name))
|
|
continue
|
|
|
|
if not _property.isConstant():
|
|
log.warning("Id not constant on: {0}".format(name))
|
|
|
|
# Get first value sample
|
|
value = _property.getValue()[0]
|
|
|
|
obj_ids[name] = value
|
|
|
|
return obj_ids
|
|
|
|
|
|
def get_alembic_ids_cache(path):
|
|
# type: (str) -> dict
|
|
"""Build a id to node mapping in Alembic file.
|
|
|
|
Nodes without IDs are ignored.
|
|
|
|
Returns:
|
|
dict: Mapping of id to nodes in the Alembic.
|
|
|
|
"""
|
|
node_ids = get_alembic_paths_by_property(path, attr="cbId")
|
|
id_nodes = defaultdict(list)
|
|
for node, _id in six.iteritems(node_ids):
|
|
id_nodes[_id].append(node)
|
|
|
|
return dict(six.iteritems(id_nodes))
|
|
|
|
|
|
def assign_vrayproxy_shaders(vrayproxy, assignments):
|
|
# type: (str, dict) -> None
|
|
"""Assign shaders to content of Vray Proxy.
|
|
|
|
This will create shader overrides on Vray Proxy to assign shaders to its
|
|
content.
|
|
|
|
Todo:
|
|
Allow to optimize and assign a single shader to multiple shapes at
|
|
once or maybe even set it to the highest available path?
|
|
|
|
Args:
|
|
vrayproxy (str): Name of Vray Proxy
|
|
assignments (dict): Mapping of shader assignments.
|
|
|
|
Returns:
|
|
None
|
|
|
|
"""
|
|
# Clear all current shader assignments
|
|
plug = vrayproxy + ".shaders"
|
|
num = cmds.getAttr(plug, size=True)
|
|
for i in reversed(range(num)):
|
|
cmds.removeMultiInstance("{}[{}]".format(plug, i), b=True)
|
|
|
|
# Create new assignment overrides
|
|
index = 0
|
|
for material, paths in assignments.items():
|
|
for path in paths:
|
|
plug = "{}.shaders[{}]".format(vrayproxy, index)
|
|
cmds.setAttr(plug + ".shadersNames", path, type="string")
|
|
cmds.connectAttr(material + ".outColor",
|
|
plug + ".shadersConnections", force=True)
|
|
index += 1
|
|
|
|
|
|
def get_look_relationships(version_id):
|
|
# type: (str) -> dict
|
|
"""Get relations for the look.
|
|
|
|
Args:
|
|
version_id (str): Parent version Id.
|
|
|
|
Returns:
|
|
dict: Dictionary of relations.
|
|
|
|
"""
|
|
json_representation = io.find_one({"type": "representation",
|
|
"parent": version_id,
|
|
"name": "json"})
|
|
|
|
# Load relationships
|
|
shader_relation = get_representation_path(json_representation)
|
|
with open(shader_relation, "r") as f:
|
|
relationships = json.load(f)
|
|
|
|
return relationships
|
|
|
|
|
|
def load_look(version_id):
|
|
# type: (str) -> list
|
|
"""Load look from version.
|
|
|
|
Get look from version and invoke Loader for it.
|
|
|
|
Args:
|
|
version_id (str): Version ID
|
|
|
|
Returns:
|
|
list of shader nodes.
|
|
|
|
"""
|
|
# Get representations of shader file and relationships
|
|
look_representation = io.find_one({"type": "representation",
|
|
"parent": version_id,
|
|
"name": "ma"})
|
|
|
|
# See if representation is already loaded, if so reuse it.
|
|
host = registered_host()
|
|
representation_id = str(look_representation['_id'])
|
|
for container in host.ls():
|
|
if (container['loader'] == "LookLoader" and
|
|
container['representation'] == representation_id):
|
|
log.info("Reusing loaded look ...")
|
|
container_node = container['objectName']
|
|
break
|
|
else:
|
|
log.info("Using look for the first time ...")
|
|
|
|
# Load file
|
|
all_loaders = discover_loader_plugins()
|
|
loaders = loaders_from_representation(all_loaders, representation_id)
|
|
loader = next(
|
|
(i for i in loaders if i.__name__ == "LookLoader"), None)
|
|
if loader is None:
|
|
raise RuntimeError("Could not find LookLoader, this is a bug")
|
|
|
|
# Reference the look file
|
|
with lib.maintained_selection():
|
|
container_node = load_container(loader, look_representation)
|
|
|
|
# Get container members
|
|
shader_nodes = lib.get_container_members(container_node)
|
|
return shader_nodes
|
|
|
|
|
|
def get_latest_version(asset_id, subset):
|
|
# type: (str, str) -> dict
|
|
"""Get latest version of subset.
|
|
|
|
Args:
|
|
asset_id (str): Asset ID
|
|
subset (str): Subset name.
|
|
|
|
Returns:
|
|
Latest version
|
|
|
|
Throws:
|
|
RuntimeError: When subset or version doesn't exist.
|
|
|
|
"""
|
|
subset = io.find_one({"name": subset,
|
|
"parent": ObjectId(asset_id),
|
|
"type": "subset"})
|
|
if not subset:
|
|
raise RuntimeError("Subset does not exist: %s" % subset)
|
|
|
|
version = io.find_one({"type": "version",
|
|
"parent": subset["_id"]},
|
|
sort=[("name", -1)])
|
|
if not version:
|
|
raise RuntimeError("Version does not exist.")
|
|
|
|
return version
|
|
|
|
|
|
def vrayproxy_assign_look(vrayproxy, subset="lookDefault"):
|
|
# type: (str, str) -> None
|
|
"""Assign look to vray proxy.
|
|
|
|
Args:
|
|
vrayproxy (str): Name of vrayproxy to apply look to.
|
|
subset (str): Name of look subset.
|
|
|
|
Returns:
|
|
None
|
|
|
|
"""
|
|
path = cmds.getAttr(vrayproxy + ".fileName")
|
|
|
|
nodes_by_id = get_alembic_ids_cache(path)
|
|
if not nodes_by_id:
|
|
log.warning("Alembic file has no cbId attributes: %s" % path)
|
|
return
|
|
|
|
# Group by asset id so we run over the look per asset
|
|
node_ids_by_asset_id = defaultdict(set)
|
|
for node_id in nodes_by_id:
|
|
asset_id = node_id.split(":", 1)[0]
|
|
node_ids_by_asset_id[asset_id].add(node_id)
|
|
|
|
for asset_id, node_ids in node_ids_by_asset_id.items():
|
|
|
|
# Get latest look version
|
|
try:
|
|
version = get_latest_version(asset_id, subset=subset)
|
|
except RuntimeError as exc:
|
|
print(exc)
|
|
continue
|
|
|
|
relationships = get_look_relationships(version["_id"])
|
|
shadernodes = load_look(version["_id"])
|
|
|
|
# Get only the node ids and paths related to this asset
|
|
# And get the shader edits the look supplies
|
|
asset_nodes_by_id = {
|
|
node_id: nodes_by_id[node_id] for node_id in node_ids
|
|
}
|
|
edits = list(
|
|
lib.iter_shader_edits(
|
|
relationships, shadernodes, asset_nodes_by_id))
|
|
|
|
# Create assignments
|
|
assignments = {}
|
|
for edit in edits:
|
|
if edit["action"] == "assign":
|
|
nodes = edit["nodes"]
|
|
shader = edit["shader"]
|
|
if not cmds.ls(shader, type="shadingEngine"):
|
|
print("Skipping non-shader: %s" % shader)
|
|
continue
|
|
|
|
inputs = cmds.listConnections(
|
|
shader + ".surfaceShader", source=True)
|
|
if not inputs:
|
|
print("Shading engine missing material: %s" % shader)
|
|
|
|
# Strip off component assignments
|
|
for i, node in enumerate(nodes):
|
|
if "." in node:
|
|
log.warning(
|
|
("Converting face assignment to full object "
|
|
"assignment. This conversion can be lossy: "
|
|
"{}").format(node))
|
|
nodes[i] = node.split(".")[0]
|
|
|
|
material = inputs[0]
|
|
assignments[material] = nodes
|
|
|
|
assign_vrayproxy_shaders(vrayproxy, assignments)
|