mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 21:32:15 +01:00
WIP on shader assignment
This commit is contained in:
parent
0b244d3b6d
commit
68f81b8a3d
8 changed files with 531 additions and 18 deletions
|
|
@ -1124,19 +1124,18 @@ def get_id_required_nodes(referenced_nodes=False, nodes=None):
|
|||
|
||||
|
||||
def get_id(node):
|
||||
"""
|
||||
Get the `cbId` attribute of the given node
|
||||
"""Get the `cbId` attribute of the given node.
|
||||
|
||||
Args:
|
||||
node (str): the name of the node to retrieve the attribute from
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
|
||||
if node is None:
|
||||
return
|
||||
|
||||
print("><?>< {}".format(node))
|
||||
sel = om.MSelectionList()
|
||||
sel.add(node)
|
||||
|
||||
|
|
@ -2688,3 +2687,69 @@ def show_message(title, msg):
|
|||
pass
|
||||
else:
|
||||
message_window.message(title=title, message=msg, parent=parent)
|
||||
|
||||
|
||||
def iter_shader_edits(relationships, shader_nodes, nodes_by_id, label=None):
|
||||
"""Yield edits as a set of actions."""
|
||||
|
||||
attributes = relationships.get("attributes", [])
|
||||
shader_data = relationships.get("relationships", {})
|
||||
|
||||
shading_engines = cmds.ls(shader_nodes, type="objectSet", long=True)
|
||||
assert shading_engines, "Error in retrieving objectSets from reference"
|
||||
|
||||
# region compute lookup
|
||||
shading_engines_by_id = defaultdict(list)
|
||||
for shad in shading_engines:
|
||||
shading_engines_by_id[get_id(shad)].append(shad)
|
||||
# endregion
|
||||
|
||||
# region assign shading engines and other sets
|
||||
for data in shader_data.values():
|
||||
# collect all unique IDs of the set members
|
||||
shader_uuid = data["uuid"]
|
||||
member_uuids = [
|
||||
(member["uuid"], member.get("components"))
|
||||
for member in data["members"]]
|
||||
|
||||
filtered_nodes = list()
|
||||
for _uuid, components in member_uuids:
|
||||
nodes = nodes_by_id.get(_uuid, None)
|
||||
if nodes is None:
|
||||
continue
|
||||
|
||||
if components:
|
||||
# Assign to the components
|
||||
nodes = [".".join([node, components]) for node in nodes]
|
||||
|
||||
filtered_nodes.extend(nodes)
|
||||
|
||||
id_shading_engines = shading_engines_by_id[shader_uuid]
|
||||
if not id_shading_engines:
|
||||
log.error("{} - No shader found with cbId "
|
||||
"'{}'".format(label, shader_uuid))
|
||||
continue
|
||||
elif len(id_shading_engines) > 1:
|
||||
log.error("{} - Skipping shader assignment. "
|
||||
"More than one shader found with cbId "
|
||||
"'{}'. (found: {})".format(label, shader_uuid,
|
||||
id_shading_engines))
|
||||
continue
|
||||
|
||||
if not filtered_nodes:
|
||||
log.warning("{} - No nodes found for shading engine "
|
||||
"'{}'".format(label, id_shading_engines[0]))
|
||||
continue
|
||||
|
||||
yield {"action": "assign",
|
||||
"uuid": data["uuid"],
|
||||
"nodes": filtered_nodes,
|
||||
"shader": id_shading_engines[0]}
|
||||
|
||||
for data in attributes:
|
||||
nodes = nodes_by_id.get(data["uuid"], [])
|
||||
attr_value = data["attributes"]
|
||||
yield {"action": "setattr",
|
||||
"uuid": data["uuid"],
|
||||
"nodes": nodes,
|
||||
"attributes": attr_value}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Look loader."""
|
||||
import openpype.hosts.maya.api.plugin
|
||||
from avalon import api, io
|
||||
import json
|
||||
|
|
|
|||
|
|
@ -1,12 +1,21 @@
|
|||
from avalon.maya import lib
|
||||
from avalon import api
|
||||
from openpype.api import get_project_settings
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Loader for Vray Proxy files.
|
||||
|
||||
If there are Alembics published along vray proxy (in the same version),
|
||||
loader will use them instead of native vray vrmesh format.
|
||||
|
||||
"""
|
||||
import os
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
from avalon.maya import lib
|
||||
from avalon import api, io
|
||||
from openpype.api import get_project_settings
|
||||
|
||||
|
||||
class VRayProxyLoader(api.Loader):
|
||||
"""Load VRayMesh proxy"""
|
||||
"""Load VRayMesh proxy."""
|
||||
|
||||
families = ["vrayproxy"]
|
||||
representations = ["vrmesh"]
|
||||
|
|
@ -16,8 +25,17 @@ class VRayProxyLoader(api.Loader):
|
|||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
def load(self, context, name=None, namespace=None, options=None):
|
||||
# type: (dict, str, str, dict) -> None
|
||||
"""Loader entry point.
|
||||
|
||||
Args:
|
||||
context (dict): Loaded representation context.
|
||||
name (str): Name of container.
|
||||
namespace (str): Optional namespace name.
|
||||
options (dict): Optional loader options.
|
||||
|
||||
"""
|
||||
from avalon.maya.pipeline import containerise
|
||||
from openpype.hosts.maya.api.lib import namespaced
|
||||
|
||||
|
|
@ -26,6 +44,9 @@ class VRayProxyLoader(api.Loader):
|
|||
except ValueError:
|
||||
family = "vrayproxy"
|
||||
|
||||
# get all representations for this version
|
||||
self.fname = self._get_abc(context["version"]["_id"]) or self.fname
|
||||
|
||||
asset_name = context['asset']["name"]
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset_name + "_",
|
||||
|
|
@ -39,8 +60,8 @@ class VRayProxyLoader(api.Loader):
|
|||
with lib.maintained_selection():
|
||||
cmds.namespace(addNamespace=namespace)
|
||||
with namespaced(namespace, new=False):
|
||||
nodes, group_node = self.create_vray_proxy(name,
|
||||
filename=self.fname)
|
||||
nodes, group_node = self.create_vray_proxy(
|
||||
name, filename=self.fname)
|
||||
|
||||
self[:] = nodes
|
||||
if not nodes:
|
||||
|
|
@ -63,7 +84,8 @@ class VRayProxyLoader(api.Loader):
|
|||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
# type: (dict, dict) -> None
|
||||
"""Update container with specified representation."""
|
||||
node = container['objectName']
|
||||
assert cmds.objExists(node), "Missing container"
|
||||
|
||||
|
|
@ -71,7 +93,8 @@ class VRayProxyLoader(api.Loader):
|
|||
vraymeshes = cmds.ls(members, type="VRayMesh")
|
||||
assert vraymeshes, "Cannot find VRayMesh in container"
|
||||
|
||||
filename = api.get_representation_path(representation)
|
||||
# get all representations for this version
|
||||
filename = self._get_abc(representation["parent"]) or api.get_representation_path(representation) # noqa: E501
|
||||
|
||||
for vray_mesh in vraymeshes:
|
||||
cmds.setAttr("{}.fileName".format(vray_mesh),
|
||||
|
|
@ -84,7 +107,8 @@ class VRayProxyLoader(api.Loader):
|
|||
type="string")
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
# type: (dict) -> None
|
||||
"""Remove loaded container."""
|
||||
# Delete container and its contents
|
||||
if cmds.objExists(container['objectName']):
|
||||
members = cmds.sets(container['objectName'], query=True) or []
|
||||
|
|
@ -101,18 +125,22 @@ class VRayProxyLoader(api.Loader):
|
|||
"still has members: %s", namespace)
|
||||
|
||||
def switch(self, container, representation):
|
||||
# type: (dict, dict) -> None
|
||||
"""Switch loaded representation."""
|
||||
self.update(container, representation)
|
||||
|
||||
def create_vray_proxy(self, name, filename):
|
||||
# type: (str, str) -> (list, str)
|
||||
"""Re-create the structure created by VRay to support vrmeshes
|
||||
|
||||
Args:
|
||||
name(str): name of the asset
|
||||
name (str): Name of the asset.
|
||||
filename (str): File name of vrmesh.
|
||||
|
||||
Returns:
|
||||
nodes(list)
|
||||
"""
|
||||
|
||||
"""
|
||||
# Create nodes
|
||||
vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name))
|
||||
mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name))
|
||||
|
|
@ -159,3 +187,35 @@ class VRayProxyLoader(api.Loader):
|
|||
cmds.setAttr("{}.geomType".format(vray_mesh), 2)
|
||||
|
||||
return nodes, group_node
|
||||
|
||||
def _get_abc(self, version_id):
|
||||
# type: (str) -> str
|
||||
"""Get abc representation file path if present.
|
||||
|
||||
If here is published Alembic (abc) representation published along
|
||||
vray proxy, get is file path.
|
||||
|
||||
Args:
|
||||
version_id (str): Version hash id.
|
||||
|
||||
Returns:
|
||||
str: Path to file.
|
||||
None: If abc not found.
|
||||
|
||||
"""
|
||||
self.log.debug(
|
||||
"Looking for abc in published representations of this version.")
|
||||
abc_rep = io.find_one(
|
||||
{
|
||||
"type": "representation",
|
||||
"parent": io.ObjectId(version_id),
|
||||
"name": "abc"
|
||||
})
|
||||
|
||||
if abc_rep:
|
||||
self.log.debug("Found, we'll link alembic to vray proxy.")
|
||||
file_name = api.get_representation_path(abc_rep)
|
||||
self.log.debug("File: {}".format(self.fname))
|
||||
return file_name
|
||||
|
||||
return None
|
||||
|
|
|
|||
21
openpype/hosts/maya/plugins/publish/collect_vrayproxy.py
Normal file
21
openpype/hosts/maya/plugins/publish/collect_vrayproxy.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect Vray Proxy."""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectVrayProxy(pyblish.api.InstancePlugin):
|
||||
"""Collect Vray Proxy instance.
|
||||
|
||||
Add `pointcache` family for it.
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = 'Collect Vray Proxy'
|
||||
families = ["vrayproxy"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Collector entry point."""
|
||||
if not instance.data.get('families'):
|
||||
instance.data["families"] = []
|
||||
if "pointcache" not in instance.data["families"]:
|
||||
instance.data["families"].append("pointcache")
|
||||
self.log.debug("adding to pointcache family")
|
||||
|
|
@ -15,6 +15,8 @@ import maya.api.OpenMaya as om
|
|||
|
||||
from . import widgets
|
||||
from . import commands
|
||||
from . vray_proxies import vrayproxy_assign_look
|
||||
|
||||
|
||||
module = sys.modules[__name__]
|
||||
module.window = None
|
||||
|
|
@ -211,9 +213,21 @@ class App(QtWidgets.QWidget):
|
|||
subset_name,
|
||||
asset))
|
||||
|
||||
print(">>> get vray mesh nodes ...")
|
||||
vray_proxies = set(cmds.ls(type="VRayMesh"))
|
||||
print("-" * 40)
|
||||
print(item["nodes"])
|
||||
print(vray_proxies)
|
||||
nodes = set(item["nodes"]).difference(vray_proxies)
|
||||
print(nodes)
|
||||
|
||||
# Assign look
|
||||
assign_look_by_version(nodes=item["nodes"],
|
||||
version_id=version["_id"])
|
||||
if nodes:
|
||||
assign_look_by_version([nodes], version_id=version["_id"])
|
||||
|
||||
if vray_proxies:
|
||||
for vp in vray_proxies:
|
||||
vrayproxy_assign_look(vp, subset_name)
|
||||
|
||||
end = time.time()
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,9 @@ from openpype.hosts.maya.api import lib
|
|||
|
||||
from avalon import io, api
|
||||
|
||||
|
||||
import vray_proxies
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -132,6 +135,27 @@ def create_items_from_nodes(nodes):
|
|||
asset_view_items = []
|
||||
|
||||
id_hashes = create_asset_id_hash(nodes)
|
||||
|
||||
print("*" * 40)
|
||||
print(id_hashes)
|
||||
|
||||
# get ids from alembic
|
||||
vray_proxy_nodes = cmds.ls(nodes, type="VRayMesh")
|
||||
for vp in vray_proxy_nodes:
|
||||
path = cmds.getAttr("{}.fileName".format(vp))
|
||||
ids = vray_proxies.get_alembic_ids_cache(path)
|
||||
parent_id = {}
|
||||
for k, n in ids.items():
|
||||
pid = k.split(":")[0]
|
||||
if not parent_id.get(pid):
|
||||
parent_id.update({pid: [vp]})
|
||||
|
||||
print("adding ids from alembic {}".format(path))
|
||||
id_hashes.update(parent_id)
|
||||
|
||||
print("*" * 40)
|
||||
print(id_hashes)
|
||||
|
||||
if not id_hashes:
|
||||
return asset_view_items
|
||||
|
||||
|
|
|
|||
324
openpype/tools/mayalookassigner/vray_proxies.py
Normal file
324
openpype/tools/mayalookassigner/vray_proxies.py
Normal file
|
|
@ -0,0 +1,324 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Tools for loading looks to vray proxies."""
|
||||
import os
|
||||
from collections import defaultdict
|
||||
import logging
|
||||
import json
|
||||
|
||||
import six
|
||||
|
||||
import alembic.Abc
|
||||
from maya import cmds
|
||||
|
||||
import avalon.io as io
|
||||
import avalon.maya
|
||||
import avalon.api as api
|
||||
|
||||
import openpype.hosts.maya.api.lib as lib
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_alembic_paths_by_property(filename, attr, verbose=False):
|
||||
# type: (str, str, bool) -> dict
|
||||
"""Return attribute value per objects in the Alembic file.
|
||||
|
||||
Reads an Alembic archive hierarchy and retrieves the
|
||||
value from the `attr` properties on the objects.
|
||||
|
||||
Args:
|
||||
filename (str): Full path to Alembic archive to read.
|
||||
attr (str): Id attribute.
|
||||
verbose (bool): Whether to verbosely log missing attributes.
|
||||
|
||||
Returns:
|
||||
dict: Mapping of node full path with its id
|
||||
|
||||
"""
|
||||
# Normalize alembic path
|
||||
filename = os.path.normpath(filename)
|
||||
filename = filename.replace("\\", "/")
|
||||
filename = str(filename) # path must be string
|
||||
|
||||
archive = alembic.Abc.IArchive(filename)
|
||||
root = archive.getTop()
|
||||
|
||||
iterator = list(root.children)
|
||||
obj_ids = {}
|
||||
|
||||
for obj in iterator:
|
||||
name = obj.getFullName()
|
||||
|
||||
# include children for coming iterations
|
||||
iterator.extend(obj.children)
|
||||
|
||||
props = obj.getProperties()
|
||||
if props.getNumProperties() == 0:
|
||||
# Skip those without properties, e.g. '/materials' in a gpuCache
|
||||
continue
|
||||
|
||||
# THe custom attribute is under the properties' first container under
|
||||
# the ".arbGeomParams"
|
||||
prop = props.getProperty(0) # get base property
|
||||
|
||||
_property = None
|
||||
try:
|
||||
geo_params = prop.getProperty('.arbGeomParams')
|
||||
_property = geo_params.getProperty(attr)
|
||||
except KeyError:
|
||||
if verbose:
|
||||
log.debug("Missing attr on: {0}".format(name))
|
||||
continue
|
||||
|
||||
if not _property.isConstant():
|
||||
log.warning("Id not constant on: {0}".format(name))
|
||||
|
||||
# Get first value sample
|
||||
value = _property.getValue()[0]
|
||||
|
||||
obj_ids[name] = value
|
||||
|
||||
return obj_ids
|
||||
|
||||
|
||||
def get_alembic_ids_cache(path):
|
||||
# type: (str) -> dict
|
||||
"""Build a id to node mapping in Alembic file.
|
||||
|
||||
Nodes without IDs are ignored.
|
||||
|
||||
Returns:
|
||||
dict: Mapping of id to nodes in the Alembic.
|
||||
|
||||
"""
|
||||
node_ids = get_alembic_paths_by_property(path, attr="cbId")
|
||||
id_nodes = defaultdict(list)
|
||||
for node, _id in six.iteritems(node_ids):
|
||||
id_nodes[_id].append(node)
|
||||
|
||||
return dict(six.iteritems(id_nodes))
|
||||
|
||||
|
||||
def assign_vrayproxy_shaders(vrayproxy, assignments):
|
||||
# type: (str, dict) -> None
|
||||
"""Assign shaders to content of Vray Proxy.
|
||||
|
||||
This will create shader overrides on Vray Proxy to assign shaders to its
|
||||
content.
|
||||
|
||||
Todo:
|
||||
Allow to optimize and assign a single shader to multiple shapes at
|
||||
once or maybe even set it to the highest available path?
|
||||
|
||||
Args:
|
||||
vrayproxy (str): Name of Vray Proxy
|
||||
assignments (dict): Mapping of shader assignments.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
# Clear all current shader assignments
|
||||
plug = vrayproxy + ".shaders"
|
||||
num = cmds.getAttr(plug, size=True)
|
||||
for i in reversed(range(num)):
|
||||
cmds.removeMultiInstance("{}[{}]".format(plug, i), b=True)
|
||||
|
||||
# Create new assignment overrides
|
||||
index = 0
|
||||
for material, paths in assignments.items():
|
||||
for path in paths:
|
||||
plug = "{}.shaders[{}]".format(proxy, index)
|
||||
cmds.setAttr(plug + ".shadersNames", path, type="string")
|
||||
cmds.connectAttr(material + ".outColor",
|
||||
plug + ".shadersConnections", force=True)
|
||||
index += 1
|
||||
|
||||
|
||||
def get_look_relationships(version_id):
|
||||
# type: (str) -> dict
|
||||
"""Get relations for the look.
|
||||
|
||||
Args:
|
||||
version_id (str): Parent version Id.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of relations.
|
||||
|
||||
"""
|
||||
json_representation = io.find_one({"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": "json"})
|
||||
|
||||
# Load relationships
|
||||
shader_relation = api.get_representation_path(json_representation)
|
||||
with open(shader_relation, "r") as f:
|
||||
relationships = json.load(f)
|
||||
|
||||
return relationships
|
||||
|
||||
|
||||
def load_look(version_id):
|
||||
# type: (str) -> list
|
||||
"""Load look from version.
|
||||
|
||||
Get look from version and invoke Loader for it.
|
||||
|
||||
Args:
|
||||
version_id (str): Version ID
|
||||
|
||||
Returns:
|
||||
list of shader nodes.
|
||||
|
||||
"""
|
||||
# Get representations of shader file and relationships
|
||||
look_representation = io.find_one({"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": "ma"})
|
||||
|
||||
# See if representation is already loaded, if so reuse it.
|
||||
host = api.registered_host()
|
||||
representation_id = str(look_representation['_id'])
|
||||
for container in host.ls():
|
||||
if (container['loader'] == "LookLoader" and
|
||||
container['representation'] == representation_id):
|
||||
log.info("Reusing loaded look ...")
|
||||
container_node = container['objectName']
|
||||
break
|
||||
else:
|
||||
log.info("Using look for the first time ...")
|
||||
|
||||
# Load file
|
||||
loaders = api.loaders_from_representation(api.discover(api.Loader),
|
||||
representation_id)
|
||||
loader = next((i for i in loaders if i.__name__ == "LookLoader"), None)
|
||||
if loader is None:
|
||||
raise RuntimeError("Could not find LookLoader, this is a bug")
|
||||
|
||||
# Reference the look file
|
||||
with avalon.maya.maintained_selection():
|
||||
container_node = api.load(loader, look_representation)
|
||||
|
||||
# Get container members
|
||||
shader_nodes = cmds.sets(container_node, query=True)
|
||||
return shader_nodes
|
||||
|
||||
|
||||
def get_latest_version(asset_id, subset):
|
||||
# type: (str, str) -> dict
|
||||
"""Get latest version of subset.
|
||||
|
||||
Args:
|
||||
asset_id (str): Asset ID
|
||||
subset (str): Subset name.
|
||||
|
||||
Returns:
|
||||
Latest version
|
||||
|
||||
Throws:
|
||||
RuntimeError: When subset or version doesn't exist.
|
||||
|
||||
"""
|
||||
subset = io.find_one({"name": subset,
|
||||
"parent": io.ObjectId(asset_id),
|
||||
"type": "subset"})
|
||||
if not subset:
|
||||
raise RuntimeError("Subset does not exist: %s" % subset)
|
||||
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
if not version:
|
||||
raise RuntimeError("Version does not exist.")
|
||||
|
||||
return version
|
||||
|
||||
|
||||
def vrayproxy_assign_look(vrayproxy, subset="lookDefault"):
|
||||
# type: (str, str) -> None
|
||||
"""Assign look to vray proxy.
|
||||
|
||||
Args:
|
||||
vrayproxy (str): Name of vrayproxy to apply look to.
|
||||
subset (str): Name of look subset.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
path = cmds.getAttr(vrayproxy + ".fileName")
|
||||
|
||||
nodes_by_id = get_alembic_ids_cache(path)
|
||||
if not nodes_by_id:
|
||||
log.warning("Alembic file has no cbId attributes: %s" % path)
|
||||
return
|
||||
|
||||
# Group by asset id so we run over the look per asset
|
||||
node_ids_by_asset_id = defaultdict(set)
|
||||
for node_id in nodes_by_id:
|
||||
asset_id = node_id.split(":", 1)[0]
|
||||
node_ids_by_asset_id[asset_id].add(node_id)
|
||||
|
||||
for asset_id, node_ids in node_ids_by_asset_id.items():
|
||||
|
||||
# Get latest look version
|
||||
try:
|
||||
version = get_latest_version(asset_id, subset=subset)
|
||||
except RuntimeError as exc:
|
||||
print(exc)
|
||||
continue
|
||||
|
||||
relationships = get_look_relationships(version["_id"])
|
||||
shadernodes = load_look(version["_id"])
|
||||
|
||||
# Get only the node ids and paths related to this asset
|
||||
# And get the shader edits the look supplies
|
||||
asset_nodes_by_id = {node_id: vrayproxy for node_id in node_ids}
|
||||
print("-" * 80)
|
||||
print(node_ids)
|
||||
print("+" * 80)
|
||||
print(relationships)
|
||||
print("+-" * 40)
|
||||
print(shadernodes)
|
||||
print("+-" * 40)
|
||||
print(asset_nodes_by_id)
|
||||
print("+" * 80)
|
||||
edits = list(lib.iter_shader_edits(relationships, shadernodes, asset_nodes_by_id))
|
||||
|
||||
# Create assignments
|
||||
assignments = {}
|
||||
for edit in edits:
|
||||
if edit["action"] == "assign":
|
||||
nodes = edit["nodes"]
|
||||
shader = edit["shader"]
|
||||
if not cmds.ls(shader, type="shadingEngine"):
|
||||
print("Skipping non-shader: %s" % shader)
|
||||
continue
|
||||
|
||||
inputs = cmds.listConnections(shader + ".surfaceShader", source=True)
|
||||
if not inputs:
|
||||
print("Shading engine missing material: %s" % shader)
|
||||
|
||||
# Strip off component assignments
|
||||
for i, node in enumerate(nodes):
|
||||
if "." in node:
|
||||
log.warning(
|
||||
"Converting face assignment to full object assignment. This conversion can be lossy: %s" % node)
|
||||
nodes[i] = node.split(".")[0]
|
||||
|
||||
material = inputs[0]
|
||||
assignments[material] = nodes
|
||||
|
||||
assign_vrayproxy_shaders(vrayproxy, assignments)
|
||||
|
||||
|
||||
# Example usage
|
||||
if __name__ == "__main__":
|
||||
|
||||
# Ensure V-Ray is loaded
|
||||
cmds.loadPlugin("vrayformaya", quiet=True)
|
||||
|
||||
# Assign lookDefault to all V-Ray Proxies
|
||||
for proxy in cmds.ls(sl=True, dag=True, type="VRayProxy"):
|
||||
vrayproxy_assign_look(proxy, subset="lookDefault")
|
||||
|
|
@ -93,6 +93,9 @@ class AssetOutliner(QtWidgets.QWidget):
|
|||
with lib.preserve_selection(self.view):
|
||||
self.clear()
|
||||
nodes = commands.get_all_asset_nodes()
|
||||
print("_" * 40)
|
||||
print(nodes)
|
||||
print("_" * 40)
|
||||
items = commands.create_items_from_nodes(nodes)
|
||||
self.add_items(items)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue