Merge pull request #4460 from tokejepsen/feature/OP-4859_cant-assign-shaders-to-the-ass-file

This commit is contained in:
Ondřej Samohel 2023-04-03 16:59:47 +02:00 committed by GitHub
commit 4f0fed10a6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 594 additions and 208 deletions

View file

@ -3675,3 +3675,43 @@ def len_flattened(components):
else:
n += 1
return n
def get_all_children(nodes):
"""Return all children of `nodes` including each instanced child.
Using maya.cmds.listRelatives(allDescendents=True) includes only the first
instance. As such, this function acts as an optimal replacement with a
focus on a fast query.
"""
sel = OpenMaya.MSelectionList()
traversed = set()
iterator = OpenMaya.MItDag(OpenMaya.MItDag.kDepthFirst)
for node in nodes:
if node in traversed:
# Ignore if already processed as a child
# before
continue
sel.clear()
sel.add(node)
dag = sel.getDagPath(0)
iterator.reset(dag)
# ignore self
iterator.next() # noqa: B305
while not iterator.isDone():
path = iterator.fullPathName()
if path in traversed:
iterator.prune()
iterator.next() # noqa: B305
continue
traversed.add(path)
iterator.next() # noqa: B305
return list(traversed)

View file

@ -84,7 +84,7 @@ class ArnoldStandinLoader(load.LoaderPlugin):
sequence = is_sequence(os.listdir(os.path.dirname(self.fname)))
cmds.setAttr(standin_shape + ".useFrameExtension", sequence)
nodes = [root, standin]
nodes = [root, standin, standin_shape]
if operator is not None:
nodes.append(operator)
self[:] = nodes
@ -183,7 +183,7 @@ class ArnoldStandinLoader(load.LoaderPlugin):
# If no proxy exists, the string operator won't replace anything.
cmds.setAttr(
string_replace_operator + ".match",
"resources/" + proxy_basename,
proxy_basename,
type="string"
)
cmds.setAttr(

View file

@ -1,6 +1,7 @@
from maya import cmds
import pyblish.api
from openpype.hosts.maya.api.lib import get_all_children
class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
@ -21,18 +22,21 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
self.log.warning("Skipped empty instance: \"%s\" " % objset)
continue
if objset.endswith("content_SET"):
instance.data["setMembers"] = cmds.ls(members, long=True)
self.log.debug("content members: {}".format(members))
members = cmds.ls(members, long=True)
children = get_all_children(members)
instance.data["contentMembers"] = children
self.log.debug("content members: {}".format(children))
elif objset.endswith("proxy_SET"):
instance.data["proxy"] = cmds.ls(members, long=True)
self.log.debug("proxy members: {}".format(members))
set_members = get_all_children(cmds.ls(members, long=True))
instance.data["proxy"] = set_members
self.log.debug("proxy members: {}".format(set_members))
# Use camera in object set if present else default to render globals
# camera.
cameras = cmds.ls(type="camera", long=True)
renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)]
camera = renderable[0]
for node in instance.data["setMembers"]:
for node in instance.data["contentMembers"]:
camera_shapes = cmds.listRelatives(
node, shapes=True, type="camera"
)

View file

@ -1,48 +1,8 @@
from maya import cmds
import maya.api.OpenMaya as om
import pyblish.api
import json
def get_all_children(nodes):
"""Return all children of `nodes` including each instanced child.
Using maya.cmds.listRelatives(allDescendents=True) includes only the first
instance. As such, this function acts as an optimal replacement with a
focus on a fast query.
"""
sel = om.MSelectionList()
traversed = set()
iterator = om.MItDag(om.MItDag.kDepthFirst)
for node in nodes:
if node in traversed:
# Ignore if already processed as a child
# before
continue
sel.clear()
sel.add(node)
dag = sel.getDagPath(0)
iterator.reset(dag)
# ignore self
iterator.next() # noqa: B305
while not iterator.isDone():
path = iterator.fullPathName()
if path in traversed:
iterator.prune()
iterator.next() # noqa: B305
continue
traversed.add(path)
iterator.next() # noqa: B305
return list(traversed)
from openpype.hosts.maya.api.lib import get_all_children
class CollectInstances(pyblish.api.ContextPlugin):

View file

@ -556,7 +556,7 @@ class CollectLook(pyblish.api.InstancePlugin):
continue
if cmds.getAttr(attribute, type=True) == "message":
continue
node_attributes[attr] = cmds.getAttr(attribute)
node_attributes[attr] = cmds.getAttr(attribute, asString=True)
# Only include if there are any properties we care about
if not node_attributes:
continue

View file

@ -1,12 +1,12 @@
import os
from collections import defaultdict
import json
from maya import cmds
import arnold
from openpype.pipeline import publish
from openpype.hosts.maya.api.lib import (
maintained_selection, attribute_values, delete_after
)
from openpype.hosts.maya.api import lib
class ExtractArnoldSceneSource(publish.Extractor):
@ -19,8 +19,7 @@ class ExtractArnoldSceneSource(publish.Extractor):
def process(self, instance):
staging_dir = self.staging_dir(instance)
filename = "{}.ass".format(instance.name)
file_path = os.path.join(staging_dir, filename)
file_path = os.path.join(staging_dir, "{}.ass".format(instance.name))
# Mask
mask = arnold.AI_NODE_ALL
@ -71,8 +70,8 @@ class ExtractArnoldSceneSource(publish.Extractor):
"mask": mask
}
filenames = self._extract(
instance.data["setMembers"], attribute_data, kwargs
filenames, nodes_by_id = self._extract(
instance.data["contentMembers"], attribute_data, kwargs
)
if "representations" not in instance.data:
@ -88,6 +87,19 @@ class ExtractArnoldSceneSource(publish.Extractor):
instance.data["representations"].append(representation)
json_path = os.path.join(staging_dir, "{}.json".format(instance.name))
with open(json_path, "w") as f:
json.dump(nodes_by_id, f)
representation = {
"name": "json",
"ext": "json",
"files": os.path.basename(json_path),
"stagingDir": staging_dir
}
instance.data["representations"].append(representation)
self.log.info(
"Extracted instance {} to: {}".format(instance.name, staging_dir)
)
@ -97,7 +109,7 @@ class ExtractArnoldSceneSource(publish.Extractor):
return
kwargs["filename"] = file_path.replace(".ass", "_proxy.ass")
filenames = self._extract(
filenames, _ = self._extract(
instance.data["proxy"], attribute_data, kwargs
)
@ -113,34 +125,60 @@ class ExtractArnoldSceneSource(publish.Extractor):
instance.data["representations"].append(representation)
def _extract(self, nodes, attribute_data, kwargs):
self.log.info("Writing: " + kwargs["filename"])
self.log.info(
"Writing {} with:\n{}".format(kwargs["filename"], kwargs)
)
filenames = []
nodes_by_id = defaultdict(list)
# Duplicating nodes so they are direct children of the world. This
# makes the hierarchy of any exported ass file the same.
with delete_after() as delete_bin:
with lib.delete_after() as delete_bin:
duplicate_nodes = []
for node in nodes:
# Only interested in transforms:
if cmds.nodeType(node) != "transform":
continue
# Only interested in transforms with shapes.
shapes = cmds.listRelatives(
node, shapes=True, noIntermediate=True
)
if not shapes:
continue
duplicate_transform = cmds.duplicate(node)[0]
# Discard the children.
shapes = cmds.listRelatives(duplicate_transform, shapes=True)
if cmds.listRelatives(duplicate_transform, parent=True):
duplicate_transform = cmds.parent(
duplicate_transform, world=True
)[0]
basename = node.rsplit("|", 1)[-1].rsplit(":", 1)[-1]
duplicate_transform = cmds.rename(
duplicate_transform, basename
)
# Discard children nodes that are not shapes
shapes = cmds.listRelatives(
duplicate_transform, shapes=True, fullPath=True
)
children = cmds.listRelatives(
duplicate_transform, children=True
duplicate_transform, children=True, fullPath=True
)
cmds.delete(set(children) - set(shapes))
duplicate_transform = cmds.parent(
duplicate_transform, world=True
)[0]
cmds.rename(duplicate_transform, node.split("|")[-1])
duplicate_transform = "|" + node.split("|")[-1]
duplicate_nodes.append(duplicate_transform)
duplicate_nodes.extend(shapes)
delete_bin.append(duplicate_transform)
with attribute_values(attribute_data):
with maintained_selection():
# Copy cbId to mtoa_constant.
for node in duplicate_nodes:
# Converting Maya hierarchy separator "|" to Arnold
# separator "/".
nodes_by_id[lib.get_id(node)].append(node.replace("|", "/"))
with lib.attribute_values(attribute_data):
with lib.maintained_selection():
self.log.info(
"Writing: {}".format(duplicate_nodes)
)
@ -157,4 +195,4 @@ class ExtractArnoldSceneSource(publish.Extractor):
self.log.info("Exported: {}".format(filenames))
return filenames
return filenames, nodes_by_id

View file

@ -1,5 +1,3 @@
import maya.cmds as cmds
import pyblish.api
from openpype.pipeline.publish import (
ValidateContentsOrder, PublishValidationError
@ -22,10 +20,11 @@ class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
families = ["ass"]
label = "Validate Arnold Scene Source"
def _get_nodes_data(self, nodes):
def _get_nodes_by_name(self, nodes):
ungrouped_nodes = []
nodes_by_name = {}
parents = []
same_named_nodes = {}
for node in nodes:
node_split = node.split("|")
if len(node_split) == 2:
@ -35,21 +34,38 @@ class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
if parent:
parents.append(parent)
nodes_by_name[node_split[-1]] = node
for shape in cmds.listRelatives(node, shapes=True):
nodes_by_name[shape.split("|")[-1]] = shape
node_name = node.rsplit("|", 1)[-1].rsplit(":", 1)[-1]
# Check for same same nodes, which can happen in different
# hierarchies.
if node_name in nodes_by_name:
try:
same_named_nodes[node_name].append(node)
except KeyError:
same_named_nodes[node_name] = [
nodes_by_name[node_name], node
]
nodes_by_name[node_name] = node
if same_named_nodes:
message = "Found nodes with the same name:"
for name, nodes in same_named_nodes.items():
message += "\n\n\"{}\":\n{}".format(name, "\n".join(nodes))
raise PublishValidationError(message)
return ungrouped_nodes, nodes_by_name, parents
def process(self, instance):
ungrouped_nodes = []
nodes, content_nodes_by_name, content_parents = self._get_nodes_data(
instance.data["setMembers"]
nodes, content_nodes_by_name, content_parents = (
self._get_nodes_by_name(instance.data["contentMembers"])
)
ungrouped_nodes.extend(nodes)
nodes, proxy_nodes_by_name, proxy_parents = self._get_nodes_data(
nodes, proxy_nodes_by_name, proxy_parents = self._get_nodes_by_name(
instance.data.get("proxy", [])
)
ungrouped_nodes.extend(nodes)
@ -66,11 +82,11 @@ class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
return
# Validate for content and proxy nodes amount being the same.
if len(instance.data["setMembers"]) != len(instance.data["proxy"]):
if len(instance.data["contentMembers"]) != len(instance.data["proxy"]):
raise PublishValidationError(
"Amount of content nodes ({}) and proxy nodes ({}) needs to "
"be the same.".format(
len(instance.data["setMembers"]),
len(instance.data["contentMembers"]),
len(instance.data["proxy"])
)
)

View file

@ -0,0 +1,74 @@
import pyblish.api
from openpype.hosts.maya.api import lib
from openpype.pipeline.publish import (
ValidateContentsOrder, PublishValidationError, RepairAction
)
class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin):
"""Validate Arnold Scene Source Cbid.
It is required for the proxy and content nodes to share the same cbid.
"""
order = ValidateContentsOrder
hosts = ["maya"]
families = ["ass"]
label = "Validate Arnold Scene Source CBID"
actions = [RepairAction]
@staticmethod
def _get_nodes_by_name(nodes):
nodes_by_name = {}
for node in nodes:
node_name = node.rsplit("|", 1)[-1].rsplit(":", 1)[-1]
nodes_by_name[node_name] = node
return nodes_by_name
@classmethod
def get_invalid_couples(cls, instance):
content_nodes_by_name = cls._get_nodes_by_name(
instance.data["contentMembers"]
)
proxy_nodes_by_name = cls._get_nodes_by_name(
instance.data.get("proxy", [])
)
invalid_couples = []
for content_name, content_node in content_nodes_by_name.items():
proxy_node = proxy_nodes_by_name.get(content_name, None)
if not proxy_node:
cls.log.debug(
"Content node '{}' has no matching proxy node.".format(
content_node
)
)
continue
content_id = lib.get_id(content_node)
proxy_id = lib.get_id(proxy_node)
if content_id != proxy_id:
invalid_couples.append((content_node, proxy_node))
return invalid_couples
def process(self, instance):
# Proxy validation.
if not instance.data.get("proxy", []):
return
# Validate for proxy nodes sharing the same cbId as content nodes.
invalid_couples = self.get_invalid_couples(instance)
if invalid_couples:
raise PublishValidationError(
"Found proxy nodes with mismatching cbid:\n{}".format(
invalid_couples
)
)
@classmethod
def repair(cls, instance):
for content_node, proxy_node in cls.get_invalid_couples(cls, instance):
lib.set_id(proxy_node, lib.get_id(content_node), overwrite=False)

View file

@ -24,6 +24,7 @@ from .commands import (
remove_unused_looks
)
from .vray_proxies import vrayproxy_assign_look
from . import arnold_standin
module = sys.modules[__name__]
module.window = None
@ -43,7 +44,7 @@ class MayaLookAssignerWindow(QtWidgets.QWidget):
filename = get_workfile()
self.setObjectName("lookManager")
self.setWindowTitle("Look Manager 1.3.0 - [{}]".format(filename))
self.setWindowTitle("Look Manager 1.4.0 - [{}]".format(filename))
self.setWindowFlags(QtCore.Qt.Window)
self.setParent(parent)
@ -240,18 +241,37 @@ class MayaLookAssignerWindow(QtWidgets.QWidget):
))
nodes = item["nodes"]
# Assign Vray Proxy look.
if cmds.pluginInfo('vrayformaya', query=True, loaded=True):
self.echo("Getting vray proxy nodes ...")
vray_proxies = set(cmds.ls(type="VRayProxy", long=True))
if vray_proxies:
for vp in vray_proxies:
if vp in nodes:
vrayproxy_assign_look(vp, subset_name)
for vp in vray_proxies:
if vp in nodes:
vrayproxy_assign_look(vp, subset_name)
nodes = list(set(item["nodes"]).difference(vray_proxies))
nodes = list(set(item["nodes"]).difference(vray_proxies))
else:
self.echo(
"Could not assign to VRayProxy because vrayformaya plugin "
"is not loaded."
)
# Assign look
# Assign Arnold Standin look.
if cmds.pluginInfo("mtoa", query=True, loaded=True):
arnold_standins = set(cmds.ls(type="aiStandIn", long=True))
for standin in arnold_standins:
if standin in nodes:
arnold_standin.assign_look(standin, subset_name)
else:
self.echo(
"Could not assign to aiStandIn because mtoa plugin is not "
"loaded."
)
nodes = list(set(item["nodes"]).difference(arnold_standins))
# Assign look
if nodes:
assign_look_by_version(nodes, version_id=version["_id"])

View file

@ -0,0 +1,247 @@
import os
import json
from collections import defaultdict
import logging
from maya import cmds
from openpype.pipeline import legacy_io
from openpype.client import get_last_version_by_subset_name
from openpype.hosts.maya import api
from . import lib
log = logging.getLogger(__name__)
ATTRIBUTE_MAPPING = {
"primaryVisibility": "visibility", # Camera
"castsShadows": "visibility", # Shadow
"receiveShadows": "receive_shadows",
"aiSelfShadows": "self_shadows",
"aiOpaque": "opaque",
"aiMatte": "matte",
"aiVisibleInDiffuseTransmission": "visibility",
"aiVisibleInSpecularTransmission": "visibility",
"aiVisibleInVolume": "visibility",
"aiVisibleInDiffuseReflection": "visibility",
"aiVisibleInSpecularReflection": "visibility",
"aiSubdivUvSmoothing": "subdiv_uv_smoothing",
"aiDispHeight": "disp_height",
"aiDispPadding": "disp_padding",
"aiDispZeroValue": "disp_zero_value",
"aiStepSize": "step_size",
"aiVolumePadding": "volume_padding",
"aiSubdivType": "subdiv_type",
"aiSubdivIterations": "subdiv_iterations"
}
def calculate_visibility_mask(attributes):
# https://arnoldsupport.com/2018/11/21/backdoor-setting-visibility/
mapping = {
"primaryVisibility": 1, # Camera
"castsShadows": 2, # Shadow
"aiVisibleInDiffuseTransmission": 4,
"aiVisibleInSpecularTransmission": 8,
"aiVisibleInVolume": 16,
"aiVisibleInDiffuseReflection": 32,
"aiVisibleInSpecularReflection": 64
}
mask = 255
for attr, value in mapping.items():
if attributes.get(attr, True):
continue
mask -= value
return mask
def get_nodes_by_id(standin):
"""Get node id from aiStandIn via json sidecar.
Args:
standin (string): aiStandIn node.
Returns:
(dict): Dictionary with node full name/path and id.
"""
path = cmds.getAttr(standin + ".dso")
json_path = None
for f in os.listdir(os.path.dirname(path)):
if f.endswith(".json"):
json_path = os.path.join(os.path.dirname(path), f)
break
if not json_path:
log.warning("Could not find json file for {}.".format(standin))
return {}
with open(json_path, "r") as f:
return json.load(f)
def shading_engine_assignments(shading_engine, attribute, nodes, assignments):
"""Full assignments with shader or disp_map.
Args:
shading_engine (string): Shading engine for material.
attribute (string): "surfaceShader" or "displacementShader"
nodes: (list): Nodes paths relative to aiStandIn.
assignments (dict): Assignments by nodes.
"""
shader_inputs = cmds.listConnections(
shading_engine + "." + attribute, source=True
)
if not shader_inputs:
log.info(
"Shading engine \"{}\" missing input \"{}\"".format(
shading_engine, attribute
)
)
return
# Strip off component assignments
for i, node in enumerate(nodes):
if "." in node:
log.warning(
"Converting face assignment to full object assignment. This "
"conversion can be lossy: {}".format(node)
)
nodes[i] = node.split(".")[0]
shader_type = "shader" if attribute == "surfaceShader" else "disp_map"
assignment = "{}='{}'".format(shader_type, shader_inputs[0])
for node in nodes:
assignments[node].append(assignment)
def assign_look(standin, subset):
log.info("Assigning {} to {}.".format(subset, standin))
nodes_by_id = get_nodes_by_id(standin)
# Group by asset id so we run over the look per asset
node_ids_by_asset_id = defaultdict(set)
for node_id in nodes_by_id:
asset_id = node_id.split(":", 1)[0]
node_ids_by_asset_id[asset_id].add(node_id)
project_name = legacy_io.active_project()
for asset_id, node_ids in node_ids_by_asset_id.items():
# Get latest look version
version = get_last_version_by_subset_name(
project_name,
subset_name=subset,
asset_id=asset_id,
fields=["_id"]
)
if not version:
log.info("Didn't find last version for subset name {}".format(
subset
))
continue
relationships = lib.get_look_relationships(version["_id"])
shader_nodes, container_node = lib.load_look(version["_id"])
namespace = shader_nodes[0].split(":")[0]
# Get only the node ids and paths related to this asset
# And get the shader edits the look supplies
asset_nodes_by_id = {
node_id: nodes_by_id[node_id] for node_id in node_ids
}
edits = list(
api.lib.iter_shader_edits(
relationships, shader_nodes, asset_nodes_by_id
)
)
# Create assignments
node_assignments = {}
for edit in edits:
for node in edit["nodes"]:
if node not in node_assignments:
node_assignments[node] = []
if edit["action"] == "assign":
if not cmds.ls(edit["shader"], type="shadingEngine"):
log.info("Skipping non-shader: %s" % edit["shader"])
continue
shading_engine_assignments(
shading_engine=edit["shader"],
attribute="surfaceShader",
nodes=edit["nodes"],
assignments=node_assignments
)
shading_engine_assignments(
shading_engine=edit["shader"],
attribute="displacementShader",
nodes=edit["nodes"],
assignments=node_assignments
)
if edit["action"] == "setattr":
visibility = False
for attr, value in edit["attributes"].items():
if attr not in ATTRIBUTE_MAPPING:
log.warning(
"Skipping setting attribute {} on {} because it is"
" not recognized.".format(attr, edit["nodes"])
)
continue
if isinstance(value, str):
value = "'{}'".format(value)
if ATTRIBUTE_MAPPING[attr] == "visibility":
visibility = True
continue
assignment = "{}={}".format(ATTRIBUTE_MAPPING[attr], value)
for node in edit["nodes"]:
node_assignments[node].append(assignment)
if visibility:
mask = calculate_visibility_mask(edit["attributes"])
assignment = "visibility={}".format(mask)
for node in edit["nodes"]:
node_assignments[node].append(assignment)
# Assign shader
# Clear all current shader assignments
plug = standin + ".operators"
num = cmds.getAttr(plug, size=True)
for i in reversed(range(num)):
cmds.removeMultiInstance("{}[{}]".format(plug, i), b=True)
# Create new assignment overrides
index = 0
for node, assignments in node_assignments.items():
if not assignments:
continue
with api.lib.maintained_selection():
operator = cmds.createNode("aiSetParameter")
operator = cmds.rename(operator, namespace + ":" + operator)
cmds.setAttr(operator + ".selection", node, type="string")
for i, assignment in enumerate(assignments):
cmds.setAttr(
"{}.assignment[{}]".format(operator, i),
assignment,
type="string"
)
cmds.connectAttr(
operator + ".out", "{}[{}]".format(plug, index)
)
index += 1
cmds.sets(operator, edit=True, addElement=container_node)

View file

@ -13,6 +13,7 @@ from openpype.pipeline import (
from openpype.hosts.maya.api import lib
from .vray_proxies import get_alembic_ids_cache
from . import arnold_standin
log = logging.getLogger(__name__)
@ -44,33 +45,11 @@ def get_namespace_from_node(node):
return parts[0] if len(parts) > 1 else u":"
def list_descendents(nodes):
"""Include full descendant hierarchy of given nodes.
This is a workaround to cmds.listRelatives(allDescendents=True) because
this way correctly keeps children instance paths (see Maya documentation)
This fixes LKD-26: assignments not working as expected on instanced shapes.
Return:
list: List of children descendents of nodes
"""
result = []
while True:
nodes = cmds.listRelatives(nodes,
fullPath=True)
if nodes:
result.extend(nodes)
else:
return result
def get_selected_nodes():
"""Get information from current selection"""
selection = cmds.ls(selection=True, long=True)
hierarchy = list_descendents(selection)
hierarchy = lib.get_all_children(selection)
return list(set(selection + hierarchy))
@ -105,10 +84,12 @@ def create_asset_id_hash(nodes):
path = cmds.getAttr("{}.fileName".format(node))
ids = get_alembic_ids_cache(path)
for k, _ in ids.items():
pid = k.split(":")[0]
if node not in node_id_hash[pid]:
node_id_hash[pid].append(node)
id = k.split(":")[0]
node_id_hash[id].append(node)
elif cmds.nodeType(node) == "aiStandIn":
for id, _ in arnold_standin.get_nodes_by_id(node).items():
id = id.split(":")[0]
node_id_hash[id].append(node)
else:
value = lib.get_id(node)
if value is None:

View file

@ -0,0 +1,87 @@
import json
import logging
from openpype.pipeline import (
legacy_io,
get_representation_path,
registered_host,
discover_loader_plugins,
loaders_from_representation,
load_container
)
from openpype.client import get_representation_by_name
from openpype.hosts.maya.api import lib
log = logging.getLogger(__name__)
def get_look_relationships(version_id):
# type: (str) -> dict
"""Get relations for the look.
Args:
version_id (str): Parent version Id.
Returns:
dict: Dictionary of relations.
"""
project_name = legacy_io.active_project()
json_representation = get_representation_by_name(
project_name, representation_name="json", version_id=version_id
)
# Load relationships
shader_relation = get_representation_path(json_representation)
with open(shader_relation, "r") as f:
relationships = json.load(f)
return relationships
def load_look(version_id):
# type: (str) -> list
"""Load look from version.
Get look from version and invoke Loader for it.
Args:
version_id (str): Version ID
Returns:
list of shader nodes.
"""
project_name = legacy_io.active_project()
# Get representations of shader file and relationships
look_representation = get_representation_by_name(
project_name, representation_name="ma", version_id=version_id
)
# See if representation is already loaded, if so reuse it.
host = registered_host()
representation_id = str(look_representation['_id'])
for container in host.ls():
if (container['loader'] == "LookLoader" and
container['representation'] == representation_id):
log.info("Reusing loaded look ...")
container_node = container['objectName']
break
else:
log.info("Using look for the first time ...")
# Load file
all_loaders = discover_loader_plugins()
loaders = loaders_from_representation(all_loaders, representation_id)
loader = next(
(i for i in loaders if i.__name__ == "LookLoader"), None)
if loader is None:
raise RuntimeError("Could not find LookLoader, this is a bug")
# Reference the look file
with lib.maintained_selection():
container_node = load_container(loader, look_representation)[0]
return lib.get_container_members(container_node), container_node

View file

@ -3,26 +3,16 @@
import os
from collections import defaultdict
import logging
import json
import six
import alembic.Abc
from maya import cmds
from openpype.client import (
get_representation_by_name,
get_last_version_by_subset_name,
)
from openpype.pipeline import (
legacy_io,
load_container,
loaders_from_representation,
discover_loader_plugins,
get_representation_path,
registered_host,
)
from openpype.hosts.maya.api import lib
from openpype.client import get_last_version_by_subset_name
from openpype.pipeline import legacy_io
import openpype.hosts.maya.lib as maya_lib
from . import lib
log = logging.getLogger(__name__)
@ -149,79 +139,6 @@ def assign_vrayproxy_shaders(vrayproxy, assignments):
index += 1
def get_look_relationships(version_id):
# type: (str) -> dict
"""Get relations for the look.
Args:
version_id (str): Parent version Id.
Returns:
dict: Dictionary of relations.
"""
project_name = legacy_io.active_project()
json_representation = get_representation_by_name(
project_name, representation_name="json", version_id=version_id
)
# Load relationships
shader_relation = get_representation_path(json_representation)
with open(shader_relation, "r") as f:
relationships = json.load(f)
return relationships
def load_look(version_id):
# type: (str) -> list
"""Load look from version.
Get look from version and invoke Loader for it.
Args:
version_id (str): Version ID
Returns:
list of shader nodes.
"""
project_name = legacy_io.active_project()
# Get representations of shader file and relationships
look_representation = get_representation_by_name(
project_name, representation_name="ma", version_id=version_id
)
# See if representation is already loaded, if so reuse it.
host = registered_host()
representation_id = str(look_representation['_id'])
for container in host.ls():
if (container['loader'] == "LookLoader" and
container['representation'] == representation_id):
log.info("Reusing loaded look ...")
container_node = container['objectName']
break
else:
log.info("Using look for the first time ...")
# Load file
all_loaders = discover_loader_plugins()
loaders = loaders_from_representation(all_loaders, representation_id)
loader = next(
(i for i in loaders if i.__name__ == "LookLoader"), None)
if loader is None:
raise RuntimeError("Could not find LookLoader, this is a bug")
# Reference the look file
with lib.maintained_selection():
container_node = load_container(loader, look_representation)
# Get container members
shader_nodes = lib.get_container_members(container_node)
return shader_nodes
def vrayproxy_assign_look(vrayproxy, subset="lookDefault"):
# type: (str, str) -> None
"""Assign look to vray proxy.
@ -263,8 +180,8 @@ def vrayproxy_assign_look(vrayproxy, subset="lookDefault"):
))
continue
relationships = get_look_relationships(version["_id"])
shadernodes = load_look(version["_id"])
relationships = lib.get_look_relationships(version["_id"])
shadernodes, _ = lib.load_look(version["_id"])
# Get only the node ids and paths related to this asset
# And get the shader edits the look supplies
@ -272,8 +189,10 @@ def vrayproxy_assign_look(vrayproxy, subset="lookDefault"):
node_id: nodes_by_id[node_id] for node_id in node_ids
}
edits = list(
lib.iter_shader_edits(
relationships, shadernodes, asset_nodes_by_id))
maya_lib.iter_shader_edits(
relationships, shadernodes, asset_nodes_by_id
)
)
# Create assignments
assignments = {}