Merge pull request #1200 from pypeclub/feature/1180-maya-support-for-redshift-attributes-in-looks

Maya: Support for Redshift nodes in looks
This commit is contained in:
Milan Kolar 2021-04-19 12:13:59 +02:00 committed by GitHub
commit 175ab0f52b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 223 additions and 73 deletions

View file

@ -105,7 +105,23 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
# Load relationships
shader_relation = api.get_representation_path(json_representation)
with open(shader_relation, "r") as f:
relationships = json.load(f)
json_data = json.load(f)
for rel, data in json_data["relationships"].items():
# process only non-shading nodes
current_node = "{}:{}".format(container["namespace"], rel)
if current_node in shader_nodes:
continue
print("processing {}".format(rel))
current_members = set(cmds.ls(
cmds.sets(current_node, query=True) or [], long=True))
new_members = {"{}".format(
m["name"]) for m in data["members"] or []}
dif = new_members.difference(current_members)
# add to set
cmds.sets(
dif, forceElement="{}:{}".format(container["namespace"], rel))
# update of reference could result in failed edits - material is not
# present because of renaming etc.
@ -120,7 +136,7 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
cmds.file(cr=reference_node) # cleanReference
# reapply shading groups from json representation on orig nodes
openpype.hosts.maya.api.lib.apply_shaders(relationships,
openpype.hosts.maya.api.lib.apply_shaders(json_data,
shader_nodes,
orig_nodes)
@ -128,12 +144,13 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
"All successful edits were kept intact.\n",
"Failed and removed edits:"]
msg.extend(failed_edits)
msg = ScrollMessageBox(QtWidgets.QMessageBox.Warning,
"Some reference edit failed",
msg)
msg.exec_()
attributes = relationships.get("attributes", [])
attributes = json_data.get("attributes", [])
# region compute lookup
nodes_by_id = defaultdict(list)

View file

@ -1,8 +1,10 @@
# -*- coding: utf-8 -*-
"""Maya look collector."""
import re
import os
import glob
from maya import cmds
from maya import cmds # noqa
import pyblish.api
from openpype.hosts.maya.api import lib
@ -16,6 +18,11 @@ SHAPE_ATTRS = ["castsShadows",
"doubleSided",
"opposite"]
RENDERER_NODE_TYPES = [
# redshift
"RedshiftMeshParameters"
]
SHAPE_ATTRS = set(SHAPE_ATTRS)
@ -29,7 +36,6 @@ def get_look_attrs(node):
list: Attribute names to extract
"""
# When referenced get only attributes that are "changed since file open"
# which includes any reference edits, otherwise take *all* user defined
# attributes
@ -219,9 +225,13 @@ class CollectLook(pyblish.api.InstancePlugin):
with lib.renderlayer(instance.data["renderlayer"]):
self.collect(instance)
def collect(self, instance):
"""Collect looks.
Args:
instance: Instance to collect.
"""
self.log.info("Looking for look associations "
"for %s" % instance.data['name'])
@ -235,48 +245,91 @@ class CollectLook(pyblish.api.InstancePlugin):
self.log.info("Gathering set relations..")
# Ensure iteration happen in a list so we can remove keys from the
# dict within the loop
for objset in list(sets):
self.log.debug("From %s.." % objset)
# skipped types of attribute on render specific nodes
disabled_types = ["message", "TdataCompound"]
for obj_set in list(sets):
self.log.debug("From {}".format(obj_set))
# if node is specified as renderer node type, it will be
# serialized with its attributes.
if cmds.nodeType(obj_set) in RENDERER_NODE_TYPES:
self.log.info("- {} is {}".format(
obj_set, cmds.nodeType(obj_set)))
node_attrs = []
# serialize its attributes so they can be recreated on look
# load.
for attr in cmds.listAttr(obj_set):
# skip publishedNodeInfo attributes as they break
# getAttr() and we don't need them anyway
if attr.startswith("publishedNodeInfo"):
continue
# skip attributes types defined in 'disabled_type' list
if cmds.getAttr("{}.{}".format(obj_set, attr), type=True) in disabled_types: # noqa
continue
node_attrs.append((
attr,
cmds.getAttr("{}.{}".format(obj_set, attr)),
cmds.getAttr(
"{}.{}".format(obj_set, attr), type=True)
))
for member in cmds.ls(
cmds.sets(obj_set, query=True), long=True):
member_data = self.collect_member_data(member,
instance_lookup)
if not member_data:
continue
# Add information of the node to the members list
sets[obj_set]["members"].append(member_data)
# Get all nodes of the current objectSet (shadingEngine)
for member in cmds.ls(cmds.sets(objset, query=True), long=True):
for member in cmds.ls(cmds.sets(obj_set, query=True), long=True):
member_data = self.collect_member_data(member,
instance_lookup)
if not member_data:
continue
# Add information of the node to the members list
sets[objset]["members"].append(member_data)
sets[obj_set]["members"].append(member_data)
# Remove sets that didn't have any members assigned in the end
# Thus the data will be limited to only what we need.
self.log.info("objset {}".format(sets[objset]))
if not sets[objset]["members"] or (not objset.endswith("SG")):
self.log.info("Removing redundant set information: "
"%s" % objset)
sets.pop(objset, None)
self.log.info("obj_set {}".format(sets[obj_set]))
if not sets[obj_set]["members"]:
self.log.info(
"Removing redundant set information: {}".format(obj_set))
sets.pop(obj_set, None)
self.log.info("Gathering attribute changes to instance members..")
attributes = self.collect_attributes_changed(instance)
# Store data on the instance
instance.data["lookData"] = {"attributes": attributes,
"relationships": sets}
instance.data["lookData"] = {
"attributes": attributes,
"relationships": sets
}
# Collect file nodes used by shading engines (if we have any)
files = list()
looksets = sets.keys()
shaderAttrs = [
"surfaceShader",
"volumeShader",
"displacementShader",
"aiSurfaceShader",
"aiVolumeShader"]
materials = list()
files = []
look_sets = sets.keys()
shader_attrs = [
"surfaceShader",
"volumeShader",
"displacementShader",
"aiSurfaceShader",
"aiVolumeShader"]
if look_sets:
materials = []
if looksets:
for look in looksets:
for at in shaderAttrs:
for look in look_sets:
for at in shader_attrs:
try:
con = cmds.listConnections("{}.{}".format(look, at))
except ValueError:
@ -289,10 +342,10 @@ class CollectLook(pyblish.api.InstancePlugin):
self.log.info("Found materials:\n{}".format(materials))
self.log.info("Found the following sets:\n{}".format(looksets))
self.log.info("Found the following sets:\n{}".format(look_sets))
# Get the entire node chain of the look sets
# history = cmds.listHistory(looksets)
history = list()
# history = cmds.listHistory(look_sets)
history = []
for material in materials:
history.extend(cmds.listHistory(material))
files = cmds.ls(history, type="file", long=True)
@ -313,7 +366,7 @@ class CollectLook(pyblish.api.InstancePlugin):
# Ensure unique shader sets
# Add shader sets to the instance for unify ID validation
instance.extend(shader for shader in looksets if shader
instance.extend(shader for shader in look_sets if shader
not in instance_lookup)
self.log.info("Collected look for %s" % instance)
@ -331,7 +384,7 @@ class CollectLook(pyblish.api.InstancePlugin):
dict
"""
sets = dict()
sets = {}
for node in instance:
related_sets = lib.get_related_sets(node)
if not related_sets:
@ -427,6 +480,11 @@ class CollectLook(pyblish.api.InstancePlugin):
"""
self.log.debug("processing: {}".format(node))
if cmds.nodeType(node) not in ["file", "aiImage"]:
self.log.error(
"Unsupported file node: {}".format(cmds.nodeType(node)))
raise AssertionError("Unsupported file node")
if cmds.nodeType(node) == 'file':
self.log.debug(" - file node")
attribute = "{}.fileTextureName".format(node)
@ -435,6 +493,7 @@ class CollectLook(pyblish.api.InstancePlugin):
self.log.debug("aiImage node")
attribute = "{}.filename".format(node)
computed_attribute = attribute
source = cmds.getAttr(attribute)
self.log.info(" - file source: {}".format(source))
color_space_attr = "{}.colorSpace".format(node)

View file

@ -1,13 +1,14 @@
# -*- coding: utf-8 -*-
"""Maya look extractor."""
import os
import sys
import json
import copy
import tempfile
import contextlib
import subprocess
from collections import OrderedDict
from maya import cmds
from maya import cmds # noqa
import pyblish.api
import avalon.maya
@ -22,23 +23,38 @@ HARDLINK = 2
def find_paths_by_hash(texture_hash):
# Find the texture hash key in the dictionary and all paths that
# originate from it.
"""Find the texture hash key in the dictionary.
All paths that originate from it.
Args:
texture_hash (str): Hash of the texture.
Return:
str: path to texture if found.
"""
key = "data.sourceHashes.{0}".format(texture_hash)
return io.distinct(key, {"type": "version"})
def maketx(source, destination, *args):
"""Make .tx using maketx with some default settings.
"""Make `.tx` using `maketx` with some default settings.
The settings are based on default as used in Arnold's
txManager in the scene.
This function requires the `maketx` executable to be
on the `PATH`.
Args:
source (str): Path to source file.
destination (str): Writing destination path.
"""
*args: Additional arguments for `maketx`.
Returns:
str: Output of `maketx` command.
"""
cmd = [
"maketx",
"-v", # verbose
@ -56,7 +72,7 @@ def maketx(source, destination, *args):
cmd = " ".join(cmd)
CREATE_NO_WINDOW = 0x08000000
CREATE_NO_WINDOW = 0x08000000 # noqa
kwargs = dict(args=cmd, stderr=subprocess.STDOUT)
if sys.platform == "win32":
@ -118,12 +134,58 @@ class ExtractLook(openpype.api.Extractor):
hosts = ["maya"]
families = ["look"]
order = pyblish.api.ExtractorOrder + 0.2
scene_type = "ma"
@staticmethod
def get_renderer_name():
"""Get renderer name from Maya.
Returns:
str: Renderer name.
"""
renderer = cmds.getAttr(
"defaultRenderGlobals.currentRenderer"
).lower()
# handle various renderman names
if renderer.startswith("renderman"):
renderer = "renderman"
return renderer
def get_maya_scene_type(self, instance):
"""Get Maya scene type from settings.
Args:
instance (pyblish.api.Instance): Instance with collected
project settings.
"""
ext_mapping = (
instance.context.data["project_settings"]["maya"]["ext_mapping"]
)
if ext_mapping:
self.log.info("Looking in settings for scene type ...")
# use extension mapping for first family found
for family in self.families:
try:
self.scene_type = ext_mapping[family]
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except KeyError:
# no preset found
pass
def process(self, instance):
"""Plugin entry point.
Args:
instance: Instance to process.
"""
# Define extract output file path
dir_path = self.staging_dir(instance)
maya_fname = "{0}.ma".format(instance.name)
maya_fname = "{0}.{1}".format(instance.name, self.scene_type)
json_fname = "{0}.json".format(instance.name)
# Make texture dump folder
@ -148,7 +210,7 @@ class ExtractLook(openpype.api.Extractor):
# Collect all unique files used in the resources
files = set()
files_metadata = dict()
files_metadata = {}
for resource in resources:
# Preserve color space values (force value after filepath change)
# This will also trigger in the same order at end of context to
@ -162,35 +224,33 @@ class ExtractLook(openpype.api.Extractor):
# files.update(os.path.normpath(f))
# Process the resource files
transfers = list()
hardlinks = list()
hashes = dict()
forceCopy = instance.data.get("forceCopy", False)
transfers = []
hardlinks = []
hashes = {}
force_copy = instance.data.get("forceCopy", False)
self.log.info(files)
for filepath in files_metadata:
cspace = files_metadata[filepath]["color_space"]
linearise = False
if do_maketx:
if cspace == "sRGB":
linearise = True
# set its file node to 'raw' as tx will be linearized
files_metadata[filepath]["color_space"] = "raw"
linearize = False
if do_maketx and files_metadata[filepath]["color_space"] == "sRGB": # noqa: E501
linearize = True
# set its file node to 'raw' as tx will be linearized
files_metadata[filepath]["color_space"] = "raw"
source, mode, hash = self._process_texture(
source, mode, texture_hash = self._process_texture(
filepath,
do_maketx,
staging=dir_path,
linearise=linearise,
force=forceCopy
linearize=linearize,
force=force_copy
)
destination = self.resource_destination(instance,
source,
do_maketx)
# Force copy is specified.
if forceCopy:
if force_copy:
mode = COPY
if mode == COPY:
@ -202,10 +262,10 @@ class ExtractLook(openpype.api.Extractor):
# Store the hashes from hash to destination to include in the
# database
hashes[hash] = destination
hashes[texture_hash] = destination
# Remap the resources to the destination path (change node attributes)
destinations = dict()
destinations = {}
remap = OrderedDict() # needs to be ordered, see color space values
for resource in resources:
source = os.path.normpath(resource["source"])
@ -222,7 +282,7 @@ class ExtractLook(openpype.api.Extractor):
color_space_attr = resource["node"] + ".colorSpace"
color_space = cmds.getAttr(color_space_attr)
if files_metadata[source]["color_space"] == "raw":
# set colorpsace to raw if we linearized it
# set color space to raw if we linearized it
color_space = "Raw"
# Remap file node filename to destination
attr = resource["attribute"]
@ -267,11 +327,11 @@ class ExtractLook(openpype.api.Extractor):
json.dump(data, f)
if "files" not in instance.data:
instance.data["files"] = list()
instance.data["files"] = []
if "hardlinks" not in instance.data:
instance.data["hardlinks"] = list()
instance.data["hardlinks"] = []
if "transfers" not in instance.data:
instance.data["transfers"] = list()
instance.data["transfers"] = []
instance.data["files"].append(maya_fname)
instance.data["files"].append(json_fname)
@ -311,14 +371,26 @@ class ExtractLook(openpype.api.Extractor):
maya_path))
def resource_destination(self, instance, filepath, do_maketx):
anatomy = instance.context.data["anatomy"]
"""Get resource destination path.
This is utility function to change path if resource file name is
changed by some external tool like `maketx`.
Args:
instance: Current Instance.
filepath (str): Resource path
do_maketx (bool): Flag if resource is processed by `maketx`.
Returns:
str: Path to resource file
"""
resources_dir = instance.data["resourcesDir"]
# Compute destination location
basename, ext = os.path.splitext(os.path.basename(filepath))
# If maketx then the texture will always end with .tx
# If `maketx` then the texture will always end with .tx
if do_maketx:
ext = ".tx"
@ -326,7 +398,7 @@ class ExtractLook(openpype.api.Extractor):
resources_dir, basename + ext
)
def _process_texture(self, filepath, do_maketx, staging, linearise, force):
def _process_texture(self, filepath, do_maketx, staging, linearize, force):
"""Process a single texture file on disk for publishing.
This will:
1. Check whether it's already published, if so it will do hardlink
@ -363,7 +435,7 @@ class ExtractLook(openpype.api.Extractor):
# Produce .tx file in staging if source file is not .tx
converted = os.path.join(staging, "resources", fname + ".tx")
if linearise:
if linearize:
self.log.info("tx: converting sRGB -> linear")
colorconvert = "--colorconvert sRGB linear"
else:

View file

@ -73,8 +73,10 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
# check if any objectSets are not present ion the relationships
missing_sets = [s for s in sets if s not in relationships]
if missing_sets:
for set in missing_sets:
if '_SET' not in set:
for missing_set in missing_sets:
cls.log.debug(missing_set)
if '_SET' not in missing_set:
# A set of this node is not coming along, this is wrong!
cls.log.error("Missing sets '{}' for node "
"'{}'".format(missing_sets, node))
@ -82,8 +84,8 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
continue
# Ensure the node is in the sets that are collected
for shaderset, data in relationships.items():
if shaderset not in sets:
for shader_set, data in relationships.items():
if shader_set not in sets:
# no need to check for a set if the node
# isn't in it anyway
continue
@ -94,7 +96,7 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
# The node is not found in the collected set
# relationships
cls.log.error("Missing '{}' in collected set node "
"'{}'".format(node, shaderset))
"'{}'".format(node, shader_set))
invalid.append(node)
continue