Merge pull request #4046 from j-cube/feature/multiverse

This commit is contained in:
Ondřej Samohel 2023-01-12 13:47:15 +01:00 committed by GitHub
commit 12b51a60dc
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 400 additions and 132 deletions

View file

@ -44,3 +44,6 @@ class CreateAnimation(plugin.Creator):
# Default to not send to farm.
self.data["farm"] = False
self.data["priority"] = 50
# Default to write normals.
self.data["writeNormals"] = True

View file

@ -6,7 +6,7 @@ class CreateMultiverseUsd(plugin.Creator):
name = "mvUsdMain"
label = "Multiverse USD Asset"
family = "mvUsd"
family = "usd"
icon = "cubes"
def __init__(self, *args, **kwargs):

View file

@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
import maya.cmds as cmds
from maya import mel
import os
from openpype.pipeline import (
load,
@ -11,12 +13,13 @@ from openpype.hosts.maya.api.lib import (
unique_namespace
)
from openpype.hosts.maya.api.pipeline import containerise
from openpype.client import get_representation_by_id
class MultiverseUsdLoader(load.LoaderPlugin):
"""Read USD data in a Multiverse Compound"""
families = ["model", "mvUsd", "mvUsdComposition", "mvUsdOverride",
families = ["model", "usd", "mvUsdComposition", "mvUsdOverride",
"pointcache", "animation"]
representations = ["usd", "usda", "usdc", "usdz", "abc"]
@ -26,7 +29,6 @@ class MultiverseUsdLoader(load.LoaderPlugin):
color = "orange"
def load(self, context, name=None, namespace=None, options=None):
asset = context['asset']['name']
namespace = namespace or unique_namespace(
asset + "_",
@ -34,22 +36,20 @@ class MultiverseUsdLoader(load.LoaderPlugin):
suffix="_",
)
# Create the shape
# Make sure we can load the plugin
cmds.loadPlugin("MultiverseForMaya", quiet=True)
import multiverse
# Create the shape
shape = None
transform = None
with maintained_selection():
cmds.namespace(addNamespace=namespace)
with namespaced(namespace, new=False):
import multiverse
shape = multiverse.CreateUsdCompound(self.fname)
transform = cmds.listRelatives(
shape, parent=True, fullPath=True)[0]
# Lock the shape node so the user cannot delete it.
cmds.lockNode(shape, lock=True)
nodes = [transform, shape]
self[:] = nodes
@ -70,15 +70,34 @@ class MultiverseUsdLoader(load.LoaderPlugin):
shapes = cmds.ls(members, type="mvUsdCompoundShape")
assert shapes, "Cannot find mvUsdCompoundShape in container"
path = get_representation_path(representation)
project_name = representation["context"]["project"]["name"]
prev_representation_id = cmds.getAttr("{}.representation".format(node))
prev_representation = get_representation_by_id(project_name,
prev_representation_id)
prev_path = os.path.normpath(prev_representation["data"]["path"])
# Make sure we can load the plugin
cmds.loadPlugin("MultiverseForMaya", quiet=True)
import multiverse
for shape in shapes:
multiverse.SetUsdCompoundAssetPaths(shape, [path])
asset_paths = multiverse.GetUsdCompoundAssetPaths(shape)
asset_paths = [os.path.normpath(p) for p in asset_paths]
assert asset_paths.count(prev_path) == 1, \
"Couldn't find matching path (or too many)"
prev_path_idx = asset_paths.index(prev_path)
path = get_representation_path(representation)
asset_paths[prev_path_idx] = path
multiverse.SetUsdCompoundAssetPaths(shape, asset_paths)
cmds.setAttr("{}.representation".format(node),
str(representation["_id"]),
type="string")
mel.eval('refreshEditorTemplates;')
def switch(self, container, representation):
self.update(container, representation)

View file

@ -0,0 +1,132 @@
# -*- coding: utf-8 -*-
import maya.cmds as cmds
from maya import mel
import os
import qargparse
from openpype.pipeline import (
load,
get_representation_path
)
from openpype.hosts.maya.api.lib import (
maintained_selection
)
from openpype.hosts.maya.api.pipeline import containerise
from openpype.client import get_representation_by_id
class MultiverseUsdOverLoader(load.LoaderPlugin):
"""Reference file"""
families = ["mvUsdOverride"]
representations = ["usda", "usd", "udsz"]
label = "Load Usd Override into Compound"
order = -10
icon = "code-fork"
color = "orange"
options = [
qargparse.String(
"Which Compound",
label="Compound",
help="Select which compound to add this as a layer to."
)
]
def load(self, context, name=None, namespace=None, options=None):
current_usd = cmds.ls(selection=True,
type="mvUsdCompoundShape",
dag=True,
long=True)
if len(current_usd) != 1:
self.log.error("Current selection invalid: '{}', "
"must contain exactly 1 mvUsdCompoundShape."
"".format(current_usd))
return
# Make sure we can load the plugin
cmds.loadPlugin("MultiverseForMaya", quiet=True)
import multiverse
nodes = current_usd
with maintained_selection():
multiverse.AddUsdCompoundAssetPath(current_usd[0], self.fname)
namespace = current_usd[0].split("|")[1].split(":")[0]
container = containerise(
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__)
cmds.addAttr(container, longName="mvUsdCompoundShape",
niceName="mvUsdCompoundShape", dataType="string")
cmds.setAttr(container + ".mvUsdCompoundShape",
current_usd[0], type="string")
return container
def update(self, container, representation):
# type: (dict, dict) -> None
"""Update container with specified representation."""
cmds.loadPlugin("MultiverseForMaya", quiet=True)
import multiverse
node = container['objectName']
assert cmds.objExists(node), "Missing container"
members = cmds.sets(node, query=True) or []
shapes = cmds.ls(members, type="mvUsdCompoundShape")
assert shapes, "Cannot find mvUsdCompoundShape in container"
mvShape = container['mvUsdCompoundShape']
assert mvShape, "Missing mv source"
project_name = representation["context"]["project"]["name"]
prev_representation_id = cmds.getAttr("{}.representation".format(node))
prev_representation = get_representation_by_id(project_name,
prev_representation_id)
prev_path = os.path.normpath(prev_representation["data"]["path"])
path = get_representation_path(representation)
for shape in shapes:
asset_paths = multiverse.GetUsdCompoundAssetPaths(shape)
asset_paths = [os.path.normpath(p) for p in asset_paths]
assert asset_paths.count(prev_path) == 1, \
"Couldn't find matching path (or too many)"
prev_path_idx = asset_paths.index(prev_path)
asset_paths[prev_path_idx] = path
multiverse.SetUsdCompoundAssetPaths(shape, asset_paths)
cmds.setAttr("{}.representation".format(node),
str(representation["_id"]),
type="string")
mel.eval('refreshEditorTemplates;')
def switch(self, container, representation):
self.update(container, representation)
def remove(self, container):
# type: (dict) -> None
"""Remove loaded container."""
# Delete container and its contents
if cmds.objExists(container['objectName']):
members = cmds.sets(container['objectName'], query=True) or []
cmds.delete([container['objectName']] + members)
# Remove the namespace, if empty
namespace = container['namespace']
if cmds.namespace(exists=namespace):
members = cmds.namespaceInfo(namespace, listNamespace=True)
if not members:
cmds.namespace(removeNamespace=namespace)
else:
self.log.warning("Namespace not deleted because it "
"still has members: %s", namespace)

View file

@ -26,7 +26,8 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
"rig",
"camerarig",
"xgen",
"staticMesh"]
"staticMesh",
"mvLook"]
representations = ["ma", "abc", "fbx", "mb"]
label = "Reference"

View file

@ -440,7 +440,8 @@ class CollectLook(pyblish.api.InstancePlugin):
for res in self.collect_resources(n):
instance.data["resources"].append(res)
self.log.info("Collected resources: {}".format(instance.data["resources"]))
self.log.info("Collected resources: {}".format(
instance.data["resources"]))
# Log warning when no relevant sets were retrieved for the look.
if (
@ -548,6 +549,11 @@ class CollectLook(pyblish.api.InstancePlugin):
if not cmds.attributeQuery(attr, node=node, exists=True):
continue
attribute = "{}.{}".format(node, attr)
# We don't support mixed-type attributes yet.
if cmds.attributeQuery(attr, node=node, multi=True):
self.log.warning("Attribute '{}' is mixed-type and is "
"not supported yet.".format(attribute))
continue
if cmds.getAttr(attribute, type=True) == "message":
continue
node_attributes[attr] = cmds.getAttr(attribute)

View file

@ -21,37 +21,68 @@ COLOUR_SPACES = ['sRGB', 'linear', 'auto']
MIPMAP_EXTENSIONS = ['tdl']
def get_look_attrs(node):
"""Returns attributes of a node that are important for the look.
class _NodeTypeAttrib(object):
"""docstring for _NodeType"""
These are the "changed" attributes (those that have edits applied
in the current scene).
def __init__(self, name, fname, computed_fname=None, colour_space=None):
self.name = name
self.fname = fname
self.computed_fname = computed_fname or fname
self.colour_space = colour_space or "colorSpace"
Returns:
list: Attribute names to extract
def get_fname(self, node):
return "{}.{}".format(node, self.fname)
def get_computed_fname(self, node):
return "{}.{}".format(node, self.computed_fname)
def get_colour_space(self, node):
return "{}.{}".format(node, self.colour_space)
def __str__(self):
return "_NodeTypeAttrib(name={}, fname={}, "
"computed_fname={}, colour_space={})".format(
self.name, self.fname, self.computed_fname, self.colour_space)
NODETYPES = {
"file": [_NodeTypeAttrib("file", "fileTextureName",
"computedFileTextureNamePattern")],
"aiImage": [_NodeTypeAttrib("aiImage", "filename")],
"RedshiftNormalMap": [_NodeTypeAttrib("RedshiftNormalMap", "tex0")],
"dlTexture": [_NodeTypeAttrib("dlTexture", "textureFile",
None, "textureFile_meta_colorspace")],
"dlTriplanar": [_NodeTypeAttrib("dlTriplanar", "colorTexture",
None, "colorTexture_meta_colorspace"),
_NodeTypeAttrib("dlTriplanar", "floatTexture",
None, "floatTexture_meta_colorspace"),
_NodeTypeAttrib("dlTriplanar", "heightTexture",
None, "heightTexture_meta_colorspace")]
}
def get_file_paths_for_node(node):
"""Gets all the file paths in this node.
Returns all filepaths that this node references. Some node types only
reference one, but others, like dlTriplanar, can reference 3.
Args:
node (str): Name of the Maya node
Returns
list(str): A list with all evaluated maya attributes for filepaths.
"""
# When referenced get only attributes that are "changed since file open"
# which includes any reference edits, otherwise take *all* user defined
# attributes
is_referenced = cmds.referenceQuery(node, isNodeReferenced=True)
result = cmds.listAttr(node, userDefined=True,
changedSinceFileOpen=is_referenced) or []
# `cbId` is added when a scene is saved, ignore by default
if "cbId" in result:
result.remove("cbId")
node_type = cmds.nodeType(node)
if node_type not in NODETYPES:
return []
# For shapes allow render stat changes
if cmds.objectType(node, isAType="shape"):
attrs = cmds.listAttr(node, changedSinceFileOpen=True) or []
for attr in attrs:
if attr in SHAPE_ATTRS:
result.append(attr)
elif attr.startswith('ai'):
result.append(attr)
return result
paths = []
for node_type_attr in NODETYPES[node_type]:
fname = cmds.getAttr("{}.{}".format(node, node_type_attr.fname))
paths.append(fname)
return paths
def node_uses_image_sequence(node):
@ -69,13 +100,29 @@ def node_uses_image_sequence(node):
"""
# useFrameExtension indicates an explicit image sequence
node_path = get_file_node_path(node).lower()
paths = get_file_node_paths(node)
paths = [path.lower() for path in paths]
# The following tokens imply a sequence
patterns = ["<udim>", "<tile>", "<uvtile>", "u<u>_v<v>", "<frame0"]
return (cmds.getAttr('%s.useFrameExtension' % node) or
any(pattern in node_path for pattern in patterns))
def pattern_in_paths(patterns, paths):
"""Helper function for checking to see if a pattern is contained
in the list of paths"""
for pattern in patterns:
for path in paths:
if pattern in path:
return True
return False
node_type = cmds.nodeType(node)
if node_type == 'dlTexture':
return (cmds.getAttr('{}.useImageSequence'.format(node)) or
pattern_in_paths(patterns, paths))
elif node_type == "file":
return (cmds.getAttr('{}.useFrameExtension'.format(node)) or
pattern_in_paths(patterns, paths))
return False
def seq_to_glob(path):
@ -132,7 +179,7 @@ def seq_to_glob(path):
return path
def get_file_node_path(node):
def get_file_node_paths(node):
"""Get the file path used by a Maya file node.
Args:
@ -158,15 +205,9 @@ def get_file_node_path(node):
"<uvtile>"]
lower = texture_pattern.lower()
if any(pattern in lower for pattern in patterns):
return texture_pattern
return [texture_pattern]
if cmds.nodeType(node) == 'aiImage':
return cmds.getAttr('{0}.filename'.format(node))
if cmds.nodeType(node) == 'RedshiftNormalMap':
return cmds.getAttr('{}.tex0'.format(node))
# otherwise use fileTextureName
return cmds.getAttr('{0}.fileTextureName'.format(node))
return get_file_paths_for_node(node)
def get_file_node_files(node):
@ -181,15 +222,15 @@ def get_file_node_files(node):
"""
path = get_file_node_path(node)
path = cmds.workspace(expandName=path)
paths = get_file_node_paths(node)
paths = [cmds.workspace(expandName=path) for path in paths]
if node_uses_image_sequence(node):
glob_pattern = seq_to_glob(path)
return glob.glob(glob_pattern)
elif os.path.exists(path):
return [path]
globs = []
for path in paths:
globs += glob.glob(seq_to_glob(path))
return globs
else:
return []
return list(filter(lambda x: os.path.exists(x), paths))
def get_mipmap(fname):
@ -211,6 +252,11 @@ def is_mipmap(fname):
class CollectMultiverseLookData(pyblish.api.InstancePlugin):
"""Collect Multiverse Look
Searches through the overrides finding all material overrides. From there
it extracts the shading group and then finds all texture files in the
shading group network. It also checks for mipmap versions of texture files
and adds them to the resouces to get published.
"""
order = pyblish.api.CollectorOrder + 0.2
@ -258,12 +304,20 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin):
shadingGroup), "members": list()}
# The SG may reference files, add those too!
history = cmds.listHistory(shadingGroup)
files = cmds.ls(history, type="file", long=True)
history = cmds.listHistory(
shadingGroup, allConnections=True)
# We need to iterate over node_types since `cmds.ls` may
# error out if we don't have the appropriate plugin loaded.
files = []
for node_type in NODETYPES.keys():
files += cmds.ls(history,
type=node_type,
long=True)
for f in files:
resources = self.collect_resource(f, publishMipMap)
instance.data["resources"].append(resources)
instance.data["resources"] += resources
elif isinstance(matOver, multiverse.MaterialSourceUsdPath):
# TODO: Handle this later.
@ -284,69 +338,63 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin):
dict
"""
self.log.debug("processing: {}".format(node))
if cmds.nodeType(node) not in ["file", "aiImage", "RedshiftNormalMap"]:
self.log.error(
"Unsupported file node: {}".format(cmds.nodeType(node)))
node_type = cmds.nodeType(node)
self.log.debug("processing: {}/{}".format(node, node_type))
if node_type not in NODETYPES:
self.log.error("Unsupported file node: {}".format(node_type))
raise AssertionError("Unsupported file node")
if cmds.nodeType(node) == 'file':
self.log.debug(" - file node")
attribute = "{}.fileTextureName".format(node)
computed_attribute = "{}.computedFileTextureNamePattern".format(
node)
elif cmds.nodeType(node) == 'aiImage':
self.log.debug("aiImage node")
attribute = "{}.filename".format(node)
computed_attribute = attribute
elif cmds.nodeType(node) == 'RedshiftNormalMap':
self.log.debug("RedshiftNormalMap node")
attribute = "{}.tex0".format(node)
computed_attribute = attribute
resources = []
for node_type_attr in NODETYPES[node_type]:
fname_attrib = node_type_attr.get_fname(node)
computed_fname_attrib = node_type_attr.get_computed_fname(node)
colour_space_attrib = node_type_attr.get_colour_space(node)
source = cmds.getAttr(attribute)
self.log.info(" - file source: {}".format(source))
color_space_attr = "{}.colorSpace".format(node)
try:
color_space = cmds.getAttr(color_space_attr)
except ValueError:
# node doesn't have colorspace attribute
source = cmds.getAttr(fname_attrib)
color_space = "Raw"
# Compare with the computed file path, e.g. the one with the <UDIM>
# pattern in it, to generate some logging information about this
# difference
# computed_attribute = "{}.computedFileTextureNamePattern".format(node)
computed_source = cmds.getAttr(computed_attribute)
if source != computed_source:
self.log.debug("Detected computed file pattern difference "
"from original pattern: {0} "
"({1} -> {2})".format(node,
source,
computed_source))
try:
color_space = cmds.getAttr(colour_space_attrib)
except ValueError:
# node doesn't have colorspace attribute, use "Raw" from before
pass
# Compare with the computed file path, e.g. the one with the <UDIM>
# pattern in it, to generate some logging information about this
# difference
# computed_attribute = "{}.computedFileTextureNamePattern".format(node) # noqa
computed_source = cmds.getAttr(computed_fname_attrib)
if source != computed_source:
self.log.debug("Detected computed file pattern difference "
"from original pattern: {0} "
"({1} -> {2})".format(node,
source,
computed_source))
# We replace backslashes with forward slashes because V-Ray
# can't handle the UDIM files with the backslashes in the
# paths as the computed patterns
source = source.replace("\\", "/")
# We replace backslashes with forward slashes because V-Ray
# can't handle the UDIM files with the backslashes in the
# paths as the computed patterns
source = source.replace("\\", "/")
files = get_file_node_files(node)
files = self.handle_files(files, publishMipMap)
if len(files) == 0:
self.log.error("No valid files found from node `%s`" % node)
files = get_file_node_files(node)
files = self.handle_files(files, publishMipMap)
if len(files) == 0:
self.log.error("No valid files found from node `%s`" % node)
self.log.info("collection of resource done:")
self.log.info(" - node: {}".format(node))
self.log.info(" - attribute: {}".format(attribute))
self.log.info(" - source: {}".format(source))
self.log.info(" - file: {}".format(files))
self.log.info(" - color space: {}".format(color_space))
self.log.info("collection of resource done:")
self.log.info(" - node: {}".format(node))
self.log.info(" - attribute: {}".format(fname_attrib))
self.log.info(" - source: {}".format(source))
self.log.info(" - file: {}".format(files))
self.log.info(" - color space: {}".format(color_space))
# Define the resource
return {"node": node,
"attribute": attribute,
"source": source, # required for resources
"files": files,
"color_space": color_space} # required for resources
# Define the resource
resource = {"node": node,
"attribute": fname_attrib,
"source": source, # required for resources
"files": files,
"color_space": color_space} # required for resources
resources.append(resource)
return resources
def handle_files(self, files, publishMipMap):
"""This will go through all the files and make sure that they are

View file

@ -73,12 +73,12 @@ class ExtractMultiverseLook(publish.Extractor):
"writeAll": False,
"writeTransforms": False,
"writeVisibility": False,
"writeAttributes": False,
"writeAttributes": True,
"writeMaterials": True,
"writeVariants": False,
"writeVariantsDefinition": False,
"writeActiveState": False,
"writeNamespaces": False,
"writeNamespaces": True,
"numTimeSamples": 1,
"timeSamplesSpan": 0.0
}

View file

@ -2,6 +2,7 @@ import os
import six
from maya import cmds
from maya import mel
from openpype.pipeline import publish
from openpype.hosts.maya.api.lib import maintained_selection
@ -26,7 +27,7 @@ class ExtractMultiverseUsd(publish.Extractor):
label = "Extract Multiverse USD Asset"
hosts = ["maya"]
families = ["mvUsd"]
families = ["usd"]
scene_type = "usd"
file_formats = ["usd", "usda", "usdz"]
@ -87,7 +88,7 @@ class ExtractMultiverseUsd(publish.Extractor):
return {
"stripNamespaces": False,
"mergeTransformAndShape": False,
"writeAncestors": True,
"writeAncestors": False,
"flattenParentXforms": False,
"writeSparseOverrides": False,
"useMetaPrimPath": False,
@ -147,6 +148,13 @@ class ExtractMultiverseUsd(publish.Extractor):
return options
def get_default_options(self):
self.log.info("ExtractMultiverseUsd get_default_options")
return self.default_options
def filter_members(self, members):
return members
def process(self, instance):
# Load plugin first
cmds.loadPlugin("MultiverseForMaya", quiet=True)
@ -161,7 +169,7 @@ class ExtractMultiverseUsd(publish.Extractor):
file_path = file_path.replace('\\', '/')
# Parse export options
options = self.default_options
options = self.get_default_options()
options = self.parse_overrides(instance, options)
self.log.info("Export options: {0}".format(options))
@ -170,27 +178,35 @@ class ExtractMultiverseUsd(publish.Extractor):
with maintained_selection():
members = instance.data("setMembers")
self.log.info('Collected object {}'.format(members))
self.log.info('Collected objects: {}'.format(members))
members = self.filter_members(members)
if not members:
self.log.error('No members!')
return
self.log.info(' - filtered: {}'.format(members))
import multiverse
time_opts = None
frame_start = instance.data['frameStart']
frame_end = instance.data['frameEnd']
handle_start = instance.data['handleStart']
handle_end = instance.data['handleEnd']
step = instance.data['step']
fps = instance.data['fps']
if frame_end != frame_start:
time_opts = multiverse.TimeOptions()
time_opts.writeTimeRange = True
handle_start = instance.data['handleStart']
handle_end = instance.data['handleEnd']
time_opts.frameRange = (
frame_start - handle_start, frame_end + handle_end)
time_opts.frameIncrement = step
time_opts.numTimeSamples = instance.data["numTimeSamples"]
time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"]
time_opts.framePerSecond = fps
time_opts.frameIncrement = instance.data['step']
time_opts.numTimeSamples = instance.data.get(
'numTimeSamples', options['numTimeSamples'])
time_opts.timeSamplesSpan = instance.data.get(
'timeSamplesSpan', options['timeSamplesSpan'])
time_opts.framePerSecond = instance.data.get(
'fps', mel.eval('currentTimeUnitToFPS()'))
asset_write_opts = multiverse.AssetWriteOptions(time_opts)
options_discard_keys = {
@ -203,11 +219,15 @@ class ExtractMultiverseUsd(publish.Extractor):
'step',
'fps'
}
self.log.debug("Write Options:")
for key, value in options.items():
if key in options_discard_keys:
continue
self.log.debug(" - {}={}".format(key, value))
setattr(asset_write_opts, key, value)
self.log.info('WriteAsset: {} / {}'.format(file_path, members))
multiverse.WriteAsset(file_path, members, asset_write_opts)
if "representations" not in instance.data:
@ -223,3 +243,32 @@ class ExtractMultiverseUsd(publish.Extractor):
self.log.info("Extracted instance {} to {}".format(
instance.name, file_path))
class ExtractMultiverseUsdAnim(ExtractMultiverseUsd):
"""Extractor for Multiverse USD Animation Sparse Cache data.
This will extract the sparse cache data from the scene and generate a
USD file with all the animation data.
Upon publish a .usd sparse cache will be written.
"""
label = "Extract Multiverse USD Animation Sparse Cache"
families = ["animation"]
def get_default_options(self):
anim_options = self.default_options
anim_options["writeSparseOverrides"] = True
anim_options["writeUsdAttributes"] = True
anim_options["stripNamespaces"] = True
return anim_options
def filter_members(self, members):
out_set = next((i for i in members if i.endswith("out_SET")), None)
if out_set is None:
self.log.warning("Expecting out_SET")
return None
members = cmds.ls(cmds.sets(out_set, query=True), long=True)
return members

View file

@ -80,13 +80,14 @@ class ValidateMvLookContents(pyblish.api.InstancePlugin):
def is_or_has_mipmap(self, fname, files):
ext = os.path.splitext(fname)[1][1:]
if ext in MIPMAP_EXTENSIONS:
self.log.debug("Is a mipmap '{}'".format(fname))
self.log.debug(" - Is a mipmap '{}'".format(fname))
return True
for colour_space in COLOUR_SPACES:
for mipmap_ext in MIPMAP_EXTENSIONS:
mipmap_fname = '.'.join([fname, colour_space, mipmap_ext])
if mipmap_fname in files:
self.log.debug("Has a mipmap '{}'".format(fname))
self.log.debug(
" - Has a mipmap '{}'".format(mipmap_fname))
return True
return False

View file

@ -21,6 +21,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
- nurbsSurface: _NRB
- locator: _LOC
- null/group: _GRP
Suffices can also be overriden by project settings.
.. warning::
This grabs the first child shape as a reference and doesn't use the
@ -44,6 +45,13 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
ALLOW_IF_NOT_IN_SUFFIX_TABLE = True
@classmethod
def get_table_for_invalid(cls):
ss = []
for k, v in cls.SUFFIX_NAMING_TABLE.items():
ss.append(" - {}: {}".format(k, ", ".join(v)))
return "\n".join(ss)
@staticmethod
def is_valid_name(node_name, shape_type,
SUFFIX_NAMING_TABLE, ALLOW_IF_NOT_IN_SUFFIX_TABLE):
@ -106,5 +114,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
"""
invalid = self.get_invalid(instance)
if invalid:
valid = self.get_table_for_invalid()
raise ValueError("Incorrectly named geometry "
"transforms: {0}".format(invalid))
"transforms: {0}, accepted suffixes are: "
"\n{1}".format(invalid, valid))

View file

@ -123,7 +123,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"staticMesh",
"skeletalMesh",
"mvLook",
"mvUsd",
"mvUsdComposition",
"mvUsdOverride",
"simpleUnrealTexture"