mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
f368759642
31 changed files with 1026 additions and 205 deletions
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,7 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to OpenPype Tray
|
||||
options:
|
||||
- 3.16.7-nightly.2
|
||||
- 3.16.7-nightly.1
|
||||
- 3.16.6
|
||||
- 3.16.6-nightly.1
|
||||
|
|
@ -134,7 +135,6 @@ body:
|
|||
- 3.14.10-nightly.1
|
||||
- 3.14.9
|
||||
- 3.14.9-nightly.5
|
||||
- 3.14.9-nightly.4
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
|
|
|||
|
|
@ -109,6 +109,8 @@ RUN source $HOME/.bashrc \
|
|||
RUN cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.9/lib \
|
||||
&& cp /usr/lib64/openssl11/libssl* ./build/exe.linux-x86_64-3.9/lib \
|
||||
&& cp /usr/lib64/openssl11/libcrypto* ./build/exe.linux-x86_64-3.9/lib \
|
||||
&& ln -sr ./build/exe.linux-x86_64-3.9/lib/libssl.so ./build/exe.linux-x86_64-3.9/lib/libssl.1.1.so \
|
||||
&& ln -sr ./build/exe.linux-x86_64-3.9/lib/libcrypto.so ./build/exe.linux-x86_64-3.9/lib/libcrypto.1.1.so \
|
||||
&& cp /root/.pyenv/versions/${OPENPYPE_PYTHON_VERSION}/lib/libpython* ./build/exe.linux-x86_64-3.9/lib \
|
||||
&& cp /usr/lib64/libxcb* ./build/exe.linux-x86_64-3.9/vendor/python/PySide2/Qt/lib
|
||||
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ WizardStyle=modern
|
|||
Name: "english"; MessagesFile: "compiler:Default.isl"
|
||||
|
||||
[Tasks]
|
||||
Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked
|
||||
Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"
|
||||
|
||||
[InstallDelete]
|
||||
; clean everything in previous installation folder
|
||||
|
|
@ -53,4 +53,3 @@ Name: "{autodesktop}\{#MyAppName} {#AppVer}"; Filename: "{app}\openpype_gui.exe"
|
|||
|
||||
[Run]
|
||||
Filename: "{app}\openpype_gui.exe"; Description: "{cm:LaunchProgram,OpenPype}"; Flags: nowait postinstall skipifsilent
|
||||
|
||||
|
|
|
|||
|
|
@ -422,7 +422,7 @@ def get_last_version_by_subset_name(
|
|||
if not subset:
|
||||
return None
|
||||
return get_last_version_by_subset_id(
|
||||
project_name, subset["id"], fields=fields
|
||||
project_name, subset["_id"], fields=fields
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ from openpype.hosts.max.api import lib
|
|||
from openpype.hosts.max.api.plugin import MS_CUSTOM_ATTRIB
|
||||
from openpype.hosts.max import MAX_HOST_DIR
|
||||
|
||||
|
||||
from pymxs import runtime as rt # noqa
|
||||
|
||||
log = logging.getLogger("openpype.hosts.max")
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
"""Loading model with the Alembic loader."""
|
||||
|
||||
families = ["model"]
|
||||
label = "Load Model(Alembic)"
|
||||
label = "Load Model with Alembic"
|
||||
representations = ["abc"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ class AbcLoader(load.LoaderPlugin):
|
|||
selections = rt.GetCurrentSelection()
|
||||
for abc in selections:
|
||||
for cam_shape in abc.Children:
|
||||
cam_shape.playbackType = 2
|
||||
cam_shape.playbackType = 0
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
|
|
|
|||
108
openpype/hosts/max/plugins/load/load_pointcache_ornatrix.py
Normal file
108
openpype/hosts/max/plugins/load/load_pointcache_ornatrix.py
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
import os
|
||||
from openpype.pipeline import load, get_representation_path
|
||||
from openpype.pipeline.load import LoadError
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
get_previous_loaded_object,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set,
|
||||
get_plugins
|
||||
)
|
||||
from openpype.hosts.max.api import lib
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class OxAbcLoader(load.LoaderPlugin):
|
||||
"""Ornatrix Alembic loader."""
|
||||
|
||||
families = ["camera", "animation", "pointcache"]
|
||||
label = "Load Alembic with Ornatrix"
|
||||
representations = ["abc"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
plugin_list = get_plugins()
|
||||
if "ephere.plugins.autodesk.max.ornatrix.dlo" not in plugin_list:
|
||||
raise LoadError("Ornatrix plugin not "
|
||||
"found/installed in Max yet..")
|
||||
|
||||
file_path = os.path.normpath(self.filepath_from_context(context))
|
||||
rt.AlembicImport.ImportToRoot = True
|
||||
rt.AlembicImport.CustomAttributes = True
|
||||
rt.importFile(
|
||||
file_path, rt.name("noPrompt"),
|
||||
using=rt.Ornatrix_Alembic_Importer)
|
||||
|
||||
scene_object = []
|
||||
for obj in rt.rootNode.Children:
|
||||
obj_type = rt.ClassOf(obj)
|
||||
if str(obj_type).startswith("Ox_"):
|
||||
scene_object.append(obj)
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
abc_container = []
|
||||
for abc in scene_object:
|
||||
abc.name = f"{namespace}:{abc.name}"
|
||||
abc_container.append(abc)
|
||||
|
||||
return containerise(
|
||||
name, abc_container, context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
namespace, name = get_namespace(node_name)
|
||||
node = rt.getNodeByName(node_name)
|
||||
node_list = get_previous_loaded_object(node)
|
||||
rt.Select(node_list)
|
||||
selections = rt.getCurrentSelection()
|
||||
transform_data = object_transform_set(selections)
|
||||
for prev_obj in selections:
|
||||
if rt.isValidNode(prev_obj):
|
||||
rt.Delete(prev_obj)
|
||||
|
||||
rt.AlembicImport.ImportToRoot = False
|
||||
rt.AlembicImport.CustomAttributes = True
|
||||
rt.importFile(
|
||||
path, rt.name("noPrompt"),
|
||||
using=rt.Ornatrix_Alembic_Importer)
|
||||
|
||||
scene_object = []
|
||||
for obj in rt.rootNode.Children:
|
||||
obj_type = rt.ClassOf(obj)
|
||||
if str(obj_type).startswith("Ox_"):
|
||||
scene_object.append(obj)
|
||||
ox_abc_objects = []
|
||||
for abc in scene_object:
|
||||
abc.Parent = container
|
||||
abc.name = f"{namespace}:{abc.name}"
|
||||
ox_abc_objects.append(abc)
|
||||
ox_transform = f"{abc.name}.transform"
|
||||
if ox_transform in transform_data.keys():
|
||||
abc.pos = transform_data[ox_transform] or 0
|
||||
abc.scale = transform_data[f"{abc.name}.scale"] or 0
|
||||
update_custom_attribute_data(node, ox_abc_objects)
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
{"representation": str(representation["_id"])},
|
||||
)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
rt.Delete(node)
|
||||
|
|
@ -3,6 +3,7 @@ import pyblish.api
|
|||
from openpype.pipeline import publish, OptionalPyblishPluginMixin
|
||||
from pymxs import runtime as rt
|
||||
from openpype.hosts.max.api import maintained_selection
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
|
||||
|
||||
class ExtractModelObj(publish.Extractor, OptionalPyblishPluginMixin):
|
||||
|
|
@ -27,6 +28,7 @@ class ExtractModelObj(publish.Extractor, OptionalPyblishPluginMixin):
|
|||
filepath = os.path.join(stagingdir, filename)
|
||||
self.log.info("Writing OBJ '%s' to '%s'" % (filepath, stagingdir))
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
with maintained_selection():
|
||||
# select and export
|
||||
node_list = instance.data["members"]
|
||||
|
|
@ -38,7 +40,10 @@ class ExtractModelObj(publish.Extractor, OptionalPyblishPluginMixin):
|
|||
using=rt.ObjExp,
|
||||
)
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
if not os.path.exists(filepath):
|
||||
raise KnownPublishError(
|
||||
"File {} wasn't produced by 3ds max, please check the logs.")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validator for USD plugin."""
|
||||
from openpype.pipeline import PublishValidationError
|
||||
from pyblish.api import InstancePlugin, ValidatorOrder
|
||||
from pymxs import runtime as rt
|
||||
|
||||
from openpype.pipeline import (
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
def get_plugins() -> list:
|
||||
"""Get plugin list from 3ds max."""
|
||||
|
|
@ -17,17 +21,25 @@ def get_plugins() -> list:
|
|||
return plugin_info_list
|
||||
|
||||
|
||||
class ValidateUSDPlugin(InstancePlugin):
|
||||
class ValidateUSDPlugin(OptionalPyblishPluginMixin,
|
||||
InstancePlugin):
|
||||
"""Validates if USD plugin is installed or loaded in 3ds max."""
|
||||
|
||||
order = ValidatorOrder - 0.01
|
||||
families = ["model"]
|
||||
hosts = ["max"]
|
||||
label = "USD Plugin"
|
||||
label = "Validate USD Plugin loaded"
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
|
||||
for sc in ValidateUSDPlugin.__subclasses__():
|
||||
self.log.info(sc)
|
||||
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
plugin_info = get_plugins()
|
||||
usd_import = "usdimport.dli"
|
||||
if usd_import not in plugin_info:
|
||||
|
|
|
|||
|
|
@ -659,17 +659,6 @@ def on_task_changed():
|
|||
lib.set_context_settings()
|
||||
lib.update_content_on_context_change()
|
||||
|
||||
msg = " project: {}\n asset: {}\n task:{}".format(
|
||||
get_current_project_name(),
|
||||
get_current_asset_name(),
|
||||
get_current_task_name()
|
||||
)
|
||||
|
||||
lib.show_message(
|
||||
"Context was changed",
|
||||
("Context was changed to:\n{}".format(msg)),
|
||||
)
|
||||
|
||||
|
||||
def before_workfile_open():
|
||||
if handle_workfile_locks():
|
||||
|
|
|
|||
|
|
@ -129,18 +129,49 @@ class MayaCreatorBase(object):
|
|||
shared_data["maya_cached_legacy_subsets"] = cache_legacy
|
||||
return shared_data
|
||||
|
||||
def get_publish_families(self):
|
||||
"""Return families for the instances of this creator.
|
||||
|
||||
Allow a Creator to define multiple families so that a creator can
|
||||
e.g. specify `usd` and `usdMaya` and another USD creator can also
|
||||
specify `usd` but apply different extractors like `usdMultiverse`.
|
||||
|
||||
There is no need to override this method if you only have the
|
||||
primary family defined by the `family` property as that will always
|
||||
be set.
|
||||
|
||||
Returns:
|
||||
list: families for instances of this creator
|
||||
|
||||
"""
|
||||
return []
|
||||
|
||||
def imprint_instance_node(self, node, data):
|
||||
|
||||
# We never store the instance_node as value on the node since
|
||||
# it's the node name itself
|
||||
data.pop("instance_node", None)
|
||||
|
||||
# Don't store `families` since it's up to the creator itself
|
||||
# to define the initial publish families - not a stored attribute of
|
||||
# `families`
|
||||
data.pop("families", None)
|
||||
|
||||
# We store creator attributes at the root level and assume they
|
||||
# will not clash in names with `subset`, `task`, etc. and other
|
||||
# default names. This is just so these attributes in many cases
|
||||
# are still editable in the maya UI by artists.
|
||||
# pop to move to end of dict to sort attributes last on the node
|
||||
# note: pop to move to end of dict to sort attributes last on the node
|
||||
creator_attributes = data.pop("creator_attributes", {})
|
||||
|
||||
# We only flatten value types which `imprint` function supports
|
||||
json_creator_attributes = {}
|
||||
for key, value in dict(creator_attributes).items():
|
||||
if isinstance(value, (list, tuple, dict)):
|
||||
creator_attributes.pop(key)
|
||||
json_creator_attributes[key] = value
|
||||
|
||||
# Flatten remaining creator attributes to the node itself
|
||||
data.update(creator_attributes)
|
||||
|
||||
# We know the "publish_attributes" will be complex data of
|
||||
|
|
@ -150,6 +181,10 @@ class MayaCreatorBase(object):
|
|||
data.pop("publish_attributes", {})
|
||||
)
|
||||
|
||||
# Persist the non-flattened creator attributes (special value types,
|
||||
# like multiselection EnumDef)
|
||||
data["creator_attributes"] = json.dumps(json_creator_attributes)
|
||||
|
||||
# Since we flattened the data structure for creator attributes we want
|
||||
# to correctly detect which flattened attributes should end back in the
|
||||
# creator attributes when reading the data from the node, so we store
|
||||
|
|
@ -170,15 +205,22 @@ class MayaCreatorBase(object):
|
|||
# being read as 'data'
|
||||
node_data.pop("cbId", None)
|
||||
|
||||
# Make sure we convert any creator attributes from the json string
|
||||
creator_attributes = node_data.get("creator_attributes")
|
||||
if creator_attributes:
|
||||
node_data["creator_attributes"] = json.loads(creator_attributes)
|
||||
else:
|
||||
node_data["creator_attributes"] = {}
|
||||
|
||||
# Move the relevant attributes into "creator_attributes" that
|
||||
# we flattened originally
|
||||
node_data["creator_attributes"] = {}
|
||||
creator_attribute_keys = node_data.pop("__creator_attributes_keys",
|
||||
"").split(",")
|
||||
for key in creator_attribute_keys:
|
||||
if key in node_data:
|
||||
node_data["creator_attributes"][key] = node_data.pop(key)
|
||||
|
||||
# Make sure we convert any publish attributes from the json string
|
||||
publish_attributes = node_data.get("publish_attributes")
|
||||
if publish_attributes:
|
||||
node_data["publish_attributes"] = json.loads(publish_attributes)
|
||||
|
|
@ -186,6 +228,11 @@ class MayaCreatorBase(object):
|
|||
# Explicitly re-parse the node name
|
||||
node_data["instance_node"] = node
|
||||
|
||||
# If the creator plug-in specifies
|
||||
families = self.get_publish_families()
|
||||
if families:
|
||||
node_data["families"] = families
|
||||
|
||||
return node_data
|
||||
|
||||
def _default_collect_instances(self):
|
||||
|
|
@ -230,6 +277,14 @@ class MayaCreator(NewCreator, MayaCreatorBase):
|
|||
if pre_create_data.get("use_selection"):
|
||||
members = cmds.ls(selection=True)
|
||||
|
||||
# Allow a Creator to define multiple families
|
||||
publish_families = self.get_publish_families()
|
||||
if publish_families:
|
||||
families = instance_data.setdefault("families", [])
|
||||
for family in self.get_publish_families():
|
||||
if family not in families:
|
||||
families.append(family)
|
||||
|
||||
with lib.undo_chunk():
|
||||
instance_node = cmds.sets(members, name=subset_name)
|
||||
instance_data["instance_node"] = instance_node
|
||||
|
|
|
|||
102
openpype/hosts/maya/plugins/create/create_maya_usd.py
Normal file
102
openpype/hosts/maya/plugins/create/create_maya_usd.py
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
from openpype.hosts.maya.api import plugin, lib
|
||||
from openpype.lib import (
|
||||
BoolDef,
|
||||
EnumDef,
|
||||
TextDef
|
||||
)
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class CreateMayaUsd(plugin.MayaCreator):
|
||||
"""Create Maya USD Export"""
|
||||
|
||||
identifier = "io.openpype.creators.maya.mayausd"
|
||||
label = "Maya USD"
|
||||
family = "usd"
|
||||
icon = "cubes"
|
||||
description = "Create Maya USD Export"
|
||||
|
||||
cache = {}
|
||||
|
||||
def get_publish_families(self):
|
||||
return ["usd", "mayaUsd"]
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
|
||||
if "jobContextItems" not in self.cache:
|
||||
# Query once instead of per instance
|
||||
job_context_items = {}
|
||||
try:
|
||||
cmds.loadPlugin("mayaUsdPlugin", quiet=True)
|
||||
job_context_items = {
|
||||
cmds.mayaUSDListJobContexts(jobContext=name): name
|
||||
for name in cmds.mayaUSDListJobContexts(export=True) or []
|
||||
}
|
||||
except RuntimeError:
|
||||
# Likely `mayaUsdPlugin` plug-in not available
|
||||
self.log.warning("Unable to retrieve available job "
|
||||
"contexts for `mayaUsdPlugin` exports")
|
||||
|
||||
if not job_context_items:
|
||||
# enumdef multiselection may not be empty
|
||||
job_context_items = ["<placeholder; do not use>"]
|
||||
|
||||
self.cache["jobContextItems"] = job_context_items
|
||||
|
||||
defs = lib.collect_animation_defs()
|
||||
defs.extend([
|
||||
EnumDef("defaultUSDFormat",
|
||||
label="File format",
|
||||
items={
|
||||
"usdc": "Binary",
|
||||
"usda": "ASCII"
|
||||
},
|
||||
default="usdc"),
|
||||
BoolDef("stripNamespaces",
|
||||
label="Strip Namespaces",
|
||||
tooltip=(
|
||||
"Remove namespaces during export. By default, "
|
||||
"namespaces are exported to the USD file in the "
|
||||
"following format: nameSpaceExample_pPlatonic1"
|
||||
),
|
||||
default=True),
|
||||
BoolDef("mergeTransformAndShape",
|
||||
label="Merge Transform and Shape",
|
||||
tooltip=(
|
||||
"Combine Maya transform and shape into a single USD"
|
||||
"prim that has transform and geometry, for all"
|
||||
" \"geometric primitives\" (gprims).\n"
|
||||
"This results in smaller and faster scenes. Gprims "
|
||||
"will be \"unpacked\" back into transform and shape "
|
||||
"nodes when imported into Maya from USD."
|
||||
),
|
||||
default=True),
|
||||
BoolDef("includeUserDefinedAttributes",
|
||||
label="Include User Defined Attributes",
|
||||
tooltip=(
|
||||
"Whether to include all custom maya attributes found "
|
||||
"on nodes as metadata (userProperties) in USD."
|
||||
),
|
||||
default=False),
|
||||
TextDef("attr",
|
||||
label="Custom Attributes",
|
||||
default="",
|
||||
placeholder="attr1, attr2"),
|
||||
TextDef("attrPrefix",
|
||||
label="Custom Attributes Prefix",
|
||||
default="",
|
||||
placeholder="prefix1, prefix2"),
|
||||
EnumDef("jobContext",
|
||||
label="Job Context",
|
||||
items=self.cache["jobContextItems"],
|
||||
tooltip=(
|
||||
"Specifies an additional export context to handle.\n"
|
||||
"These usually contain extra schemas, primitives,\n"
|
||||
"and materials that are to be exported for a "
|
||||
"specific\ntask, a target renderer for example."
|
||||
),
|
||||
multiselection=True),
|
||||
])
|
||||
|
||||
return defs
|
||||
|
|
@ -14,6 +14,10 @@ class CreateMultiverseUsd(plugin.MayaCreator):
|
|||
label = "Multiverse USD Asset"
|
||||
family = "usd"
|
||||
icon = "cubes"
|
||||
description = "Create Multiverse USD Asset"
|
||||
|
||||
def get_publish_families(self):
|
||||
return ["usd", "mvUsd"]
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from openpype.lib import (
|
|||
)
|
||||
|
||||
|
||||
class CreateMultiverseUsdOver(plugin.Creator):
|
||||
class CreateMultiverseUsdOver(plugin.MayaCreator):
|
||||
"""Create Multiverse USD Override"""
|
||||
|
||||
identifier = "io.openpype.creators.maya.mvusdoverride"
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ from openpype.hosts.maya.api.lib import (
|
|||
)
|
||||
from openpype.hosts.maya.api.pipeline import containerise
|
||||
|
||||
|
||||
def is_sequence(files):
|
||||
sequence = False
|
||||
collections, remainder = clique.assemble(files, minimum_items=1)
|
||||
|
|
@ -29,11 +30,12 @@ def get_current_session_fps():
|
|||
session_fps = float(legacy_io.Session.get('AVALON_FPS', 25))
|
||||
return convert_to_maya_fps(session_fps)
|
||||
|
||||
|
||||
class ArnoldStandinLoader(load.LoaderPlugin):
|
||||
"""Load as Arnold standin"""
|
||||
|
||||
families = ["ass", "animation", "model", "proxyAbc", "pointcache"]
|
||||
representations = ["ass", "abc"]
|
||||
families = ["ass", "animation", "model", "proxyAbc", "pointcache", "usd"]
|
||||
representations = ["ass", "abc", "usda", "usdc", "usd"]
|
||||
|
||||
label = "Load as Arnold standin"
|
||||
order = -5
|
||||
|
|
|
|||
108
openpype/hosts/maya/plugins/load/load_maya_usd.py
Normal file
108
openpype/hosts/maya/plugins/load/load_maya_usd.py
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import maya.cmds as cmds
|
||||
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
)
|
||||
from openpype.pipeline.load import get_representation_path_from_context
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
namespaced,
|
||||
unique_namespace
|
||||
)
|
||||
from openpype.hosts.maya.api.pipeline import containerise
|
||||
|
||||
|
||||
class MayaUsdLoader(load.LoaderPlugin):
|
||||
"""Read USD data in a Maya USD Proxy"""
|
||||
|
||||
families = ["model", "usd", "pointcache", "animation"]
|
||||
representations = ["usd", "usda", "usdc", "usdz", "abc"]
|
||||
|
||||
label = "Load USD to Maya Proxy"
|
||||
order = -1
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, options=None):
|
||||
asset = context['asset']['name']
|
||||
namespace = namespace or unique_namespace(
|
||||
asset + "_",
|
||||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# Make sure we can load the plugin
|
||||
cmds.loadPlugin("mayaUsdPlugin", quiet=True)
|
||||
|
||||
path = get_representation_path_from_context(context)
|
||||
|
||||
# Create the shape
|
||||
cmds.namespace(addNamespace=namespace)
|
||||
with namespaced(namespace, new=False):
|
||||
transform = cmds.createNode("transform",
|
||||
name=name,
|
||||
skipSelect=True)
|
||||
proxy = cmds.createNode('mayaUsdProxyShape',
|
||||
name="{}Shape".format(name),
|
||||
parent=transform,
|
||||
skipSelect=True)
|
||||
|
||||
cmds.connectAttr("time1.outTime", "{}.time".format(proxy))
|
||||
cmds.setAttr("{}.filePath".format(proxy), path, type="string")
|
||||
|
||||
# By default, we force the proxy to not use a shared stage because
|
||||
# when doing so Maya will quite easily allow to save into the
|
||||
# loaded usd file. Since we are loading published files we want to
|
||||
# avoid altering them. Unshared stages also save their edits into
|
||||
# the workfile as an artist might expect it to do.
|
||||
cmds.setAttr("{}.shareStage".format(proxy), False)
|
||||
# cmds.setAttr("{}.shareStage".format(proxy), lock=True)
|
||||
|
||||
nodes = [transform, proxy]
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
# type: (dict, dict) -> None
|
||||
"""Update container with specified representation."""
|
||||
node = container['objectName']
|
||||
assert cmds.objExists(node), "Missing container"
|
||||
|
||||
members = cmds.sets(node, query=True) or []
|
||||
shapes = cmds.ls(members, type="mayaUsdProxyShape")
|
||||
|
||||
path = get_representation_path(representation)
|
||||
for shape in shapes:
|
||||
cmds.setAttr("{}.filePath".format(shape), path, type="string")
|
||||
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
# type: (dict) -> None
|
||||
"""Remove loaded container."""
|
||||
# Delete container and its contents
|
||||
if cmds.objExists(container['objectName']):
|
||||
members = cmds.sets(container['objectName'], query=True) or []
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Remove the namespace, if empty
|
||||
namespace = container['namespace']
|
||||
if cmds.namespace(exists=namespace):
|
||||
members = cmds.namespaceInfo(namespace, listNamespace=True)
|
||||
if not members:
|
||||
cmds.namespace(removeNamespace=namespace)
|
||||
else:
|
||||
self.log.warning("Namespace not deleted because it "
|
||||
"still has members: %s", namespace)
|
||||
|
|
@ -58,17 +58,3 @@ class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin):
|
|||
if instance.data.get("farm"):
|
||||
instance.data["families"].append("publish.farm")
|
||||
|
||||
# Collect user defined attributes.
|
||||
if not instance.data.get("includeUserDefinedAttributes", False):
|
||||
return
|
||||
|
||||
user_defined_attributes = set()
|
||||
for node in hierarchy:
|
||||
attrs = cmds.listAttr(node, userDefined=True) or list()
|
||||
shapes = cmds.listRelatives(node, shapes=True) or list()
|
||||
for shape in shapes:
|
||||
attrs.extend(cmds.listAttr(shape, userDefined=True) or list())
|
||||
|
||||
user_defined_attributes.update(attrs)
|
||||
|
||||
instance.data["userDefinedAttributes"] = list(user_defined_attributes)
|
||||
|
|
|
|||
|
|
@ -17,11 +17,6 @@ SHAPE_ATTRS = ["castsShadows",
|
|||
"visibleInRefractions",
|
||||
"doubleSided",
|
||||
"opposite"]
|
||||
|
||||
RENDERER_NODE_TYPES = [
|
||||
# redshift
|
||||
"RedshiftMeshParameters"
|
||||
]
|
||||
SHAPE_ATTRS = set(SHAPE_ATTRS)
|
||||
|
||||
|
||||
|
|
@ -36,12 +31,13 @@ def get_pxr_multitexture_file_attrs(node):
|
|||
|
||||
|
||||
FILE_NODES = {
|
||||
# maya
|
||||
"file": "fileTextureName",
|
||||
|
||||
# arnold (mtoa)
|
||||
"aiImage": "filename",
|
||||
|
||||
# redshift
|
||||
"RedshiftNormalMap": "tex0",
|
||||
|
||||
# renderman
|
||||
"PxrBump": "filename",
|
||||
"PxrNormalMap": "filename",
|
||||
"PxrMultiTexture": get_pxr_multitexture_file_attrs,
|
||||
|
|
@ -49,6 +45,22 @@ FILE_NODES = {
|
|||
"PxrTexture": "filename"
|
||||
}
|
||||
|
||||
# Keep only node types that actually exist
|
||||
all_node_types = set(cmds.allNodeTypes())
|
||||
for node_type in list(FILE_NODES.keys()):
|
||||
if node_type not in all_node_types:
|
||||
FILE_NODES.pop(node_type)
|
||||
del all_node_types
|
||||
|
||||
# Cache pixar dependency node types so we can perform a type lookup against it
|
||||
PXR_NODES = set()
|
||||
if cmds.pluginInfo("RenderMan_for_Maya", query=True, loaded=True):
|
||||
PXR_NODES = set(
|
||||
cmds.pluginInfo("RenderMan_for_Maya",
|
||||
query=True,
|
||||
dependNode=True)
|
||||
)
|
||||
|
||||
|
||||
def get_attributes(dictionary, attr, node=None):
|
||||
# type: (dict, str, str) -> list
|
||||
|
|
@ -232,20 +244,17 @@ def get_file_node_files(node):
|
|||
|
||||
"""
|
||||
paths = get_file_node_paths(node)
|
||||
sequences = []
|
||||
replaces = []
|
||||
|
||||
# For sequences get all files and filter to only existing files
|
||||
result = []
|
||||
for index, path in enumerate(paths):
|
||||
if node_uses_image_sequence(node, path):
|
||||
glob_pattern = seq_to_glob(path)
|
||||
sequences.extend(glob.glob(glob_pattern))
|
||||
replaces.append(index)
|
||||
result.extend(glob.glob(glob_pattern))
|
||||
elif os.path.exists(path):
|
||||
result.append(path)
|
||||
|
||||
for index in replaces:
|
||||
paths.pop(index)
|
||||
|
||||
paths.extend(sequences)
|
||||
|
||||
return [p for p in paths if os.path.exists(p)]
|
||||
return result
|
||||
|
||||
|
||||
class CollectLook(pyblish.api.InstancePlugin):
|
||||
|
|
@ -260,7 +269,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
membership relations.
|
||||
|
||||
Collects:
|
||||
lookAttribtutes (list): Nodes in instance with their altered attributes
|
||||
lookAttributes (list): Nodes in instance with their altered attributes
|
||||
lookSetRelations (list): Sets and their memberships
|
||||
lookSets (list): List of set names included in the look
|
||||
|
||||
|
|
@ -286,7 +295,10 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
self.log.debug("Looking for look associations "
|
||||
"for %s" % instance.data['name'])
|
||||
"for %s" % instance.data['name'])
|
||||
|
||||
# Lookup set (optimization)
|
||||
instance_lookup = set(cmds.ls(instance, long=True))
|
||||
|
||||
# Discover related object sets
|
||||
self.log.debug("Gathering sets ...")
|
||||
|
|
@ -296,65 +308,20 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
instance_lookup = set(cmds.ls(instance, long=True))
|
||||
|
||||
self.log.debug("Gathering set relations ...")
|
||||
# Ensure iteration happen in a list so we can remove keys from the
|
||||
# Ensure iteration happen in a list to allow removing keys from the
|
||||
# dict within the loop
|
||||
|
||||
# skipped types of attribute on render specific nodes
|
||||
disabled_types = ["message", "TdataCompound"]
|
||||
|
||||
for obj_set in list(sets):
|
||||
self.log.debug("From {}".format(obj_set))
|
||||
|
||||
# if node is specified as renderer node type, it will be
|
||||
# serialized with its attributes.
|
||||
if cmds.nodeType(obj_set) in RENDERER_NODE_TYPES:
|
||||
self.log.debug("- {} is {}".format(
|
||||
obj_set, cmds.nodeType(obj_set)))
|
||||
|
||||
node_attrs = []
|
||||
|
||||
# serialize its attributes so they can be recreated on look
|
||||
# load.
|
||||
for attr in cmds.listAttr(obj_set):
|
||||
# skip publishedNodeInfo attributes as they break
|
||||
# getAttr() and we don't need them anyway
|
||||
if attr.startswith("publishedNodeInfo"):
|
||||
continue
|
||||
|
||||
# skip attributes types defined in 'disabled_type' list
|
||||
if cmds.getAttr("{}.{}".format(obj_set, attr), type=True) in disabled_types: # noqa
|
||||
continue
|
||||
|
||||
node_attrs.append((
|
||||
attr,
|
||||
cmds.getAttr("{}.{}".format(obj_set, attr)),
|
||||
cmds.getAttr(
|
||||
"{}.{}".format(obj_set, attr), type=True)
|
||||
))
|
||||
|
||||
for member in cmds.ls(
|
||||
cmds.sets(obj_set, query=True), long=True):
|
||||
member_data = self.collect_member_data(member,
|
||||
instance_lookup)
|
||||
if not member_data:
|
||||
continue
|
||||
|
||||
# Add information of the node to the members list
|
||||
sets[obj_set]["members"].append(member_data)
|
||||
|
||||
# Get all nodes of the current objectSet (shadingEngine)
|
||||
for member in cmds.ls(cmds.sets(obj_set, query=True), long=True):
|
||||
member_data = self.collect_member_data(member,
|
||||
instance_lookup)
|
||||
if not member_data:
|
||||
continue
|
||||
|
||||
# Add information of the node to the members list
|
||||
sets[obj_set]["members"].append(member_data)
|
||||
if member_data:
|
||||
# Add information of the node to the members list
|
||||
sets[obj_set]["members"].append(member_data)
|
||||
|
||||
# Remove sets that didn't have any members assigned in the end
|
||||
# Thus the data will be limited to only what we need.
|
||||
self.log.debug("obj_set {}".format(sets[obj_set]))
|
||||
if not sets[obj_set]["members"]:
|
||||
self.log.debug(
|
||||
"Removing redundant set information: {}".format(obj_set)
|
||||
|
|
@ -383,35 +350,28 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
"rman__displacement"
|
||||
]
|
||||
if look_sets:
|
||||
materials = []
|
||||
self.log.debug("Found look sets: {}".format(look_sets))
|
||||
|
||||
# Get all material attrs for all look sets to retrieve their inputs
|
||||
existing_attrs = []
|
||||
for look in look_sets:
|
||||
for at in shader_attrs:
|
||||
try:
|
||||
con = cmds.listConnections("{}.{}".format(look, at))
|
||||
except ValueError:
|
||||
# skip attributes that are invalid in current
|
||||
# context. For example in the case where
|
||||
# Arnold is not enabled.
|
||||
continue
|
||||
if con:
|
||||
materials.extend(con)
|
||||
for attr in shader_attrs:
|
||||
if cmds.attributeQuery(attr, node=look, exists=True):
|
||||
existing_attrs.append("{}.{}".format(look, attr))
|
||||
materials = cmds.listConnections(existing_attrs,
|
||||
source=True,
|
||||
destination=False) or []
|
||||
|
||||
self.log.debug("Found materials:\n{}".format(materials))
|
||||
|
||||
self.log.debug("Found the following sets:\n{}".format(look_sets))
|
||||
# Get the entire node chain of the look sets
|
||||
# history = cmds.listHistory(look_sets)
|
||||
history = []
|
||||
for material in materials:
|
||||
history.extend(cmds.listHistory(material, ac=True))
|
||||
|
||||
# handle VrayPluginNodeMtl node - see #1397
|
||||
vray_plugin_nodes = cmds.ls(
|
||||
history, type="VRayPluginNodeMtl", long=True)
|
||||
for vray_node in vray_plugin_nodes:
|
||||
history.extend(cmds.listHistory(vray_node, ac=True))
|
||||
# history = cmds.listHistory(look_sets, allConnections=True)
|
||||
history = cmds.listHistory(materials, allConnections=True)
|
||||
|
||||
# Since we retrieved history only of the connected materials
|
||||
# connected to the look sets above we now add direct history
|
||||
# for some of the look sets directly
|
||||
# handling render attribute sets
|
||||
render_set_types = [
|
||||
"VRayDisplacement",
|
||||
|
|
@ -429,20 +389,26 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
or []
|
||||
)
|
||||
|
||||
all_supported_nodes = FILE_NODES.keys()
|
||||
files = []
|
||||
for node_type in all_supported_nodes:
|
||||
files.extend(cmds.ls(history, type=node_type, long=True))
|
||||
# Ensure unique entries only
|
||||
history = list(set(history))
|
||||
|
||||
files = cmds.ls(history,
|
||||
# It's important only node types are passed that
|
||||
# exist (e.g. for loaded plugins) because otherwise
|
||||
# the result will turn back empty
|
||||
type=list(FILE_NODES.keys()),
|
||||
long=True)
|
||||
|
||||
# Sort for log readability
|
||||
files.sort()
|
||||
|
||||
self.log.debug("Collected file nodes:\n{}".format(files))
|
||||
# Collect textures if any file nodes are found
|
||||
instance.data["resources"] = []
|
||||
for n in files:
|
||||
for res in self.collect_resources(n):
|
||||
instance.data["resources"].append(res)
|
||||
|
||||
self.log.debug("Collected resources: {}".format(
|
||||
instance.data["resources"]))
|
||||
resources = []
|
||||
for node in files: # sort for log readability
|
||||
resources.extend(self.collect_resources(node))
|
||||
instance.data["resources"] = resources
|
||||
self.log.debug("Collected resources: {}".format(resources))
|
||||
|
||||
# Log warning when no relevant sets were retrieved for the look.
|
||||
if (
|
||||
|
|
@ -537,14 +503,14 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
# Collect changes to "custom" attributes
|
||||
node_attrs = get_look_attrs(node)
|
||||
|
||||
self.log.debug(
|
||||
"Node \"{0}\" attributes: {1}".format(node, node_attrs)
|
||||
)
|
||||
|
||||
# Only include if there are any properties we care about
|
||||
if not node_attrs:
|
||||
continue
|
||||
|
||||
self.log.debug(
|
||||
"Node \"{0}\" attributes: {1}".format(node, node_attrs)
|
||||
)
|
||||
|
||||
node_attributes = {}
|
||||
for attr in node_attrs:
|
||||
if not cmds.attributeQuery(attr, node=node, exists=True):
|
||||
|
|
@ -575,14 +541,14 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
Returns:
|
||||
dict
|
||||
"""
|
||||
self.log.debug("processing: {}".format(node))
|
||||
all_supported_nodes = FILE_NODES.keys()
|
||||
if cmds.nodeType(node) not in all_supported_nodes:
|
||||
if cmds.nodeType(node) not in FILE_NODES:
|
||||
self.log.error(
|
||||
"Unsupported file node: {}".format(cmds.nodeType(node)))
|
||||
raise AssertionError("Unsupported file node")
|
||||
|
||||
self.log.debug(" - got {}".format(cmds.nodeType(node)))
|
||||
self.log.debug(
|
||||
"Collecting resource: {} ({})".format(node, cmds.nodeType(node))
|
||||
)
|
||||
|
||||
attributes = get_attributes(FILE_NODES, cmds.nodeType(node), node)
|
||||
for attribute in attributes:
|
||||
|
|
@ -590,9 +556,6 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
node,
|
||||
attribute
|
||||
))
|
||||
computed_attribute = "{}.{}".format(node, attribute)
|
||||
if attribute == "fileTextureName":
|
||||
computed_attribute = node + ".computedFileTextureNamePattern"
|
||||
|
||||
self.log.debug(" - file source: {}".format(source))
|
||||
color_space_attr = "{}.colorSpace".format(node)
|
||||
|
|
@ -601,27 +564,25 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
except ValueError:
|
||||
# node doesn't have colorspace attribute
|
||||
color_space = "Raw"
|
||||
|
||||
# Compare with the computed file path, e.g. the one with
|
||||
# the <UDIM> pattern in it, to generate some logging information
|
||||
# about this difference
|
||||
computed_source = cmds.getAttr(computed_attribute)
|
||||
if source != computed_source:
|
||||
self.log.debug("Detected computed file pattern difference "
|
||||
"from original pattern: {0} "
|
||||
"({1} -> {2})".format(node,
|
||||
source,
|
||||
computed_source))
|
||||
# Only for file nodes with `fileTextureName` attribute
|
||||
if attribute == "fileTextureName":
|
||||
computed_source = cmds.getAttr(
|
||||
"{}.computedFileTextureNamePattern".format(node)
|
||||
)
|
||||
if source != computed_source:
|
||||
self.log.debug("Detected computed file pattern difference "
|
||||
"from original pattern: {0} "
|
||||
"({1} -> {2})".format(node,
|
||||
source,
|
||||
computed_source))
|
||||
|
||||
# renderman allows nodes to have filename attribute empty while
|
||||
# you can have another incoming connection from different node.
|
||||
pxr_nodes = set()
|
||||
if cmds.pluginInfo("RenderMan_for_Maya", query=True, loaded=True):
|
||||
pxr_nodes = set(
|
||||
cmds.pluginInfo("RenderMan_for_Maya",
|
||||
query=True,
|
||||
dependNode=True)
|
||||
)
|
||||
if not source and cmds.nodeType(node) in pxr_nodes:
|
||||
if not source and cmds.nodeType(node) in PXR_NODES:
|
||||
self.log.debug("Renderman: source is empty, skipping...")
|
||||
continue
|
||||
# We replace backslashes with forward slashes because V-Ray
|
||||
|
|
|
|||
|
|
@ -45,18 +45,3 @@ class CollectPointcache(pyblish.api.InstancePlugin):
|
|||
if proxy_set:
|
||||
instance.remove(proxy_set)
|
||||
instance.data["setMembers"].remove(proxy_set)
|
||||
|
||||
# Collect user defined attributes.
|
||||
if not instance.data.get("includeUserDefinedAttributes", False):
|
||||
return
|
||||
|
||||
user_defined_attributes = set()
|
||||
for node in instance:
|
||||
attrs = cmds.listAttr(node, userDefined=True) or list()
|
||||
shapes = cmds.listRelatives(node, shapes=True) or list()
|
||||
for shape in shapes:
|
||||
attrs.extend(cmds.listAttr(shape, userDefined=True) or list())
|
||||
|
||||
user_defined_attributes.update(attrs)
|
||||
|
||||
instance.data["userDefinedAttributes"] = list(user_defined_attributes)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,39 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectUserDefinedAttributes(pyblish.api.InstancePlugin):
|
||||
"""Collect user defined attributes for nodes in instance."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.45
|
||||
families = ["pointcache", "animation", "usd"]
|
||||
label = "Collect User Defined Attributes"
|
||||
hosts = ["maya"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Collect user defined attributes.
|
||||
if not instance.data.get("includeUserDefinedAttributes", False):
|
||||
return
|
||||
|
||||
if "out_hierarchy" in instance.data:
|
||||
# animation family
|
||||
nodes = instance.data["out_hierarchy"]
|
||||
else:
|
||||
nodes = instance[:]
|
||||
if not nodes:
|
||||
return
|
||||
|
||||
shapes = cmds.listRelatives(nodes, shapes=True, fullPath=True) or []
|
||||
nodes = set(nodes).union(shapes)
|
||||
|
||||
attrs = cmds.listAttr(list(nodes), userDefined=True) or []
|
||||
user_defined_attributes = list(sorted(set(attrs)))
|
||||
instance.data["userDefinedAttributes"] = user_defined_attributes
|
||||
|
||||
self.log.debug(
|
||||
"Collected user defined attributes: {}".format(
|
||||
", ".join(user_defined_attributes)
|
||||
)
|
||||
)
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
import maya.api.OpenMaya as om
|
||||
import maya.api.OpenMayaUI as omui
|
||||
|
||||
import pyblish.api
|
||||
import tempfile
|
||||
|
||||
from openpype.hosts.maya.api.lib import IS_HEADLESS
|
||||
|
||||
|
||||
class ExtractActiveViewThumbnail(pyblish.api.InstancePlugin):
|
||||
"""Set instance thumbnail to a screengrab of current active viewport.
|
||||
|
||||
This makes it so that if an instance does not have a thumbnail set yet that
|
||||
it will get a thumbnail of the currently active view at the time of
|
||||
publishing as a fallback.
|
||||
|
||||
"""
|
||||
order = pyblish.api.ExtractorOrder + 0.49
|
||||
label = "Active View Thumbnail"
|
||||
families = ["workfile"]
|
||||
hosts = ["maya"]
|
||||
|
||||
def process(self, instance):
|
||||
if IS_HEADLESS:
|
||||
self.log.debug(
|
||||
"Skip extraction of active view thumbnail, due to being in"
|
||||
"headless mode."
|
||||
)
|
||||
return
|
||||
|
||||
thumbnail = instance.data.get("thumbnailPath")
|
||||
if not thumbnail:
|
||||
view_thumbnail = self.get_view_thumbnail(instance)
|
||||
if not view_thumbnail:
|
||||
return
|
||||
|
||||
self.log.debug("Setting instance thumbnail path to: {}".format(
|
||||
view_thumbnail
|
||||
))
|
||||
instance.data["thumbnailPath"] = view_thumbnail
|
||||
|
||||
def get_view_thumbnail(self, instance):
|
||||
cache_key = "__maya_view_thumbnail"
|
||||
context = instance.context
|
||||
|
||||
if cache_key not in context.data:
|
||||
# Generate only a single thumbnail, even for multiple instances
|
||||
with tempfile.NamedTemporaryFile(suffix="_thumbnail.jpg",
|
||||
delete=False) as f:
|
||||
path = f.name
|
||||
|
||||
view = omui.M3dView.active3dView()
|
||||
image = om.MImage()
|
||||
view.readColorBuffer(image, True)
|
||||
image.writeToFile(path, "jpg")
|
||||
self.log.debug("Generated thumbnail: {}".format(path))
|
||||
|
||||
context.data["cleanupFullPaths"].append(path)
|
||||
context.data[cache_key] = path
|
||||
return context.data[cache_key]
|
||||
293
openpype/hosts/maya/plugins/publish/extract_maya_usd.py
Normal file
293
openpype/hosts/maya/plugins/publish/extract_maya_usd.py
Normal file
|
|
@ -0,0 +1,293 @@
|
|||
import os
|
||||
import six
|
||||
import json
|
||||
import contextlib
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def usd_export_attributes(nodes, attrs=None, attr_prefixes=None, mapping=None):
|
||||
"""Define attributes for the given nodes that should be exported.
|
||||
|
||||
MayaUSDExport will export custom attributes if the Maya node has a
|
||||
string attribute `USD_UserExportedAttributesJson` that provides an
|
||||
export mapping for the maya attributes. This context manager will try
|
||||
to autogenerate such an attribute during the export to include attributes
|
||||
for the export.
|
||||
|
||||
Arguments:
|
||||
nodes (List[str]): Nodes to process.
|
||||
attrs (Optional[List[str]]): Full name of attributes to include.
|
||||
attr_prefixes (Optional[List[str]]): Prefixes of attributes to include.
|
||||
mapping (Optional[Dict[Dict]]): A mapping per attribute name for the
|
||||
conversion to a USD attribute, including renaming, defining type,
|
||||
converting attribute precision, etc. This match the usual
|
||||
`USD_UserExportedAttributesJson` json mapping of `mayaUSDExport`.
|
||||
When no mapping provided for an attribute it will use `{}` as
|
||||
value.
|
||||
|
||||
Examples:
|
||||
>>> with usd_export_attributes(
|
||||
>>> ["pCube1"], attrs="myDoubleAttributeAsFloat", mapping={
|
||||
>>> "myDoubleAttributeAsFloat": {
|
||||
>>> "usdAttrName": "my:namespace:attrib",
|
||||
>>> "translateMayaDoubleToUsdSinglePrecision": True,
|
||||
>>> }
|
||||
>>> })
|
||||
|
||||
"""
|
||||
# todo: this might be better done with a custom export chaser
|
||||
# see `chaser` argument for `mayaUSDExport`
|
||||
|
||||
import maya.api.OpenMaya as om
|
||||
|
||||
if not attrs and not attr_prefixes:
|
||||
# context manager does nothing
|
||||
yield
|
||||
return
|
||||
|
||||
if attrs is None:
|
||||
attrs = []
|
||||
if attr_prefixes is None:
|
||||
attr_prefixes = []
|
||||
if mapping is None:
|
||||
mapping = {}
|
||||
|
||||
usd_json_attr = "USD_UserExportedAttributesJson"
|
||||
strings = attrs + ["{}*".format(prefix) for prefix in attr_prefixes]
|
||||
context_state = {}
|
||||
for node in set(nodes):
|
||||
node_attrs = cmds.listAttr(node, st=strings)
|
||||
if not node_attrs:
|
||||
# Nothing to do for this node
|
||||
continue
|
||||
|
||||
node_attr_data = {}
|
||||
for node_attr in set(node_attrs):
|
||||
node_attr_data[node_attr] = mapping.get(node_attr, {})
|
||||
|
||||
if cmds.attributeQuery(usd_json_attr, node=node, exists=True):
|
||||
existing_node_attr_value = cmds.getAttr(
|
||||
"{}.{}".format(node, usd_json_attr)
|
||||
)
|
||||
if existing_node_attr_value and existing_node_attr_value != "{}":
|
||||
# Any existing attribute mappings in an existing
|
||||
# `USD_UserExportedAttributesJson` attribute always take
|
||||
# precedence over what this function tries to imprint
|
||||
existing_node_attr_data = json.loads(existing_node_attr_value)
|
||||
node_attr_data.update(existing_node_attr_data)
|
||||
|
||||
context_state[node] = json.dumps(node_attr_data)
|
||||
|
||||
sel = om.MSelectionList()
|
||||
dg_mod = om.MDGModifier()
|
||||
fn_string = om.MFnStringData()
|
||||
fn_typed = om.MFnTypedAttribute()
|
||||
try:
|
||||
for node, value in context_state.items():
|
||||
data = fn_string.create(value)
|
||||
sel.clear()
|
||||
if cmds.attributeQuery(usd_json_attr, node=node, exists=True):
|
||||
# Set the attribute value
|
||||
sel.add("{}.{}".format(node, usd_json_attr))
|
||||
plug = sel.getPlug(0)
|
||||
dg_mod.newPlugValue(plug, data)
|
||||
else:
|
||||
# Create attribute with the value as default value
|
||||
sel.add(node)
|
||||
node_obj = sel.getDependNode(0)
|
||||
attr_obj = fn_typed.create(usd_json_attr,
|
||||
usd_json_attr,
|
||||
om.MFnData.kString,
|
||||
data)
|
||||
dg_mod.addAttribute(node_obj, attr_obj)
|
||||
dg_mod.doIt()
|
||||
yield
|
||||
finally:
|
||||
dg_mod.undoIt()
|
||||
|
||||
|
||||
class ExtractMayaUsd(publish.Extractor):
|
||||
"""Extractor for Maya USD Asset data.
|
||||
|
||||
Upon publish a .usd (or .usdz) asset file will typically be written.
|
||||
"""
|
||||
|
||||
label = "Extract Maya USD Asset"
|
||||
hosts = ["maya"]
|
||||
families = ["mayaUsd"]
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Overridable options for Maya USD Export
|
||||
|
||||
Given in the following format
|
||||
- {NAME: EXPECTED TYPE}
|
||||
|
||||
If the overridden option's type does not match,
|
||||
the option is not included and a warning is logged.
|
||||
|
||||
"""
|
||||
|
||||
# TODO: Support more `mayaUSDExport` parameters
|
||||
return {
|
||||
"defaultUSDFormat": str,
|
||||
"stripNamespaces": bool,
|
||||
"mergeTransformAndShape": bool,
|
||||
"exportDisplayColor": bool,
|
||||
"exportColorSets": bool,
|
||||
"exportInstances": bool,
|
||||
"exportUVs": bool,
|
||||
"exportVisibility": bool,
|
||||
"exportComponentTags": bool,
|
||||
"exportRefsAsInstanceable": bool,
|
||||
"eulerFilter": bool,
|
||||
"renderableOnly": bool,
|
||||
"jobContext": (list, None) # optional list
|
||||
# "worldspace": bool,
|
||||
}
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
"""The default options for Maya USD Export."""
|
||||
|
||||
# TODO: Support more `mayaUSDExport` parameters
|
||||
return {
|
||||
"defaultUSDFormat": "usdc",
|
||||
"stripNamespaces": False,
|
||||
"mergeTransformAndShape": False,
|
||||
"exportDisplayColor": False,
|
||||
"exportColorSets": True,
|
||||
"exportInstances": True,
|
||||
"exportUVs": True,
|
||||
"exportVisibility": True,
|
||||
"exportComponentTags": True,
|
||||
"exportRefsAsInstanceable": False,
|
||||
"eulerFilter": True,
|
||||
"renderableOnly": False,
|
||||
"jobContext": None
|
||||
# "worldspace": False
|
||||
}
|
||||
|
||||
def parse_overrides(self, instance, options):
|
||||
"""Inspect data of instance to determine overridden options"""
|
||||
|
||||
for key in instance.data:
|
||||
if key not in self.options:
|
||||
continue
|
||||
|
||||
# Ensure the data is of correct type
|
||||
value = instance.data[key]
|
||||
if isinstance(value, six.text_type):
|
||||
value = str(value)
|
||||
if not isinstance(value, self.options[key]):
|
||||
self.log.warning(
|
||||
"Overridden attribute {key} was of "
|
||||
"the wrong type: {invalid_type} "
|
||||
"- should have been {valid_type}".format(
|
||||
key=key,
|
||||
invalid_type=type(value).__name__,
|
||||
valid_type=self.options[key].__name__))
|
||||
continue
|
||||
|
||||
options[key] = value
|
||||
|
||||
return options
|
||||
|
||||
def filter_members(self, members):
|
||||
# Can be overridden by inherited classes
|
||||
return members
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Load plugin first
|
||||
cmds.loadPlugin("mayaUsdPlugin", quiet=True)
|
||||
|
||||
# Define output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
file_name = "{0}.usd".format(instance.name)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
file_path = file_path.replace('\\', '/')
|
||||
|
||||
# Parse export options
|
||||
options = self.default_options
|
||||
options = self.parse_overrides(instance, options)
|
||||
self.log.debug("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.debug("Performing extraction ...")
|
||||
|
||||
members = instance.data("setMembers")
|
||||
self.log.debug('Collected objects: {}'.format(members))
|
||||
members = self.filter_members(members)
|
||||
if not members:
|
||||
self.log.error('No members!')
|
||||
return
|
||||
|
||||
start = instance.data["frameStartHandle"]
|
||||
end = instance.data["frameEndHandle"]
|
||||
|
||||
def parse_attr_str(attr_str):
|
||||
result = list()
|
||||
for attr in attr_str.split(","):
|
||||
attr = attr.strip()
|
||||
if not attr:
|
||||
continue
|
||||
result.append(attr)
|
||||
return result
|
||||
|
||||
attrs = parse_attr_str(instance.data.get("attr", ""))
|
||||
attrs += instance.data.get("userDefinedAttributes", [])
|
||||
attrs += ["cbId"]
|
||||
attr_prefixes = parse_attr_str(instance.data.get("attrPrefix", ""))
|
||||
|
||||
self.log.debug('Exporting USD: {} / {}'.format(file_path, members))
|
||||
with maintained_selection():
|
||||
with usd_export_attributes(instance[:],
|
||||
attrs=attrs,
|
||||
attr_prefixes=attr_prefixes):
|
||||
cmds.mayaUSDExport(file=file_path,
|
||||
frameRange=(start, end),
|
||||
frameStride=instance.data.get("step", 1.0),
|
||||
exportRoots=members,
|
||||
**options)
|
||||
|
||||
representation = {
|
||||
'name': "usd",
|
||||
'ext': "usd",
|
||||
'files': file_name,
|
||||
'stagingDir': staging_dir
|
||||
}
|
||||
instance.data.setdefault("representations", []).append(representation)
|
||||
|
||||
self.log.debug(
|
||||
"Extracted instance {} to {}".format(instance.name, file_path)
|
||||
)
|
||||
|
||||
|
||||
class ExtractMayaUsdAnim(ExtractMayaUsd):
|
||||
"""Extractor for Maya USD Animation Sparse Cache data.
|
||||
|
||||
This will extract the sparse cache data from the scene and generate a
|
||||
USD file with all the animation data.
|
||||
|
||||
Upon publish a .usd sparse cache will be written.
|
||||
"""
|
||||
label = "Extract Maya USD Animation Sparse Cache"
|
||||
families = ["animation", "mayaUsd"]
|
||||
match = pyblish.api.Subset
|
||||
|
||||
def filter_members(self, members):
|
||||
out_set = next((i for i in members if i.endswith("out_SET")), None)
|
||||
|
||||
if out_set is None:
|
||||
self.log.warning("Expecting out_SET")
|
||||
return None
|
||||
|
||||
members = cmds.ls(cmds.sets(out_set, query=True), long=True)
|
||||
return members
|
||||
|
|
@ -28,7 +28,7 @@ class ExtractMultiverseUsd(publish.Extractor):
|
|||
|
||||
label = "Extract Multiverse USD Asset"
|
||||
hosts = ["maya"]
|
||||
families = ["usd"]
|
||||
families = ["mvUsd"]
|
||||
scene_type = "usd"
|
||||
file_formats = ["usd", "usda", "usdz"]
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from openpype.client import get_last_version_by_subset_name
|
|||
from openpype.hosts.maya import api
|
||||
from . import lib
|
||||
from .alembic import get_alembic_ids_cache
|
||||
from .usd import is_usd_lib_supported, get_usd_ids_cache
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -74,6 +75,13 @@ def get_nodes_by_id(standin):
|
|||
# Support alembic files directly
|
||||
return get_alembic_ids_cache(path)
|
||||
|
||||
elif (
|
||||
is_usd_lib_supported and
|
||||
any(path.endswith(ext) for ext in [".usd", ".usda", ".usdc"])
|
||||
):
|
||||
# Support usd files directly
|
||||
return get_usd_ids_cache(path)
|
||||
|
||||
json_path = None
|
||||
for f in os.listdir(os.path.dirname(path)):
|
||||
if f.endswith(".json"):
|
||||
|
|
|
|||
38
openpype/hosts/maya/tools/mayalookassigner/usd.py
Normal file
38
openpype/hosts/maya/tools/mayalookassigner/usd.py
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
from collections import defaultdict
|
||||
|
||||
try:
|
||||
from pxr import Usd
|
||||
is_usd_lib_supported = True
|
||||
except ImportError:
|
||||
is_usd_lib_supported = False
|
||||
|
||||
|
||||
def get_usd_ids_cache(path):
|
||||
# type: (str) -> dict
|
||||
"""Build a id to node mapping in a USD file.
|
||||
|
||||
Nodes without IDs are ignored.
|
||||
|
||||
Returns:
|
||||
dict: Mapping of id to nodes in the USD file.
|
||||
|
||||
"""
|
||||
if not is_usd_lib_supported:
|
||||
raise RuntimeError("No pxr.Usd python library available.")
|
||||
|
||||
stage = Usd.Stage.Open(path)
|
||||
ids = {}
|
||||
for prim in stage.Traverse():
|
||||
attr = prim.GetAttribute("userProperties:cbId")
|
||||
if not attr.IsValid():
|
||||
continue
|
||||
value = attr.Get()
|
||||
if not value:
|
||||
continue
|
||||
path = str(prim.GetPath())
|
||||
ids[path] = value
|
||||
|
||||
cache = defaultdict(list)
|
||||
for path, value in ids.items():
|
||||
cache[value].append(path)
|
||||
return dict(cache)
|
||||
|
|
@ -59,6 +59,14 @@ IGNORED_DEFAULT_FILENAMES = (
|
|||
"example_addons",
|
||||
"default_modules",
|
||||
)
|
||||
# Modules that won't be loaded in AYON mode from "./openpype/modules"
|
||||
# - the same modules are ignored in "./server_addon/create_ayon_addons.py"
|
||||
IGNORED_FILENAMES_IN_AYON = {
|
||||
"ftrack",
|
||||
"shotgrid",
|
||||
"sync_server",
|
||||
"slack",
|
||||
}
|
||||
|
||||
|
||||
# Inherit from `object` for Python 2 hosts
|
||||
|
|
@ -392,9 +400,9 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
|
|||
folder_name = "{}_{}".format(addon_name, addon_version)
|
||||
addon_dir = os.path.join(addons_dir, folder_name)
|
||||
if not os.path.exists(addon_dir):
|
||||
log.warning((
|
||||
"Directory for addon {} {} does not exists. Path \"{}\""
|
||||
).format(addon_name, addon_version, addon_dir))
|
||||
log.debug((
|
||||
"No localized client code found for addon {} {}."
|
||||
).format(addon_name, addon_version))
|
||||
continue
|
||||
|
||||
sys.path.insert(0, addon_dir)
|
||||
|
|
@ -483,6 +491,10 @@ def _load_modules():
|
|||
|
||||
is_in_current_dir = dirpath == current_dir
|
||||
is_in_host_dir = dirpath == hosts_dir
|
||||
ignored_current_dir_filenames = set(IGNORED_DEFAULT_FILENAMES)
|
||||
if AYON_SERVER_ENABLED:
|
||||
ignored_current_dir_filenames |= IGNORED_FILENAMES_IN_AYON
|
||||
|
||||
for filename in os.listdir(dirpath):
|
||||
# Ignore filenames
|
||||
if filename in IGNORED_FILENAMES:
|
||||
|
|
@ -490,7 +502,7 @@ def _load_modules():
|
|||
|
||||
if (
|
||||
is_in_current_dir
|
||||
and filename in IGNORED_DEFAULT_FILENAMES
|
||||
and filename in ignored_current_dir_filenames
|
||||
):
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
import collections
|
||||
|
||||
import pyblish.api
|
||||
from ayon_api import create_link, make_sure_link_type_exists
|
||||
from ayon_api import (
|
||||
create_link,
|
||||
make_sure_link_type_exists,
|
||||
get_versions_links,
|
||||
)
|
||||
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
|
||||
|
|
@ -124,6 +128,33 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin):
|
|||
version_entity["_id"],
|
||||
)
|
||||
|
||||
def _get_existing_links(self, project_name, link_type, entity_ids):
|
||||
"""Find all existing links for given version ids.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
link_type (str): Type of link.
|
||||
entity_ids (set[str]): Set of version ids.
|
||||
|
||||
Returns:
|
||||
dict[str, set[str]]: Existing links by version id.
|
||||
"""
|
||||
|
||||
output = collections.defaultdict(set)
|
||||
if not entity_ids:
|
||||
return output
|
||||
|
||||
existing_in_links = get_versions_links(
|
||||
project_name, entity_ids, [link_type], "output"
|
||||
)
|
||||
|
||||
for entity_id, links in existing_in_links.items():
|
||||
if not links:
|
||||
continue
|
||||
for link in links:
|
||||
output[entity_id].add(link["entityId"])
|
||||
return output
|
||||
|
||||
def create_links_on_server(self, context, new_links):
|
||||
"""Create new links on server.
|
||||
|
||||
|
|
@ -144,16 +175,32 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin):
|
|||
|
||||
# Create link themselves
|
||||
for link_type, items in new_links.items():
|
||||
mapping = collections.defaultdict(set)
|
||||
# Make sure there are no duplicates of src > dst ids
|
||||
for item in items:
|
||||
input_id, output_id = item
|
||||
create_link(
|
||||
project_name,
|
||||
link_type,
|
||||
input_id,
|
||||
"version",
|
||||
output_id,
|
||||
"version"
|
||||
)
|
||||
_input_id, _output_id = item
|
||||
mapping[_input_id].add(_output_id)
|
||||
|
||||
existing_links_by_in_id = self._get_existing_links(
|
||||
project_name, link_type, set(mapping.keys())
|
||||
)
|
||||
|
||||
for input_id, output_ids in mapping.items():
|
||||
existing_links = existing_links_by_in_id[input_id]
|
||||
for output_id in output_ids:
|
||||
# Skip creation of link if already exists
|
||||
# NOTE: AYON server does not support
|
||||
# to have same links
|
||||
if output_id in existing_links:
|
||||
continue
|
||||
create_link(
|
||||
project_name,
|
||||
link_type,
|
||||
input_id,
|
||||
"version",
|
||||
output_id,
|
||||
"version"
|
||||
)
|
||||
|
||||
|
||||
if not AYON_SERVER_ENABLED:
|
||||
|
|
|
|||
|
|
@ -39,8 +39,10 @@ class ImageIOFileRulesModel(BaseSettingsModel):
|
|||
|
||||
|
||||
class ColorManagementPreferenceV2Model(BaseSettingsModel):
|
||||
"""Color Management Preference v2 (Maya 2022+)."""
|
||||
_layout = "expanded"
|
||||
"""Color Management Preference v2 (Maya 2022+).
|
||||
|
||||
Please migrate all to 'imageio/workfile' and enable it.
|
||||
"""
|
||||
|
||||
enabled: bool = Field(True, title="Use Color Management Preference v2")
|
||||
|
||||
|
|
@ -51,7 +53,6 @@ class ColorManagementPreferenceV2Model(BaseSettingsModel):
|
|||
|
||||
class ColorManagementPreferenceModel(BaseSettingsModel):
|
||||
"""Color Management Preference (legacy)."""
|
||||
_layout = "expanded"
|
||||
|
||||
renderSpace: str = Field(title="Rendering Space")
|
||||
viewTransform: str = Field(title="Viewer Transform ")
|
||||
|
|
@ -89,11 +90,11 @@ class ImageIOSettings(BaseSettingsModel):
|
|||
# Deprecated
|
||||
colorManagementPreference_v2: ColorManagementPreferenceV2Model = Field(
|
||||
default_factory=ColorManagementPreferenceV2Model,
|
||||
title="Color Management Preference v2 (Maya 2022+)"
|
||||
title="DEPRECATED: Color Management Preference v2 (Maya 2022+)"
|
||||
)
|
||||
colorManagementPreference: ColorManagementPreferenceModel = Field(
|
||||
default_factory=ColorManagementPreferenceModel,
|
||||
title="Color Management Preference (legacy)"
|
||||
title="DEPRECATED: Color Management Preference (legacy)"
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -16,13 +16,19 @@ OpenPype stores some of it's settings and configuration in local file system. Th
|
|||
## Categories
|
||||
|
||||
### OpenPype Mongo URL
|
||||
The **Mongo URL** is the database URL given by your Studio. More details [here](artist_getting_started.md#mongodb).
|
||||
|
||||
### General
|
||||
**OpenPype Username** : enter your username (if not provided, it uses computer session username by default). This username is used to sign your actions on **OpenPype**, for example the "author" on a publish.
|
||||
|
||||
**Admin permissions** : When enabled you do not need to enter a password (if defined in Studio Settings) to access to the **Admin** section.
|
||||
### Experimental tools
|
||||
Future version of existing tools or new ones.
|
||||
### Environments
|
||||
Local replacement of the environment data of each software and additional internal data necessary to be loaded correctly.
|
||||
|
||||
### Applications
|
||||
Local override of software executable paths for each version. More details [here](admin_settings_system.md#applications).
|
||||
|
||||
### Project Settings
|
||||
|
||||
|
||||
The **Project Settings** allows to determine the root folder. More details [here](module_site_sync.md#local-settings).
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 7 KiB After Width: | Height: | Size: 11 KiB |
Loading…
Add table
Add a link
Reference in a new issue