Merge pull request #5378 from ynput/bugfix/OP-6416_3dsmax-container-tab

Max: Adding custom modifiers back to the loaded objects
This commit is contained in:
Kayla Man 2023-09-04 19:22:54 +08:00 committed by GitHub
commit c8b6bcf990
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 523 additions and 136 deletions

View file

@ -6,7 +6,7 @@ from typing import Any, Dict, Union
import six
from openpype.pipeline.context_tools import (
get_current_project, get_current_project_asset,)
get_current_project, get_current_project_asset)
from pymxs import runtime as rt
JSON_PREFIX = "JSON::"
@ -312,3 +312,98 @@ def set_timeline(frameStart, frameEnd):
"""
rt.animationRange = rt.interval(frameStart, frameEnd)
return rt.animationRange
def unique_namespace(namespace, format="%02d",
prefix="", suffix="", con_suffix="CON"):
"""Return unique namespace
Arguments:
namespace (str): Name of namespace to consider
format (str, optional): Formatting of the given iteration number
suffix (str, optional): Only consider namespaces with this suffix.
con_suffix: max only, for finding the name of the master container
>>> unique_namespace("bar")
# bar01
>>> unique_namespace(":hello")
# :hello01
>>> unique_namespace("bar:", suffix="_NS")
# bar01_NS:
"""
def current_namespace():
current = namespace
# When inside a namespace Max adds no trailing :
if not current.endswith(":"):
current += ":"
return current
# Always check against the absolute namespace root
# There's no clash with :x if we're defining namespace :a:x
ROOT = ":" if namespace.startswith(":") else current_namespace()
# Strip trailing `:` tokens since we might want to add a suffix
start = ":" if namespace.startswith(":") else ""
end = ":" if namespace.endswith(":") else ""
namespace = namespace.strip(":")
if ":" in namespace:
# Split off any nesting that we don't uniqify anyway.
parents, namespace = namespace.rsplit(":", 1)
start += parents + ":"
ROOT += start
iteration = 1
increment_version = True
while increment_version:
nr_namespace = namespace + format % iteration
unique = prefix + nr_namespace + suffix
container_name = f"{unique}:{namespace}{con_suffix}"
if not rt.getNodeByName(container_name):
name_space = start + unique + end
increment_version = False
return name_space
else:
increment_version = True
iteration += 1
def get_namespace(container_name):
"""Get the namespace and name of the sub-container
Args:
container_name (str): the name of master container
Raises:
RuntimeError: when there is no master container found
Returns:
namespace (str): namespace of the sub-container
name (str): name of the sub-container
"""
node = rt.getNodeByName(container_name)
if not node:
raise RuntimeError("Master Container Not Found..")
name = rt.getUserProp(node, "name")
namespace = rt.getUserProp(node, "namespace")
return namespace, name
def object_transform_set(container_children):
"""A function which allows to store the transform of
previous loaded object(s)
Args:
container_children(list): A list of nodes
Returns:
transform_set (dict): A dict with all transform data of
the previous loaded object(s)
"""
transform_set = {}
for node in container_children:
name = f"{node.name}.transform"
transform_set[name] = node.pos
name = f"{node.name}.scale"
transform_set[name] = node.scale
return transform_set

View file

@ -15,8 +15,10 @@ from openpype.pipeline import (
)
from openpype.hosts.max.api.menu import OpenPypeMenu
from openpype.hosts.max.api import lib
from openpype.hosts.max.api.plugin import MS_CUSTOM_ATTRIB
from openpype.hosts.max import MAX_HOST_DIR
from pymxs import runtime as rt # noqa
log = logging.getLogger("openpype.hosts.max")
@ -152,17 +154,18 @@ def ls() -> list:
yield lib.read(container)
def containerise(name: str, nodes: list, context, loader=None, suffix="_CON"):
def containerise(name: str, nodes: list, context,
namespace=None, loader=None, suffix="_CON"):
data = {
"schema": "openpype:container-2.0",
"id": AVALON_CONTAINER_ID,
"name": name,
"namespace": "",
"namespace": namespace or "",
"loader": loader,
"representation": context["representation"]["_id"],
}
container_name = f"{name}{suffix}"
container_name = f"{namespace}:{name}{suffix}"
container = rt.container(name=container_name)
for node in nodes:
node.Parent = container
@ -170,3 +173,52 @@ def containerise(name: str, nodes: list, context, loader=None, suffix="_CON"):
if not lib.imprint(container_name, data):
print(f"imprinting of {container_name} failed.")
return container
def load_custom_attribute_data():
"""Re-loading the Openpype/AYON custom parameter built by the creator
Returns:
attribute: re-loading the custom OP attributes set in Maxscript
"""
return rt.Execute(MS_CUSTOM_ATTRIB)
def import_custom_attribute_data(container: str, selections: list):
"""Importing the Openpype/AYON custom parameter built by the creator
Args:
container (str): target container which adds custom attributes
selections (list): nodes to be added into
group in custom attributes
"""
attrs = load_custom_attribute_data()
modifier = rt.EmptyModifier()
rt.addModifier(container, modifier)
container.modifiers[0].name = "OP Data"
rt.custAttributes.add(container.modifiers[0], attrs)
nodes = {}
for i in selections:
nodes = {
str(i): rt.NodeTransformMonitor(node=i),
}
# Setting the property
rt.setProperty(
container.modifiers[0].openPypeData,
"all_handles", nodes.values())
rt.setProperty(
container.modifiers[0].openPypeData,
"sel_list", nodes.keys())
def update_custom_attribute_data(container: str, selections: list):
"""Updating the Openpype/AYON custom parameter built by the creator
Args:
container (str): target container which adds custom attributes
selections (list): nodes to be added into
group in custom attributes
"""
if container.modifiers[0].name == "OP Data":
rt.deleteModifier(container, container.modifiers[0])
import_custom_attribute_data(container, selections)

View file

@ -1,7 +1,16 @@
import os
from openpype.hosts.max.api import lib, maintained_selection
from openpype.hosts.max.api.pipeline import containerise
from openpype.hosts.max.api.lib import (
unique_namespace,
get_namespace,
object_transform_set
)
from openpype.hosts.max.api.pipeline import (
containerise,
import_custom_attribute_data,
update_custom_attribute_data
)
from openpype.pipeline import get_representation_path, load
@ -13,50 +22,76 @@ class FbxLoader(load.LoaderPlugin):
order = -9
icon = "code-fork"
color = "white"
postfix = "param"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
filepath = self.filepath_from_context(context)
filepath = os.path.normpath(filepath)
rt.FBXImporterSetParam("Animation", True)
rt.FBXImporterSetParam("Camera", True)
rt.FBXImporterSetParam("AxisConversionMethod", True)
rt.FBXImporterSetParam("Mode", rt.Name("create"))
rt.FBXImporterSetParam("Preserveinstances", True)
rt.ImportFile(
filepath,
rt.name("noPrompt"),
using=rt.FBXIMP)
container = rt.GetNodeByName(f"{name}")
if not container:
container = rt.Container()
container.name = f"{name}"
namespace = unique_namespace(
name + "_",
suffix="_",
)
container = rt.container(
name=f"{namespace}:{name}_{self.postfix}")
selections = rt.GetCurrentSelection()
import_custom_attribute_data(container, selections)
for selection in rt.GetCurrentSelection():
for selection in selections:
selection.Parent = container
selection.name = f"{namespace}:{selection.name}"
return containerise(
name, [container], context, loader=self.__class__.__name__)
name, [container], context,
namespace, loader=self.__class__.__name__)
def update(self, container, representation):
from pymxs import runtime as rt
path = get_representation_path(representation)
node = rt.GetNodeByName(container["instance_node"])
rt.Select(node.Children)
fbx_reimport_cmd = (
f"""
node_name = container["instance_node"]
node = rt.getNodeByName(node_name)
namespace, name = get_namespace(node_name)
sub_node_name = f"{namespace}:{name}_{self.postfix}"
inst_container = rt.getNodeByName(sub_node_name)
rt.Select(inst_container.Children)
transform_data = object_transform_set(inst_container.Children)
for prev_fbx_obj in rt.selection:
if rt.isValidNode(prev_fbx_obj):
rt.Delete(prev_fbx_obj)
FBXImporterSetParam "Animation" true
FBXImporterSetParam "Cameras" true
FBXImporterSetParam "AxisConversionMethod" true
FbxExporterSetParam "UpAxis" "Y"
FbxExporterSetParam "Preserveinstances" true
rt.FBXImporterSetParam("Animation", True)
rt.FBXImporterSetParam("Camera", True)
rt.FBXImporterSetParam("Mode", rt.Name("merge"))
rt.FBXImporterSetParam("AxisConversionMethod", True)
rt.FBXImporterSetParam("Preserveinstances", True)
rt.ImportFile(
path, rt.name("noPrompt"), using=rt.FBXIMP)
current_fbx_objects = rt.GetCurrentSelection()
for fbx_object in current_fbx_objects:
if fbx_object.Parent != inst_container:
fbx_object.Parent = inst_container
fbx_object.name = f"{namespace}:{fbx_object.name}"
fbx_object.pos = transform_data[
f"{fbx_object.name}.transform"]
fbx_object.scale = transform_data[
f"{fbx_object.name}.scale"]
importFile @"{path}" #noPrompt using:FBXIMP
""")
rt.Execute(fbx_reimport_cmd)
for children in node.Children:
if rt.classOf(children) == rt.Container:
if children.name == sub_node_name:
update_custom_attribute_data(
children, current_fbx_objects)
with maintained_selection():
rt.Select(node)

View file

@ -1,7 +1,15 @@
import os
from openpype.hosts.max.api import lib
from openpype.hosts.max.api.pipeline import containerise
from openpype.hosts.max.api.lib import (
unique_namespace,
get_namespace,
object_transform_set
)
from openpype.hosts.max.api.pipeline import (
containerise, import_custom_attribute_data,
update_custom_attribute_data
)
from openpype.pipeline import get_representation_path, load
@ -16,22 +24,34 @@ class MaxSceneLoader(load.LoaderPlugin):
order = -8
icon = "code-fork"
color = "green"
postfix = "param"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
path = self.filepath_from_context(context)
path = os.path.normpath(path)
# import the max scene by using "merge file"
path = path.replace('\\', '/')
rt.MergeMaxFile(path)
rt.MergeMaxFile(path, quiet=True, includeFullGroup=True)
max_objects = rt.getLastMergedNodes()
max_container = rt.Container(name=f"{name}")
for max_object in max_objects:
max_object.Parent = max_container
max_object_names = [obj.name for obj in max_objects]
# implement the OP/AYON custom attributes before load
max_container = []
namespace = unique_namespace(
name + "_",
suffix="_",
)
container_name = f"{namespace}:{name}_{self.postfix}"
container = rt.Container(name=container_name)
import_custom_attribute_data(container, max_objects)
max_container.append(container)
max_container.extend(max_objects)
for max_obj, obj_name in zip(max_objects, max_object_names):
max_obj.name = f"{namespace}:{obj_name}"
return containerise(
name, [max_container], context, loader=self.__class__.__name__)
name, max_container, context,
namespace, loader=self.__class__.__name__)
def update(self, container, representation):
from pymxs import runtime as rt
@ -39,15 +59,32 @@ class MaxSceneLoader(load.LoaderPlugin):
path = get_representation_path(representation)
node_name = container["instance_node"]
rt.MergeMaxFile(path,
rt.Name("noRedraw"),
rt.Name("deleteOldDups"),
rt.Name("useSceneMtlDups"))
node = rt.getNodeByName(node_name)
namespace, name = get_namespace(node_name)
sub_container_name = f"{namespace}:{name}_{self.postfix}"
# delete the old container with attribute
# delete old duplicate
rt.Select(node.Children)
transform_data = object_transform_set(node.Children)
for prev_max_obj in rt.GetCurrentSelection():
if rt.isValidNode(prev_max_obj) and prev_max_obj.name != sub_container_name: # noqa
rt.Delete(prev_max_obj)
rt.MergeMaxFile(path, rt.Name("deleteOldDups"))
max_objects = rt.getLastMergedNodes()
container_node = rt.GetNodeByName(node_name)
for max_object in max_objects:
max_object.Parent = container_node
current_max_objects = rt.getLastMergedNodes()
current_max_object_names = [obj.name for obj
in current_max_objects]
sub_container = rt.getNodeByName(sub_container_name)
update_custom_attribute_data(sub_container, current_max_objects)
for max_object in current_max_objects:
max_object.Parent = node
for max_obj, obj_name in zip(current_max_objects,
current_max_object_names):
max_obj.name = f"{namespace}:{obj_name}"
max_obj.pos = transform_data[
f"{max_obj.name}.transform"]
max_obj.scale = transform_data[
f"{max_obj.name}.scale"]
lib.imprint(container["instance_node"], {
"representation": str(representation["_id"])

View file

@ -1,8 +1,14 @@
import os
from openpype.pipeline import load, get_representation_path
from openpype.hosts.max.api.pipeline import containerise
from openpype.hosts.max.api.pipeline import (
containerise,
import_custom_attribute_data,
update_custom_attribute_data
)
from openpype.hosts.max.api import lib
from openpype.hosts.max.api.lib import maintained_selection
from openpype.hosts.max.api.lib import (
maintained_selection, unique_namespace
)
class ModelAbcLoader(load.LoaderPlugin):
@ -14,6 +20,7 @@ class ModelAbcLoader(load.LoaderPlugin):
order = -10
icon = "code-fork"
color = "orange"
postfix = "param"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
@ -30,7 +37,7 @@ class ModelAbcLoader(load.LoaderPlugin):
rt.AlembicImport.CustomAttributes = True
rt.AlembicImport.UVs = True
rt.AlembicImport.VertexColors = True
rt.importFile(file_path, rt.name("noPrompt"))
rt.importFile(file_path, rt.name("noPrompt"), using=rt.AlembicImport)
abc_after = {
c
@ -45,9 +52,22 @@ class ModelAbcLoader(load.LoaderPlugin):
self.log.error("Something failed when loading.")
abc_container = abc_containers.pop()
import_custom_attribute_data(
abc_container, abc_container.Children)
namespace = unique_namespace(
name + "_",
suffix="_",
)
for abc_object in abc_container.Children:
abc_object.name = f"{namespace}:{abc_object.name}"
# rename the abc container with namespace
abc_container_name = f"{namespace}:{name}_{self.postfix}"
abc_container.name = abc_container_name
return containerise(
name, [abc_container], context, loader=self.__class__.__name__
name, [abc_container], context,
namespace, loader=self.__class__.__name__
)
def update(self, container, representation):
@ -55,21 +75,19 @@ class ModelAbcLoader(load.LoaderPlugin):
path = get_representation_path(representation)
node = rt.GetNodeByName(container["instance_node"])
rt.Select(node.Children)
for alembic in rt.Selection:
abc = rt.GetNodeByName(alembic.name)
rt.Select(abc.Children)
for abc_con in rt.Selection:
container = rt.GetNodeByName(abc_con.name)
container.source = path
rt.Select(container.Children)
for abc_obj in rt.Selection:
alembic_obj = rt.GetNodeByName(abc_obj.name)
alembic_obj.source = path
with maintained_selection():
rt.Select(node)
rt.Select(node.Children)
for alembic in rt.Selection:
abc = rt.GetNodeByName(alembic.name)
update_custom_attribute_data(abc, abc.Children)
rt.Select(abc.Children)
for abc_con in abc.Children:
abc_con.source = path
rt.Select(abc_con.Children)
for abc_obj in abc_con.Children:
abc_obj.source = path
lib.imprint(
container["instance_node"],

View file

@ -1,7 +1,15 @@
import os
from openpype.pipeline import load, get_representation_path
from openpype.hosts.max.api.pipeline import containerise
from openpype.hosts.max.api.pipeline import (
containerise, import_custom_attribute_data,
update_custom_attribute_data
)
from openpype.hosts.max.api import lib
from openpype.hosts.max.api.lib import (
unique_namespace,
get_namespace,
object_transform_set
)
from openpype.hosts.max.api.lib import maintained_selection
@ -13,6 +21,7 @@ class FbxModelLoader(load.LoaderPlugin):
order = -9
icon = "code-fork"
color = "white"
postfix = "param"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
@ -20,39 +29,69 @@ class FbxModelLoader(load.LoaderPlugin):
filepath = os.path.normpath(self.filepath_from_context(context))
rt.FBXImporterSetParam("Animation", False)
rt.FBXImporterSetParam("Cameras", False)
rt.FBXImporterSetParam("Mode", rt.Name("create"))
rt.FBXImporterSetParam("Preserveinstances", True)
rt.importFile(filepath, rt.name("noPrompt"), using=rt.FBXIMP)
container = rt.GetNodeByName(name)
if not container:
container = rt.Container()
container.name = name
namespace = unique_namespace(
name + "_",
suffix="_",
)
container = rt.container(
name=f"{namespace}:{name}_{self.postfix}")
selections = rt.GetCurrentSelection()
import_custom_attribute_data(container, selections)
for selection in rt.GetCurrentSelection():
for selection in selections:
selection.Parent = container
selection.name = f"{namespace}:{selection.name}"
return containerise(
name, [container], context, loader=self.__class__.__name__
name, [container], context,
namespace, loader=self.__class__.__name__
)
def update(self, container, representation):
from pymxs import runtime as rt
path = get_representation_path(representation)
node = rt.getNodeByName(container["instance_node"])
rt.select(node.Children)
node_name = container["instance_node"]
node = rt.getNodeByName(node_name)
namespace, name = get_namespace(node_name)
sub_node_name = f"{namespace}:{name}_{self.postfix}"
inst_container = rt.getNodeByName(sub_node_name)
rt.Select(inst_container.Children)
transform_data = object_transform_set(inst_container.Children)
for prev_fbx_obj in rt.selection:
if rt.isValidNode(prev_fbx_obj):
rt.Delete(prev_fbx_obj)
rt.FBXImporterSetParam("Animation", False)
rt.FBXImporterSetParam("Cameras", False)
rt.FBXImporterSetParam("Mode", rt.Name("merge"))
rt.FBXImporterSetParam("AxisConversionMethod", True)
rt.FBXImporterSetParam("UpAxis", "Y")
rt.FBXImporterSetParam("Preserveinstances", True)
rt.importFile(path, rt.name("noPrompt"), using=rt.FBXIMP)
current_fbx_objects = rt.GetCurrentSelection()
for fbx_object in current_fbx_objects:
if fbx_object.Parent != inst_container:
fbx_object.Parent = inst_container
fbx_object.name = f"{namespace}:{fbx_object.name}"
fbx_object.pos = transform_data[
f"{fbx_object.name}.transform"]
fbx_object.scale = transform_data[
f"{fbx_object.name}.scale"]
for children in node.Children:
if rt.classOf(children) == rt.Container:
if children.name == sub_node_name:
update_custom_attribute_data(
children, current_fbx_objects)
with maintained_selection():
rt.Select(node)
lib.imprint(
container["instance_node"],
node_name,
{"representation": str(representation["_id"])},
)

View file

@ -1,8 +1,18 @@
import os
from openpype.hosts.max.api import lib
from openpype.hosts.max.api.lib import (
unique_namespace,
get_namespace,
maintained_selection,
object_transform_set
)
from openpype.hosts.max.api.lib import maintained_selection
from openpype.hosts.max.api.pipeline import containerise
from openpype.hosts.max.api.pipeline import (
containerise,
import_custom_attribute_data,
update_custom_attribute_data
)
from openpype.pipeline import get_representation_path, load
@ -14,6 +24,7 @@ class ObjLoader(load.LoaderPlugin):
order = -9
icon = "code-fork"
color = "white"
postfix = "param"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
@ -22,36 +33,49 @@ class ObjLoader(load.LoaderPlugin):
self.log.debug("Executing command to import..")
rt.Execute(f'importFile @"{filepath}" #noPrompt using:ObjImp')
namespace = unique_namespace(
name + "_",
suffix="_",
)
# create "missing" container for obj import
container = rt.Container()
container.name = name
container = rt.Container(name=f"{namespace}:{name}_{self.postfix}")
selections = rt.GetCurrentSelection()
import_custom_attribute_data(container, selections)
# get current selection
for selection in rt.GetCurrentSelection():
for selection in selections:
selection.Parent = container
asset = rt.GetNodeByName(name)
selection.name = f"{namespace}:{selection.name}"
return containerise(
name, [asset], context, loader=self.__class__.__name__)
name, [container], context,
namespace, loader=self.__class__.__name__)
def update(self, container, representation):
from pymxs import runtime as rt
path = get_representation_path(representation)
node_name = container["instance_node"]
node = rt.GetNodeByName(node_name)
instance_name, _ = node_name.split("_")
container = rt.GetNodeByName(instance_name)
for child in container.Children:
rt.Delete(child)
node = rt.getNodeByName(node_name)
namespace, name = get_namespace(node_name)
sub_node_name = f"{namespace}:{name}_{self.postfix}"
inst_container = rt.getNodeByName(sub_node_name)
rt.Select(inst_container.Children)
transform_data = object_transform_set(inst_container.Children)
for prev_obj in rt.selection:
if rt.isValidNode(prev_obj):
rt.Delete(prev_obj)
rt.Execute(f'importFile @"{path}" #noPrompt using:ObjImp')
# get current selection
for selection in rt.GetCurrentSelection():
selection.Parent = container
selections = rt.GetCurrentSelection()
update_custom_attribute_data(inst_container, selections)
for selection in selections:
selection.Parent = inst_container
selection.name = f"{namespace}:{selection.name}"
selection.pos = transform_data[
f"{selection.name}.transform"]
selection.scale = transform_data[
f"{selection.name}.scale"]
with maintained_selection():
rt.Select(node)

View file

@ -1,8 +1,16 @@
import os
from openpype.hosts.max.api import lib
from openpype.hosts.max.api.lib import (
unique_namespace,
get_namespace,
object_transform_set
)
from openpype.hosts.max.api.lib import maintained_selection
from openpype.hosts.max.api.pipeline import containerise
from openpype.hosts.max.api.pipeline import (
containerise,
import_custom_attribute_data
)
from openpype.pipeline import get_representation_path, load
@ -15,6 +23,7 @@ class ModelUSDLoader(load.LoaderPlugin):
order = -10
icon = "code-fork"
color = "orange"
postfix = "param"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
@ -30,11 +39,24 @@ class ModelUSDLoader(load.LoaderPlugin):
rt.LogLevel = rt.Name("info")
rt.USDImporter.importFile(filepath,
importOptions=import_options)
namespace = unique_namespace(
name + "_",
suffix="_",
)
asset = rt.GetNodeByName(name)
import_custom_attribute_data(asset, asset.Children)
for usd_asset in asset.Children:
usd_asset.name = f"{namespace}:{usd_asset.name}"
asset_name = f"{namespace}:{name}_{self.postfix}"
asset.name = asset_name
# need to get the correct container after renamed
asset = rt.GetNodeByName(asset_name)
return containerise(
name, [asset], context, loader=self.__class__.__name__)
name, [asset], context,
namespace, loader=self.__class__.__name__)
def update(self, container, representation):
from pymxs import runtime as rt
@ -42,11 +64,16 @@ class ModelUSDLoader(load.LoaderPlugin):
path = get_representation_path(representation)
node_name = container["instance_node"]
node = rt.GetNodeByName(node_name)
namespace, name = get_namespace(node_name)
sub_node_name = f"{namespace}:{name}_{self.postfix}"
transform_data = None
for n in node.Children:
for r in n.Children:
rt.Delete(r)
rt.Select(n.Children)
transform_data = object_transform_set(n.Children)
for prev_usd_asset in rt.selection:
if rt.isValidNode(prev_usd_asset):
rt.Delete(prev_usd_asset)
rt.Delete(n)
instance_name, _ = node_name.split("_")
import_options = rt.USDImporter.CreateOptions()
base_filename = os.path.basename(path)
@ -55,11 +82,20 @@ class ModelUSDLoader(load.LoaderPlugin):
rt.LogPath = log_filepath
rt.LogLevel = rt.Name("info")
rt.USDImporter.importFile(path,
importOptions=import_options)
rt.USDImporter.importFile(
path, importOptions=import_options)
asset = rt.GetNodeByName(instance_name)
asset = rt.GetNodeByName(name)
asset.Parent = node
import_custom_attribute_data(asset, asset.Children)
for children in asset.Children:
children.name = f"{namespace}:{children.name}"
children.pos = transform_data[
f"{children.name}.transform"]
children.scale = transform_data[
f"{children.name}.scale"]
asset.name = sub_node_name
with maintained_selection():
rt.Select(node)

View file

@ -7,7 +7,12 @@ Because of limited api, alembics can be only loaded, but not easily updated.
import os
from openpype.pipeline import load, get_representation_path
from openpype.hosts.max.api import lib, maintained_selection
from openpype.hosts.max.api.pipeline import containerise
from openpype.hosts.max.api.lib import unique_namespace
from openpype.hosts.max.api.pipeline import (
containerise,
import_custom_attribute_data,
update_custom_attribute_data
)
class AbcLoader(load.LoaderPlugin):
@ -19,6 +24,7 @@ class AbcLoader(load.LoaderPlugin):
order = -10
icon = "code-fork"
color = "orange"
postfix = "param"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
@ -33,7 +39,7 @@ class AbcLoader(load.LoaderPlugin):
}
rt.AlembicImport.ImportToRoot = False
rt.importFile(file_path, rt.name("noPrompt"))
rt.importFile(file_path, rt.name("noPrompt"), using=rt.AlembicImport)
abc_after = {
c
@ -48,13 +54,27 @@ class AbcLoader(load.LoaderPlugin):
self.log.error("Something failed when loading.")
abc_container = abc_containers.pop()
for abc in rt.GetCurrentSelection():
selections = rt.GetCurrentSelection()
import_custom_attribute_data(
abc_container, abc_container.Children)
for abc in selections:
for cam_shape in abc.Children:
cam_shape.playbackType = 2
namespace = unique_namespace(
name + "_",
suffix="_",
)
for abc_object in abc_container.Children:
abc_object.name = f"{namespace}:{abc_object.name}"
# rename the abc container with namespace
abc_container_name = f"{namespace}:{name}_{self.postfix}"
abc_container.name = abc_container_name
return containerise(
name, [abc_container], context, loader=self.__class__.__name__
name, [abc_container], context,
namespace, loader=self.__class__.__name__
)
def update(self, container, representation):
@ -63,28 +83,23 @@ class AbcLoader(load.LoaderPlugin):
path = get_representation_path(representation)
node = rt.GetNodeByName(container["instance_node"])
alembic_objects = self.get_container_children(node, "AlembicObject")
for alembic_object in alembic_objects:
alembic_object.source = path
lib.imprint(
container["instance_node"],
{"representation": str(representation["_id"])},
)
with maintained_selection():
rt.Select(node.Children)
for alembic in rt.Selection:
abc = rt.GetNodeByName(alembic.name)
update_custom_attribute_data(abc, abc.Children)
rt.Select(abc.Children)
for abc_con in rt.Selection:
container = rt.GetNodeByName(abc_con.name)
container.source = path
rt.Select(container.Children)
for abc_obj in rt.Selection:
alembic_obj = rt.GetNodeByName(abc_obj.name)
alembic_obj.source = path
for abc_con in abc.Children:
abc_con.source = path
rt.Select(abc_con.Children)
for abc_obj in abc_con.Children:
abc_obj.source = path
lib.imprint(
container["instance_node"],
{"representation": str(representation["_id"])},
)
def switch(self, container, representation):
self.update(container, representation)

View file

@ -1,7 +1,14 @@
import os
from openpype.hosts.max.api import lib, maintained_selection
from openpype.hosts.max.api.pipeline import containerise
from openpype.hosts.max.api.lib import (
unique_namespace, get_namespace
)
from openpype.hosts.max.api.pipeline import (
containerise,
import_custom_attribute_data,
update_custom_attribute_data
)
from openpype.pipeline import get_representation_path, load
@ -13,6 +20,7 @@ class PointCloudLoader(load.LoaderPlugin):
order = -8
icon = "code-fork"
color = "green"
postfix = "param"
def load(self, context, name=None, namespace=None, data=None):
"""load point cloud by tyCache"""
@ -22,10 +30,19 @@ class PointCloudLoader(load.LoaderPlugin):
obj = rt.tyCache()
obj.filename = filepath
prt_container = rt.GetNodeByName(obj.name)
namespace = unique_namespace(
name + "_",
suffix="_",
)
prt_container = rt.Container(
name=f"{namespace}:{name}_{self.postfix}")
import_custom_attribute_data(prt_container, [obj])
obj.Parent = prt_container
obj.name = f"{namespace}:{obj.name}"
return containerise(
name, [prt_container], context, loader=self.__class__.__name__)
name, [prt_container], context,
namespace, loader=self.__class__.__name__)
def update(self, container, representation):
"""update the container"""
@ -33,15 +50,18 @@ class PointCloudLoader(load.LoaderPlugin):
path = get_representation_path(representation)
node = rt.GetNodeByName(container["instance_node"])
namespace, name = get_namespace(container["instance_node"])
sub_node_name = f"{namespace}:{name}_{self.postfix}"
inst_container = rt.getNodeByName(sub_node_name)
update_custom_attribute_data(
inst_container, inst_container.Children)
with maintained_selection():
rt.Select(node.Children)
for prt in rt.Selection:
prt_object = rt.GetNodeByName(prt.name)
prt_object.filename = path
lib.imprint(container["instance_node"], {
"representation": str(representation["_id"])
})
for prt in inst_container.Children:
prt.filename = path
lib.imprint(container["instance_node"], {
"representation": str(representation["_id"])
})
def switch(self, container, representation):
self.update(container, representation)

View file

@ -5,8 +5,15 @@ from openpype.pipeline import (
load,
get_representation_path
)
from openpype.hosts.max.api.pipeline import containerise
from openpype.hosts.max.api.pipeline import (
containerise,
import_custom_attribute_data,
update_custom_attribute_data
)
from openpype.hosts.max.api import lib
from openpype.hosts.max.api.lib import (
unique_namespace, get_namespace
)
class RedshiftProxyLoader(load.LoaderPlugin):
@ -18,6 +25,7 @@ class RedshiftProxyLoader(load.LoaderPlugin):
order = -9
icon = "code-fork"
color = "white"
postfix = "param"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
@ -30,24 +38,32 @@ class RedshiftProxyLoader(load.LoaderPlugin):
if collections:
rs_proxy.is_sequence = True
container = rt.container()
container.name = name
namespace = unique_namespace(
name + "_",
suffix="_",
)
container = rt.Container(
name=f"{namespace}:{name}_{self.postfix}")
rs_proxy.Parent = container
asset = rt.getNodeByName(name)
rs_proxy.name = f"{namespace}:{rs_proxy.name}"
import_custom_attribute_data(container, [rs_proxy])
return containerise(
name, [asset], context, loader=self.__class__.__name__)
name, [container], context,
namespace, loader=self.__class__.__name__)
def update(self, container, representation):
from pymxs import runtime as rt
path = get_representation_path(representation)
node = rt.getNodeByName(container["instance_node"])
for children in node.Children:
children_node = rt.getNodeByName(children.name)
for proxy in children_node.Children:
proxy.file = path
namespace, name = get_namespace(container["instance_node"])
sub_node_name = f"{namespace}:{name}_{self.postfix}"
inst_container = rt.getNodeByName(sub_node_name)
update_custom_attribute_data(
inst_container, inst_container.Children)
for proxy in inst_container.Children:
proxy.file = path
lib.imprint(container["instance_node"], {
"representation": str(representation["_id"])