mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'develop' into enhancement/ocio_configuration_max_2024
This commit is contained in:
commit
c4202e7fca
304 changed files with 28051 additions and 3449 deletions
|
|
@ -481,3 +481,19 @@ def object_transform_set(container_children):
|
|||
name = f"{node.name}.scale"
|
||||
transform_set[name] = node.scale
|
||||
return transform_set
|
||||
|
||||
|
||||
def get_plugins() -> list:
|
||||
"""Get all loaded plugins in 3dsMax
|
||||
|
||||
Returns:
|
||||
plugin_info_list: a list of loaded plugins
|
||||
"""
|
||||
manager = rt.PluginManager
|
||||
count = manager.pluginDllCount
|
||||
plugin_info_list = []
|
||||
for p in range(1, count + 1):
|
||||
plugin_info = manager.pluginDllName(p)
|
||||
plugin_info_list.append(plugin_info)
|
||||
|
||||
return plugin_info_list
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ from openpype.hosts.max.api import lib
|
|||
from openpype.hosts.max.api.plugin import MS_CUSTOM_ATTRIB
|
||||
from openpype.hosts.max import MAX_HOST_DIR
|
||||
|
||||
|
||||
from pymxs import runtime as rt # noqa
|
||||
|
||||
log = logging.getLogger("openpype.hosts.max")
|
||||
|
|
@ -167,12 +166,9 @@ def containerise(name: str, nodes: list, context,
|
|||
"loader": loader,
|
||||
"representation": context["representation"]["_id"],
|
||||
}
|
||||
|
||||
container_name = f"{namespace}:{name}{suffix}"
|
||||
container = rt.container(name=container_name)
|
||||
for node in nodes:
|
||||
node.Parent = container
|
||||
|
||||
import_custom_attribute_data(container, nodes)
|
||||
if not lib.imprint(container_name, data):
|
||||
print(f"imprinting of {container_name} failed.")
|
||||
return container
|
||||
|
|
@ -215,6 +211,7 @@ def import_custom_attribute_data(container: str, selections: list):
|
|||
container.modifiers[0].openPypeData,
|
||||
"sel_list", sel_list)
|
||||
|
||||
|
||||
def update_custom_attribute_data(container: str, selections: list):
|
||||
"""Updating the Openpype/AYON custom parameter built by the creator
|
||||
|
||||
|
|
@ -226,3 +223,20 @@ def update_custom_attribute_data(container: str, selections: list):
|
|||
if container.modifiers[0].name == "OP Data":
|
||||
rt.deleteModifier(container, container.modifiers[0])
|
||||
import_custom_attribute_data(container, selections)
|
||||
|
||||
|
||||
def get_previous_loaded_object(container: str):
|
||||
"""Get previous loaded_object through the OP data
|
||||
|
||||
Args:
|
||||
container (str): the container which stores the OP data
|
||||
|
||||
Returns:
|
||||
node_list(list): list of nodes which are previously loaded
|
||||
"""
|
||||
node_list = []
|
||||
sel_list = rt.getProperty(container.modifiers[0].openPypeData, "sel_list")
|
||||
for obj in rt.Objects:
|
||||
if str(obj) in sel_list:
|
||||
node_list.append(obj)
|
||||
return node_list
|
||||
|
|
|
|||
|
|
@ -65,12 +65,12 @@ MS_CUSTOM_ATTRIB = """attributes "openPypeData"
|
|||
|
||||
on button_add pressed do
|
||||
(
|
||||
current_selection = selectByName title:"Select Objects to add to
|
||||
current_sel = selectByName title:"Select Objects to add to
|
||||
the Container" buttontext:"Add" filter:nodes_to_add
|
||||
if current_selection == undefined then return False
|
||||
if current_sel == undefined then return False
|
||||
temp_arr = #()
|
||||
i_node_arr = #()
|
||||
for c in current_selection do
|
||||
for c in current_sel do
|
||||
(
|
||||
handle_name = node_to_name c
|
||||
node_ref = NodeTransformMonitor node:c
|
||||
|
|
@ -89,15 +89,18 @@ MS_CUSTOM_ATTRIB = """attributes "openPypeData"
|
|||
|
||||
on button_del pressed do
|
||||
(
|
||||
current_selection = selectByName title:"Select Objects to remove
|
||||
current_sel = selectByName title:"Select Objects to remove
|
||||
from the Container" buttontext:"Remove" filter: nodes_to_rmv
|
||||
if current_selection == undefined then return False
|
||||
if current_sel == undefined or current_sel.count == 0 then
|
||||
(
|
||||
return False
|
||||
)
|
||||
temp_arr = #()
|
||||
i_node_arr = #()
|
||||
new_i_node_arr = #()
|
||||
new_temp_arr = #()
|
||||
|
||||
for c in current_selection do
|
||||
for c in current_sel do
|
||||
(
|
||||
node_ref = NodeTransformMonitor node:c as string
|
||||
handle_name = node_to_name c
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from openpype.hosts.max.api.lib import (
|
|||
)
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
get_previous_loaded_object,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
|
@ -22,7 +22,6 @@ class FbxLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -42,17 +41,13 @@ class FbxLoader(load.LoaderPlugin):
|
|||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container = rt.container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(container, selections)
|
||||
|
||||
for selection in selections:
|
||||
selection.Parent = container
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
|
||||
return containerise(
|
||||
name, [container], context,
|
||||
name, selections, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
|
@ -61,12 +56,13 @@ class FbxLoader(load.LoaderPlugin):
|
|||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
rt.Select(inst_container.Children)
|
||||
transform_data = object_transform_set(inst_container.Children)
|
||||
for prev_fbx_obj in rt.selection:
|
||||
namespace, _ = get_namespace(node_name)
|
||||
|
||||
node_list = get_previous_loaded_object(node)
|
||||
rt.Select(node_list)
|
||||
prev_fbx_objects = rt.GetCurrentSelection()
|
||||
transform_data = object_transform_set(prev_fbx_objects)
|
||||
for prev_fbx_obj in prev_fbx_objects:
|
||||
if rt.isValidNode(prev_fbx_obj):
|
||||
rt.Delete(prev_fbx_obj)
|
||||
|
||||
|
|
@ -78,24 +74,17 @@ class FbxLoader(load.LoaderPlugin):
|
|||
rt.ImportFile(
|
||||
path, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
current_fbx_objects = rt.GetCurrentSelection()
|
||||
fbx_objects = []
|
||||
for fbx_object in current_fbx_objects:
|
||||
if fbx_object.Parent != inst_container:
|
||||
fbx_object.Parent = inst_container
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_object.pos = transform_data[
|
||||
f"{fbx_object.name}.transform"]
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_objects.append(fbx_object)
|
||||
fbx_transform = f"{fbx_object.name}.transform"
|
||||
if fbx_transform in transform_data.keys():
|
||||
fbx_object.pos = transform_data[fbx_transform] or 0
|
||||
fbx_object.scale = transform_data[
|
||||
f"{fbx_object.name}.scale"]
|
||||
|
||||
for children in node.Children:
|
||||
if rt.classOf(children) == rt.Container:
|
||||
if children.name == sub_node_name:
|
||||
update_custom_attribute_data(
|
||||
children, current_fbx_objects)
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
f"{fbx_object.name}.scale"] or 0
|
||||
|
||||
update_custom_attribute_data(node, fbx_objects)
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from openpype.hosts.max.api.lib import (
|
|||
object_transform_set
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise, import_custom_attribute_data,
|
||||
containerise, get_previous_loaded_object,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
|
@ -24,7 +24,6 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
order = -8
|
||||
icon = "code-fork"
|
||||
color = "green"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -37,18 +36,13 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
max_object_names = [obj.name for obj in max_objects]
|
||||
# implement the OP/AYON custom attributes before load
|
||||
max_container = []
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
container = rt.Container(name=container_name)
|
||||
import_custom_attribute_data(container, max_objects)
|
||||
max_container.append(container)
|
||||
max_container.extend(max_objects)
|
||||
for max_obj, obj_name in zip(max_objects, max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
max_container.append(rt.getNodeByName(max_obj.name))
|
||||
return containerise(
|
||||
name, max_container, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
|
@ -58,34 +52,38 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
|
||||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
namespace, _ = get_namespace(node_name)
|
||||
# delete the old container with attribute
|
||||
# delete old duplicate
|
||||
rt.Select(node.Children)
|
||||
transform_data = object_transform_set(node.Children)
|
||||
for prev_max_obj in rt.GetCurrentSelection():
|
||||
if rt.isValidNode(prev_max_obj) and prev_max_obj.name != sub_container_name: # noqa
|
||||
# use the modifier OP data to delete the data
|
||||
node_list = get_previous_loaded_object(node)
|
||||
rt.select(node_list)
|
||||
prev_max_objects = rt.GetCurrentSelection()
|
||||
transform_data = object_transform_set(prev_max_objects)
|
||||
|
||||
for prev_max_obj in prev_max_objects:
|
||||
if rt.isValidNode(prev_max_obj): # noqa
|
||||
rt.Delete(prev_max_obj)
|
||||
rt.MergeMaxFile(path, rt.Name("deleteOldDups"))
|
||||
rt.MergeMaxFile(path, quiet=True)
|
||||
|
||||
current_max_objects = rt.getLastMergedNodes()
|
||||
|
||||
current_max_object_names = [obj.name for obj
|
||||
in current_max_objects]
|
||||
sub_container = rt.getNodeByName(sub_container_name)
|
||||
update_custom_attribute_data(sub_container, current_max_objects)
|
||||
for max_object in current_max_objects:
|
||||
max_object.Parent = node
|
||||
|
||||
max_objects = []
|
||||
for max_obj, obj_name in zip(current_max_objects,
|
||||
current_max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
max_obj.pos = transform_data[
|
||||
f"{max_obj.name}.transform"]
|
||||
max_obj.scale = transform_data[
|
||||
f"{max_obj.name}.scale"]
|
||||
max_objects.append(max_obj)
|
||||
max_transform = f"{max_obj.name}.transform"
|
||||
if max_transform in transform_data.keys():
|
||||
max_obj.pos = transform_data[max_transform] or 0
|
||||
max_obj.scale = transform_data[
|
||||
f"{max_obj.name}.scale"] or 0
|
||||
|
||||
update_custom_attribute_data(node, max_objects)
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ import os
|
|||
from openpype.pipeline import load, get_representation_path
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
get_previous_loaded_object
|
||||
)
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import (
|
||||
|
|
@ -15,12 +14,11 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
"""Loading model with the Alembic loader."""
|
||||
|
||||
families = ["model"]
|
||||
label = "Load Model(Alembic)"
|
||||
label = "Load Model with Alembic"
|
||||
representations = ["abc"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -52,21 +50,22 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
self.log.error("Something failed when loading.")
|
||||
|
||||
abc_container = abc_containers.pop()
|
||||
import_custom_attribute_data(
|
||||
abc_container, abc_container.Children)
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
abc_objects = []
|
||||
for abc_object in abc_container.Children:
|
||||
abc_object.name = f"{namespace}:{abc_object.name}"
|
||||
abc_objects.append(abc_object)
|
||||
# rename the abc container with namespace
|
||||
abc_container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
abc_container_name = f"{namespace}:{name}"
|
||||
abc_container.name = abc_container_name
|
||||
abc_objects.append(abc_container)
|
||||
|
||||
return containerise(
|
||||
name, [abc_container], context,
|
||||
name, abc_objects, context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
|
|
@ -75,20 +74,19 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
|
||||
node_list = [n for n in get_previous_loaded_object(node)
|
||||
if rt.ClassOf(n) == rt.AlembicContainer]
|
||||
with maintained_selection():
|
||||
rt.Select(node.Children)
|
||||
rt.Select(node_list)
|
||||
|
||||
for alembic in rt.Selection:
|
||||
abc = rt.GetNodeByName(alembic.name)
|
||||
update_custom_attribute_data(abc, abc.Children)
|
||||
rt.Select(abc.Children)
|
||||
for abc_con in abc.Children:
|
||||
abc_con.source = path
|
||||
rt.Select(abc_con.Children)
|
||||
for abc_obj in abc_con.Children:
|
||||
abc_obj.source = path
|
||||
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
{"representation": str(representation["_id"])},
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
from openpype.pipeline import load, get_representation_path
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise, import_custom_attribute_data,
|
||||
containerise, get_previous_loaded_object,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.hosts.max.api import lib
|
||||
|
|
@ -21,79 +21,71 @@ class FbxModelLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
filepath = os.path.normpath(self.filepath_from_context(context))
|
||||
filepath = self.filepath_from_context(context)
|
||||
filepath = os.path.normpath(filepath)
|
||||
rt.FBXImporterSetParam("Animation", False)
|
||||
rt.FBXImporterSetParam("Cameras", False)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("create"))
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.importFile(filepath, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
rt.importFile(
|
||||
filepath, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container = rt.container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(container, selections)
|
||||
|
||||
for selection in selections:
|
||||
selection.Parent = container
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
|
||||
return containerise(
|
||||
name, [container], context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
name, selections, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
rt.Select(inst_container.Children)
|
||||
transform_data = object_transform_set(inst_container.Children)
|
||||
for prev_fbx_obj in rt.selection:
|
||||
if not node:
|
||||
rt.Container(name=node_name)
|
||||
namespace, _ = get_namespace(node_name)
|
||||
|
||||
node_list = get_previous_loaded_object(node)
|
||||
rt.Select(node_list)
|
||||
prev_fbx_objects = rt.GetCurrentSelection()
|
||||
transform_data = object_transform_set(prev_fbx_objects)
|
||||
for prev_fbx_obj in prev_fbx_objects:
|
||||
if rt.isValidNode(prev_fbx_obj):
|
||||
rt.Delete(prev_fbx_obj)
|
||||
|
||||
rt.FBXImporterSetParam("Animation", False)
|
||||
rt.FBXImporterSetParam("Cameras", False)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("merge"))
|
||||
rt.FBXImporterSetParam("AxisConversionMethod", True)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("create"))
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.importFile(path, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
current_fbx_objects = rt.GetCurrentSelection()
|
||||
fbx_objects = []
|
||||
for fbx_object in current_fbx_objects:
|
||||
if fbx_object.Parent != inst_container:
|
||||
fbx_object.Parent = inst_container
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_object.pos = transform_data[
|
||||
f"{fbx_object.name}.transform"]
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_objects.append(fbx_object)
|
||||
fbx_transform = f"{fbx_object.name}.transform"
|
||||
if fbx_transform in transform_data.keys():
|
||||
fbx_object.pos = transform_data[fbx_transform] or 0
|
||||
fbx_object.scale = transform_data[
|
||||
f"{fbx_object.name}.scale"]
|
||||
|
||||
for children in node.Children:
|
||||
if rt.classOf(children) == rt.Container:
|
||||
if children.name == sub_node_name:
|
||||
update_custom_attribute_data(
|
||||
children, current_fbx_objects)
|
||||
f"{fbx_object.name}.scale"] or 0
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
||||
lib.imprint(
|
||||
node_name,
|
||||
{"representation": str(representation["_id"])},
|
||||
)
|
||||
update_custom_attribute_data(node, fbx_objects)
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from openpype.hosts.max.api.lib import (
|
|||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
get_previous_loaded_object,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
|
@ -24,7 +24,6 @@ class ObjLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -39,15 +38,12 @@ class ObjLoader(load.LoaderPlugin):
|
|||
suffix="_",
|
||||
)
|
||||
# create "missing" container for obj import
|
||||
container = rt.Container(name=f"{namespace}:{name}_{self.postfix}")
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(container, selections)
|
||||
# get current selection
|
||||
for selection in selections:
|
||||
selection.Parent = container
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
return containerise(
|
||||
name, [container], context,
|
||||
name, selections, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
|
@ -56,26 +52,26 @@ class ObjLoader(load.LoaderPlugin):
|
|||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
rt.Select(inst_container.Children)
|
||||
transform_data = object_transform_set(inst_container.Children)
|
||||
for prev_obj in rt.selection:
|
||||
namespace, _ = get_namespace(node_name)
|
||||
node_list = get_previous_loaded_object(node)
|
||||
rt.Select(node_list)
|
||||
previous_objects = rt.GetCurrentSelection()
|
||||
transform_data = object_transform_set(previous_objects)
|
||||
for prev_obj in previous_objects:
|
||||
if rt.isValidNode(prev_obj):
|
||||
rt.Delete(prev_obj)
|
||||
|
||||
rt.Execute(f'importFile @"{path}" #noPrompt using:ObjImp')
|
||||
# get current selection
|
||||
selections = rt.GetCurrentSelection()
|
||||
update_custom_attribute_data(inst_container, selections)
|
||||
for selection in selections:
|
||||
selection.Parent = inst_container
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
selection.pos = transform_data[
|
||||
f"{selection.name}.transform"]
|
||||
selection.scale = transform_data[
|
||||
f"{selection.name}.scale"]
|
||||
selection_transform = f"{selection.name}.transform"
|
||||
if selection_transform in transform_data.keys():
|
||||
selection.pos = transform_data[selection_transform] or 0
|
||||
selection.scale = transform_data[
|
||||
f"{selection.name}.scale"] or 0
|
||||
update_custom_attribute_data(node, selections)
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,15 +1,19 @@
|
|||
import os
|
||||
|
||||
from pymxs import runtime as rt
|
||||
from openpype.pipeline.load import LoadError
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set
|
||||
object_transform_set,
|
||||
get_plugins
|
||||
)
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data
|
||||
get_previous_loaded_object,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
||||
|
|
@ -23,16 +27,16 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
# asset_filepath
|
||||
plugin_info = get_plugins()
|
||||
if "usdimport.dli" not in plugin_info:
|
||||
raise LoadError("No USDImporter loaded/installed in Max..")
|
||||
filepath = os.path.normpath(self.filepath_from_context(context))
|
||||
import_options = rt.USDImporter.CreateOptions()
|
||||
base_filename = os.path.basename(filepath)
|
||||
filename, ext = os.path.splitext(base_filename)
|
||||
_, ext = os.path.splitext(base_filename)
|
||||
log_filepath = filepath.replace(ext, "txt")
|
||||
|
||||
rt.LogPath = log_filepath
|
||||
|
|
@ -44,35 +48,34 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
suffix="_",
|
||||
)
|
||||
asset = rt.GetNodeByName(name)
|
||||
import_custom_attribute_data(asset, asset.Children)
|
||||
usd_objects = []
|
||||
|
||||
for usd_asset in asset.Children:
|
||||
usd_asset.name = f"{namespace}:{usd_asset.name}"
|
||||
usd_objects.append(usd_asset)
|
||||
|
||||
asset_name = f"{namespace}:{name}_{self.postfix}"
|
||||
asset_name = f"{namespace}:{name}"
|
||||
asset.name = asset_name
|
||||
# need to get the correct container after renamed
|
||||
asset = rt.GetNodeByName(asset_name)
|
||||
|
||||
usd_objects.append(asset)
|
||||
|
||||
return containerise(
|
||||
name, [asset], context,
|
||||
name, usd_objects, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.GetNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
transform_data = None
|
||||
for n in node.Children:
|
||||
rt.Select(n.Children)
|
||||
transform_data = object_transform_set(n.Children)
|
||||
for prev_usd_asset in rt.selection:
|
||||
if rt.isValidNode(prev_usd_asset):
|
||||
rt.Delete(prev_usd_asset)
|
||||
node_list = get_previous_loaded_object(node)
|
||||
rt.Select(node_list)
|
||||
prev_objects = [sel for sel in rt.GetCurrentSelection()
|
||||
if sel != rt.Container
|
||||
and sel.name != node_name]
|
||||
transform_data = object_transform_set(prev_objects)
|
||||
for n in prev_objects:
|
||||
rt.Delete(n)
|
||||
|
||||
import_options = rt.USDImporter.CreateOptions()
|
||||
|
|
@ -86,17 +89,19 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
path, importOptions=import_options)
|
||||
|
||||
asset = rt.GetNodeByName(name)
|
||||
asset.Parent = node
|
||||
import_custom_attribute_data(asset, asset.Children)
|
||||
usd_objects = []
|
||||
for children in asset.Children:
|
||||
children.name = f"{namespace}:{children.name}"
|
||||
children.pos = transform_data[
|
||||
f"{children.name}.transform"]
|
||||
children.scale = transform_data[
|
||||
f"{children.name}.scale"]
|
||||
|
||||
asset.name = sub_node_name
|
||||
usd_objects.append(children)
|
||||
children_transform = f"{children.name}.transform"
|
||||
if children_transform in transform_data.keys():
|
||||
children.pos = transform_data[children_transform] or 0
|
||||
children.scale = transform_data[
|
||||
f"{children.name}.scale"] or 0
|
||||
|
||||
asset.name = f"{namespace}:{asset.name}"
|
||||
usd_objects.append(asset)
|
||||
update_custom_attribute_data(node, usd_objects)
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
||||
|
|
@ -108,7 +113,5 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
rt.Delete(node)
|
||||
|
|
|
|||
|
|
@ -10,8 +10,7 @@ from openpype.hosts.max.api import lib, maintained_selection
|
|||
from openpype.hosts.max.api.lib import unique_namespace
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
get_previous_loaded_object
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -24,7 +23,6 @@ class AbcLoader(load.LoaderPlugin):
|
|||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -55,25 +53,25 @@ class AbcLoader(load.LoaderPlugin):
|
|||
|
||||
abc_container = abc_containers.pop()
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(
|
||||
abc_container, abc_container.Children)
|
||||
for abc in selections:
|
||||
for cam_shape in abc.Children:
|
||||
cam_shape.playbackType = 2
|
||||
cam_shape.playbackType = 0
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
abc_objects = []
|
||||
for abc_object in abc_container.Children:
|
||||
abc_object.name = f"{namespace}:{abc_object.name}"
|
||||
abc_objects.append(abc_object)
|
||||
# rename the abc container with namespace
|
||||
abc_container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
abc_container_name = f"{namespace}:{name}"
|
||||
abc_container.name = abc_container_name
|
||||
abc_objects.append(abc_container)
|
||||
|
||||
return containerise(
|
||||
name, [abc_container], context,
|
||||
name, abc_objects, context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
|
|
@ -82,20 +80,19 @@ class AbcLoader(load.LoaderPlugin):
|
|||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
|
||||
abc_container = [n for n in get_previous_loaded_object(node)
|
||||
if rt.ClassOf(n) == rt.AlembicContainer]
|
||||
with maintained_selection():
|
||||
rt.Select(node.Children)
|
||||
rt.Select(abc_container)
|
||||
|
||||
for alembic in rt.Selection:
|
||||
abc = rt.GetNodeByName(alembic.name)
|
||||
update_custom_attribute_data(abc, abc.Children)
|
||||
rt.Select(abc.Children)
|
||||
for abc_con in abc.Children:
|
||||
abc_con.source = path
|
||||
rt.Select(abc_con.Children)
|
||||
for abc_obj in abc_con.Children:
|
||||
abc_obj.source = path
|
||||
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
{"representation": str(representation["_id"])},
|
||||
|
|
|
|||
108
openpype/hosts/max/plugins/load/load_pointcache_ornatrix.py
Normal file
108
openpype/hosts/max/plugins/load/load_pointcache_ornatrix.py
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
import os
|
||||
from openpype.pipeline import load, get_representation_path
|
||||
from openpype.pipeline.load import LoadError
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
get_previous_loaded_object,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set,
|
||||
get_plugins
|
||||
)
|
||||
from openpype.hosts.max.api import lib
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class OxAbcLoader(load.LoaderPlugin):
|
||||
"""Ornatrix Alembic loader."""
|
||||
|
||||
families = ["camera", "animation", "pointcache"]
|
||||
label = "Load Alembic with Ornatrix"
|
||||
representations = ["abc"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
plugin_list = get_plugins()
|
||||
if "ephere.plugins.autodesk.max.ornatrix.dlo" not in plugin_list:
|
||||
raise LoadError("Ornatrix plugin not "
|
||||
"found/installed in Max yet..")
|
||||
|
||||
file_path = os.path.normpath(self.filepath_from_context(context))
|
||||
rt.AlembicImport.ImportToRoot = True
|
||||
rt.AlembicImport.CustomAttributes = True
|
||||
rt.importFile(
|
||||
file_path, rt.name("noPrompt"),
|
||||
using=rt.Ornatrix_Alembic_Importer)
|
||||
|
||||
scene_object = []
|
||||
for obj in rt.rootNode.Children:
|
||||
obj_type = rt.ClassOf(obj)
|
||||
if str(obj_type).startswith("Ox_"):
|
||||
scene_object.append(obj)
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
abc_container = []
|
||||
for abc in scene_object:
|
||||
abc.name = f"{namespace}:{abc.name}"
|
||||
abc_container.append(abc)
|
||||
|
||||
return containerise(
|
||||
name, abc_container, context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
namespace, name = get_namespace(node_name)
|
||||
node = rt.getNodeByName(node_name)
|
||||
node_list = get_previous_loaded_object(node)
|
||||
rt.Select(node_list)
|
||||
selections = rt.getCurrentSelection()
|
||||
transform_data = object_transform_set(selections)
|
||||
for prev_obj in selections:
|
||||
if rt.isValidNode(prev_obj):
|
||||
rt.Delete(prev_obj)
|
||||
|
||||
rt.AlembicImport.ImportToRoot = False
|
||||
rt.AlembicImport.CustomAttributes = True
|
||||
rt.importFile(
|
||||
path, rt.name("noPrompt"),
|
||||
using=rt.Ornatrix_Alembic_Importer)
|
||||
|
||||
scene_object = []
|
||||
for obj in rt.rootNode.Children:
|
||||
obj_type = rt.ClassOf(obj)
|
||||
if str(obj_type).startswith("Ox_"):
|
||||
scene_object.append(obj)
|
||||
ox_abc_objects = []
|
||||
for abc in scene_object:
|
||||
abc.Parent = container
|
||||
abc.name = f"{namespace}:{abc.name}"
|
||||
ox_abc_objects.append(abc)
|
||||
ox_transform = f"{abc.name}.transform"
|
||||
if ox_transform in transform_data.keys():
|
||||
abc.pos = transform_data[ox_transform] or 0
|
||||
abc.scale = transform_data[f"{abc.name}.scale"] or 0
|
||||
update_custom_attribute_data(node, ox_abc_objects)
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
{"representation": str(representation["_id"])},
|
||||
)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
rt.Delete(node)
|
||||
|
|
@ -2,11 +2,12 @@ import os
|
|||
|
||||
from openpype.hosts.max.api import lib, maintained_selection
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace, get_namespace
|
||||
unique_namespace,
|
||||
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
get_previous_loaded_object,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
|
@ -25,7 +26,6 @@ class PointCloudLoader(load.LoaderPlugin):
|
|||
def load(self, context, name=None, namespace=None, data=None):
|
||||
"""load point cloud by tyCache"""
|
||||
from pymxs import runtime as rt
|
||||
|
||||
filepath = os.path.normpath(self.filepath_from_context(context))
|
||||
obj = rt.tyCache()
|
||||
obj.filename = filepath
|
||||
|
|
@ -34,14 +34,10 @@ class PointCloudLoader(load.LoaderPlugin):
|
|||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
prt_container = rt.Container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
import_custom_attribute_data(prt_container, [obj])
|
||||
obj.Parent = prt_container
|
||||
obj.name = f"{namespace}:{obj.name}"
|
||||
|
||||
return containerise(
|
||||
name, [prt_container], context,
|
||||
name, [obj], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
|
@ -50,14 +46,12 @@ class PointCloudLoader(load.LoaderPlugin):
|
|||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
namespace, name = get_namespace(container["instance_node"])
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
node_list = get_previous_loaded_object(node)
|
||||
update_custom_attribute_data(
|
||||
inst_container, inst_container.Children)
|
||||
node, node_list)
|
||||
with maintained_selection():
|
||||
rt.Select(node.Children)
|
||||
for prt in inst_container.Children:
|
||||
rt.Select(node_list)
|
||||
for prt in rt.Selection:
|
||||
prt.filename = path
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
|
|
|
|||
|
|
@ -5,14 +5,16 @@ from openpype.pipeline import (
|
|||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.pipeline.load import LoadError
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
update_custom_attribute_data,
|
||||
get_previous_loaded_object
|
||||
)
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace, get_namespace
|
||||
unique_namespace,
|
||||
get_plugins
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -25,11 +27,12 @@ class RedshiftProxyLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
plugin_info = get_plugins()
|
||||
if "redshift4max.dlr" not in plugin_info:
|
||||
raise LoadError("Redshift not loaded/installed in Max..")
|
||||
filepath = self.filepath_from_context(context)
|
||||
rs_proxy = rt.RedshiftProxy()
|
||||
rs_proxy.file = filepath
|
||||
|
|
@ -42,27 +45,22 @@ class RedshiftProxyLoader(load.LoaderPlugin):
|
|||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container = rt.Container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
rs_proxy.Parent = container
|
||||
rs_proxy.name = f"{namespace}:{rs_proxy.name}"
|
||||
import_custom_attribute_data(container, [rs_proxy])
|
||||
|
||||
return containerise(
|
||||
name, [container], context,
|
||||
name, [rs_proxy], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
namespace, name = get_namespace(container["instance_node"])
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
|
||||
node = rt.getNodeByName(container["instance_node"])
|
||||
node_list = get_previous_loaded_object(node)
|
||||
rt.Select(node_list)
|
||||
update_custom_attribute_data(
|
||||
inst_container, inst_container.Children)
|
||||
for proxy in inst_container.Children:
|
||||
node, rt.Selection)
|
||||
for proxy in rt.Selection:
|
||||
proxy.file = path
|
||||
|
||||
lib.imprint(container["instance_node"], {
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import pyblish.api
|
|||
from openpype.pipeline import publish, OptionalPyblishPluginMixin
|
||||
from pymxs import runtime as rt
|
||||
from openpype.hosts.max.api import maintained_selection
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
|
||||
|
||||
class ExtractModelObj(publish.Extractor, OptionalPyblishPluginMixin):
|
||||
|
|
@ -27,6 +28,7 @@ class ExtractModelObj(publish.Extractor, OptionalPyblishPluginMixin):
|
|||
filepath = os.path.join(stagingdir, filename)
|
||||
self.log.info("Writing OBJ '%s' to '%s'" % (filepath, stagingdir))
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
with maintained_selection():
|
||||
# select and export
|
||||
node_list = instance.data["members"]
|
||||
|
|
@ -38,7 +40,10 @@ class ExtractModelObj(publish.Extractor, OptionalPyblishPluginMixin):
|
|||
using=rt.ObjExp,
|
||||
)
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
if not os.path.exists(filepath):
|
||||
raise KnownPublishError(
|
||||
"File {} wasn't produced by 3ds max, please check the logs.")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validator for USD plugin."""
|
||||
from openpype.pipeline import PublishValidationError
|
||||
from pyblish.api import InstancePlugin, ValidatorOrder
|
||||
from pymxs import runtime as rt
|
||||
|
||||
from openpype.pipeline import (
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
def get_plugins() -> list:
|
||||
"""Get plugin list from 3ds max."""
|
||||
|
|
@ -17,17 +21,25 @@ def get_plugins() -> list:
|
|||
return plugin_info_list
|
||||
|
||||
|
||||
class ValidateUSDPlugin(InstancePlugin):
|
||||
class ValidateUSDPlugin(OptionalPyblishPluginMixin,
|
||||
InstancePlugin):
|
||||
"""Validates if USD plugin is installed or loaded in 3ds max."""
|
||||
|
||||
order = ValidatorOrder - 0.01
|
||||
families = ["model"]
|
||||
hosts = ["max"]
|
||||
label = "USD Plugin"
|
||||
label = "Validate USD Plugin loaded"
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
|
||||
for sc in ValidateUSDPlugin.__subclasses__():
|
||||
self.log.info(sc)
|
||||
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
plugin_info = get_plugins()
|
||||
usd_import = "usdimport.dli"
|
||||
if usd_import not in plugin_info:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue