mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge pull request #770 from ynput/enhancement/remove-max-addon-again
Chore: Remove 3dsMax addon (again)
This commit is contained in:
commit
cb9f6a79ce
12 changed files with 0 additions and 1888 deletions
|
|
@ -1,589 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Library of functions useful for 3dsmax pipeline."""
|
||||
import contextlib
|
||||
import logging
|
||||
import json
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
import six
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_current_project_name,
|
||||
colorspace
|
||||
)
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline.context_tools import (
|
||||
get_current_task_entity
|
||||
)
|
||||
from ayon_core.style import load_stylesheet
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
JSON_PREFIX = "JSON::"
|
||||
log = logging.getLogger("ayon_max")
|
||||
|
||||
|
||||
def get_main_window():
|
||||
"""Acquire Max's main window"""
|
||||
from qtpy import QtWidgets
|
||||
top_widgets = QtWidgets.QApplication.topLevelWidgets()
|
||||
name = "QmaxApplicationWindow"
|
||||
for widget in top_widgets:
|
||||
if (
|
||||
widget.inherits("QMainWindow")
|
||||
and widget.metaObject().className() == name
|
||||
):
|
||||
return widget
|
||||
raise RuntimeError('Count not find 3dsMax main window.')
|
||||
|
||||
|
||||
def imprint(node_name: str, data: dict) -> bool:
|
||||
node = rt.GetNodeByName(node_name)
|
||||
if not node:
|
||||
return False
|
||||
|
||||
for k, v in data.items():
|
||||
if isinstance(v, (dict, list)):
|
||||
rt.SetUserProp(node, k, f"{JSON_PREFIX}{json.dumps(v)}")
|
||||
else:
|
||||
rt.SetUserProp(node, k, v)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def lsattr(
|
||||
attr: str,
|
||||
value: Union[str, None] = None,
|
||||
root: Union[str, None] = None) -> list:
|
||||
"""List nodes having attribute with specified value.
|
||||
|
||||
Args:
|
||||
attr (str): Attribute name to match.
|
||||
value (str, Optional): Value to match, of omitted, all nodes
|
||||
with specified attribute are returned no matter of value.
|
||||
root (str, Optional): Root node name. If omitted, scene root is used.
|
||||
|
||||
Returns:
|
||||
list of nodes.
|
||||
"""
|
||||
root = rt.RootNode if root is None else rt.GetNodeByName(root)
|
||||
|
||||
def output_node(node, nodes):
|
||||
nodes.append(node)
|
||||
for child in node.Children:
|
||||
output_node(child, nodes)
|
||||
|
||||
nodes = []
|
||||
output_node(root, nodes)
|
||||
return [
|
||||
n for n in nodes
|
||||
if rt.GetUserProp(n, attr) == value
|
||||
] if value else [
|
||||
n for n in nodes
|
||||
if rt.GetUserProp(n, attr)
|
||||
]
|
||||
|
||||
|
||||
def read(container) -> dict:
|
||||
data = {}
|
||||
props = rt.GetUserPropBuffer(container)
|
||||
# this shouldn't happen but let's guard against it anyway
|
||||
if not props:
|
||||
return data
|
||||
|
||||
for line in props.split("\r\n"):
|
||||
try:
|
||||
key, value = line.split("=")
|
||||
except ValueError:
|
||||
# if the line cannot be split we can't really parse it
|
||||
continue
|
||||
|
||||
value = value.strip()
|
||||
if isinstance(value.strip(), six.string_types) and \
|
||||
value.startswith(JSON_PREFIX):
|
||||
with contextlib.suppress(json.JSONDecodeError):
|
||||
value = json.loads(value[len(JSON_PREFIX):])
|
||||
|
||||
# default value behavior
|
||||
# convert maxscript boolean values
|
||||
if value == "true":
|
||||
value = True
|
||||
elif value == "false":
|
||||
value = False
|
||||
|
||||
data[key.strip()] = value
|
||||
|
||||
data["instance_node"] = container.Name
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection():
|
||||
previous_selection = rt.GetCurrentSelection()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if previous_selection:
|
||||
rt.Select(previous_selection)
|
||||
else:
|
||||
rt.Select()
|
||||
|
||||
|
||||
def get_all_children(parent, node_type=None):
|
||||
"""Handy function to get all the children of a given node
|
||||
|
||||
Args:
|
||||
parent (3dsmax Node1): Node to get all children of.
|
||||
node_type (None, runtime.class): give class to check for
|
||||
e.g. rt.FFDBox/rt.GeometryClass etc.
|
||||
|
||||
Returns:
|
||||
list: list of all children of the parent node
|
||||
"""
|
||||
def list_children(node):
|
||||
children = []
|
||||
for c in node.Children:
|
||||
children.append(c)
|
||||
children = children + list_children(c)
|
||||
return children
|
||||
child_list = list_children(parent)
|
||||
|
||||
return ([x for x in child_list if rt.SuperClassOf(x) == node_type]
|
||||
if node_type else child_list)
|
||||
|
||||
|
||||
def get_current_renderer():
|
||||
"""
|
||||
Notes:
|
||||
Get current renderer for Max
|
||||
|
||||
Returns:
|
||||
"{Current Renderer}:{Current Renderer}"
|
||||
e.g. "Redshift_Renderer:Redshift_Renderer"
|
||||
"""
|
||||
return rt.renderers.production
|
||||
|
||||
|
||||
def get_default_render_folder(project_setting=None):
|
||||
return (project_setting["max"]
|
||||
["RenderSettings"]
|
||||
["default_render_image_folder"])
|
||||
|
||||
|
||||
def set_render_frame_range(start_frame, end_frame):
|
||||
"""
|
||||
Note:
|
||||
Frame range can be specified in different types. Possible values are:
|
||||
* `1` - Single frame.
|
||||
* `2` - Active time segment ( animationRange ).
|
||||
* `3` - User specified Range.
|
||||
* `4` - User specified Frame pickup string (for example `1,3,5-12`).
|
||||
|
||||
Todo:
|
||||
Current type is hard-coded, there should be a custom setting for this.
|
||||
"""
|
||||
rt.rendTimeType = 3
|
||||
if start_frame is not None and end_frame is not None:
|
||||
rt.rendStart = int(start_frame)
|
||||
rt.rendEnd = int(end_frame)
|
||||
|
||||
|
||||
def get_multipass_setting(project_setting=None):
|
||||
return (project_setting["max"]
|
||||
["RenderSettings"]
|
||||
["multipass"])
|
||||
|
||||
|
||||
def set_scene_resolution(width: int, height: int):
|
||||
"""Set the render resolution
|
||||
|
||||
Args:
|
||||
width(int): value of the width
|
||||
height(int): value of the height
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
# make sure the render dialog is closed
|
||||
# for the update of resolution
|
||||
# Changing the Render Setup dialog settings should be done
|
||||
# with the actual Render Setup dialog in a closed state.
|
||||
if rt.renderSceneDialog.isOpen():
|
||||
rt.renderSceneDialog.close()
|
||||
|
||||
rt.renderWidth = width
|
||||
rt.renderHeight = height
|
||||
|
||||
|
||||
def reset_scene_resolution():
|
||||
"""Apply the scene resolution from the project definition
|
||||
|
||||
scene resolution can be overwritten by a folder if the folder.attrib
|
||||
contains any information regarding scene resolution.
|
||||
"""
|
||||
task_attributes = get_current_task_entity(fields={"attrib"})["attrib"]
|
||||
width = int(task_attributes["resolutionWidth"])
|
||||
height = int(task_attributes["resolutionHeight"])
|
||||
|
||||
set_scene_resolution(width, height)
|
||||
|
||||
|
||||
def get_frame_range(task_entity=None) -> Union[Dict[str, Any], None]:
|
||||
"""Get the current task frame range and handles
|
||||
|
||||
Args:
|
||||
task_entity (dict): Task Entity.
|
||||
|
||||
Returns:
|
||||
dict: with frame start, frame end, handle start, handle end.
|
||||
"""
|
||||
# Set frame start/end
|
||||
if task_entity is None:
|
||||
task_entity = get_current_task_entity(fields={"attrib"})
|
||||
task_attributes = task_entity["attrib"]
|
||||
frame_start = int(task_attributes["frameStart"])
|
||||
frame_end = int(task_attributes["frameEnd"])
|
||||
handle_start = int(task_attributes["handleStart"])
|
||||
handle_end = int(task_attributes["handleEnd"])
|
||||
frame_start_handle = frame_start - handle_start
|
||||
frame_end_handle = frame_end + handle_end
|
||||
|
||||
return {
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStartHandle": frame_start_handle,
|
||||
"frameEndHandle": frame_end_handle,
|
||||
}
|
||||
|
||||
|
||||
def reset_frame_range(fps: bool = True):
|
||||
"""Set frame range to current folder.
|
||||
This is part of 3dsmax documentation:
|
||||
|
||||
animationRange: A System Global variable which lets you get and
|
||||
set an Interval value that defines the start and end frames
|
||||
of the Active Time Segment.
|
||||
frameRate: A System Global variable which lets you get
|
||||
and set an Integer value that defines the current
|
||||
scene frame rate in frames-per-second.
|
||||
"""
|
||||
if fps:
|
||||
rt.frameRate = float(get_fps_for_current_context())
|
||||
|
||||
frame_range = get_frame_range()
|
||||
|
||||
set_timeline(
|
||||
frame_range["frameStartHandle"], frame_range["frameEndHandle"])
|
||||
set_render_frame_range(
|
||||
frame_range["frameStartHandle"], frame_range["frameEndHandle"])
|
||||
|
||||
|
||||
def get_fps_for_current_context():
|
||||
"""Get fps that should be set for current context.
|
||||
|
||||
Todos:
|
||||
- Skip project value.
|
||||
- Merge logic with 'get_frame_range' and 'reset_scene_resolution' ->
|
||||
all the values in the functions can be collected at one place as
|
||||
they have same requirements.
|
||||
|
||||
Returns:
|
||||
Union[int, float]: FPS value.
|
||||
"""
|
||||
task_entity = get_current_task_entity(fields={"attrib"})
|
||||
return task_entity["attrib"]["fps"]
|
||||
|
||||
|
||||
def reset_unit_scale():
|
||||
"""Apply the unit scale setting to 3dsMax
|
||||
"""
|
||||
project_name = get_current_project_name()
|
||||
settings = get_project_settings(project_name).get("max")
|
||||
scene_scale = settings.get("unit_scale_settings",
|
||||
{}).get("scene_unit_scale")
|
||||
if scene_scale:
|
||||
rt.units.DisplayType = rt.Name("Metric")
|
||||
rt.units.MetricType = rt.Name(scene_scale)
|
||||
else:
|
||||
rt.units.DisplayType = rt.Name("Generic")
|
||||
|
||||
|
||||
def convert_unit_scale():
|
||||
"""Convert system unit scale in 3dsMax
|
||||
for fbx export
|
||||
|
||||
Returns:
|
||||
str: unit scale
|
||||
"""
|
||||
unit_scale_dict = {
|
||||
"millimeters": "mm",
|
||||
"centimeters": "cm",
|
||||
"meters": "m",
|
||||
"kilometers": "km"
|
||||
}
|
||||
current_unit_scale = rt.Execute("units.MetricType as string")
|
||||
return unit_scale_dict[current_unit_scale]
|
||||
|
||||
|
||||
def set_context_setting():
|
||||
"""Apply the project settings from the project definition
|
||||
|
||||
Settings can be overwritten by an folder if the folder.attrib contains
|
||||
any information regarding those settings.
|
||||
|
||||
Examples of settings:
|
||||
frame range
|
||||
resolution
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
reset_scene_resolution()
|
||||
reset_frame_range()
|
||||
reset_colorspace()
|
||||
reset_unit_scale()
|
||||
|
||||
|
||||
def get_max_version():
|
||||
"""
|
||||
Args:
|
||||
get max version date for deadline
|
||||
|
||||
Returns:
|
||||
#(25000, 62, 0, 25, 0, 0, 997, 2023, "")
|
||||
max_info[7] = max version date
|
||||
"""
|
||||
max_info = rt.MaxVersion()
|
||||
return max_info[7]
|
||||
|
||||
|
||||
def is_headless():
|
||||
"""Check if 3dsMax runs in batch mode.
|
||||
If it returns True, it runs in 3dsbatch.exe
|
||||
If it returns False, it runs in 3dsmax.exe
|
||||
"""
|
||||
return rt.maxops.isInNonInteractiveMode()
|
||||
|
||||
|
||||
def set_timeline(frameStart, frameEnd):
|
||||
"""Set frame range for timeline editor in Max
|
||||
"""
|
||||
rt.animationRange = rt.interval(int(frameStart), int(frameEnd))
|
||||
return rt.animationRange
|
||||
|
||||
|
||||
def reset_colorspace():
|
||||
"""OCIO Configuration
|
||||
Supports in 3dsMax 2024+
|
||||
|
||||
"""
|
||||
if int(get_max_version()) < 2024:
|
||||
return
|
||||
|
||||
max_config_data = colorspace.get_current_context_imageio_config_preset()
|
||||
if max_config_data:
|
||||
ocio_config_path = max_config_data["path"]
|
||||
colorspace_mgr = rt.ColorPipelineMgr
|
||||
colorspace_mgr.Mode = rt.Name("OCIO_Custom")
|
||||
colorspace_mgr.OCIOConfigPath = ocio_config_path
|
||||
|
||||
|
||||
def check_colorspace():
|
||||
parent = get_main_window()
|
||||
if parent is None:
|
||||
log.info("Skipping outdated pop-up "
|
||||
"because Max main window can't be found.")
|
||||
if int(get_max_version()) >= 2024:
|
||||
color_mgr = rt.ColorPipelineMgr
|
||||
max_config_data = colorspace.get_current_context_imageio_config_preset()
|
||||
if max_config_data and color_mgr.Mode != rt.Name("OCIO_Custom"):
|
||||
if not is_headless():
|
||||
from ayon_core.tools.utils import SimplePopup
|
||||
dialog = SimplePopup(parent=parent)
|
||||
dialog.setWindowTitle("Warning: Wrong OCIO Mode")
|
||||
dialog.set_message("This scene has wrong OCIO "
|
||||
"Mode setting.")
|
||||
dialog.set_button_text("Fix")
|
||||
dialog.setStyleSheet(load_stylesheet())
|
||||
dialog.on_clicked.connect(reset_colorspace)
|
||||
dialog.show()
|
||||
|
||||
def unique_namespace(namespace, format="%02d",
|
||||
prefix="", suffix="", con_suffix="CON"):
|
||||
"""Return unique namespace
|
||||
|
||||
Arguments:
|
||||
namespace (str): Name of namespace to consider
|
||||
format (str, optional): Formatting of the given iteration number
|
||||
suffix (str, optional): Only consider namespaces with this suffix.
|
||||
con_suffix: max only, for finding the name of the master container
|
||||
|
||||
>>> unique_namespace("bar")
|
||||
# bar01
|
||||
>>> unique_namespace(":hello")
|
||||
# :hello01
|
||||
>>> unique_namespace("bar:", suffix="_NS")
|
||||
# bar01_NS:
|
||||
|
||||
"""
|
||||
|
||||
def current_namespace():
|
||||
current = namespace
|
||||
# When inside a namespace Max adds no trailing :
|
||||
if not current.endswith(":"):
|
||||
current += ":"
|
||||
return current
|
||||
|
||||
# Always check against the absolute namespace root
|
||||
# There's no clash with :x if we're defining namespace :a:x
|
||||
ROOT = ":" if namespace.startswith(":") else current_namespace()
|
||||
|
||||
# Strip trailing `:` tokens since we might want to add a suffix
|
||||
start = ":" if namespace.startswith(":") else ""
|
||||
end = ":" if namespace.endswith(":") else ""
|
||||
namespace = namespace.strip(":")
|
||||
if ":" in namespace:
|
||||
# Split off any nesting that we don't uniqify anyway.
|
||||
parents, namespace = namespace.rsplit(":", 1)
|
||||
start += parents + ":"
|
||||
ROOT += start
|
||||
|
||||
iteration = 1
|
||||
increment_version = True
|
||||
while increment_version:
|
||||
nr_namespace = namespace + format % iteration
|
||||
unique = prefix + nr_namespace + suffix
|
||||
container_name = f"{unique}:{namespace}{con_suffix}"
|
||||
if not rt.getNodeByName(container_name):
|
||||
name_space = start + unique + end
|
||||
increment_version = False
|
||||
return name_space
|
||||
else:
|
||||
increment_version = True
|
||||
iteration += 1
|
||||
|
||||
|
||||
def get_namespace(container_name):
|
||||
"""Get the namespace and name of the sub-container
|
||||
|
||||
Args:
|
||||
container_name (str): the name of master container
|
||||
|
||||
Raises:
|
||||
RuntimeError: when there is no master container found
|
||||
|
||||
Returns:
|
||||
namespace (str): namespace of the sub-container
|
||||
name (str): name of the sub-container
|
||||
"""
|
||||
node = rt.getNodeByName(container_name)
|
||||
if not node:
|
||||
raise RuntimeError("Master Container Not Found..")
|
||||
name = rt.getUserProp(node, "name")
|
||||
namespace = rt.getUserProp(node, "namespace")
|
||||
return namespace, name
|
||||
|
||||
|
||||
def object_transform_set(container_children):
|
||||
"""A function which allows to store the transform of
|
||||
previous loaded object(s)
|
||||
Args:
|
||||
container_children(list): A list of nodes
|
||||
|
||||
Returns:
|
||||
transform_set (dict): A dict with all transform data of
|
||||
the previous loaded object(s)
|
||||
"""
|
||||
transform_set = {}
|
||||
for node in container_children:
|
||||
name = f"{node}.transform"
|
||||
transform_set[name] = node.pos
|
||||
name = f"{node}.scale"
|
||||
transform_set[name] = node.scale
|
||||
return transform_set
|
||||
|
||||
|
||||
def get_plugins() -> list:
|
||||
"""Get all loaded plugins in 3dsMax
|
||||
|
||||
Returns:
|
||||
plugin_info_list: a list of loaded plugins
|
||||
"""
|
||||
manager = rt.PluginManager
|
||||
count = manager.pluginDllCount
|
||||
plugin_info_list = []
|
||||
for p in range(1, count + 1):
|
||||
plugin_info = manager.pluginDllName(p)
|
||||
plugin_info_list.append(plugin_info)
|
||||
|
||||
return plugin_info_list
|
||||
|
||||
|
||||
def update_modifier_node_names(event, node):
|
||||
"""Update the name of the nodes after renaming
|
||||
|
||||
Args:
|
||||
event (pymxs.MXSWrapperBase): Event Name (
|
||||
Mandatory argument for rt.NodeEventCallback)
|
||||
node (list): Event Number (
|
||||
Mandatory argument for rt.NodeEventCallback)
|
||||
|
||||
"""
|
||||
containers = [
|
||||
obj
|
||||
for obj in rt.Objects
|
||||
if (
|
||||
rt.ClassOf(obj) == rt.Container
|
||||
and rt.getUserProp(obj, "id") == "pyblish.avalon.instance"
|
||||
and rt.getUserProp(obj, "productType") not in {
|
||||
"workfile", "tyflow"
|
||||
}
|
||||
)
|
||||
]
|
||||
if not containers:
|
||||
return
|
||||
for container in containers:
|
||||
ayon_data = container.modifiers[0].openPypeData
|
||||
updated_node_names = [str(node.node) for node
|
||||
in ayon_data.all_handles]
|
||||
rt.setProperty(ayon_data, "sel_list", updated_node_names)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def render_resolution(width, height):
|
||||
"""Set render resolution option during context
|
||||
|
||||
Args:
|
||||
width (int): render width
|
||||
height (int): render height
|
||||
"""
|
||||
current_renderWidth = rt.renderWidth
|
||||
current_renderHeight = rt.renderHeight
|
||||
try:
|
||||
rt.renderWidth = width
|
||||
rt.renderHeight = height
|
||||
yield
|
||||
finally:
|
||||
rt.renderWidth = current_renderWidth
|
||||
rt.renderHeight = current_renderHeight
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def suspended_refresh():
|
||||
"""Suspended refresh for scene and modify panel redraw.
|
||||
"""
|
||||
if is_headless():
|
||||
yield
|
||||
return
|
||||
rt.disableSceneRedraw()
|
||||
rt.suspendEditing()
|
||||
try:
|
||||
yield
|
||||
|
||||
finally:
|
||||
rt.enableSceneRedraw()
|
||||
rt.resumeEditing()
|
||||
|
|
@ -1,298 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""3dsmax specific AYON/Pyblish plugin definitions."""
|
||||
from abc import ABCMeta
|
||||
|
||||
import six
|
||||
from pymxs import runtime as rt
|
||||
|
||||
from ayon_core.lib import BoolDef
|
||||
from ayon_core.pipeline import (
|
||||
CreatedInstance,
|
||||
Creator,
|
||||
CreatorError,
|
||||
AYON_INSTANCE_ID,
|
||||
AVALON_INSTANCE_ID,
|
||||
)
|
||||
|
||||
from .lib import imprint, lsattr, read
|
||||
|
||||
MS_CUSTOM_ATTRIB = """attributes "openPypeData"
|
||||
(
|
||||
parameters main rollout:OPparams
|
||||
(
|
||||
all_handles type:#maxObjectTab tabSize:0 tabSizeVariable:on
|
||||
sel_list type:#stringTab tabSize:0 tabSizeVariable:on
|
||||
)
|
||||
|
||||
rollout OPparams "OP Parameters"
|
||||
(
|
||||
listbox list_node "Node References" items:#()
|
||||
button button_add "Add to Container"
|
||||
button button_del "Delete from Container"
|
||||
|
||||
fn node_to_name the_node =
|
||||
(
|
||||
handle = the_node.handle
|
||||
obj_name = the_node.name
|
||||
handle_name = obj_name + "<" + handle as string + ">"
|
||||
return handle_name
|
||||
)
|
||||
fn nodes_to_add node =
|
||||
(
|
||||
sceneObjs = #()
|
||||
if classOf node == Container do return false
|
||||
n = node as string
|
||||
for obj in Objects do
|
||||
(
|
||||
tmp_obj = obj as string
|
||||
append sceneObjs tmp_obj
|
||||
)
|
||||
if sel_list != undefined do
|
||||
(
|
||||
for obj in sel_list do
|
||||
(
|
||||
idx = findItem sceneObjs obj
|
||||
if idx do
|
||||
(
|
||||
deleteItem sceneObjs idx
|
||||
)
|
||||
)
|
||||
)
|
||||
idx = findItem sceneObjs n
|
||||
if idx then return true else false
|
||||
)
|
||||
|
||||
fn nodes_to_rmv node =
|
||||
(
|
||||
n = node as string
|
||||
idx = findItem sel_list n
|
||||
if idx then return true else false
|
||||
)
|
||||
|
||||
on button_add pressed do
|
||||
(
|
||||
current_sel = selectByName title:"Select Objects to add to
|
||||
the Container" buttontext:"Add" filter:nodes_to_add
|
||||
if current_sel == undefined then return False
|
||||
temp_arr = #()
|
||||
i_node_arr = #()
|
||||
for c in current_sel do
|
||||
(
|
||||
handle_name = node_to_name c
|
||||
node_ref = NodeTransformMonitor node:c
|
||||
idx = finditem list_node.items handle_name
|
||||
if idx do (
|
||||
continue
|
||||
)
|
||||
name = c as string
|
||||
append temp_arr handle_name
|
||||
append i_node_arr node_ref
|
||||
append sel_list name
|
||||
)
|
||||
all_handles = join i_node_arr all_handles
|
||||
list_node.items = join temp_arr list_node.items
|
||||
)
|
||||
|
||||
on button_del pressed do
|
||||
(
|
||||
current_sel = selectByName title:"Select Objects to remove
|
||||
from the Container" buttontext:"Remove" filter: nodes_to_rmv
|
||||
if current_sel == undefined or current_sel.count == 0 then
|
||||
(
|
||||
return False
|
||||
)
|
||||
temp_arr = #()
|
||||
i_node_arr = #()
|
||||
new_i_node_arr = #()
|
||||
new_temp_arr = #()
|
||||
|
||||
for c in current_sel do
|
||||
(
|
||||
node_ref = NodeTransformMonitor node:c as string
|
||||
handle_name = node_to_name c
|
||||
n = c as string
|
||||
tmp_all_handles = #()
|
||||
for i in all_handles do
|
||||
(
|
||||
tmp = i as string
|
||||
append tmp_all_handles tmp
|
||||
)
|
||||
idx = finditem tmp_all_handles node_ref
|
||||
if idx do
|
||||
(
|
||||
new_i_node_arr = DeleteItem all_handles idx
|
||||
|
||||
)
|
||||
idx = finditem list_node.items handle_name
|
||||
if idx do
|
||||
(
|
||||
new_temp_arr = DeleteItem list_node.items idx
|
||||
)
|
||||
idx = finditem sel_list n
|
||||
if idx do
|
||||
(
|
||||
sel_list = DeleteItem sel_list idx
|
||||
)
|
||||
)
|
||||
all_handles = join i_node_arr new_i_node_arr
|
||||
list_node.items = join temp_arr new_temp_arr
|
||||
)
|
||||
|
||||
on OPparams open do
|
||||
(
|
||||
if all_handles.count != 0 then
|
||||
(
|
||||
temp_arr = #()
|
||||
for x in all_handles do
|
||||
(
|
||||
if x.node == undefined do continue
|
||||
handle_name = node_to_name x.node
|
||||
append temp_arr handle_name
|
||||
)
|
||||
list_node.items = temp_arr
|
||||
)
|
||||
)
|
||||
)
|
||||
)"""
|
||||
|
||||
|
||||
class MaxCreatorBase(object):
|
||||
|
||||
@staticmethod
|
||||
def cache_instance_data(shared_data):
|
||||
if shared_data.get("max_cached_instances") is not None:
|
||||
return shared_data
|
||||
|
||||
shared_data["max_cached_instances"] = {}
|
||||
|
||||
cached_instances = []
|
||||
for id_type in [AYON_INSTANCE_ID, AVALON_INSTANCE_ID]:
|
||||
cached_instances.extend(lsattr("id", id_type))
|
||||
|
||||
for i in cached_instances:
|
||||
creator_id = rt.GetUserProp(i, "creator_identifier")
|
||||
if creator_id not in shared_data["max_cached_instances"]:
|
||||
shared_data["max_cached_instances"][creator_id] = [i.name]
|
||||
else:
|
||||
shared_data[
|
||||
"max_cached_instances"][creator_id].append(i.name)
|
||||
return shared_data
|
||||
|
||||
@staticmethod
|
||||
def create_instance_node(node):
|
||||
"""Create instance node.
|
||||
|
||||
If the supplied node is existing node, it will be used to hold the
|
||||
instance, otherwise new node of type Dummy will be created.
|
||||
|
||||
Args:
|
||||
node (rt.MXSWrapperBase, str): Node or node name to use.
|
||||
|
||||
Returns:
|
||||
instance
|
||||
"""
|
||||
if isinstance(node, str):
|
||||
node = rt.Container(name=node)
|
||||
|
||||
attrs = rt.Execute(MS_CUSTOM_ATTRIB)
|
||||
modifier = rt.EmptyModifier()
|
||||
rt.addModifier(node, modifier)
|
||||
node.modifiers[0].name = "OP Data"
|
||||
rt.custAttributes.add(node.modifiers[0], attrs)
|
||||
|
||||
return node
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class MaxCreator(Creator, MaxCreatorBase):
|
||||
selected_nodes = []
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
if pre_create_data.get("use_selection"):
|
||||
self.selected_nodes = rt.GetCurrentSelection()
|
||||
if rt.getNodeByName(product_name):
|
||||
raise CreatorError(f"'{product_name}' is already created..")
|
||||
|
||||
instance_node = self.create_instance_node(product_name)
|
||||
instance_data["instance_node"] = instance_node.name
|
||||
instance = CreatedInstance(
|
||||
self.product_type,
|
||||
product_name,
|
||||
instance_data,
|
||||
self
|
||||
)
|
||||
if pre_create_data.get("use_selection"):
|
||||
|
||||
node_list = []
|
||||
sel_list = []
|
||||
for i in self.selected_nodes:
|
||||
node_ref = rt.NodeTransformMonitor(node=i)
|
||||
node_list.append(node_ref)
|
||||
sel_list.append(str(i))
|
||||
|
||||
# Setting the property
|
||||
rt.setProperty(
|
||||
instance_node.modifiers[0].openPypeData,
|
||||
"all_handles", node_list)
|
||||
rt.setProperty(
|
||||
instance_node.modifiers[0].openPypeData,
|
||||
"sel_list", sel_list)
|
||||
|
||||
self._add_instance_to_context(instance)
|
||||
imprint(instance_node.name, instance.data_to_store())
|
||||
|
||||
return instance
|
||||
|
||||
def collect_instances(self):
|
||||
self.cache_instance_data(self.collection_shared_data)
|
||||
for instance in self.collection_shared_data["max_cached_instances"].get(self.identifier, []): # noqa
|
||||
created_instance = CreatedInstance.from_existing(
|
||||
read(rt.GetNodeByName(instance)), self
|
||||
)
|
||||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, changes in update_list:
|
||||
instance_node = created_inst.get("instance_node")
|
||||
new_values = {
|
||||
key: changes[key].new_value
|
||||
for key in changes.changed_keys
|
||||
}
|
||||
product_name = new_values.get("productName", "")
|
||||
if product_name and instance_node != product_name:
|
||||
node = rt.getNodeByName(instance_node)
|
||||
new_product_name = new_values["productName"]
|
||||
if rt.getNodeByName(new_product_name):
|
||||
raise CreatorError(
|
||||
"The product '{}' already exists.".format(
|
||||
new_product_name))
|
||||
instance_node = new_product_name
|
||||
created_inst["instance_node"] = instance_node
|
||||
node.name = instance_node
|
||||
|
||||
imprint(
|
||||
instance_node,
|
||||
created_inst.data_to_store(),
|
||||
)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
"""Remove specified instance from the scene.
|
||||
|
||||
This is only removing `id` parameter so instance is no longer
|
||||
instance, because it might contain valuable data for artist.
|
||||
|
||||
"""
|
||||
for instance in instances:
|
||||
instance_node = rt.GetNodeByName(
|
||||
instance.data.get("instance_node"))
|
||||
if instance_node:
|
||||
count = rt.custAttributes.count(instance_node.modifiers[0])
|
||||
rt.custAttributes.delete(instance_node.modifiers[0], count)
|
||||
rt.Delete(instance_node)
|
||||
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
BoolDef("use_selection", label="Use selection")
|
||||
]
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating TyCache."""
|
||||
from ayon_max.api import plugin
|
||||
|
||||
|
||||
class CreateTyCache(plugin.MaxCreator):
|
||||
"""Creator plugin for TyCache."""
|
||||
identifier = "io.openpype.creators.max.tycache"
|
||||
label = "TyCache"
|
||||
product_type = "tycache"
|
||||
icon = "gear"
|
||||
|
||||
settings_category = "max"
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
import os
|
||||
from ayon_max.api import lib, maintained_selection
|
||||
from ayon_max.api.lib import (
|
||||
unique_namespace,
|
||||
|
||||
)
|
||||
from ayon_max.api.pipeline import (
|
||||
containerise,
|
||||
get_previous_loaded_object,
|
||||
update_custom_attribute_data,
|
||||
remove_container_data
|
||||
)
|
||||
from ayon_core.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
class TyCacheLoader(load.LoaderPlugin):
|
||||
"""TyCache Loader."""
|
||||
|
||||
product_types = {"tycache"}
|
||||
representations = {"tyc"}
|
||||
order = -8
|
||||
icon = "code-fork"
|
||||
color = "green"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
"""Load tyCache"""
|
||||
from pymxs import runtime as rt
|
||||
filepath = os.path.normpath(self.filepath_from_context(context))
|
||||
obj = rt.tyCache()
|
||||
obj.filename = filepath
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
obj.name = f"{namespace}:{obj.name}"
|
||||
|
||||
return containerise(
|
||||
name, [obj], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, context):
|
||||
"""update the container"""
|
||||
from pymxs import runtime as rt
|
||||
|
||||
repre_entity = context["representation"]
|
||||
path = get_representation_path(repre_entity)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
node_list = get_previous_loaded_object(node)
|
||||
update_custom_attribute_data(node, node_list)
|
||||
with maintained_selection():
|
||||
for tyc in node_list:
|
||||
tyc.filename = path
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": repre_entity["id"]
|
||||
})
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
"""remove the container"""
|
||||
from pymxs import runtime as rt
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
remove_container_data(node)
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class CollectFrameRange(pyblish.api.InstancePlugin):
|
||||
"""Collect Frame Range."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = "Collect Frame Range"
|
||||
hosts = ['max']
|
||||
families = ["camera", "maxrender",
|
||||
"pointcache", "pointcloud",
|
||||
"review", "redshiftproxy"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data["productType"] == "maxrender":
|
||||
instance.data["frameStartHandle"] = int(rt.rendStart)
|
||||
instance.data["frameEndHandle"] = int(rt.rendEnd)
|
||||
else:
|
||||
instance.data["frameStartHandle"] = int(rt.animationRange.start)
|
||||
instance.data["frameEndHandle"] = int(rt.animationRange.end)
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect instance members."""
|
||||
import pyblish.api
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class CollectMembers(pyblish.api.InstancePlugin):
|
||||
"""Collect Set Members."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = "Collect Instance Members"
|
||||
hosts = ['max']
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data["productType"] == "workfile":
|
||||
self.log.debug(
|
||||
"Skipping Collecting Members for workfile product type."
|
||||
)
|
||||
return
|
||||
if instance.data.get("instance_node"):
|
||||
container = rt.GetNodeByName(instance.data["instance_node"])
|
||||
instance.data["members"] = [
|
||||
member.node for member
|
||||
in container.modifiers[0].openPypeData.all_handles
|
||||
]
|
||||
self.log.debug("{}".format(instance.data["members"]))
|
||||
|
|
@ -1,76 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.lib import EnumDef, TextDef
|
||||
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
|
||||
|
||||
|
||||
class CollectTyCacheData(pyblish.api.InstancePlugin,
|
||||
AYONPyblishPluginMixin):
|
||||
"""Collect Channel Attributes for TyCache Export"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.02
|
||||
label = "Collect tyCache attribute Data"
|
||||
hosts = ['max']
|
||||
families = ["tycache"]
|
||||
|
||||
def process(self, instance):
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
attributes = {}
|
||||
for attr_key in attr_values.get("tycacheAttributes", []):
|
||||
attributes[attr_key] = True
|
||||
|
||||
for key in ["tycacheLayer", "tycacheObjectName"]:
|
||||
attributes[key] = attr_values.get(key, "")
|
||||
|
||||
# Collect the selected channel data before exporting
|
||||
instance.data["tyc_attrs"] = attributes
|
||||
self.log.debug(
|
||||
f"Found tycache attributes: {attributes}"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
# TODO: Support the attributes with maxObject array
|
||||
tyc_attr_enum = ["tycacheChanAge", "tycacheChanGroups",
|
||||
"tycacheChanPos", "tycacheChanRot",
|
||||
"tycacheChanScale", "tycacheChanVel",
|
||||
"tycacheChanSpin", "tycacheChanShape",
|
||||
"tycacheChanMatID", "tycacheChanMapping",
|
||||
"tycacheChanMaterials", "tycacheChanCustomFloat"
|
||||
"tycacheChanCustomVector", "tycacheChanCustomTM",
|
||||
"tycacheChanPhysX", "tycacheMeshBackup",
|
||||
"tycacheCreateObject",
|
||||
"tycacheCreateObjectIfNotCreated",
|
||||
"tycacheAdditionalCloth",
|
||||
"tycacheAdditionalSkin",
|
||||
"tycacheAdditionalSkinID",
|
||||
"tycacheAdditionalSkinIDValue",
|
||||
"tycacheAdditionalTerrain",
|
||||
"tycacheAdditionalVDB",
|
||||
"tycacheAdditionalSplinePaths",
|
||||
"tycacheAdditionalGeo",
|
||||
"tycacheAdditionalGeoActivateModifiers",
|
||||
"tycacheSplines",
|
||||
"tycacheSplinesAdditionalSplines"
|
||||
]
|
||||
tyc_default_attrs = ["tycacheChanGroups", "tycacheChanPos",
|
||||
"tycacheChanRot", "tycacheChanScale",
|
||||
"tycacheChanVel", "tycacheChanShape",
|
||||
"tycacheChanMatID", "tycacheChanMapping",
|
||||
"tycacheChanMaterials",
|
||||
"tycacheCreateObjectIfNotCreated"]
|
||||
return [
|
||||
EnumDef("tycacheAttributes",
|
||||
tyc_attr_enum,
|
||||
default=tyc_default_attrs,
|
||||
multiselection=True,
|
||||
label="TyCache Attributes"),
|
||||
TextDef("tycacheLayer",
|
||||
label="TyCache Layer",
|
||||
tooltip="Name of tycache layer",
|
||||
default="$(tyFlowLayer)"),
|
||||
TextDef("tycacheObjectName",
|
||||
label="TyCache Object Name",
|
||||
tooltip="TyCache Object Name",
|
||||
default="$(tyFlowName)_tyCache")
|
||||
]
|
||||
|
|
@ -1,242 +0,0 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from pymxs import runtime as rt
|
||||
|
||||
from ayon_max.api import maintained_selection
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractPointCloud(publish.Extractor):
|
||||
"""
|
||||
Extract PRT format with tyFlow operators.
|
||||
|
||||
Notes:
|
||||
Currently only works for the default partition setting
|
||||
|
||||
Args:
|
||||
self.export_particle(): sets up all job arguments for attributes
|
||||
to be exported in MAXscript
|
||||
|
||||
self.get_operators(): get the export_particle operator
|
||||
|
||||
self.get_custom_attr(): get all custom channel attributes from Openpype
|
||||
setting and sets it as job arguments before exporting
|
||||
|
||||
self.get_files(): get the files with tyFlow naming convention
|
||||
before publishing
|
||||
|
||||
self.partition_output_name(): get the naming with partition settings.
|
||||
|
||||
self.get_partition(): get partition value
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.2
|
||||
label = "Extract Point Cloud"
|
||||
hosts = ["max"]
|
||||
families = ["pointcloud"]
|
||||
settings = []
|
||||
|
||||
def process(self, instance):
|
||||
self.settings = self.get_setting(instance)
|
||||
start = instance.data["frameStartHandle"]
|
||||
end = instance.data["frameEndHandle"]
|
||||
self.log.info("Extracting PRT...")
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{name}.prt".format(**instance.data)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
with maintained_selection():
|
||||
job_args = self.export_particle(instance.data["members"],
|
||||
start,
|
||||
end,
|
||||
path)
|
||||
|
||||
for job in job_args:
|
||||
rt.Execute(job)
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
self.log.info("Writing PRT with TyFlow Plugin...")
|
||||
filenames = self.get_files(
|
||||
instance.data["members"], path, start, end)
|
||||
self.log.debug(f"filenames: {filenames}")
|
||||
|
||||
partition = self.partition_output_name(
|
||||
instance.data["members"])
|
||||
|
||||
representation = {
|
||||
'name': 'prt',
|
||||
'ext': 'prt',
|
||||
'files': filenames if len(filenames) > 1 else filenames[0],
|
||||
"stagingDir": stagingdir,
|
||||
"outputName": partition # partition value
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.info(f"Extracted instance '{instance.name}' to: {path}")
|
||||
|
||||
def export_particle(self,
|
||||
members,
|
||||
start,
|
||||
end,
|
||||
filepath):
|
||||
"""Sets up all job arguments for attributes.
|
||||
|
||||
Those attributes are to be exported in MAX Script.
|
||||
|
||||
Args:
|
||||
members (list): Member nodes of the instance.
|
||||
start (int): Start frame.
|
||||
end (int): End frame.
|
||||
filepath (str): Path to PRT file.
|
||||
|
||||
Returns:
|
||||
list of arguments for MAX Script.
|
||||
|
||||
"""
|
||||
job_args = []
|
||||
opt_list = self.get_operators(members)
|
||||
for operator in opt_list:
|
||||
start_frame = f"{operator}.frameStart={start}"
|
||||
job_args.append(start_frame)
|
||||
end_frame = f"{operator}.frameEnd={end}"
|
||||
job_args.append(end_frame)
|
||||
filepath = filepath.replace("\\", "/")
|
||||
prt_filename = f'{operator}.PRTFilename="{filepath}"'
|
||||
job_args.append(prt_filename)
|
||||
# Partition
|
||||
mode = f"{operator}.PRTPartitionsMode=2"
|
||||
job_args.append(mode)
|
||||
|
||||
additional_args = self.get_custom_attr(operator)
|
||||
job_args.extend(iter(additional_args))
|
||||
prt_export = f"{operator}.exportPRT()"
|
||||
job_args.append(prt_export)
|
||||
|
||||
return job_args
|
||||
|
||||
@staticmethod
|
||||
def get_operators(members):
|
||||
"""Get Export Particles Operator.
|
||||
|
||||
Args:
|
||||
members (list): Instance members.
|
||||
|
||||
Returns:
|
||||
list of particle operators
|
||||
|
||||
"""
|
||||
opt_list = []
|
||||
for member in members:
|
||||
obj = member.baseobject
|
||||
# TODO: to see if it can be used maxscript instead
|
||||
anim_names = rt.GetSubAnimNames(obj)
|
||||
for anim_name in anim_names:
|
||||
sub_anim = rt.GetSubAnim(obj, anim_name)
|
||||
boolean = rt.IsProperty(sub_anim, "Export_Particles")
|
||||
if boolean:
|
||||
event_name = sub_anim.Name
|
||||
opt = f"${member.Name}.{event_name}.export_particles"
|
||||
opt_list.append(opt)
|
||||
|
||||
return opt_list
|
||||
|
||||
@staticmethod
|
||||
def get_setting(instance):
|
||||
project_setting = instance.context.data["project_settings"]
|
||||
return project_setting["max"]["PointCloud"]
|
||||
|
||||
def get_custom_attr(self, operator):
|
||||
"""Get Custom Attributes"""
|
||||
|
||||
custom_attr_list = []
|
||||
attr_settings = self.settings["attribute"]
|
||||
for attr in attr_settings:
|
||||
key = attr["name"]
|
||||
value = attr["value"]
|
||||
custom_attr = "{0}.PRTChannels_{1}=True".format(operator,
|
||||
value)
|
||||
self.log.debug(
|
||||
"{0} will be added as custom attribute".format(key)
|
||||
)
|
||||
custom_attr_list.append(custom_attr)
|
||||
|
||||
return custom_attr_list
|
||||
|
||||
def get_files(self,
|
||||
container,
|
||||
path,
|
||||
start_frame,
|
||||
end_frame):
|
||||
"""Get file names for tyFlow.
|
||||
|
||||
Set the filenames accordingly to the tyFlow file
|
||||
naming extension for the publishing purpose
|
||||
|
||||
Actual File Output from tyFlow::
|
||||
<SceneFile>__part<PartitionStart>of<PartitionCount>.<frame>.prt
|
||||
|
||||
e.g. tyFlow_cloth_CCCS_blobbyFill_001__part1of1_00004.prt
|
||||
|
||||
Args:
|
||||
container: Instance node.
|
||||
path (str): Output directory.
|
||||
start_frame (int): Start frame.
|
||||
end_frame (int): End frame.
|
||||
|
||||
Returns:
|
||||
list of filenames
|
||||
|
||||
"""
|
||||
filenames = []
|
||||
filename = os.path.basename(path)
|
||||
orig_name, ext = os.path.splitext(filename)
|
||||
partition_count, partition_start = self.get_partition(container)
|
||||
for frame in range(int(start_frame), int(end_frame) + 1):
|
||||
actual_name = "{}__part{:03}of{}_{:05}".format(orig_name,
|
||||
partition_start,
|
||||
partition_count,
|
||||
frame)
|
||||
actual_filename = path.replace(orig_name, actual_name)
|
||||
filenames.append(os.path.basename(actual_filename))
|
||||
|
||||
return filenames
|
||||
|
||||
def partition_output_name(self, container):
|
||||
"""Get partition output name.
|
||||
|
||||
Partition output name set for mapping
|
||||
the published file output.
|
||||
|
||||
Todo:
|
||||
Customizes the setting for the output.
|
||||
|
||||
Args:
|
||||
container: Instance node.
|
||||
|
||||
Returns:
|
||||
str: Partition name.
|
||||
|
||||
"""
|
||||
partition_count, partition_start = self.get_partition(container)
|
||||
return f"_part{partition_start:03}of{partition_count}"
|
||||
|
||||
def get_partition(self, container):
|
||||
"""Get Partition value.
|
||||
|
||||
Args:
|
||||
container: Instance node.
|
||||
|
||||
"""
|
||||
opt_list = self.get_operators(container)
|
||||
# TODO: This looks strange? Iterating over
|
||||
# the opt_list but returning from inside?
|
||||
for operator in opt_list:
|
||||
count = rt.Execute(f'{operator}.PRTPartitionsCount')
|
||||
start = rt.Execute(f'{operator}.PRTPartitionsFrom')
|
||||
|
||||
return count, start
|
||||
|
|
@ -1,157 +0,0 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from pymxs import runtime as rt
|
||||
|
||||
from ayon_max.api import maintained_selection
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractTyCache(publish.Extractor):
|
||||
"""Extract tycache format with tyFlow operators.
|
||||
Notes:
|
||||
- TyCache only works for TyFlow Pro Plugin.
|
||||
|
||||
Methods:
|
||||
self.get_export_particles_job_args(): sets up all job arguments
|
||||
for attributes to be exported in MAXscript
|
||||
|
||||
self.get_operators(): get the export_particle operator
|
||||
|
||||
self.get_files(): get the files with tyFlow naming convention
|
||||
before publishing
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.2
|
||||
label = "Extract TyCache"
|
||||
hosts = ["max"]
|
||||
families = ["tycache"]
|
||||
|
||||
def process(self, instance):
|
||||
# TODO: let user decide the param
|
||||
start = int(instance.context.data["frameStart"])
|
||||
end = int(instance.context.data.get("frameEnd"))
|
||||
self.log.debug("Extracting Tycache...")
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{name}.tyc".format(**instance.data)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
filenames = self.get_files(instance, start, end)
|
||||
additional_attributes = instance.data.get("tyc_attrs", {})
|
||||
|
||||
with maintained_selection():
|
||||
job_args = self.get_export_particles_job_args(
|
||||
instance.data["members"],
|
||||
start, end, path,
|
||||
additional_attributes)
|
||||
for job in job_args:
|
||||
rt.Execute(job)
|
||||
representations = instance.data.setdefault("representations", [])
|
||||
representation = {
|
||||
'name': 'tyc',
|
||||
'ext': 'tyc',
|
||||
'files': filenames if len(filenames) > 1 else filenames[0],
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
representations.append(representation)
|
||||
|
||||
# Get the tyMesh filename for extraction
|
||||
mesh_filename = f"{instance.name}__tyMesh.tyc"
|
||||
mesh_repres = {
|
||||
'name': 'tyMesh',
|
||||
'ext': 'tyc',
|
||||
'files': mesh_filename,
|
||||
"stagingDir": stagingdir,
|
||||
"outputName": '__tyMesh'
|
||||
}
|
||||
representations.append(mesh_repres)
|
||||
self.log.debug(f"Extracted instance '{instance.name}' to: {filenames}")
|
||||
|
||||
def get_files(self, instance, start_frame, end_frame):
|
||||
"""Get file names for tyFlow in tyCache format.
|
||||
|
||||
Set the filenames accordingly to the tyCache file
|
||||
naming extension(.tyc) for the publishing purpose
|
||||
|
||||
Actual File Output from tyFlow in tyCache format:
|
||||
<InstanceName>__tyPart_<frame>.tyc
|
||||
|
||||
e.g. tycacheMain__tyPart_00000.tyc
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): instance.
|
||||
start_frame (int): Start frame.
|
||||
end_frame (int): End frame.
|
||||
|
||||
Returns:
|
||||
filenames(list): list of filenames
|
||||
|
||||
"""
|
||||
filenames = []
|
||||
for frame in range(int(start_frame), int(end_frame) + 1):
|
||||
filename = f"{instance.name}__tyPart_{frame:05}.tyc"
|
||||
filenames.append(filename)
|
||||
return filenames
|
||||
|
||||
def get_export_particles_job_args(self, members, start, end,
|
||||
filepath, additional_attributes):
|
||||
"""Sets up all job arguments for attributes.
|
||||
|
||||
Those attributes are to be exported in MAX Script.
|
||||
|
||||
Args:
|
||||
members (list): Member nodes of the instance.
|
||||
start (int): Start frame.
|
||||
end (int): End frame.
|
||||
filepath (str): Output path of the TyCache file.
|
||||
additional_attributes (dict): channel attributes data
|
||||
which needed to be exported
|
||||
|
||||
Returns:
|
||||
list of arguments for MAX Script.
|
||||
|
||||
"""
|
||||
settings = {
|
||||
"exportMode": 2,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"tyCacheFilename": filepath.replace("\\", "/")
|
||||
}
|
||||
settings.update(additional_attributes)
|
||||
|
||||
job_args = []
|
||||
for operator in self.get_operators(members):
|
||||
for key, value in settings.items():
|
||||
if isinstance(value, str):
|
||||
# embed in quotes
|
||||
value = f'"{value}"'
|
||||
|
||||
job_args.append(f"{operator}.{key}={value}")
|
||||
job_args.append(f"{operator}.exportTyCache()")
|
||||
return job_args
|
||||
|
||||
@staticmethod
|
||||
def get_operators(members):
|
||||
"""Get Export Particles Operator.
|
||||
|
||||
Args:
|
||||
members (list): Instance members.
|
||||
|
||||
Returns:
|
||||
list of particle operators
|
||||
|
||||
"""
|
||||
opt_list = []
|
||||
for member in members:
|
||||
obj = member.baseobject
|
||||
# TODO: see if it can use maxscript instead
|
||||
anim_names = rt.GetSubAnimNames(obj)
|
||||
for anim_name in anim_names:
|
||||
sub_anim = rt.GetSubAnim(obj, anim_name)
|
||||
boolean = rt.IsProperty(sub_anim, "Export_Particles")
|
||||
if boolean:
|
||||
event_name = sub_anim.Name
|
||||
opt = f"${member.Name}.{event_name}.export_particles"
|
||||
opt_list.append(opt)
|
||||
|
||||
return opt_list
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
from pymxs import runtime as rt
|
||||
from ayon_core.pipeline import (
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.pipeline.publish import (
|
||||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError,
|
||||
KnownPublishError
|
||||
)
|
||||
from ayon_max.api.lib import get_frame_range, set_timeline
|
||||
|
||||
|
||||
class ValidateFrameRange(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validates the frame ranges.
|
||||
|
||||
This is an optional validator checking if the frame range on instance
|
||||
matches the frame range specified for the folder.
|
||||
|
||||
It also validates render frame ranges of render layers.
|
||||
|
||||
Repair action will change everything to match the folder frame range.
|
||||
|
||||
This can be turned off by the artist to allow custom ranges.
|
||||
"""
|
||||
|
||||
label = "Validate Frame Range"
|
||||
order = ValidateContentsOrder
|
||||
families = ["camera", "maxrender",
|
||||
"pointcache", "pointcloud",
|
||||
"review", "redshiftproxy"]
|
||||
hosts = ["max"]
|
||||
optional = True
|
||||
actions = [RepairAction]
|
||||
|
||||
settings_category = "max"
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
self.log.debug("Skipping Validate Frame Range...")
|
||||
return
|
||||
|
||||
frame_range = get_frame_range(
|
||||
instance.data["taskEntity"])
|
||||
|
||||
inst_frame_start = instance.data.get("frameStartHandle")
|
||||
inst_frame_end = instance.data.get("frameEndHandle")
|
||||
if inst_frame_start is None or inst_frame_end is None:
|
||||
raise KnownPublishError(
|
||||
"Missing frame start and frame end on "
|
||||
"instance to to validate."
|
||||
)
|
||||
frame_start_handle = frame_range["frameStartHandle"]
|
||||
frame_end_handle = frame_range["frameEndHandle"]
|
||||
errors = []
|
||||
if frame_start_handle != inst_frame_start:
|
||||
errors.append(
|
||||
f"Start frame ({inst_frame_start}) on instance does not match " # noqa
|
||||
f"with the start frame ({frame_start_handle}) set on the folder attributes. ") # noqa
|
||||
if frame_end_handle != inst_frame_end:
|
||||
errors.append(
|
||||
f"End frame ({inst_frame_end}) on instance does not match "
|
||||
f"with the end frame ({frame_end_handle}) "
|
||||
"from the folder attributes. ")
|
||||
|
||||
if errors:
|
||||
bullet_point_errors = "\n".join(
|
||||
"- {}".format(error) for error in errors
|
||||
)
|
||||
report = (
|
||||
"Frame range settings are incorrect.\n\n"
|
||||
f"{bullet_point_errors}\n\n"
|
||||
"You can use repair action to fix it."
|
||||
)
|
||||
raise PublishValidationError(report, title="Frame Range incorrect")
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
frame_range = get_frame_range()
|
||||
frame_start_handle = frame_range["frameStartHandle"]
|
||||
frame_end_handle = frame_range["frameEndHandle"]
|
||||
|
||||
if instance.data["productType"] == "maxrender":
|
||||
rt.rendStart = frame_start_handle
|
||||
rt.rendEnd = frame_end_handle
|
||||
else:
|
||||
set_timeline(frame_start_handle, frame_end_handle)
|
||||
|
|
@ -1,88 +0,0 @@
|
|||
import pyblish.api
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class ValidateTyFlowData(pyblish.api.InstancePlugin):
|
||||
"""Validate TyFlow plugins or relevant operators are set correctly."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcloud", "tycache"]
|
||||
hosts = ["max"]
|
||||
label = "TyFlow Data"
|
||||
|
||||
def process(self, instance):
|
||||
"""
|
||||
Notes:
|
||||
1. Validate the container only include tyFlow objects
|
||||
2. Validate if tyFlow operator Export Particle exists
|
||||
|
||||
"""
|
||||
|
||||
invalid_object = self.get_tyflow_object(instance)
|
||||
if invalid_object:
|
||||
self.log.error(f"Non tyFlow object found: {invalid_object}")
|
||||
|
||||
invalid_operator = self.get_tyflow_operator(instance)
|
||||
if invalid_operator:
|
||||
self.log.error(
|
||||
"Operator 'Export Particles' not found in tyFlow editor.")
|
||||
if invalid_object or invalid_operator:
|
||||
raise PublishValidationError(
|
||||
"issues occurred",
|
||||
description="Container should only include tyFlow object "
|
||||
"and tyflow operator 'Export Particle' should be in "
|
||||
"the tyFlow editor.")
|
||||
|
||||
def get_tyflow_object(self, instance):
|
||||
"""Get the nodes which are not tyFlow object(s)
|
||||
and editable mesh(es)
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): instance
|
||||
|
||||
Returns:
|
||||
list: invalid nodes which are not tyFlow
|
||||
object(s) and editable mesh(es).
|
||||
"""
|
||||
container = instance.data["instance_node"]
|
||||
self.log.debug(f"Validating tyFlow container for {container}")
|
||||
|
||||
allowed_classes = [rt.tyFlow, rt.Editable_Mesh]
|
||||
return [
|
||||
member for member in instance.data["members"]
|
||||
if rt.ClassOf(member) not in allowed_classes
|
||||
]
|
||||
|
||||
def get_tyflow_operator(self, instance):
|
||||
"""Check if the Export Particle Operators in the node
|
||||
connections.
|
||||
|
||||
Args:
|
||||
instance (str): instance node
|
||||
|
||||
Returns:
|
||||
invalid(list): list of invalid nodes which do
|
||||
not consist of Export Particle Operators as parts
|
||||
of the node connections
|
||||
"""
|
||||
invalid = []
|
||||
members = instance.data["members"]
|
||||
for member in members:
|
||||
obj = member.baseobject
|
||||
|
||||
# There must be at least one animation with export
|
||||
# particles enabled
|
||||
has_export_particles = False
|
||||
anim_names = rt.GetSubAnimNames(obj)
|
||||
for anim_name in anim_names:
|
||||
# get name of the related tyFlow node
|
||||
sub_anim = rt.GetSubAnim(obj, anim_name)
|
||||
# check if there is export particle operator
|
||||
if rt.IsProperty(sub_anim, "Export_Particles"):
|
||||
has_export_particles = True
|
||||
break
|
||||
|
||||
if not has_export_particles:
|
||||
invalid.append(member)
|
||||
return invalid
|
||||
|
|
@ -1,222 +0,0 @@
|
|||
import json
|
||||
from pydantic import validator
|
||||
|
||||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
from ayon_server.exceptions import BadRequestException
|
||||
|
||||
|
||||
class ValidateAttributesModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="ValidateAttributes")
|
||||
attributes: str = SettingsField(
|
||||
"{}", title="Attributes", widget="textarea")
|
||||
|
||||
@validator("attributes")
|
||||
def validate_json(cls, value):
|
||||
if not value.strip():
|
||||
return "{}"
|
||||
try:
|
||||
converted_value = json.loads(value)
|
||||
success = isinstance(converted_value, dict)
|
||||
except json.JSONDecodeError:
|
||||
success = False
|
||||
|
||||
if not success:
|
||||
raise BadRequestException(
|
||||
"The attibutes can't be parsed as json object"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
class ValidateCameraAttributesModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
fov: float = SettingsField(0.0, title="Focal Length")
|
||||
nearrange: float = SettingsField(0.0, title="Near Range")
|
||||
farrange: float = SettingsField(0.0, title="Far Range")
|
||||
nearclip: float = SettingsField(0.0, title="Near Clip")
|
||||
farclip: float = SettingsField(0.0, title="Far Clip")
|
||||
|
||||
|
||||
class FamilyMappingItemModel(BaseSettingsModel):
|
||||
families: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Families"
|
||||
)
|
||||
plugins: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Plugins"
|
||||
)
|
||||
|
||||
|
||||
class ValidateModelNameModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
regex: str = SettingsField(
|
||||
"(.*)_(?P<subset>.*)_(GEO)",
|
||||
title="Validation regex",
|
||||
description=(
|
||||
"Regex for validating model name. You can use named "
|
||||
" capturing groups:(?P<asset>.*) for Asset name"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ValidateLoadedPluginModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
family_plugins_mapping: list[FamilyMappingItemModel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Family Plugins Mapping"
|
||||
)
|
||||
|
||||
|
||||
class BasicValidateModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
|
||||
|
||||
class PublishersModel(BaseSettingsModel):
|
||||
ValidateInstanceInContext: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Instance In Context",
|
||||
section="Validators"
|
||||
)
|
||||
ValidateFrameRange: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Frame Range"
|
||||
)
|
||||
ValidateAttributes: ValidateAttributesModel = SettingsField(
|
||||
default_factory=ValidateAttributesModel,
|
||||
title="Validate Attributes"
|
||||
)
|
||||
ValidateCameraAttributes: ValidateCameraAttributesModel = SettingsField(
|
||||
default_factory=ValidateCameraAttributesModel,
|
||||
title="Validate Camera Attributes",
|
||||
description=(
|
||||
"If the value of the camera attributes set to 0, "
|
||||
"the system automatically skips checking it"
|
||||
)
|
||||
)
|
||||
ValidateNoAnimation: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate No Animation"
|
||||
)
|
||||
ValidateLoadedPlugin: ValidateLoadedPluginModel = SettingsField(
|
||||
default_factory=ValidateLoadedPluginModel,
|
||||
title="Validate Loaded Plugin"
|
||||
)
|
||||
ValidateMeshHasUVs: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Mesh Has UVs"
|
||||
)
|
||||
ValidateModelName: ValidateModelNameModel = SettingsField(
|
||||
default_factory=ValidateModelNameModel,
|
||||
title="Validate Model Name"
|
||||
)
|
||||
ValidateRenderPasses: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Render Passes"
|
||||
)
|
||||
ExtractModelObj: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Extract OBJ",
|
||||
section="Extractors"
|
||||
)
|
||||
ExtractModelFbx: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Extract FBX"
|
||||
)
|
||||
ExtractModelUSD: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Extract Geometry (USD)"
|
||||
)
|
||||
ExtractModel: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Extract Geometry (Alembic)"
|
||||
)
|
||||
ExtractMaxSceneRaw: BasicValidateModel = SettingsField(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Extract Max Scene (Raw)"
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_PUBLISH_SETTINGS = {
|
||||
"ValidateInstanceInContext": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateFrameRange": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateAttributes": {
|
||||
"enabled": False,
|
||||
"attributes": "{}"
|
||||
},
|
||||
"ValidateCameraAttributes": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": False,
|
||||
"fov": 45.0,
|
||||
"nearrange": 0.0,
|
||||
"farrange": 1000.0,
|
||||
"nearclip": 1.0,
|
||||
"farclip": 1000.0
|
||||
},
|
||||
"ValidateModelName": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": False,
|
||||
"regex": "(.*)_(?P<subset>.*)_(GEO)"
|
||||
},
|
||||
"ValidateLoadedPlugin": {
|
||||
"enabled": False,
|
||||
"optional": True,
|
||||
"family_plugins_mapping": []
|
||||
},
|
||||
"ValidateMeshHasUVs": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": False
|
||||
},
|
||||
"ValidateNoAnimation": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": False,
|
||||
},
|
||||
"ValidateRenderPasses": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True
|
||||
},
|
||||
"ExtractModelObj": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": False
|
||||
},
|
||||
"ExtractModelFbx": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": False
|
||||
},
|
||||
"ExtractModelUSD": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": False
|
||||
},
|
||||
"ExtractModel": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ExtractMaxSceneRaw": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue