mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 08:24:53 +01:00
🐛 fix publishing of alembics
This commit is contained in:
parent
8b71066d9c
commit
1c985ca001
8 changed files with 272 additions and 25 deletions
|
|
@ -7,4 +7,4 @@ from .addon import (
|
||||||
__all__ = (
|
__all__ = (
|
||||||
"MaxAddon",
|
"MaxAddon",
|
||||||
"MAX_HOST_DIR",
|
"MAX_HOST_DIR",
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -2,10 +2,19 @@
|
||||||
"""Public API for 3dsmax"""
|
"""Public API for 3dsmax"""
|
||||||
|
|
||||||
from .pipeline import (
|
from .pipeline import (
|
||||||
MaxHost
|
MaxHost,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
from .lib import(
|
||||||
|
maintained_selection,
|
||||||
|
lsattr,
|
||||||
|
get_all_children
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"MaxHost",
|
"MaxHost",
|
||||||
|
"maintained_selection",
|
||||||
|
"lsattr",
|
||||||
|
"get_all_children"
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,13 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
"""Library of functions useful for 3dsmax pipeline."""
|
"""Library of functions useful for 3dsmax pipeline."""
|
||||||
|
import json
|
||||||
|
import six
|
||||||
from pymxs import runtime as rt
|
from pymxs import runtime as rt
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
import contextlib
|
||||||
|
|
||||||
|
|
||||||
|
JSON_PREFIX = "JSON::"
|
||||||
|
|
||||||
|
|
||||||
def imprint(node_name: str, data: dict) -> bool:
|
def imprint(node_name: str, data: dict) -> bool:
|
||||||
|
|
@ -10,7 +16,10 @@ def imprint(node_name: str, data: dict) -> bool:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for k, v in data.items():
|
for k, v in data.items():
|
||||||
rt.setUserProp(node, k, v)
|
if isinstance(v, (dict, list)):
|
||||||
|
rt.setUserProp(node, k, f'{JSON_PREFIX}{json.dumps(v)}')
|
||||||
|
else:
|
||||||
|
rt.setUserProp(node, k, v)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
@ -39,10 +48,13 @@ def lsattr(
|
||||||
|
|
||||||
nodes = []
|
nodes = []
|
||||||
output_node(root, nodes)
|
output_node(root, nodes)
|
||||||
if not value:
|
return [
|
||||||
return [n for n in nodes if rt.getUserProp(n, attr)]
|
n for n in nodes
|
||||||
|
if rt.getUserProp(n, attr) == value
|
||||||
return [n for n in nodes if rt.getUserProp(n, attr) == value]
|
] if value else [
|
||||||
|
n for n in nodes
|
||||||
|
if rt.getUserProp(n, attr)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def read(container) -> dict:
|
def read(container) -> dict:
|
||||||
|
|
@ -53,12 +65,58 @@ def read(container) -> dict:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
for line in props.split("\r\n"):
|
for line in props.split("\r\n"):
|
||||||
key, value = line.split("=")
|
try:
|
||||||
# if the line cannot be split we can't really parse it
|
key, value = line.split("=")
|
||||||
if not key:
|
except ValueError:
|
||||||
|
# if the line cannot be split we can't really parse it
|
||||||
continue
|
continue
|
||||||
data[key.strip()] = value.strip()
|
|
||||||
|
|
||||||
data["instance_node"] = container
|
value = value.strip()
|
||||||
|
if isinstance(value.strip(), six.string_types) and \
|
||||||
|
value.startswith(JSON_PREFIX):
|
||||||
|
try:
|
||||||
|
value = json.loads(value[len(JSON_PREFIX):])
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
# not a json
|
||||||
|
pass
|
||||||
|
|
||||||
|
data[key.strip()] = value
|
||||||
|
|
||||||
|
data["instance_node"] = container.name
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def maintained_selection():
|
||||||
|
previous_selection = rt.getCurrentSelection()
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
if previous_selection:
|
||||||
|
rt.select(previous_selection)
|
||||||
|
else:
|
||||||
|
rt.select()
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_children(parent, node_type=None):
|
||||||
|
"""Handy function to get all the children of a given node
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parent (3dsmax Node1): Node to get all children of.
|
||||||
|
node_type (None, runtime.class): give class to check for
|
||||||
|
e.g. rt.FFDBox/rt.GeometryClass etc.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: list of all children of the parent node
|
||||||
|
"""
|
||||||
|
def list_children(node):
|
||||||
|
children = []
|
||||||
|
for c in node.Children:
|
||||||
|
children.append(c)
|
||||||
|
children = children + list_children(c)
|
||||||
|
return children
|
||||||
|
child_list = list_children(parent)
|
||||||
|
|
||||||
|
return ([x for x in child_list if rt.superClassOf(x) == node_type]
|
||||||
|
if node_type else child_list)
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
"""Pipeline tools for OpenPype Houdini integration."""
|
"""Pipeline tools for OpenPype Houdini integration."""
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import logging
|
import logging
|
||||||
import contextlib
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
|
@ -101,12 +99,12 @@ attributes "OpenPypeContext"
|
||||||
(
|
(
|
||||||
context type: #string
|
context type: #string
|
||||||
)
|
)
|
||||||
|
|
||||||
rollout params "OpenPype Parameters"
|
rollout params "OpenPype Parameters"
|
||||||
(
|
(
|
||||||
editText editTextContext "Context" type: #string
|
editText editTextContext "Context" type: #string
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
""")
|
""")
|
||||||
|
|
||||||
attr = rt.execute(create_attr_script)
|
attr = rt.execute(create_attr_script)
|
||||||
|
|
@ -149,6 +147,3 @@ def ls() -> list:
|
||||||
|
|
||||||
for container in sorted(containers, key=lambda name: container.name):
|
for container in sorted(containers, key=lambda name: container.name):
|
||||||
yield lib.read(container)
|
yield lib.read(container)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
"""3dsmax specific Avalon/Pyblish plugin definitions."""
|
"""3dsmax specific Avalon/Pyblish plugin definitions."""
|
||||||
import sys
|
|
||||||
from pymxs import runtime as rt
|
from pymxs import runtime as rt
|
||||||
import six
|
import six
|
||||||
from abc import ABCMeta
|
from abc import ABCMeta
|
||||||
|
|
@ -25,12 +24,12 @@ class MaxCreatorBase(object):
|
||||||
shared_data["max_cached_subsets"] = {}
|
shared_data["max_cached_subsets"] = {}
|
||||||
cached_instances = lsattr("id", "pyblish.avalon.instance")
|
cached_instances = lsattr("id", "pyblish.avalon.instance")
|
||||||
for i in cached_instances:
|
for i in cached_instances:
|
||||||
creator_id = i.get("creator_identifier")
|
creator_id = rt.getUserProp(i, "creator_identifier")
|
||||||
if creator_id not in shared_data["max_cached_subsets"]:
|
if creator_id not in shared_data["max_cached_subsets"]:
|
||||||
shared_data["houdini_cached_subsets"][creator_id] = [i]
|
shared_data["max_cached_subsets"][creator_id] = [i.name]
|
||||||
else:
|
else:
|
||||||
shared_data[
|
shared_data[
|
||||||
"houdini_cached_subsets"][creator_id].append(i) # noqa
|
"max_cached_subsets"][creator_id].append(i.name) # noqa
|
||||||
return shared_data
|
return shared_data
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
@ -61,8 +60,12 @@ class MaxCreator(Creator, MaxCreatorBase):
|
||||||
instance_data,
|
instance_data,
|
||||||
self
|
self
|
||||||
)
|
)
|
||||||
|
for node in self.selected_nodes:
|
||||||
|
node.Parent = instance_node
|
||||||
|
|
||||||
self._add_instance_to_context(instance)
|
self._add_instance_to_context(instance)
|
||||||
imprint(instance_node.name, instance.data_to_store())
|
imprint(instance_node.name, instance.data_to_store())
|
||||||
|
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def collect_instances(self):
|
def collect_instances(self):
|
||||||
|
|
@ -70,7 +73,7 @@ class MaxCreator(Creator, MaxCreatorBase):
|
||||||
for instance in self.collection_shared_data[
|
for instance in self.collection_shared_data[
|
||||||
"max_cached_subsets"].get(self.identifier, []):
|
"max_cached_subsets"].get(self.identifier, []):
|
||||||
created_instance = CreatedInstance.from_existing(
|
created_instance = CreatedInstance.from_existing(
|
||||||
read(instance), self
|
read(rt.getNodeByName(instance)), self
|
||||||
)
|
)
|
||||||
self._add_instance_to_context(created_instance)
|
self._add_instance_to_context(created_instance)
|
||||||
|
|
||||||
|
|
@ -98,7 +101,7 @@ class MaxCreator(Creator, MaxCreatorBase):
|
||||||
instance_node = rt.getNodeByName(
|
instance_node = rt.getNodeByName(
|
||||||
instance.data.get("instance_node"))
|
instance.data.get("instance_node"))
|
||||||
if instance_node:
|
if instance_node:
|
||||||
rt.delete(instance_node)
|
rt.delete(rt.getNodeByName(instance_node))
|
||||||
|
|
||||||
self._remove_instance_from_context(instance)
|
self._remove_instance_from_context(instance)
|
||||||
|
|
||||||
|
|
|
||||||
63
openpype/hosts/max/plugins/publish/collect_workfile.py
Normal file
63
openpype/hosts/max/plugins/publish/collect_workfile.py
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Collect current work file."""
|
||||||
|
import os
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
|
from pymxs import runtime as rt
|
||||||
|
from openpype.pipeline import legacy_io, KnownPublishError
|
||||||
|
|
||||||
|
|
||||||
|
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||||
|
"""Inject the current working file into context"""
|
||||||
|
|
||||||
|
order = pyblish.api.CollectorOrder - 0.01
|
||||||
|
label = "Collect 3dsmax Workfile"
|
||||||
|
hosts = ['max']
|
||||||
|
|
||||||
|
def process(self, context):
|
||||||
|
"""Inject the current working file."""
|
||||||
|
folder = rt.maxFilePath
|
||||||
|
file = rt.maxFileName
|
||||||
|
if not folder or not file:
|
||||||
|
self.log.error("Scene is not saved.")
|
||||||
|
current_file = os.path.join(folder, file)
|
||||||
|
|
||||||
|
context.data['currentFile'] = current_file
|
||||||
|
|
||||||
|
filename, ext = os.path.splitext(file)
|
||||||
|
|
||||||
|
task = legacy_io.Session["AVALON_TASK"]
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
# create instance
|
||||||
|
instance = context.create_instance(name=filename)
|
||||||
|
subset = 'workfile' + task.capitalize()
|
||||||
|
|
||||||
|
data.update({
|
||||||
|
"subset": subset,
|
||||||
|
"asset": os.getenv("AVALON_ASSET", None),
|
||||||
|
"label": subset,
|
||||||
|
"publish": True,
|
||||||
|
"family": 'workfile',
|
||||||
|
"families": ['workfile'],
|
||||||
|
"setMembers": [current_file],
|
||||||
|
"frameStart": context.data['frameStart'],
|
||||||
|
"frameEnd": context.data['frameEnd'],
|
||||||
|
"handleStart": context.data['handleStart'],
|
||||||
|
"handleEnd": context.data['handleEnd']
|
||||||
|
})
|
||||||
|
|
||||||
|
data['representations'] = [{
|
||||||
|
'name': ext.lstrip("."),
|
||||||
|
'ext': ext.lstrip("."),
|
||||||
|
'files': file,
|
||||||
|
"stagingDir": folder,
|
||||||
|
}]
|
||||||
|
|
||||||
|
instance.data.update(data)
|
||||||
|
|
||||||
|
self.log.info('Collected instance: {}'.format(file))
|
||||||
|
self.log.info('Scene path: {}'.format(current_file))
|
||||||
|
self.log.info('staging Dir: {}'.format(folder))
|
||||||
|
self.log.info('subset: {}'.format(subset))
|
||||||
100
openpype/hosts/max/plugins/publish/extract_pointcache.py
Normal file
100
openpype/hosts/max/plugins/publish/extract_pointcache.py
Normal file
|
|
@ -0,0 +1,100 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
Export alembic file.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Parameters on AlembicExport (AlembicExport.Parameter):
|
||||||
|
|
||||||
|
ParticleAsMesh (bool): Sets whether particle shapes are exported
|
||||||
|
as meshes.
|
||||||
|
AnimTimeRange (enum): How animation is saved:
|
||||||
|
#CurrentFrame: saves current frame
|
||||||
|
#TimeSlider: saves the active time segments on time slider (default)
|
||||||
|
#StartEnd: saves a range specified by the Step
|
||||||
|
StartFrame (int)
|
||||||
|
EnFrame (int)
|
||||||
|
ShapeSuffix (bool): When set to true, appends the string "Shape" to the
|
||||||
|
name of each exported mesh. This property is set to false by default.
|
||||||
|
SamplesPerFrame (int): Sets the number of animation samples per frame.
|
||||||
|
Hidden (bool): When true, export hidden geometry.
|
||||||
|
UVs (bool): When true, export the mesh UV map channel.
|
||||||
|
Normals (bool): When true, export the mesh normals.
|
||||||
|
VertexColors (bool): When true, export the mesh vertex color map 0 and the
|
||||||
|
current vertex color display data when it differs
|
||||||
|
ExtraChannels (bool): When true, export the mesh extra map channels
|
||||||
|
(map channels greater than channel 1)
|
||||||
|
Velocity (bool): When true, export the meh vertex and particle velocity
|
||||||
|
data.
|
||||||
|
MaterialIDs (bool): When true, export the mesh material ID as
|
||||||
|
Alembic face sets.
|
||||||
|
Visibility (bool): When true, export the node visibility data.
|
||||||
|
LayerName (bool): When true, export the node layer name as an Alembic
|
||||||
|
object property.
|
||||||
|
MaterialName (bool): When true, export the geometry node material name as
|
||||||
|
an Alembic object property
|
||||||
|
ObjectID (bool): When true, export the geometry node g-buffer object ID as
|
||||||
|
an Alembic object property.
|
||||||
|
CustomAttributes (bool): When true, export the node and its modifiers
|
||||||
|
custom attributes into an Alembic object compound property.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import pyblish.api
|
||||||
|
from openpype.pipeline import publish
|
||||||
|
from pymxs import runtime as rt
|
||||||
|
from openpype.hosts.max.api import (
|
||||||
|
maintained_selection,
|
||||||
|
get_all_children
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ExtractAlembic(publish.Extractor):
|
||||||
|
order = pyblish.api.ExtractorOrder
|
||||||
|
label = "Extract Pointcache"
|
||||||
|
hosts = ["max"]
|
||||||
|
families = ["pointcache", "camera"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
start = float(instance.data.get("frameStartHandle", 1))
|
||||||
|
end = float(instance.data.get("frameEndHandle", 1))
|
||||||
|
|
||||||
|
container = instance.data["instance_node"]
|
||||||
|
|
||||||
|
self.log.info("Extracting pointcache ...")
|
||||||
|
|
||||||
|
parent_dir = self.staging_dir(instance)
|
||||||
|
file_name = "{name}.abc".format(**instance.data)
|
||||||
|
path = os.path.join(parent_dir, file_name)
|
||||||
|
|
||||||
|
# We run the render
|
||||||
|
self.log.info("Writing alembic '%s' to '%s'" % (file_name,
|
||||||
|
parent_dir))
|
||||||
|
|
||||||
|
abc_export_cmd = (
|
||||||
|
f"""
|
||||||
|
AlembicExport.ArchiveType = #ogawa
|
||||||
|
AlembicExport.CoordinateSystem = #maya
|
||||||
|
AlembicExport.StartFrame = {start}
|
||||||
|
AlembicExport.EndFrame = {end}
|
||||||
|
|
||||||
|
exportFile @"{path}" #noPrompt selectedOnly:on using:AlembicExport
|
||||||
|
|
||||||
|
""")
|
||||||
|
|
||||||
|
self.log.debug(f"Executing command: {abc_export_cmd}")
|
||||||
|
|
||||||
|
with maintained_selection():
|
||||||
|
# select and export
|
||||||
|
|
||||||
|
rt.select(get_all_children(rt.getNodeByName(container)))
|
||||||
|
rt.execute(abc_export_cmd)
|
||||||
|
|
||||||
|
if "representations" not in instance.data:
|
||||||
|
instance.data["representations"] = []
|
||||||
|
|
||||||
|
representation = {
|
||||||
|
'name': 'abc',
|
||||||
|
'ext': 'abc',
|
||||||
|
'files': file_name,
|
||||||
|
"stagingDir": parent_dir,
|
||||||
|
}
|
||||||
|
instance.data["representations"].append(representation)
|
||||||
19
openpype/hosts/max/plugins/publish/validate_scene_saved.py
Normal file
19
openpype/hosts/max/plugins/publish/validate_scene_saved.py
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import pyblish.api
|
||||||
|
from openpype.pipeline import PublishValidationError
|
||||||
|
from openpype.pipeline.publish import RepairAction
|
||||||
|
from pymxs import runtime as rt
|
||||||
|
|
||||||
|
|
||||||
|
class ValidateSceneSaved(pyblish.api.InstancePlugin):
|
||||||
|
"""Validate that workfile was saved."""
|
||||||
|
|
||||||
|
order = pyblish.api.ValidatorOrder
|
||||||
|
families = ["workfile"]
|
||||||
|
hosts = ["max"]
|
||||||
|
label = "Validate Workfile is saved"
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
if not rt.maxFilePath or not rt.maxFileName:
|
||||||
|
raise PublishValidationError(
|
||||||
|
"Workfile is not saved", title=self.label)
|
||||||
Loading…
Add table
Add a link
Reference in a new issue