diff --git a/openpype/hosts/max/__init__.py b/openpype/hosts/max/__init__.py index 8da0e0ee42..9a5af8258c 100644 --- a/openpype/hosts/max/__init__.py +++ b/openpype/hosts/max/__init__.py @@ -7,4 +7,4 @@ from .addon import ( __all__ = ( "MaxAddon", "MAX_HOST_DIR", -) \ No newline at end of file +) diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py index 503afade73..26190dcfb8 100644 --- a/openpype/hosts/max/api/__init__.py +++ b/openpype/hosts/max/api/__init__.py @@ -2,10 +2,19 @@ """Public API for 3dsmax""" from .pipeline import ( - MaxHost + MaxHost, ) +from .lib import( + maintained_selection, + lsattr, + get_all_children +) + __all__ = [ "MaxHost", + "maintained_selection", + "lsattr", + "get_all_children" ] diff --git a/openpype/hosts/max/api/lib.py b/openpype/hosts/max/api/lib.py index 8a57bb1bf6..9256ca9ac1 100644 --- a/openpype/hosts/max/api/lib.py +++ b/openpype/hosts/max/api/lib.py @@ -1,7 +1,13 @@ # -*- coding: utf-8 -*- """Library of functions useful for 3dsmax pipeline.""" +import json +import six from pymxs import runtime as rt from typing import Union +import contextlib + + +JSON_PREFIX = "JSON::" def imprint(node_name: str, data: dict) -> bool: @@ -10,7 +16,10 @@ def imprint(node_name: str, data: dict) -> bool: return False for k, v in data.items(): - rt.setUserProp(node, k, v) + if isinstance(v, (dict, list)): + rt.setUserProp(node, k, f'{JSON_PREFIX}{json.dumps(v)}') + else: + rt.setUserProp(node, k, v) return True @@ -39,10 +48,13 @@ def lsattr( nodes = [] output_node(root, nodes) - if not value: - return [n for n in nodes if rt.getUserProp(n, attr)] - - return [n for n in nodes if rt.getUserProp(n, attr) == value] + return [ + n for n in nodes + if rt.getUserProp(n, attr) == value + ] if value else [ + n for n in nodes + if rt.getUserProp(n, attr) + ] def read(container) -> dict: @@ -53,12 +65,58 @@ def read(container) -> dict: return data for line in props.split("\r\n"): - key, value = line.split("=") - # if the line cannot be split we can't really parse it - if not key: + try: + key, value = line.split("=") + except ValueError: + # if the line cannot be split we can't really parse it continue - data[key.strip()] = value.strip() - data["instance_node"] = container + value = value.strip() + if isinstance(value.strip(), six.string_types) and \ + value.startswith(JSON_PREFIX): + try: + value = json.loads(value[len(JSON_PREFIX):]) + except json.JSONDecodeError: + # not a json + pass + + data[key.strip()] = value + + data["instance_node"] = container.name return data + + +@contextlib.contextmanager +def maintained_selection(): + previous_selection = rt.getCurrentSelection() + try: + yield + finally: + if previous_selection: + rt.select(previous_selection) + else: + rt.select() + + +def get_all_children(parent, node_type=None): + """Handy function to get all the children of a given node + + Args: + parent (3dsmax Node1): Node to get all children of. + node_type (None, runtime.class): give class to check for + e.g. rt.FFDBox/rt.GeometryClass etc. + + Returns: + list: list of all children of the parent node + """ + def list_children(node): + children = [] + for c in node.Children: + children.append(c) + children = children + list_children(c) + return children + child_list = list_children(parent) + + return ([x for x in child_list if rt.superClassOf(x) == node_type] + if node_type else child_list) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py index cef45193c4..4f8271fb7e 100644 --- a/openpype/hosts/max/api/pipeline.py +++ b/openpype/hosts/max/api/pipeline.py @@ -1,9 +1,7 @@ # -*- coding: utf-8 -*- """Pipeline tools for OpenPype Houdini integration.""" import os -import sys import logging -import contextlib import json @@ -101,12 +99,12 @@ attributes "OpenPypeContext" ( context type: #string ) - + rollout params "OpenPype Parameters" ( editText editTextContext "Context" type: #string ) -) +) """) attr = rt.execute(create_attr_script) @@ -149,6 +147,3 @@ def ls() -> list: for container in sorted(containers, key=lambda name: container.name): yield lib.read(container) - - - diff --git a/openpype/hosts/max/api/plugin.py b/openpype/hosts/max/api/plugin.py index 0f01c94ce1..4788bfd383 100644 --- a/openpype/hosts/max/api/plugin.py +++ b/openpype/hosts/max/api/plugin.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- """3dsmax specific Avalon/Pyblish plugin definitions.""" -import sys from pymxs import runtime as rt import six from abc import ABCMeta @@ -25,12 +24,12 @@ class MaxCreatorBase(object): shared_data["max_cached_subsets"] = {} cached_instances = lsattr("id", "pyblish.avalon.instance") for i in cached_instances: - creator_id = i.get("creator_identifier") + creator_id = rt.getUserProp(i, "creator_identifier") if creator_id not in shared_data["max_cached_subsets"]: - shared_data["houdini_cached_subsets"][creator_id] = [i] + shared_data["max_cached_subsets"][creator_id] = [i.name] else: shared_data[ - "houdini_cached_subsets"][creator_id].append(i) # noqa + "max_cached_subsets"][creator_id].append(i.name) # noqa return shared_data @staticmethod @@ -61,8 +60,12 @@ class MaxCreator(Creator, MaxCreatorBase): instance_data, self ) + for node in self.selected_nodes: + node.Parent = instance_node + self._add_instance_to_context(instance) imprint(instance_node.name, instance.data_to_store()) + return instance def collect_instances(self): @@ -70,7 +73,7 @@ class MaxCreator(Creator, MaxCreatorBase): for instance in self.collection_shared_data[ "max_cached_subsets"].get(self.identifier, []): created_instance = CreatedInstance.from_existing( - read(instance), self + read(rt.getNodeByName(instance)), self ) self._add_instance_to_context(created_instance) @@ -98,7 +101,7 @@ class MaxCreator(Creator, MaxCreatorBase): instance_node = rt.getNodeByName( instance.data.get("instance_node")) if instance_node: - rt.delete(instance_node) + rt.delete(rt.getNodeByName(instance_node)) self._remove_instance_from_context(instance) diff --git a/openpype/hosts/max/plugins/publish/collect_workfile.py b/openpype/hosts/max/plugins/publish/collect_workfile.py new file mode 100644 index 0000000000..7112337575 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/collect_workfile.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +"""Collect current work file.""" +import os +import pyblish.api + +from pymxs import runtime as rt +from openpype.pipeline import legacy_io, KnownPublishError + + +class CollectWorkfile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.01 + label = "Collect 3dsmax Workfile" + hosts = ['max'] + + def process(self, context): + """Inject the current working file.""" + folder = rt.maxFilePath + file = rt.maxFileName + if not folder or not file: + self.log.error("Scene is not saved.") + current_file = os.path.join(folder, file) + + context.data['currentFile'] = current_file + + filename, ext = os.path.splitext(file) + + task = legacy_io.Session["AVALON_TASK"] + + data = {} + + # create instance + instance = context.create_instance(name=filename) + subset = 'workfile' + task.capitalize() + + data.update({ + "subset": subset, + "asset": os.getenv("AVALON_ASSET", None), + "label": subset, + "publish": True, + "family": 'workfile', + "families": ['workfile'], + "setMembers": [current_file], + "frameStart": context.data['frameStart'], + "frameEnd": context.data['frameEnd'], + "handleStart": context.data['handleStart'], + "handleEnd": context.data['handleEnd'] + }) + + data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] + + instance.data.update(data) + + self.log.info('Collected instance: {}'.format(file)) + self.log.info('Scene path: {}'.format(current_file)) + self.log.info('staging Dir: {}'.format(folder)) + self.log.info('subset: {}'.format(subset)) diff --git a/openpype/hosts/max/plugins/publish/extract_pointcache.py b/openpype/hosts/max/plugins/publish/extract_pointcache.py new file mode 100644 index 0000000000..904c1656da --- /dev/null +++ b/openpype/hosts/max/plugins/publish/extract_pointcache.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +""" +Export alembic file. + +Note: + Parameters on AlembicExport (AlembicExport.Parameter): + + ParticleAsMesh (bool): Sets whether particle shapes are exported + as meshes. + AnimTimeRange (enum): How animation is saved: + #CurrentFrame: saves current frame + #TimeSlider: saves the active time segments on time slider (default) + #StartEnd: saves a range specified by the Step + StartFrame (int) + EnFrame (int) + ShapeSuffix (bool): When set to true, appends the string "Shape" to the + name of each exported mesh. This property is set to false by default. + SamplesPerFrame (int): Sets the number of animation samples per frame. + Hidden (bool): When true, export hidden geometry. + UVs (bool): When true, export the mesh UV map channel. + Normals (bool): When true, export the mesh normals. + VertexColors (bool): When true, export the mesh vertex color map 0 and the + current vertex color display data when it differs + ExtraChannels (bool): When true, export the mesh extra map channels + (map channels greater than channel 1) + Velocity (bool): When true, export the meh vertex and particle velocity + data. + MaterialIDs (bool): When true, export the mesh material ID as + Alembic face sets. + Visibility (bool): When true, export the node visibility data. + LayerName (bool): When true, export the node layer name as an Alembic + object property. + MaterialName (bool): When true, export the geometry node material name as + an Alembic object property + ObjectID (bool): When true, export the geometry node g-buffer object ID as + an Alembic object property. + CustomAttributes (bool): When true, export the node and its modifiers + custom attributes into an Alembic object compound property. +""" +import os +import pyblish.api +from openpype.pipeline import publish +from pymxs import runtime as rt +from openpype.hosts.max.api import ( + maintained_selection, + get_all_children +) + + +class ExtractAlembic(publish.Extractor): + order = pyblish.api.ExtractorOrder + label = "Extract Pointcache" + hosts = ["max"] + families = ["pointcache", "camera"] + + def process(self, instance): + start = float(instance.data.get("frameStartHandle", 1)) + end = float(instance.data.get("frameEndHandle", 1)) + + container = instance.data["instance_node"] + + self.log.info("Extracting pointcache ...") + + parent_dir = self.staging_dir(instance) + file_name = "{name}.abc".format(**instance.data) + path = os.path.join(parent_dir, file_name) + + # We run the render + self.log.info("Writing alembic '%s' to '%s'" % (file_name, + parent_dir)) + + abc_export_cmd = ( + f""" +AlembicExport.ArchiveType = #ogawa +AlembicExport.CoordinateSystem = #maya +AlembicExport.StartFrame = {start} +AlembicExport.EndFrame = {end} + +exportFile @"{path}" #noPrompt selectedOnly:on using:AlembicExport + + """) + + self.log.debug(f"Executing command: {abc_export_cmd}") + + with maintained_selection(): + # select and export + + rt.select(get_all_children(rt.getNodeByName(container))) + rt.execute(abc_export_cmd) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': file_name, + "stagingDir": parent_dir, + } + instance.data["representations"].append(representation) diff --git a/openpype/hosts/max/plugins/publish/validate_scene_saved.py b/openpype/hosts/max/plugins/publish/validate_scene_saved.py new file mode 100644 index 0000000000..6392b12d11 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/validate_scene_saved.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from openpype.pipeline import PublishValidationError +from openpype.pipeline.publish import RepairAction +from pymxs import runtime as rt + + +class ValidateSceneSaved(pyblish.api.InstancePlugin): + """Validate that workfile was saved.""" + + order = pyblish.api.ValidatorOrder + families = ["workfile"] + hosts = ["max"] + label = "Validate Workfile is saved" + + def process(self, instance): + if not rt.maxFilePath or not rt.maxFileName: + raise PublishValidationError( + "Workfile is not saved", title=self.label)