mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge develop
This commit is contained in:
commit
663538ceff
67 changed files with 2192 additions and 223 deletions
10
openpype/hosts/max/__init__.py
Normal file
10
openpype/hosts/max/__init__.py
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
from .addon import (
|
||||||
|
MaxAddon,
|
||||||
|
MAX_HOST_DIR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"MaxAddon",
|
||||||
|
"MAX_HOST_DIR",
|
||||||
|
)
|
||||||
16
openpype/hosts/max/addon.py
Normal file
16
openpype/hosts/max/addon.py
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import os
|
||||||
|
from openpype.modules import OpenPypeModule, IHostAddon
|
||||||
|
|
||||||
|
MAX_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
class MaxAddon(OpenPypeModule, IHostAddon):
|
||||||
|
name = "max"
|
||||||
|
host_name = "max"
|
||||||
|
|
||||||
|
def initialize(self, module_settings):
|
||||||
|
self.enabled = True
|
||||||
|
|
||||||
|
def get_workfile_extensions(self):
|
||||||
|
return [".max"]
|
||||||
20
openpype/hosts/max/api/__init__.py
Normal file
20
openpype/hosts/max/api/__init__.py
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Public API for 3dsmax"""
|
||||||
|
|
||||||
|
from .pipeline import (
|
||||||
|
MaxHost,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
from .lib import (
|
||||||
|
maintained_selection,
|
||||||
|
lsattr,
|
||||||
|
get_all_children
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"MaxHost",
|
||||||
|
"maintained_selection",
|
||||||
|
"lsattr",
|
||||||
|
"get_all_children"
|
||||||
|
]
|
||||||
122
openpype/hosts/max/api/lib.py
Normal file
122
openpype/hosts/max/api/lib.py
Normal file
|
|
@ -0,0 +1,122 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Library of functions useful for 3dsmax pipeline."""
|
||||||
|
import json
|
||||||
|
import six
|
||||||
|
from pymxs import runtime as rt
|
||||||
|
from typing import Union
|
||||||
|
import contextlib
|
||||||
|
|
||||||
|
|
||||||
|
JSON_PREFIX = "JSON::"
|
||||||
|
|
||||||
|
|
||||||
|
def imprint(node_name: str, data: dict) -> bool:
|
||||||
|
node = rt.getNodeByName(node_name)
|
||||||
|
if not node:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for k, v in data.items():
|
||||||
|
if isinstance(v, (dict, list)):
|
||||||
|
rt.setUserProp(node, k, f'{JSON_PREFIX}{json.dumps(v)}')
|
||||||
|
else:
|
||||||
|
rt.setUserProp(node, k, v)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def lsattr(
|
||||||
|
attr: str,
|
||||||
|
value: Union[str, None] = None,
|
||||||
|
root: Union[str, None] = None) -> list:
|
||||||
|
"""List nodes having attribute with specified value.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
attr (str): Attribute name to match.
|
||||||
|
value (str, Optional): Value to match, of omitted, all nodes
|
||||||
|
with specified attribute are returned no matter of value.
|
||||||
|
root (str, Optional): Root node name. If omitted, scene root is used.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list of nodes.
|
||||||
|
"""
|
||||||
|
root = rt.rootnode if root is None else rt.getNodeByName(root)
|
||||||
|
|
||||||
|
def output_node(node, nodes):
|
||||||
|
nodes.append(node)
|
||||||
|
for child in node.Children:
|
||||||
|
output_node(child, nodes)
|
||||||
|
|
||||||
|
nodes = []
|
||||||
|
output_node(root, nodes)
|
||||||
|
return [
|
||||||
|
n for n in nodes
|
||||||
|
if rt.getUserProp(n, attr) == value
|
||||||
|
] if value else [
|
||||||
|
n for n in nodes
|
||||||
|
if rt.getUserProp(n, attr)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def read(container) -> dict:
|
||||||
|
data = {}
|
||||||
|
props = rt.getUserPropBuffer(container)
|
||||||
|
# this shouldn't happen but let's guard against it anyway
|
||||||
|
if not props:
|
||||||
|
return data
|
||||||
|
|
||||||
|
for line in props.split("\r\n"):
|
||||||
|
try:
|
||||||
|
key, value = line.split("=")
|
||||||
|
except ValueError:
|
||||||
|
# if the line cannot be split we can't really parse it
|
||||||
|
continue
|
||||||
|
|
||||||
|
value = value.strip()
|
||||||
|
if isinstance(value.strip(), six.string_types) and \
|
||||||
|
value.startswith(JSON_PREFIX):
|
||||||
|
try:
|
||||||
|
value = json.loads(value[len(JSON_PREFIX):])
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
# not a json
|
||||||
|
pass
|
||||||
|
|
||||||
|
data[key.strip()] = value
|
||||||
|
|
||||||
|
data["instance_node"] = container.name
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def maintained_selection():
|
||||||
|
previous_selection = rt.getCurrentSelection()
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
if previous_selection:
|
||||||
|
rt.select(previous_selection)
|
||||||
|
else:
|
||||||
|
rt.select()
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_children(parent, node_type=None):
|
||||||
|
"""Handy function to get all the children of a given node
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parent (3dsmax Node1): Node to get all children of.
|
||||||
|
node_type (None, runtime.class): give class to check for
|
||||||
|
e.g. rt.FFDBox/rt.GeometryClass etc.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: list of all children of the parent node
|
||||||
|
"""
|
||||||
|
def list_children(node):
|
||||||
|
children = []
|
||||||
|
for c in node.Children:
|
||||||
|
children.append(c)
|
||||||
|
children = children + list_children(c)
|
||||||
|
return children
|
||||||
|
child_list = list_children(parent)
|
||||||
|
|
||||||
|
return ([x for x in child_list if rt.superClassOf(x) == node_type]
|
||||||
|
if node_type else child_list)
|
||||||
130
openpype/hosts/max/api/menu.py
Normal file
130
openpype/hosts/max/api/menu.py
Normal file
|
|
@ -0,0 +1,130 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""3dsmax menu definition of OpenPype."""
|
||||||
|
from Qt import QtWidgets, QtCore
|
||||||
|
from pymxs import runtime as rt
|
||||||
|
|
||||||
|
from openpype.tools.utils import host_tools
|
||||||
|
|
||||||
|
|
||||||
|
class OpenPypeMenu(object):
|
||||||
|
"""Object representing OpenPype menu.
|
||||||
|
|
||||||
|
This is using "hack" to inject itself before "Help" menu of 3dsmax.
|
||||||
|
For some reason `postLoadingMenus` event doesn't fire, and main menu
|
||||||
|
if probably re-initialized by menu templates, se we wait for at least
|
||||||
|
1 event Qt event loop before trying to insert.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.main_widget = self.get_main_widget()
|
||||||
|
self.menu = None
|
||||||
|
|
||||||
|
timer = QtCore.QTimer()
|
||||||
|
# set number of event loops to wait.
|
||||||
|
timer.setInterval(1)
|
||||||
|
timer.timeout.connect(self._on_timer)
|
||||||
|
timer.start()
|
||||||
|
|
||||||
|
self._timer = timer
|
||||||
|
self._counter = 0
|
||||||
|
|
||||||
|
def _on_timer(self):
|
||||||
|
if self._counter < 1:
|
||||||
|
self._counter += 1
|
||||||
|
return
|
||||||
|
|
||||||
|
self._counter = 0
|
||||||
|
self._timer.stop()
|
||||||
|
self.build_openpype_menu()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_main_widget():
|
||||||
|
"""Get 3dsmax main window."""
|
||||||
|
return QtWidgets.QWidget.find(rt.windows.getMAXHWND())
|
||||||
|
|
||||||
|
def get_main_menubar(self) -> QtWidgets.QMenuBar:
|
||||||
|
"""Get main Menubar by 3dsmax main window."""
|
||||||
|
return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0]
|
||||||
|
|
||||||
|
def get_or_create_openpype_menu(
|
||||||
|
self, name: str = "&OpenPype",
|
||||||
|
before: str = "&Help") -> QtWidgets.QAction:
|
||||||
|
"""Create OpenPype menu.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str, Optional): OpenPypep menu name.
|
||||||
|
before (str, Optional): Name of the 3dsmax main menu item to
|
||||||
|
add OpenPype menu before.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
QtWidgets.QAction: OpenPype menu action.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.menu is not None:
|
||||||
|
return self.menu
|
||||||
|
|
||||||
|
menu_bar = self.get_main_menubar()
|
||||||
|
menu_items = menu_bar.findChildren(
|
||||||
|
QtWidgets.QMenu, options=QtCore.Qt.FindDirectChildrenOnly)
|
||||||
|
help_action = None
|
||||||
|
for item in menu_items:
|
||||||
|
if name in item.title():
|
||||||
|
# we already have OpenPype menu
|
||||||
|
return item
|
||||||
|
|
||||||
|
if before in item.title():
|
||||||
|
help_action = item.menuAction()
|
||||||
|
|
||||||
|
op_menu = QtWidgets.QMenu("&OpenPype")
|
||||||
|
menu_bar.insertMenu(help_action, op_menu)
|
||||||
|
|
||||||
|
self.menu = op_menu
|
||||||
|
return op_menu
|
||||||
|
|
||||||
|
def build_openpype_menu(self) -> QtWidgets.QAction:
|
||||||
|
"""Build items in OpenPype menu."""
|
||||||
|
openpype_menu = self.get_or_create_openpype_menu()
|
||||||
|
load_action = QtWidgets.QAction("Load...", openpype_menu)
|
||||||
|
load_action.triggered.connect(self.load_callback)
|
||||||
|
openpype_menu.addAction(load_action)
|
||||||
|
|
||||||
|
publish_action = QtWidgets.QAction("Publish...", openpype_menu)
|
||||||
|
publish_action.triggered.connect(self.publish_callback)
|
||||||
|
openpype_menu.addAction(publish_action)
|
||||||
|
|
||||||
|
manage_action = QtWidgets.QAction("Manage...", openpype_menu)
|
||||||
|
manage_action.triggered.connect(self.manage_callback)
|
||||||
|
openpype_menu.addAction(manage_action)
|
||||||
|
|
||||||
|
library_action = QtWidgets.QAction("Library...", openpype_menu)
|
||||||
|
library_action.triggered.connect(self.library_callback)
|
||||||
|
openpype_menu.addAction(library_action)
|
||||||
|
|
||||||
|
openpype_menu.addSeparator()
|
||||||
|
|
||||||
|
workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu)
|
||||||
|
workfiles_action.triggered.connect(self.workfiles_callback)
|
||||||
|
openpype_menu.addAction(workfiles_action)
|
||||||
|
return openpype_menu
|
||||||
|
|
||||||
|
def load_callback(self):
|
||||||
|
"""Callback to show Loader tool."""
|
||||||
|
host_tools.show_loader(parent=self.main_widget)
|
||||||
|
|
||||||
|
def publish_callback(self):
|
||||||
|
"""Callback to show Publisher tool."""
|
||||||
|
host_tools.show_publisher(parent=self.main_widget)
|
||||||
|
|
||||||
|
def manage_callback(self):
|
||||||
|
"""Callback to show Scene Manager/Inventory tool."""
|
||||||
|
host_tools.show_subset_manager(parent=self.main_widget)
|
||||||
|
|
||||||
|
def library_callback(self):
|
||||||
|
"""Callback to show Library Loader tool."""
|
||||||
|
host_tools.show_library_loader(parent=self.main_widget)
|
||||||
|
|
||||||
|
def workfiles_callback(self):
|
||||||
|
"""Callback to show Workfiles tool."""
|
||||||
|
host_tools.show_workfiles(parent=self.main_widget)
|
||||||
145
openpype/hosts/max/api/pipeline.py
Normal file
145
openpype/hosts/max/api/pipeline.py
Normal file
|
|
@ -0,0 +1,145 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Pipeline tools for OpenPype Houdini integration."""
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher
|
||||||
|
import pyblish.api
|
||||||
|
from openpype.pipeline import (
|
||||||
|
register_creator_plugin_path,
|
||||||
|
register_loader_plugin_path,
|
||||||
|
AVALON_CONTAINER_ID,
|
||||||
|
)
|
||||||
|
from openpype.hosts.max.api.menu import OpenPypeMenu
|
||||||
|
from openpype.hosts.max.api import lib
|
||||||
|
from openpype.hosts.max import MAX_HOST_DIR
|
||||||
|
|
||||||
|
from pymxs import runtime as rt # noqa
|
||||||
|
|
||||||
|
log = logging.getLogger("openpype.hosts.max")
|
||||||
|
|
||||||
|
PLUGINS_DIR = os.path.join(MAX_HOST_DIR, "plugins")
|
||||||
|
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||||
|
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||||
|
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||||
|
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||||
|
|
||||||
|
|
||||||
|
class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher):
|
||||||
|
|
||||||
|
name = "max"
|
||||||
|
menu = None
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(MaxHost, self).__init__()
|
||||||
|
self._op_events = {}
|
||||||
|
self._has_been_setup = False
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
pyblish.api.register_host("max")
|
||||||
|
|
||||||
|
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||||
|
register_loader_plugin_path(LOAD_PATH)
|
||||||
|
register_creator_plugin_path(CREATE_PATH)
|
||||||
|
|
||||||
|
# self._register_callbacks()
|
||||||
|
self.menu = OpenPypeMenu()
|
||||||
|
|
||||||
|
self._has_been_setup = True
|
||||||
|
|
||||||
|
def has_unsaved_changes(self):
|
||||||
|
# TODO: how to get it from 3dsmax?
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_workfile_extensions(self):
|
||||||
|
return [".max"]
|
||||||
|
|
||||||
|
def save_workfile(self, dst_path=None):
|
||||||
|
rt.saveMaxFile(dst_path)
|
||||||
|
return dst_path
|
||||||
|
|
||||||
|
def open_workfile(self, filepath):
|
||||||
|
rt.checkForSave()
|
||||||
|
rt.loadMaxFile(filepath)
|
||||||
|
return filepath
|
||||||
|
|
||||||
|
def get_current_workfile(self):
|
||||||
|
return os.path.join(rt.maxFilePath, rt.maxFileName)
|
||||||
|
|
||||||
|
def get_containers(self):
|
||||||
|
return ls()
|
||||||
|
|
||||||
|
def _register_callbacks(self):
|
||||||
|
rt.callbacks.removeScripts(id=rt.name("OpenPypeCallbacks"))
|
||||||
|
|
||||||
|
rt.callbacks.addScript(
|
||||||
|
rt.Name("postLoadingMenus"),
|
||||||
|
self._deferred_menu_creation, id=rt.Name('OpenPypeCallbacks'))
|
||||||
|
|
||||||
|
def _deferred_menu_creation(self):
|
||||||
|
self.log.info("Building menu ...")
|
||||||
|
self.menu = OpenPypeMenu()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_context_node():
|
||||||
|
"""Helper for creating context holding node."""
|
||||||
|
|
||||||
|
root_scene = rt.rootScene
|
||||||
|
|
||||||
|
create_attr_script = ("""
|
||||||
|
attributes "OpenPypeContext"
|
||||||
|
(
|
||||||
|
parameters main rollout:params
|
||||||
|
(
|
||||||
|
context type: #string
|
||||||
|
)
|
||||||
|
|
||||||
|
rollout params "OpenPype Parameters"
|
||||||
|
(
|
||||||
|
editText editTextContext "Context" type: #string
|
||||||
|
)
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
|
attr = rt.execute(create_attr_script)
|
||||||
|
rt.custAttributes.add(root_scene, attr)
|
||||||
|
|
||||||
|
return root_scene.OpenPypeContext.context
|
||||||
|
|
||||||
|
def update_context_data(self, data, changes):
|
||||||
|
try:
|
||||||
|
_ = rt.rootScene.OpenPypeContext.context
|
||||||
|
except AttributeError:
|
||||||
|
# context node doesn't exists
|
||||||
|
self.create_context_node()
|
||||||
|
|
||||||
|
rt.rootScene.OpenPypeContext.context = json.dumps(data)
|
||||||
|
|
||||||
|
def get_context_data(self):
|
||||||
|
try:
|
||||||
|
context = rt.rootScene.OpenPypeContext.context
|
||||||
|
except AttributeError:
|
||||||
|
# context node doesn't exists
|
||||||
|
context = self.create_context_node()
|
||||||
|
if not context:
|
||||||
|
context = "{}"
|
||||||
|
return json.loads(context)
|
||||||
|
|
||||||
|
def save_file(self, dst_path=None):
|
||||||
|
# Force forwards slashes to avoid segfault
|
||||||
|
dst_path = dst_path.replace("\\", "/")
|
||||||
|
rt.saveMaxFile(dst_path)
|
||||||
|
|
||||||
|
|
||||||
|
def ls() -> list:
|
||||||
|
"""Get all OpenPype instances."""
|
||||||
|
objs = rt.objects
|
||||||
|
containers = [
|
||||||
|
obj for obj in objs
|
||||||
|
if rt.getUserProp(obj, "id") == AVALON_CONTAINER_ID
|
||||||
|
]
|
||||||
|
|
||||||
|
for container in sorted(containers, key=lambda name: container.name):
|
||||||
|
yield lib.read(container)
|
||||||
111
openpype/hosts/max/api/plugin.py
Normal file
111
openpype/hosts/max/api/plugin.py
Normal file
|
|
@ -0,0 +1,111 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""3dsmax specific Avalon/Pyblish plugin definitions."""
|
||||||
|
from pymxs import runtime as rt
|
||||||
|
import six
|
||||||
|
from abc import ABCMeta
|
||||||
|
from openpype.pipeline import (
|
||||||
|
CreatorError,
|
||||||
|
Creator,
|
||||||
|
CreatedInstance
|
||||||
|
)
|
||||||
|
from openpype.lib import BoolDef
|
||||||
|
from .lib import imprint, read, lsattr
|
||||||
|
|
||||||
|
|
||||||
|
class OpenPypeCreatorError(CreatorError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MaxCreatorBase(object):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def cache_subsets(shared_data):
|
||||||
|
if shared_data.get("max_cached_subsets") is None:
|
||||||
|
shared_data["max_cached_subsets"] = {}
|
||||||
|
cached_instances = lsattr("id", "pyblish.avalon.instance")
|
||||||
|
for i in cached_instances:
|
||||||
|
creator_id = rt.getUserProp(i, "creator_identifier")
|
||||||
|
if creator_id not in shared_data["max_cached_subsets"]:
|
||||||
|
shared_data["max_cached_subsets"][creator_id] = [i.name]
|
||||||
|
else:
|
||||||
|
shared_data[
|
||||||
|
"max_cached_subsets"][creator_id].append(i.name) # noqa
|
||||||
|
return shared_data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_instance_node(node_name: str, parent: str = ""):
|
||||||
|
parent_node = rt.getNodeByName(parent) if parent else rt.rootScene
|
||||||
|
if not parent_node:
|
||||||
|
raise OpenPypeCreatorError(f"Specified parent {parent} not found")
|
||||||
|
|
||||||
|
container = rt.container(name=node_name)
|
||||||
|
container.Parent = parent_node
|
||||||
|
|
||||||
|
return container
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(ABCMeta)
|
||||||
|
class MaxCreator(Creator, MaxCreatorBase):
|
||||||
|
selected_nodes = []
|
||||||
|
|
||||||
|
def create(self, subset_name, instance_data, pre_create_data):
|
||||||
|
if pre_create_data.get("use_selection"):
|
||||||
|
self.selected_nodes = rt.getCurrentSelection()
|
||||||
|
|
||||||
|
instance_node = self.create_instance_node(subset_name)
|
||||||
|
instance_data["instance_node"] = instance_node.name
|
||||||
|
instance = CreatedInstance(
|
||||||
|
self.family,
|
||||||
|
subset_name,
|
||||||
|
instance_data,
|
||||||
|
self
|
||||||
|
)
|
||||||
|
for node in self.selected_nodes:
|
||||||
|
node.Parent = instance_node
|
||||||
|
|
||||||
|
self._add_instance_to_context(instance)
|
||||||
|
imprint(instance_node.name, instance.data_to_store())
|
||||||
|
|
||||||
|
return instance
|
||||||
|
|
||||||
|
def collect_instances(self):
|
||||||
|
self.cache_subsets(self.collection_shared_data)
|
||||||
|
for instance in self.collection_shared_data[
|
||||||
|
"max_cached_subsets"].get(self.identifier, []):
|
||||||
|
created_instance = CreatedInstance.from_existing(
|
||||||
|
read(rt.getNodeByName(instance)), self
|
||||||
|
)
|
||||||
|
self._add_instance_to_context(created_instance)
|
||||||
|
|
||||||
|
def update_instances(self, update_list):
|
||||||
|
for created_inst, _changes in update_list:
|
||||||
|
instance_node = created_inst.get("instance_node")
|
||||||
|
|
||||||
|
new_values = {
|
||||||
|
key: new_value
|
||||||
|
for key, (_old_value, new_value) in _changes.items()
|
||||||
|
}
|
||||||
|
imprint(
|
||||||
|
instance_node,
|
||||||
|
new_values,
|
||||||
|
)
|
||||||
|
|
||||||
|
def remove_instances(self, instances):
|
||||||
|
"""Remove specified instance from the scene.
|
||||||
|
|
||||||
|
This is only removing `id` parameter so instance is no longer
|
||||||
|
instance, because it might contain valuable data for artist.
|
||||||
|
|
||||||
|
"""
|
||||||
|
for instance in instances:
|
||||||
|
instance_node = rt.getNodeByName(
|
||||||
|
instance.data.get("instance_node"))
|
||||||
|
if instance_node:
|
||||||
|
rt.delete(rt.getNodeByName(instance_node))
|
||||||
|
|
||||||
|
self._remove_instance_from_context(instance)
|
||||||
|
|
||||||
|
def get_pre_create_attr_defs(self):
|
||||||
|
return [
|
||||||
|
BoolDef("use_selection", label="Use selection")
|
||||||
|
]
|
||||||
17
openpype/hosts/max/hooks/set_paths.py
Normal file
17
openpype/hosts/max/hooks/set_paths.py
Normal file
|
|
@ -0,0 +1,17 @@
|
||||||
|
from openpype.lib import PreLaunchHook
|
||||||
|
|
||||||
|
|
||||||
|
class SetPath(PreLaunchHook):
|
||||||
|
"""Set current dir to workdir.
|
||||||
|
|
||||||
|
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||||
|
"""
|
||||||
|
app_groups = ["max"]
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")
|
||||||
|
if not workdir:
|
||||||
|
self.log.warning("BUG: Workdir is not filled.")
|
||||||
|
return
|
||||||
|
|
||||||
|
self.launch_context.kwargs["cwd"] = workdir
|
||||||
0
openpype/hosts/max/plugins/__init__.py
Normal file
0
openpype/hosts/max/plugins/__init__.py
Normal file
22
openpype/hosts/max/plugins/create/create_pointcache.py
Normal file
22
openpype/hosts/max/plugins/create/create_pointcache.py
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Creator plugin for creating pointcache alembics."""
|
||||||
|
from openpype.hosts.max.api import plugin
|
||||||
|
from openpype.pipeline import CreatedInstance
|
||||||
|
|
||||||
|
|
||||||
|
class CreatePointCache(plugin.MaxCreator):
|
||||||
|
identifier = "io.openpype.creators.max.pointcache"
|
||||||
|
label = "Point Cache"
|
||||||
|
family = "pointcache"
|
||||||
|
icon = "gear"
|
||||||
|
|
||||||
|
def create(self, subset_name, instance_data, pre_create_data):
|
||||||
|
# from pymxs import runtime as rt
|
||||||
|
|
||||||
|
_ = super(CreatePointCache, self).create(
|
||||||
|
subset_name,
|
||||||
|
instance_data,
|
||||||
|
pre_create_data) # type: CreatedInstance
|
||||||
|
|
||||||
|
# for additional work on the node:
|
||||||
|
# instance_node = rt.getNodeByName(instance.get("instance_node"))
|
||||||
65
openpype/hosts/max/plugins/load/load_pointcache.py
Normal file
65
openpype/hosts/max/plugins/load/load_pointcache.py
Normal file
|
|
@ -0,0 +1,65 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Simple alembic loader for 3dsmax.
|
||||||
|
|
||||||
|
Because of limited api, alembics can be only loaded, but not easily updated.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
from openpype.pipeline import (
|
||||||
|
load
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AbcLoader(load.LoaderPlugin):
|
||||||
|
"""Alembic loader."""
|
||||||
|
|
||||||
|
families = ["model", "animation", "pointcache"]
|
||||||
|
label = "Load Alembic"
|
||||||
|
representations = ["abc"]
|
||||||
|
order = -10
|
||||||
|
icon = "code-fork"
|
||||||
|
color = "orange"
|
||||||
|
|
||||||
|
def load(self, context, name=None, namespace=None, data=None):
|
||||||
|
from pymxs import runtime as rt
|
||||||
|
|
||||||
|
file_path = os.path.normpath(self.fname)
|
||||||
|
|
||||||
|
abc_before = {
|
||||||
|
c for c in rt.rootNode.Children
|
||||||
|
if rt.classOf(c) == rt.AlembicContainer
|
||||||
|
}
|
||||||
|
|
||||||
|
abc_export_cmd = (f"""
|
||||||
|
AlembicImport.ImportToRoot = false
|
||||||
|
|
||||||
|
importFile @"{file_path}" #noPrompt
|
||||||
|
""")
|
||||||
|
|
||||||
|
self.log.debug(f"Executing command: {abc_export_cmd}")
|
||||||
|
rt.execute(abc_export_cmd)
|
||||||
|
|
||||||
|
abc_after = {
|
||||||
|
c for c in rt.rootNode.Children
|
||||||
|
if rt.classOf(c) == rt.AlembicContainer
|
||||||
|
}
|
||||||
|
|
||||||
|
# This should yield new AlembicContainer node
|
||||||
|
abc_containers = abc_after.difference(abc_before)
|
||||||
|
|
||||||
|
if len(abc_containers) != 1:
|
||||||
|
self.log.error("Something failed when loading.")
|
||||||
|
|
||||||
|
abc_container = abc_containers.pop()
|
||||||
|
|
||||||
|
container_name = f"{name}_CON"
|
||||||
|
container = rt.container(name=container_name)
|
||||||
|
abc_container.Parent = container
|
||||||
|
|
||||||
|
return container
|
||||||
|
|
||||||
|
def remove(self, container):
|
||||||
|
from pymxs import runtime as rt
|
||||||
|
|
||||||
|
node = container["node"]
|
||||||
|
rt.delete(node)
|
||||||
63
openpype/hosts/max/plugins/publish/collect_workfile.py
Normal file
63
openpype/hosts/max/plugins/publish/collect_workfile.py
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Collect current work file."""
|
||||||
|
import os
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
|
from pymxs import runtime as rt
|
||||||
|
from openpype.pipeline import legacy_io
|
||||||
|
|
||||||
|
|
||||||
|
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||||
|
"""Inject the current working file into context"""
|
||||||
|
|
||||||
|
order = pyblish.api.CollectorOrder - 0.01
|
||||||
|
label = "Collect 3dsmax Workfile"
|
||||||
|
hosts = ['max']
|
||||||
|
|
||||||
|
def process(self, context):
|
||||||
|
"""Inject the current working file."""
|
||||||
|
folder = rt.maxFilePath
|
||||||
|
file = rt.maxFileName
|
||||||
|
if not folder or not file:
|
||||||
|
self.log.error("Scene is not saved.")
|
||||||
|
current_file = os.path.join(folder, file)
|
||||||
|
|
||||||
|
context.data['currentFile'] = current_file
|
||||||
|
|
||||||
|
filename, ext = os.path.splitext(file)
|
||||||
|
|
||||||
|
task = legacy_io.Session["AVALON_TASK"]
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
# create instance
|
||||||
|
instance = context.create_instance(name=filename)
|
||||||
|
subset = 'workfile' + task.capitalize()
|
||||||
|
|
||||||
|
data.update({
|
||||||
|
"subset": subset,
|
||||||
|
"asset": os.getenv("AVALON_ASSET", None),
|
||||||
|
"label": subset,
|
||||||
|
"publish": True,
|
||||||
|
"family": 'workfile',
|
||||||
|
"families": ['workfile'],
|
||||||
|
"setMembers": [current_file],
|
||||||
|
"frameStart": context.data['frameStart'],
|
||||||
|
"frameEnd": context.data['frameEnd'],
|
||||||
|
"handleStart": context.data['handleStart'],
|
||||||
|
"handleEnd": context.data['handleEnd']
|
||||||
|
})
|
||||||
|
|
||||||
|
data['representations'] = [{
|
||||||
|
'name': ext.lstrip("."),
|
||||||
|
'ext': ext.lstrip("."),
|
||||||
|
'files': file,
|
||||||
|
"stagingDir": folder,
|
||||||
|
}]
|
||||||
|
|
||||||
|
instance.data.update(data)
|
||||||
|
|
||||||
|
self.log.info('Collected instance: {}'.format(file))
|
||||||
|
self.log.info('Scene path: {}'.format(current_file))
|
||||||
|
self.log.info('staging Dir: {}'.format(folder))
|
||||||
|
self.log.info('subset: {}'.format(subset))
|
||||||
100
openpype/hosts/max/plugins/publish/extract_pointcache.py
Normal file
100
openpype/hosts/max/plugins/publish/extract_pointcache.py
Normal file
|
|
@ -0,0 +1,100 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
Export alembic file.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Parameters on AlembicExport (AlembicExport.Parameter):
|
||||||
|
|
||||||
|
ParticleAsMesh (bool): Sets whether particle shapes are exported
|
||||||
|
as meshes.
|
||||||
|
AnimTimeRange (enum): How animation is saved:
|
||||||
|
#CurrentFrame: saves current frame
|
||||||
|
#TimeSlider: saves the active time segments on time slider (default)
|
||||||
|
#StartEnd: saves a range specified by the Step
|
||||||
|
StartFrame (int)
|
||||||
|
EnFrame (int)
|
||||||
|
ShapeSuffix (bool): When set to true, appends the string "Shape" to the
|
||||||
|
name of each exported mesh. This property is set to false by default.
|
||||||
|
SamplesPerFrame (int): Sets the number of animation samples per frame.
|
||||||
|
Hidden (bool): When true, export hidden geometry.
|
||||||
|
UVs (bool): When true, export the mesh UV map channel.
|
||||||
|
Normals (bool): When true, export the mesh normals.
|
||||||
|
VertexColors (bool): When true, export the mesh vertex color map 0 and the
|
||||||
|
current vertex color display data when it differs
|
||||||
|
ExtraChannels (bool): When true, export the mesh extra map channels
|
||||||
|
(map channels greater than channel 1)
|
||||||
|
Velocity (bool): When true, export the meh vertex and particle velocity
|
||||||
|
data.
|
||||||
|
MaterialIDs (bool): When true, export the mesh material ID as
|
||||||
|
Alembic face sets.
|
||||||
|
Visibility (bool): When true, export the node visibility data.
|
||||||
|
LayerName (bool): When true, export the node layer name as an Alembic
|
||||||
|
object property.
|
||||||
|
MaterialName (bool): When true, export the geometry node material name as
|
||||||
|
an Alembic object property
|
||||||
|
ObjectID (bool): When true, export the geometry node g-buffer object ID as
|
||||||
|
an Alembic object property.
|
||||||
|
CustomAttributes (bool): When true, export the node and its modifiers
|
||||||
|
custom attributes into an Alembic object compound property.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import pyblish.api
|
||||||
|
from openpype.pipeline import publish
|
||||||
|
from pymxs import runtime as rt
|
||||||
|
from openpype.hosts.max.api import (
|
||||||
|
maintained_selection,
|
||||||
|
get_all_children
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ExtractAlembic(publish.Extractor):
|
||||||
|
order = pyblish.api.ExtractorOrder
|
||||||
|
label = "Extract Pointcache"
|
||||||
|
hosts = ["max"]
|
||||||
|
families = ["pointcache", "camera"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
start = float(instance.data.get("frameStartHandle", 1))
|
||||||
|
end = float(instance.data.get("frameEndHandle", 1))
|
||||||
|
|
||||||
|
container = instance.data["instance_node"]
|
||||||
|
|
||||||
|
self.log.info("Extracting pointcache ...")
|
||||||
|
|
||||||
|
parent_dir = self.staging_dir(instance)
|
||||||
|
file_name = "{name}.abc".format(**instance.data)
|
||||||
|
path = os.path.join(parent_dir, file_name)
|
||||||
|
|
||||||
|
# We run the render
|
||||||
|
self.log.info("Writing alembic '%s' to '%s'" % (file_name,
|
||||||
|
parent_dir))
|
||||||
|
|
||||||
|
abc_export_cmd = (
|
||||||
|
f"""
|
||||||
|
AlembicExport.ArchiveType = #ogawa
|
||||||
|
AlembicExport.CoordinateSystem = #maya
|
||||||
|
AlembicExport.StartFrame = {start}
|
||||||
|
AlembicExport.EndFrame = {end}
|
||||||
|
|
||||||
|
exportFile @"{path}" #noPrompt selectedOnly:on using:AlembicExport
|
||||||
|
|
||||||
|
""")
|
||||||
|
|
||||||
|
self.log.debug(f"Executing command: {abc_export_cmd}")
|
||||||
|
|
||||||
|
with maintained_selection():
|
||||||
|
# select and export
|
||||||
|
|
||||||
|
rt.select(get_all_children(rt.getNodeByName(container)))
|
||||||
|
rt.execute(abc_export_cmd)
|
||||||
|
|
||||||
|
if "representations" not in instance.data:
|
||||||
|
instance.data["representations"] = []
|
||||||
|
|
||||||
|
representation = {
|
||||||
|
'name': 'abc',
|
||||||
|
'ext': 'abc',
|
||||||
|
'files': file_name,
|
||||||
|
"stagingDir": parent_dir,
|
||||||
|
}
|
||||||
|
instance.data["representations"].append(representation)
|
||||||
18
openpype/hosts/max/plugins/publish/validate_scene_saved.py
Normal file
18
openpype/hosts/max/plugins/publish/validate_scene_saved.py
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import pyblish.api
|
||||||
|
from openpype.pipeline import PublishValidationError
|
||||||
|
from pymxs import runtime as rt
|
||||||
|
|
||||||
|
|
||||||
|
class ValidateSceneSaved(pyblish.api.InstancePlugin):
|
||||||
|
"""Validate that workfile was saved."""
|
||||||
|
|
||||||
|
order = pyblish.api.ValidatorOrder
|
||||||
|
families = ["workfile"]
|
||||||
|
hosts = ["max"]
|
||||||
|
label = "Validate Workfile is saved"
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
if not rt.maxFilePath or not rt.maxFileName:
|
||||||
|
raise PublishValidationError(
|
||||||
|
"Workfile is not saved", title=self.label)
|
||||||
9
openpype/hosts/max/startup/startup.ms
Normal file
9
openpype/hosts/max/startup/startup.ms
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
-- OpenPype Init Script
|
||||||
|
(
|
||||||
|
local sysPath = dotNetClass "System.IO.Path"
|
||||||
|
local sysDir = dotNetClass "System.IO.Directory"
|
||||||
|
local localScript = getThisScriptFilename()
|
||||||
|
local startup = sysPath.Combine (sysPath.GetDirectoryName localScript) "startup.py"
|
||||||
|
|
||||||
|
python.ExecuteFile startup
|
||||||
|
)
|
||||||
6
openpype/hosts/max/startup/startup.py
Normal file
6
openpype/hosts/max/startup/startup.py
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from openpype.hosts.max.api import MaxHost
|
||||||
|
from openpype.pipeline import install_host
|
||||||
|
|
||||||
|
host = MaxHost()
|
||||||
|
install_host(host)
|
||||||
88
openpype/hosts/maya/api/gltf.py
Normal file
88
openpype/hosts/maya/api/gltf.py
Normal file
|
|
@ -0,0 +1,88 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Tools to work with GLTF."""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from maya import cmds, mel # noqa
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_gltf_options = {
|
||||||
|
"of": str, # outputFolder
|
||||||
|
"cpr": str, # copyright
|
||||||
|
"sno": bool, # selectedNodeOnly
|
||||||
|
"sn": str, # sceneName
|
||||||
|
"glb": bool, # binary
|
||||||
|
"nbu": bool, # niceBufferURIs
|
||||||
|
"hbu": bool, # hashBufferURI
|
||||||
|
"ext": bool, # externalTextures
|
||||||
|
"ivt": int, # initialValuesTime
|
||||||
|
"acn": str, # animationClipName
|
||||||
|
"ast": int, # animationClipStartTime
|
||||||
|
"aet": int, # animationClipEndTime
|
||||||
|
"afr": float, # animationClipFrameRate
|
||||||
|
"dsa": int, # detectStepAnimations
|
||||||
|
"mpa": str, # meshPrimitiveAttributes
|
||||||
|
"bpa": str, # blendPrimitiveAttributes
|
||||||
|
"i32": bool, # force32bitIndices
|
||||||
|
"ssm": bool, # skipStandardMaterials
|
||||||
|
"eut": bool, # excludeUnusedTexcoord
|
||||||
|
"dm": bool, # defaultMaterial
|
||||||
|
"cm": bool, # colorizeMaterials
|
||||||
|
"dmy": str, # dumpMaya
|
||||||
|
"dgl": str, # dumpGLTF
|
||||||
|
"imd": str, # ignoreMeshDeformers
|
||||||
|
"ssc": bool, # skipSkinClusters
|
||||||
|
"sbs": bool, # skipBlendShapes
|
||||||
|
"rvp": bool, # redrawViewport
|
||||||
|
"vno": bool # visibleNodesOnly
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def extract_gltf(parent_dir,
|
||||||
|
filename,
|
||||||
|
**kwargs):
|
||||||
|
|
||||||
|
"""Sets GLTF export options from data in the instance.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
cmds.loadPlugin('maya2glTF', quiet=True)
|
||||||
|
# load the UI to run mel command
|
||||||
|
mel.eval("maya2glTF_UI()")
|
||||||
|
|
||||||
|
parent_dir = parent_dir.replace('\\', '/')
|
||||||
|
options = {
|
||||||
|
"dsa": 1,
|
||||||
|
"glb": True
|
||||||
|
}
|
||||||
|
options.update(kwargs)
|
||||||
|
|
||||||
|
for key, value in options.copy().items():
|
||||||
|
if key not in _gltf_options:
|
||||||
|
log.warning("extract_gltf() does not support option '%s'. "
|
||||||
|
"Flag will be ignored..", key)
|
||||||
|
options.pop(key)
|
||||||
|
options.pop(value)
|
||||||
|
continue
|
||||||
|
|
||||||
|
job_args = list()
|
||||||
|
default_opt = "maya2glTF -of \"{0}\" -sn \"{1}\"".format(parent_dir, filename) # noqa
|
||||||
|
job_args.append(default_opt)
|
||||||
|
|
||||||
|
for key, value in options.items():
|
||||||
|
if isinstance(value, str):
|
||||||
|
job_args.append("-{0} \"{1}\"".format(key, value))
|
||||||
|
elif isinstance(value, bool):
|
||||||
|
if value:
|
||||||
|
job_args.append("-{0}".format(key))
|
||||||
|
else:
|
||||||
|
job_args.append("-{0} {1}".format(key, value))
|
||||||
|
|
||||||
|
job_str = " ".join(job_args)
|
||||||
|
log.info("{}".format(job_str))
|
||||||
|
mel.eval(job_str)
|
||||||
|
|
||||||
|
# close the gltf export after finish the export
|
||||||
|
gltf_UI = "maya2glTF_exporter_window"
|
||||||
|
if cmds.window(gltf_UI, q=True, exists=True):
|
||||||
|
cmds.deleteUI(gltf_UI)
|
||||||
|
|
@ -128,13 +128,18 @@ def get_main_window():
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def suspended_refresh(suspend=True):
|
def suspended_refresh(suspend=True):
|
||||||
"""Suspend viewport refreshes"""
|
"""Suspend viewport refreshes
|
||||||
original_state = cmds.refresh(query=True, suspend=True)
|
|
||||||
|
cmds.ogs(pause=True) is a toggle so we cant pass False.
|
||||||
|
"""
|
||||||
|
original_state = cmds.ogs(query=True, pause=True)
|
||||||
try:
|
try:
|
||||||
cmds.refresh(suspend=suspend)
|
if suspend and not original_state:
|
||||||
|
cmds.ogs(pause=True)
|
||||||
yield
|
yield
|
||||||
finally:
|
finally:
|
||||||
cmds.refresh(suspend=original_state)
|
if suspend and not original_state:
|
||||||
|
cmds.ogs(pause=True)
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,3 @@
|
||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
from openpype.hosts.maya.api import (
|
from openpype.hosts.maya.api import (
|
||||||
lib,
|
lib,
|
||||||
plugin
|
plugin
|
||||||
|
|
@ -9,12 +7,26 @@ from maya import cmds
|
||||||
|
|
||||||
|
|
||||||
class CreateAss(plugin.Creator):
|
class CreateAss(plugin.Creator):
|
||||||
"""Arnold Archive"""
|
"""Arnold Scene Source"""
|
||||||
|
|
||||||
name = "ass"
|
name = "ass"
|
||||||
label = "Ass StandIn"
|
label = "Arnold Scene Source"
|
||||||
family = "ass"
|
family = "ass"
|
||||||
icon = "cube"
|
icon = "cube"
|
||||||
|
expandProcedurals = False
|
||||||
|
motionBlur = True
|
||||||
|
motionBlurKeys = 2
|
||||||
|
motionBlurLength = 0.5
|
||||||
|
maskOptions = False
|
||||||
|
maskCamera = False
|
||||||
|
maskLight = False
|
||||||
|
maskShape = False
|
||||||
|
maskShader = False
|
||||||
|
maskOverride = False
|
||||||
|
maskDriver = False
|
||||||
|
maskFilter = False
|
||||||
|
maskColor_manager = False
|
||||||
|
maskOperator = False
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(CreateAss, self).__init__(*args, **kwargs)
|
super(CreateAss, self).__init__(*args, **kwargs)
|
||||||
|
|
@ -22,17 +34,27 @@ class CreateAss(plugin.Creator):
|
||||||
# Add animation data
|
# Add animation data
|
||||||
self.data.update(lib.collect_animation_data())
|
self.data.update(lib.collect_animation_data())
|
||||||
|
|
||||||
# Vertex colors with the geometry
|
self.data["expandProcedurals"] = self.expandProcedurals
|
||||||
self.data["exportSequence"] = False
|
self.data["motionBlur"] = self.motionBlur
|
||||||
|
self.data["motionBlurKeys"] = self.motionBlurKeys
|
||||||
|
self.data["motionBlurLength"] = self.motionBlurLength
|
||||||
|
|
||||||
|
# Masks
|
||||||
|
self.data["maskOptions"] = self.maskOptions
|
||||||
|
self.data["maskCamera"] = self.maskCamera
|
||||||
|
self.data["maskLight"] = self.maskLight
|
||||||
|
self.data["maskShape"] = self.maskShape
|
||||||
|
self.data["maskShader"] = self.maskShader
|
||||||
|
self.data["maskOverride"] = self.maskOverride
|
||||||
|
self.data["maskDriver"] = self.maskDriver
|
||||||
|
self.data["maskFilter"] = self.maskFilter
|
||||||
|
self.data["maskColor_manager"] = self.maskColor_manager
|
||||||
|
self.data["maskOperator"] = self.maskOperator
|
||||||
|
|
||||||
def process(self):
|
def process(self):
|
||||||
instance = super(CreateAss, self).process()
|
instance = super(CreateAss, self).process()
|
||||||
|
|
||||||
# data = OrderedDict(**self.data)
|
nodes = []
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
nodes = list()
|
|
||||||
|
|
||||||
if (self.options or {}).get("useSelection"):
|
if (self.options or {}).get("useSelection"):
|
||||||
nodes = cmds.ls(selection=True)
|
nodes = cmds.ls(selection=True)
|
||||||
|
|
@ -42,7 +64,3 @@ class CreateAss(plugin.Creator):
|
||||||
assContent = cmds.sets(name="content_SET")
|
assContent = cmds.sets(name="content_SET")
|
||||||
assProxy = cmds.sets(name="proxy_SET", empty=True)
|
assProxy = cmds.sets(name="proxy_SET", empty=True)
|
||||||
cmds.sets([assContent, assProxy], forceElement=instance)
|
cmds.sets([assContent, assProxy], forceElement=instance)
|
||||||
|
|
||||||
# self.log.info(data)
|
|
||||||
#
|
|
||||||
# self.data = data
|
|
||||||
|
|
|
||||||
35
openpype/hosts/maya/plugins/create/create_proxy_abc.py
Normal file
35
openpype/hosts/maya/plugins/create/create_proxy_abc.py
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
from openpype.hosts.maya.api import (
|
||||||
|
lib,
|
||||||
|
plugin
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CreateProxyAlembic(plugin.Creator):
|
||||||
|
"""Proxy Alembic for animated data"""
|
||||||
|
|
||||||
|
name = "proxyAbcMain"
|
||||||
|
label = "Proxy Alembic"
|
||||||
|
family = "proxyAbc"
|
||||||
|
icon = "gears"
|
||||||
|
write_color_sets = False
|
||||||
|
write_face_sets = False
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(CreateProxyAlembic, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# Add animation data
|
||||||
|
self.data.update(lib.collect_animation_data())
|
||||||
|
|
||||||
|
# Vertex colors with the geometry.
|
||||||
|
self.data["writeColorSets"] = self.write_color_sets
|
||||||
|
# Vertex colors with the geometry.
|
||||||
|
self.data["writeFaceSets"] = self.write_face_sets
|
||||||
|
# Default to exporting world-space
|
||||||
|
self.data["worldSpace"] = True
|
||||||
|
|
||||||
|
# name suffix for the bounding box
|
||||||
|
self.data["nameSuffix"] = "_BBox"
|
||||||
|
|
||||||
|
# Add options for custom attributes
|
||||||
|
self.data["attr"] = ""
|
||||||
|
self.data["attrPrefix"] = ""
|
||||||
|
|
@ -48,3 +48,21 @@ class CreateUnrealSkeletalMesh(plugin.Creator):
|
||||||
cmds.sets(node, forceElement=joints_set)
|
cmds.sets(node, forceElement=joints_set)
|
||||||
else:
|
else:
|
||||||
cmds.sets(node, forceElement=geometry_set)
|
cmds.sets(node, forceElement=geometry_set)
|
||||||
|
|
||||||
|
# Add animation data
|
||||||
|
self.data.update(lib.collect_animation_data())
|
||||||
|
|
||||||
|
# Only renderable visible shapes
|
||||||
|
self.data["renderableOnly"] = False
|
||||||
|
# only nodes that are visible
|
||||||
|
self.data["visibleOnly"] = False
|
||||||
|
# Include parent groups
|
||||||
|
self.data["includeParentHierarchy"] = False
|
||||||
|
# Default to exporting world-space
|
||||||
|
self.data["worldSpace"] = True
|
||||||
|
# Default to suspend refresh.
|
||||||
|
self.data["refresh"] = False
|
||||||
|
|
||||||
|
# Add options for custom attributes
|
||||||
|
self.data["attr"] = ""
|
||||||
|
self.data["attrPrefix"] = ""
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@ class SetFrameRangeLoader(load.LoaderPlugin):
|
||||||
|
|
||||||
families = ["animation",
|
families = ["animation",
|
||||||
"camera",
|
"camera",
|
||||||
|
"proxyAbc",
|
||||||
"pointcache"]
|
"pointcache"]
|
||||||
representations = ["abc"]
|
representations = ["abc"]
|
||||||
|
|
||||||
|
|
@ -48,6 +49,7 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin):
|
||||||
|
|
||||||
families = ["animation",
|
families = ["animation",
|
||||||
"camera",
|
"camera",
|
||||||
|
"proxyAbc",
|
||||||
"pointcache"]
|
"pointcache"]
|
||||||
representations = ["abc"]
|
representations = ["abc"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ from openpype.settings import get_project_settings
|
||||||
class AlembicStandinLoader(load.LoaderPlugin):
|
class AlembicStandinLoader(load.LoaderPlugin):
|
||||||
"""Load Alembic as Arnold Standin"""
|
"""Load Alembic as Arnold Standin"""
|
||||||
|
|
||||||
families = ["animation", "model", "pointcache"]
|
families = ["animation", "model", "proxyAbc", "pointcache"]
|
||||||
representations = ["abc"]
|
representations = ["abc"]
|
||||||
|
|
||||||
label = "Import Alembic as Arnold Standin"
|
label = "Import Alembic as Arnold Standin"
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ from openpype.settings import get_project_settings
|
||||||
class GpuCacheLoader(load.LoaderPlugin):
|
class GpuCacheLoader(load.LoaderPlugin):
|
||||||
"""Load Alembic as gpuCache"""
|
"""Load Alembic as gpuCache"""
|
||||||
|
|
||||||
families = ["model", "animation", "pointcache"]
|
families = ["model", "animation", "proxyAbc", "pointcache"]
|
||||||
representations = ["abc"]
|
representations = ["abc"]
|
||||||
|
|
||||||
label = "Import Gpu Cache"
|
label = "Import Gpu Cache"
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
||||||
|
|
||||||
families = ["model",
|
families = ["model",
|
||||||
"pointcache",
|
"pointcache",
|
||||||
|
"proxyAbc",
|
||||||
"animation",
|
"animation",
|
||||||
"mayaAscii",
|
"mayaAscii",
|
||||||
"mayaScene",
|
"mayaScene",
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
from maya import cmds
|
from maya import cmds
|
||||||
|
from openpype.pipeline.publish import KnownPublishError
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
|
|
@ -6,6 +7,7 @@ import pyblish.api
|
||||||
class CollectAssData(pyblish.api.InstancePlugin):
|
class CollectAssData(pyblish.api.InstancePlugin):
|
||||||
"""Collect Ass data."""
|
"""Collect Ass data."""
|
||||||
|
|
||||||
|
# Offset to be after renderable camera collection.
|
||||||
order = pyblish.api.CollectorOrder + 0.2
|
order = pyblish.api.CollectorOrder + 0.2
|
||||||
label = 'Collect Ass'
|
label = 'Collect Ass'
|
||||||
families = ["ass"]
|
families = ["ass"]
|
||||||
|
|
@ -23,8 +25,23 @@ class CollectAssData(pyblish.api.InstancePlugin):
|
||||||
instance.data['setMembers'] = members
|
instance.data['setMembers'] = members
|
||||||
self.log.debug('content members: {}'.format(members))
|
self.log.debug('content members: {}'.format(members))
|
||||||
elif objset.startswith("proxy_SET"):
|
elif objset.startswith("proxy_SET"):
|
||||||
assert len(members) == 1, "You have multiple proxy meshes, please only use one"
|
if len(members) != 1:
|
||||||
|
msg = "You have multiple proxy meshes, please only use one"
|
||||||
|
raise KnownPublishError(msg)
|
||||||
instance.data['proxy'] = members
|
instance.data['proxy'] = members
|
||||||
self.log.debug('proxy members: {}'.format(members))
|
self.log.debug('proxy members: {}'.format(members))
|
||||||
|
|
||||||
|
# Use camera in object set if present else default to render globals
|
||||||
|
# camera.
|
||||||
|
cameras = cmds.ls(type="camera", long=True)
|
||||||
|
renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)]
|
||||||
|
camera = renderable[0]
|
||||||
|
for node in instance.data["setMembers"]:
|
||||||
|
camera_shapes = cmds.listRelatives(
|
||||||
|
node, shapes=True, type="camera"
|
||||||
|
)
|
||||||
|
if camera_shapes:
|
||||||
|
camera = node
|
||||||
|
instance.data["camera"] = camera
|
||||||
|
|
||||||
self.log.debug("data: {}".format(instance.data))
|
self.log.debug("data: {}".format(instance.data))
|
||||||
|
|
|
||||||
17
openpype/hosts/maya/plugins/publish/collect_gltf.py
Normal file
17
openpype/hosts/maya/plugins/publish/collect_gltf.py
Normal file
|
|
@ -0,0 +1,17 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
|
|
||||||
|
class CollectGLTF(pyblish.api.InstancePlugin):
|
||||||
|
"""Collect Assets for GLTF/GLB export."""
|
||||||
|
|
||||||
|
order = pyblish.api.CollectorOrder + 0.2
|
||||||
|
label = "Collect Asset for GLTF/GLB export"
|
||||||
|
families = ["model", "animation", "pointcache"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
if not instance.data.get("families"):
|
||||||
|
instance.data["families"] = []
|
||||||
|
|
||||||
|
if "gltf" not in instance.data["families"]:
|
||||||
|
instance.data["families"].append("gltf")
|
||||||
|
|
@ -1,77 +1,93 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from maya import cmds
|
from maya import cmds
|
||||||
|
import arnold
|
||||||
|
|
||||||
from openpype.pipeline import publish
|
from openpype.pipeline import publish
|
||||||
from openpype.hosts.maya.api.lib import maintained_selection
|
from openpype.hosts.maya.api.lib import maintained_selection, attribute_values
|
||||||
|
|
||||||
|
|
||||||
class ExtractAssStandin(publish.Extractor):
|
class ExtractAssStandin(publish.Extractor):
|
||||||
"""Extract the content of the instance to a ass file
|
"""Extract the content of the instance to a ass file"""
|
||||||
|
|
||||||
Things to pay attention to:
|
label = "Arnold Scene Source (.ass)"
|
||||||
- If animation is toggled, are the frames correct
|
|
||||||
-
|
|
||||||
"""
|
|
||||||
|
|
||||||
label = "Ass Standin (.ass)"
|
|
||||||
hosts = ["maya"]
|
hosts = ["maya"]
|
||||||
families = ["ass"]
|
families = ["ass"]
|
||||||
asciiAss = False
|
asciiAss = False
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
|
|
||||||
sequence = instance.data.get("exportSequence", False)
|
|
||||||
|
|
||||||
staging_dir = self.staging_dir(instance)
|
staging_dir = self.staging_dir(instance)
|
||||||
filename = "{}.ass".format(instance.name)
|
filename = "{}.ass".format(instance.name)
|
||||||
filenames = list()
|
filenames = []
|
||||||
file_path = os.path.join(staging_dir, filename)
|
file_path = os.path.join(staging_dir, filename)
|
||||||
|
|
||||||
|
# Mask
|
||||||
|
mask = arnold.AI_NODE_ALL
|
||||||
|
|
||||||
|
node_types = {
|
||||||
|
"options": arnold.AI_NODE_OPTIONS,
|
||||||
|
"camera": arnold.AI_NODE_CAMERA,
|
||||||
|
"light": arnold.AI_NODE_LIGHT,
|
||||||
|
"shape": arnold.AI_NODE_SHAPE,
|
||||||
|
"shader": arnold.AI_NODE_SHADER,
|
||||||
|
"override": arnold.AI_NODE_OVERRIDE,
|
||||||
|
"driver": arnold.AI_NODE_DRIVER,
|
||||||
|
"filter": arnold.AI_NODE_FILTER,
|
||||||
|
"color_manager": arnold.AI_NODE_COLOR_MANAGER,
|
||||||
|
"operator": arnold.AI_NODE_OPERATOR
|
||||||
|
}
|
||||||
|
|
||||||
|
for key in node_types.keys():
|
||||||
|
if instance.data.get("mask" + key.title()):
|
||||||
|
mask = mask ^ node_types[key]
|
||||||
|
|
||||||
|
# Motion blur
|
||||||
|
values = {
|
||||||
|
"defaultArnoldRenderOptions.motion_blur_enable": instance.data.get(
|
||||||
|
"motionBlur", True
|
||||||
|
),
|
||||||
|
"defaultArnoldRenderOptions.motion_steps": instance.data.get(
|
||||||
|
"motionBlurKeys", 2
|
||||||
|
),
|
||||||
|
"defaultArnoldRenderOptions.motion_frames": instance.data.get(
|
||||||
|
"motionBlurLength", 0.5
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
# Write out .ass file
|
# Write out .ass file
|
||||||
|
kwargs = {
|
||||||
|
"filename": file_path,
|
||||||
|
"startFrame": instance.data.get("frameStartHandle", 1),
|
||||||
|
"endFrame": instance.data.get("frameEndHandle", 1),
|
||||||
|
"frameStep": instance.data.get("step", 1),
|
||||||
|
"selected": True,
|
||||||
|
"asciiAss": self.asciiAss,
|
||||||
|
"shadowLinks": True,
|
||||||
|
"lightLinks": True,
|
||||||
|
"boundingBox": True,
|
||||||
|
"expandProcedurals": instance.data.get("expandProcedurals", False),
|
||||||
|
"camera": instance.data["camera"],
|
||||||
|
"mask": mask
|
||||||
|
}
|
||||||
|
|
||||||
self.log.info("Writing: '%s'" % file_path)
|
self.log.info("Writing: '%s'" % file_path)
|
||||||
|
with attribute_values(values):
|
||||||
with maintained_selection():
|
with maintained_selection():
|
||||||
self.log.info("Writing: {}".format(instance.data["setMembers"]))
|
self.log.info(
|
||||||
|
"Writing: {}".format(instance.data["setMembers"])
|
||||||
|
)
|
||||||
cmds.select(instance.data["setMembers"], noExpand=True)
|
cmds.select(instance.data["setMembers"], noExpand=True)
|
||||||
|
|
||||||
if sequence:
|
self.log.info(
|
||||||
self.log.info("Extracting ass sequence")
|
"Extracting ass sequence with: {}".format(kwargs)
|
||||||
|
|
||||||
# Collect the start and end including handles
|
|
||||||
start = instance.data.get("frameStartHandle", 1)
|
|
||||||
end = instance.data.get("frameEndHandle", 1)
|
|
||||||
step = instance.data.get("step", 0)
|
|
||||||
|
|
||||||
exported_files = cmds.arnoldExportAss(filename=file_path,
|
|
||||||
selected=True,
|
|
||||||
asciiAss=self.asciiAss,
|
|
||||||
shadowLinks=True,
|
|
||||||
lightLinks=True,
|
|
||||||
boundingBox=True,
|
|
||||||
startFrame=start,
|
|
||||||
endFrame=end,
|
|
||||||
frameStep=step
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
exported_files = cmds.arnoldExportAss(**kwargs)
|
||||||
|
|
||||||
for file in exported_files:
|
for file in exported_files:
|
||||||
filenames.append(os.path.split(file)[1])
|
filenames.append(os.path.split(file)[1])
|
||||||
|
|
||||||
self.log.info("Exported: {}".format(filenames))
|
self.log.info("Exported: {}".format(filenames))
|
||||||
else:
|
|
||||||
self.log.info("Extracting ass")
|
|
||||||
cmds.arnoldExportAss(filename=file_path,
|
|
||||||
selected=True,
|
|
||||||
asciiAss=False,
|
|
||||||
shadowLinks=True,
|
|
||||||
lightLinks=True,
|
|
||||||
boundingBox=True
|
|
||||||
)
|
|
||||||
self.log.info("Extracted {}".format(filename))
|
|
||||||
filenames = filename
|
|
||||||
optionals = [
|
|
||||||
"frameStart", "frameEnd", "step", "handles",
|
|
||||||
"handleEnd", "handleStart"
|
|
||||||
]
|
|
||||||
for key in optionals:
|
|
||||||
instance.data.pop(key, None)
|
|
||||||
|
|
||||||
if "representations" not in instance.data:
|
if "representations" not in instance.data:
|
||||||
instance.data["representations"] = []
|
instance.data["representations"] = []
|
||||||
|
|
@ -79,13 +95,11 @@ class ExtractAssStandin(publish.Extractor):
|
||||||
representation = {
|
representation = {
|
||||||
'name': 'ass',
|
'name': 'ass',
|
||||||
'ext': 'ass',
|
'ext': 'ass',
|
||||||
'files': filenames,
|
'files': filenames if len(filenames) > 1 else filenames[0],
|
||||||
"stagingDir": staging_dir
|
"stagingDir": staging_dir,
|
||||||
|
'frameStart': kwargs["startFrame"]
|
||||||
}
|
}
|
||||||
|
|
||||||
if sequence:
|
|
||||||
representation['frameStart'] = start
|
|
||||||
|
|
||||||
instance.data["representations"].append(representation)
|
instance.data["representations"].append(representation)
|
||||||
|
|
||||||
self.log.info("Extracted instance '%s' to: %s"
|
self.log.info("Extracted instance '%s' to: %s"
|
||||||
|
|
|
||||||
65
openpype/hosts/maya/plugins/publish/extract_gltf.py
Normal file
65
openpype/hosts/maya/plugins/publish/extract_gltf.py
Normal file
|
|
@ -0,0 +1,65 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
from maya import cmds, mel
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
|
from openpype.pipeline import publish
|
||||||
|
from openpype.hosts.maya.api import lib
|
||||||
|
from openpype.hosts.maya.api.gltf import extract_gltf
|
||||||
|
|
||||||
|
|
||||||
|
class ExtractGLB(publish.Extractor):
|
||||||
|
|
||||||
|
order = pyblish.api.ExtractorOrder
|
||||||
|
hosts = ["maya"]
|
||||||
|
label = "Extract GLB"
|
||||||
|
families = ["gltf"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
staging_dir = self.staging_dir(instance)
|
||||||
|
filename = "{0}.glb".format(instance.name)
|
||||||
|
path = os.path.join(staging_dir, filename)
|
||||||
|
|
||||||
|
self.log.info("Extracting GLB to: {}".format(path))
|
||||||
|
|
||||||
|
nodes = instance[:]
|
||||||
|
|
||||||
|
self.log.info("Instance: {0}".format(nodes))
|
||||||
|
|
||||||
|
start_frame = instance.data('frameStart') or \
|
||||||
|
int(cmds.playbackOptions(query=True,
|
||||||
|
animationStartTime=True))# noqa
|
||||||
|
end_frame = instance.data('frameEnd') or \
|
||||||
|
int(cmds.playbackOptions(query=True,
|
||||||
|
animationEndTime=True)) # noqa
|
||||||
|
fps = mel.eval('currentTimeUnitToFPS()')
|
||||||
|
|
||||||
|
options = {
|
||||||
|
"sno": True, # selectedNodeOnly
|
||||||
|
"nbu": True, # .bin instead of .bin0
|
||||||
|
"ast": start_frame,
|
||||||
|
"aet": end_frame,
|
||||||
|
"afr": fps,
|
||||||
|
"dsa": 1,
|
||||||
|
"acn": instance.name,
|
||||||
|
"glb": True,
|
||||||
|
"vno": True # visibleNodeOnly
|
||||||
|
}
|
||||||
|
with lib.maintained_selection():
|
||||||
|
cmds.select(nodes, hi=True, noExpand=True)
|
||||||
|
extract_gltf(staging_dir,
|
||||||
|
instance.name,
|
||||||
|
**options)
|
||||||
|
|
||||||
|
if "representations" not in instance.data:
|
||||||
|
instance.data["representations"] = []
|
||||||
|
|
||||||
|
representation = {
|
||||||
|
'name': 'glb',
|
||||||
|
'ext': 'glb',
|
||||||
|
'files': filename,
|
||||||
|
"stagingDir": staging_dir,
|
||||||
|
}
|
||||||
|
instance.data["representations"].append(representation)
|
||||||
|
|
||||||
|
self.log.info("Extract GLB successful to: {0}".format(path))
|
||||||
|
|
@ -86,7 +86,8 @@ class ExtractAlembic(publish.Extractor):
|
||||||
start=start,
|
start=start,
|
||||||
end=end))
|
end=end))
|
||||||
|
|
||||||
with suspended_refresh(suspend=instance.data.get("refresh", False)):
|
suspend = not instance.data.get("refresh", False)
|
||||||
|
with suspended_refresh(suspend=suspend):
|
||||||
with maintained_selection():
|
with maintained_selection():
|
||||||
cmds.select(nodes, noExpand=True)
|
cmds.select(nodes, noExpand=True)
|
||||||
extract_alembic(
|
extract_alembic(
|
||||||
|
|
|
||||||
109
openpype/hosts/maya/plugins/publish/extract_proxy_abc.py
Normal file
109
openpype/hosts/maya/plugins/publish/extract_proxy_abc.py
Normal file
|
|
@ -0,0 +1,109 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
from maya import cmds
|
||||||
|
|
||||||
|
from openpype.pipeline import publish
|
||||||
|
from openpype.hosts.maya.api.lib import (
|
||||||
|
extract_alembic,
|
||||||
|
suspended_refresh,
|
||||||
|
maintained_selection,
|
||||||
|
iter_visible_nodes_in_range
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ExtractProxyAlembic(publish.Extractor):
|
||||||
|
"""Produce an alembic for bounding box geometry
|
||||||
|
"""
|
||||||
|
|
||||||
|
label = "Extract Proxy (Alembic)"
|
||||||
|
hosts = ["maya"]
|
||||||
|
families = ["proxyAbc"]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
name_suffix = instance.data.get("nameSuffix")
|
||||||
|
# Collect the start and end including handles
|
||||||
|
start = float(instance.data.get("frameStartHandle", 1))
|
||||||
|
end = float(instance.data.get("frameEndHandle", 1))
|
||||||
|
|
||||||
|
attrs = instance.data.get("attr", "").split(";")
|
||||||
|
attrs = [value for value in attrs if value.strip()]
|
||||||
|
attrs += ["cbId"]
|
||||||
|
|
||||||
|
attr_prefixes = instance.data.get("attrPrefix", "").split(";")
|
||||||
|
attr_prefixes = [value for value in attr_prefixes if value.strip()]
|
||||||
|
|
||||||
|
self.log.info("Extracting Proxy Alembic..")
|
||||||
|
dirname = self.staging_dir(instance)
|
||||||
|
|
||||||
|
filename = "{name}.abc".format(**instance.data)
|
||||||
|
path = os.path.join(dirname, filename)
|
||||||
|
|
||||||
|
proxy_root = self.create_proxy_geometry(instance,
|
||||||
|
name_suffix,
|
||||||
|
start,
|
||||||
|
end)
|
||||||
|
|
||||||
|
options = {
|
||||||
|
"step": instance.data.get("step", 1.0),
|
||||||
|
"attr": attrs,
|
||||||
|
"attrPrefix": attr_prefixes,
|
||||||
|
"writeVisibility": True,
|
||||||
|
"writeCreases": True,
|
||||||
|
"writeColorSets": instance.data.get("writeColorSets", False),
|
||||||
|
"writeFaceSets": instance.data.get("writeFaceSets", False),
|
||||||
|
"uvWrite": True,
|
||||||
|
"selection": True,
|
||||||
|
"worldSpace": instance.data.get("worldSpace", True),
|
||||||
|
"root": proxy_root
|
||||||
|
}
|
||||||
|
|
||||||
|
if int(cmds.about(version=True)) >= 2017:
|
||||||
|
# Since Maya 2017 alembic supports multiple uv sets - write them.
|
||||||
|
options["writeUVSets"] = True
|
||||||
|
|
||||||
|
with suspended_refresh():
|
||||||
|
with maintained_selection():
|
||||||
|
cmds.select(proxy_root, hi=True, noExpand=True)
|
||||||
|
extract_alembic(file=path,
|
||||||
|
startFrame=start,
|
||||||
|
endFrame=end,
|
||||||
|
**options)
|
||||||
|
|
||||||
|
if "representations" not in instance.data:
|
||||||
|
instance.data["representations"] = []
|
||||||
|
|
||||||
|
representation = {
|
||||||
|
'name': 'abc',
|
||||||
|
'ext': 'abc',
|
||||||
|
'files': filename,
|
||||||
|
"stagingDir": dirname
|
||||||
|
}
|
||||||
|
instance.data["representations"].append(representation)
|
||||||
|
|
||||||
|
instance.context.data["cleanupFullPaths"].append(path)
|
||||||
|
|
||||||
|
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||||
|
# remove the bounding box
|
||||||
|
bbox_master = cmds.ls("bbox_grp")
|
||||||
|
cmds.delete(bbox_master)
|
||||||
|
|
||||||
|
def create_proxy_geometry(self, instance, name_suffix, start, end):
|
||||||
|
nodes = instance[:]
|
||||||
|
nodes = list(iter_visible_nodes_in_range(nodes,
|
||||||
|
start=start,
|
||||||
|
end=end))
|
||||||
|
|
||||||
|
inst_selection = cmds.ls(nodes, long=True)
|
||||||
|
cmds.geomToBBox(inst_selection,
|
||||||
|
nameSuffix=name_suffix,
|
||||||
|
keepOriginal=True,
|
||||||
|
single=False,
|
||||||
|
bakeAnimation=True,
|
||||||
|
startTime=start,
|
||||||
|
endTime=end)
|
||||||
|
# create master group for bounding
|
||||||
|
# boxes as the main root
|
||||||
|
master_group = cmds.group(name="bbox_grp")
|
||||||
|
bbox_sel = cmds.ls(master_group, long=True)
|
||||||
|
self.log.debug("proxy_root: {}".format(bbox_sel))
|
||||||
|
return bbox_sel
|
||||||
|
|
@ -0,0 +1,108 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Create Unreal Skeletal Mesh data to be extracted as FBX."""
|
||||||
|
import os
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
from maya import cmds # noqa
|
||||||
|
|
||||||
|
from openpype.pipeline import publish
|
||||||
|
from openpype.hosts.maya.api.lib import (
|
||||||
|
extract_alembic,
|
||||||
|
suspended_refresh,
|
||||||
|
maintained_selection
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def renamed(original_name, renamed_name):
|
||||||
|
# type: (str, str) -> None
|
||||||
|
try:
|
||||||
|
cmds.rename(original_name, renamed_name)
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
cmds.rename(renamed_name, original_name)
|
||||||
|
|
||||||
|
|
||||||
|
class ExtractUnrealSkeletalMeshAbc(publish.Extractor):
|
||||||
|
"""Extract Unreal Skeletal Mesh as FBX from Maya. """
|
||||||
|
|
||||||
|
label = "Extract Unreal Skeletal Mesh - Alembic"
|
||||||
|
hosts = ["maya"]
|
||||||
|
families = ["skeletalMesh"]
|
||||||
|
optional = True
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
self.log.info("Extracting pointcache..")
|
||||||
|
|
||||||
|
geo = cmds.listRelatives(
|
||||||
|
instance.data.get("geometry"), allDescendents=True, fullPath=True)
|
||||||
|
joints = cmds.listRelatives(
|
||||||
|
instance.data.get("joints"), allDescendents=True, fullPath=True)
|
||||||
|
|
||||||
|
nodes = geo + joints
|
||||||
|
|
||||||
|
attrs = instance.data.get("attr", "").split(";")
|
||||||
|
attrs = [value for value in attrs if value.strip()]
|
||||||
|
attrs += ["cbId"]
|
||||||
|
|
||||||
|
attr_prefixes = instance.data.get("attrPrefix", "").split(";")
|
||||||
|
attr_prefixes = [value for value in attr_prefixes if value.strip()]
|
||||||
|
|
||||||
|
# Define output path
|
||||||
|
staging_dir = self.staging_dir(instance)
|
||||||
|
filename = "{0}.abc".format(instance.name)
|
||||||
|
path = os.path.join(staging_dir, filename)
|
||||||
|
|
||||||
|
# The export requires forward slashes because we need
|
||||||
|
# to format it into a string in a mel expression
|
||||||
|
path = path.replace('\\', '/')
|
||||||
|
|
||||||
|
self.log.info("Extracting ABC to: {0}".format(path))
|
||||||
|
self.log.info("Members: {0}".format(nodes))
|
||||||
|
self.log.info("Instance: {0}".format(instance[:]))
|
||||||
|
|
||||||
|
options = {
|
||||||
|
"step": instance.data.get("step", 1.0),
|
||||||
|
"attr": attrs,
|
||||||
|
"attrPrefix": attr_prefixes,
|
||||||
|
"writeVisibility": True,
|
||||||
|
"writeCreases": True,
|
||||||
|
"writeColorSets": instance.data.get("writeColorSets", False),
|
||||||
|
"writeFaceSets": instance.data.get("writeFaceSets", False),
|
||||||
|
"uvWrite": True,
|
||||||
|
"selection": True,
|
||||||
|
"worldSpace": instance.data.get("worldSpace", True)
|
||||||
|
}
|
||||||
|
|
||||||
|
self.log.info("Options: {}".format(options))
|
||||||
|
|
||||||
|
if int(cmds.about(version=True)) >= 2017:
|
||||||
|
# Since Maya 2017 alembic supports multiple uv sets - write them.
|
||||||
|
options["writeUVSets"] = True
|
||||||
|
|
||||||
|
if not instance.data.get("includeParentHierarchy", True):
|
||||||
|
# Set the root nodes if we don't want to include parents
|
||||||
|
# The roots are to be considered the ones that are the actual
|
||||||
|
# direct members of the set
|
||||||
|
options["root"] = instance.data.get("setMembers")
|
||||||
|
|
||||||
|
with suspended_refresh(suspend=instance.data.get("refresh", False)):
|
||||||
|
with maintained_selection():
|
||||||
|
cmds.select(nodes, noExpand=True)
|
||||||
|
extract_alembic(file=path,
|
||||||
|
# startFrame=start,
|
||||||
|
# endFrame=end,
|
||||||
|
**options)
|
||||||
|
|
||||||
|
if "representations" not in instance.data:
|
||||||
|
instance.data["representations"] = []
|
||||||
|
|
||||||
|
representation = {
|
||||||
|
'name': 'abc',
|
||||||
|
'ext': 'abc',
|
||||||
|
'files': filename,
|
||||||
|
"stagingDir": staging_dir,
|
||||||
|
}
|
||||||
|
instance.data["representations"].append(representation)
|
||||||
|
|
||||||
|
self.log.info("Extract ABC successful to: {0}".format(path))
|
||||||
|
|
@ -21,12 +21,13 @@ def renamed(original_name, renamed_name):
|
||||||
cmds.rename(renamed_name, original_name)
|
cmds.rename(renamed_name, original_name)
|
||||||
|
|
||||||
|
|
||||||
class ExtractUnrealSkeletalMesh(publish.Extractor):
|
class ExtractUnrealSkeletalMeshFbx(publish.Extractor):
|
||||||
"""Extract Unreal Skeletal Mesh as FBX from Maya. """
|
"""Extract Unreal Skeletal Mesh as FBX from Maya. """
|
||||||
|
|
||||||
order = pyblish.api.ExtractorOrder - 0.1
|
order = pyblish.api.ExtractorOrder - 0.1
|
||||||
label = "Extract Unreal Skeletal Mesh"
|
label = "Extract Unreal Skeletal Mesh - FBX"
|
||||||
families = ["skeletalMesh"]
|
families = ["skeletalMesh"]
|
||||||
|
optional = True
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
fbx_exporter = fbx.FBXExtractor(log=self.log)
|
fbx_exporter = fbx.FBXExtractor(log=self.log)
|
||||||
|
|
@ -20,7 +20,7 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
order = ValidateContentsOrder
|
order = ValidateContentsOrder
|
||||||
families = ['animation', "pointcache"]
|
families = ['animation', "pointcache", "proxyAbc"]
|
||||||
hosts = ['maya']
|
hosts = ['maya']
|
||||||
label = 'Animation Out Set Related Node Ids'
|
label = 'Animation Out Set Related Node Ids'
|
||||||
actions = [
|
actions = [
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
|
||||||
families = ["animation",
|
families = ["animation",
|
||||||
"pointcache",
|
"pointcache",
|
||||||
"camera",
|
"camera",
|
||||||
|
"proxyAbc",
|
||||||
"renderlayer",
|
"renderlayer",
|
||||||
"review",
|
"review",
|
||||||
"yeticache"]
|
"yeticache"]
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,9 @@ class ValidateSkeletalMeshHierarchy(pyblish.api.InstancePlugin):
|
||||||
parent.split("|")[1] for parent in (joints_parents + geo_parents)
|
parent.split("|")[1] for parent in (joints_parents + geo_parents)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(set(parents_set)) != 1:
|
self.log.info(parents_set)
|
||||||
|
|
||||||
|
if len(set(parents_set)) > 2:
|
||||||
raise PublishXmlValidationError(
|
raise PublishXmlValidationError(
|
||||||
self,
|
self,
|
||||||
"Multiple roots on geometry or joints."
|
"Multiple roots on geometry or joints."
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,60 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
|
from openpype.hosts.maya.api.action import (
|
||||||
|
SelectInvalidAction,
|
||||||
|
)
|
||||||
|
from openpype.pipeline.publish import (
|
||||||
|
RepairAction,
|
||||||
|
ValidateContentsOrder,
|
||||||
|
)
|
||||||
|
|
||||||
|
from maya import cmds
|
||||||
|
|
||||||
|
|
||||||
|
class ValidateSkeletalMeshTriangulated(pyblish.api.InstancePlugin):
|
||||||
|
"""Validates that the geometry has been triangulated."""
|
||||||
|
|
||||||
|
order = ValidateContentsOrder
|
||||||
|
hosts = ["maya"]
|
||||||
|
families = ["skeletalMesh"]
|
||||||
|
label = "Skeletal Mesh Triangulated"
|
||||||
|
optional = True
|
||||||
|
actions = [
|
||||||
|
SelectInvalidAction,
|
||||||
|
RepairAction
|
||||||
|
]
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
invalid = self.get_invalid(instance)
|
||||||
|
if invalid:
|
||||||
|
raise RuntimeError(
|
||||||
|
"The following objects needs to be triangulated: "
|
||||||
|
"{}".format(invalid))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_invalid(cls, instance):
|
||||||
|
geo = instance.data.get("geometry")
|
||||||
|
|
||||||
|
invalid = []
|
||||||
|
|
||||||
|
for obj in cmds.listRelatives(
|
||||||
|
cmds.ls(geo), allDescendents=True, fullPath=True):
|
||||||
|
n_triangles = cmds.polyEvaluate(obj, triangle=True)
|
||||||
|
n_faces = cmds.polyEvaluate(obj, face=True)
|
||||||
|
|
||||||
|
if not (isinstance(n_triangles, int) and isinstance(n_faces, int)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# We check if the number of triangles is equal to the number of
|
||||||
|
# faces for each transform node.
|
||||||
|
# If it is, the object is triangulated.
|
||||||
|
if cmds.objectType(obj, i="transform") and n_triangles != n_faces:
|
||||||
|
invalid.append(obj)
|
||||||
|
|
||||||
|
return invalid
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def repair(cls, instance):
|
||||||
|
for node in cls.get_invalid(instance):
|
||||||
|
cmds.polyTriangulate(node)
|
||||||
|
|
@ -2961,7 +2961,7 @@ def get_viewer_config_from_string(input_string):
|
||||||
viewer = split[1]
|
viewer = split[1]
|
||||||
display = split[0]
|
display = split[0]
|
||||||
elif "(" in viewer:
|
elif "(" in viewer:
|
||||||
pattern = r"([\w\d\s]+).*[(](.*)[)]"
|
pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]"
|
||||||
result = re.findall(pattern, viewer)
|
result = re.findall(pattern, viewer)
|
||||||
try:
|
try:
|
||||||
result = result.pop()
|
result = result.pop()
|
||||||
|
|
|
||||||
162
openpype/hosts/unreal/plugins/load/load_alembic_animation.py
Normal file
162
openpype/hosts/unreal/plugins/load/load_alembic_animation.py
Normal file
|
|
@ -0,0 +1,162 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Load Alembic Animation."""
|
||||||
|
import os
|
||||||
|
|
||||||
|
from openpype.pipeline import (
|
||||||
|
get_representation_path,
|
||||||
|
AVALON_CONTAINER_ID
|
||||||
|
)
|
||||||
|
from openpype.hosts.unreal.api import plugin
|
||||||
|
from openpype.hosts.unreal.api import pipeline as unreal_pipeline
|
||||||
|
import unreal # noqa
|
||||||
|
|
||||||
|
|
||||||
|
class AnimationAlembicLoader(plugin.Loader):
|
||||||
|
"""Load Unreal SkeletalMesh from Alembic"""
|
||||||
|
|
||||||
|
families = ["animation"]
|
||||||
|
label = "Import Alembic Animation"
|
||||||
|
representations = ["abc"]
|
||||||
|
icon = "cube"
|
||||||
|
color = "orange"
|
||||||
|
|
||||||
|
def get_task(self, filename, asset_dir, asset_name, replace):
|
||||||
|
task = unreal.AssetImportTask()
|
||||||
|
options = unreal.AbcImportSettings()
|
||||||
|
sm_settings = unreal.AbcStaticMeshSettings()
|
||||||
|
conversion_settings = unreal.AbcConversionSettings(
|
||||||
|
preset=unreal.AbcConversionPreset.CUSTOM,
|
||||||
|
flip_u=False, flip_v=False,
|
||||||
|
rotation=[0.0, 0.0, 0.0],
|
||||||
|
scale=[1.0, 1.0, -1.0])
|
||||||
|
|
||||||
|
task.set_editor_property('filename', filename)
|
||||||
|
task.set_editor_property('destination_path', asset_dir)
|
||||||
|
task.set_editor_property('destination_name', asset_name)
|
||||||
|
task.set_editor_property('replace_existing', replace)
|
||||||
|
task.set_editor_property('automated', True)
|
||||||
|
task.set_editor_property('save', True)
|
||||||
|
|
||||||
|
options.set_editor_property(
|
||||||
|
'import_type', unreal.AlembicImportType.SKELETAL)
|
||||||
|
|
||||||
|
options.static_mesh_settings = sm_settings
|
||||||
|
options.conversion_settings = conversion_settings
|
||||||
|
task.options = options
|
||||||
|
|
||||||
|
return task
|
||||||
|
|
||||||
|
def load(self, context, name, namespace, data):
|
||||||
|
"""Load and containerise representation into Content Browser.
|
||||||
|
|
||||||
|
This is two step process. First, import FBX to temporary path and
|
||||||
|
then call `containerise()` on it - this moves all content to new
|
||||||
|
directory and then it will create AssetContainer there and imprint it
|
||||||
|
with metadata. This will mark this path as container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context (dict): application context
|
||||||
|
name (str): subset name
|
||||||
|
namespace (str): in Unreal this is basically path to container.
|
||||||
|
This is not passed here, so namespace is set
|
||||||
|
by `containerise()` because only then we know
|
||||||
|
real path.
|
||||||
|
data (dict): Those would be data to be imprinted. This is not used
|
||||||
|
now, data are imprinted by `containerise()`.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list(str): list of container content
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Create directory for asset and openpype container
|
||||||
|
root = "/Game/OpenPype/Assets"
|
||||||
|
asset = context.get('asset').get('name')
|
||||||
|
suffix = "_CON"
|
||||||
|
if asset:
|
||||||
|
asset_name = "{}_{}".format(asset, name)
|
||||||
|
else:
|
||||||
|
asset_name = "{}".format(name)
|
||||||
|
version = context.get('version').get('name')
|
||||||
|
|
||||||
|
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||||
|
asset_dir, container_name = tools.create_unique_asset_name(
|
||||||
|
f"{root}/{asset}/{name}_v{version:03d}", suffix="")
|
||||||
|
|
||||||
|
container_name += suffix
|
||||||
|
|
||||||
|
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||||
|
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||||
|
|
||||||
|
task = self.get_task(self.fname, asset_dir, asset_name, False)
|
||||||
|
|
||||||
|
asset_tools = unreal.AssetToolsHelpers.get_asset_tools()
|
||||||
|
asset_tools.import_asset_tasks([task])
|
||||||
|
|
||||||
|
# Create Asset Container
|
||||||
|
unreal_pipeline.create_container(
|
||||||
|
container=container_name, path=asset_dir)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"schema": "openpype:container-2.0",
|
||||||
|
"id": AVALON_CONTAINER_ID,
|
||||||
|
"asset": asset,
|
||||||
|
"namespace": asset_dir,
|
||||||
|
"container_name": container_name,
|
||||||
|
"asset_name": asset_name,
|
||||||
|
"loader": str(self.__class__.__name__),
|
||||||
|
"representation": context["representation"]["_id"],
|
||||||
|
"parent": context["representation"]["parent"],
|
||||||
|
"family": context["representation"]["context"]["family"]
|
||||||
|
}
|
||||||
|
unreal_pipeline.imprint(
|
||||||
|
"{}/{}".format(asset_dir, container_name), data)
|
||||||
|
|
||||||
|
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||||
|
asset_dir, recursive=True, include_folder=True
|
||||||
|
)
|
||||||
|
|
||||||
|
for a in asset_content:
|
||||||
|
unreal.EditorAssetLibrary.save_asset(a)
|
||||||
|
|
||||||
|
return asset_content
|
||||||
|
|
||||||
|
def update(self, container, representation):
|
||||||
|
name = container["asset_name"]
|
||||||
|
source_path = get_representation_path(representation)
|
||||||
|
destination_path = container["namespace"]
|
||||||
|
|
||||||
|
task = self.get_task(source_path, destination_path, name, True)
|
||||||
|
|
||||||
|
# do import fbx and replace existing data
|
||||||
|
asset_tools = unreal.AssetToolsHelpers.get_asset_tools()
|
||||||
|
asset_tools.import_asset_tasks([task])
|
||||||
|
|
||||||
|
container_path = f"{container['namespace']}/{container['objectName']}"
|
||||||
|
|
||||||
|
# update metadata
|
||||||
|
unreal_pipeline.imprint(
|
||||||
|
container_path,
|
||||||
|
{
|
||||||
|
"representation": str(representation["_id"]),
|
||||||
|
"parent": str(representation["parent"])
|
||||||
|
})
|
||||||
|
|
||||||
|
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||||
|
destination_path, recursive=True, include_folder=True
|
||||||
|
)
|
||||||
|
|
||||||
|
for a in asset_content:
|
||||||
|
unreal.EditorAssetLibrary.save_asset(a)
|
||||||
|
|
||||||
|
def remove(self, container):
|
||||||
|
path = container["namespace"]
|
||||||
|
parent_path = os.path.dirname(path)
|
||||||
|
|
||||||
|
unreal.EditorAssetLibrary.delete_directory(path)
|
||||||
|
|
||||||
|
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||||
|
parent_path, recursive=False
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(asset_content) == 0:
|
||||||
|
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||||
|
|
@ -14,7 +14,7 @@ import unreal # noqa
|
||||||
class SkeletalMeshAlembicLoader(plugin.Loader):
|
class SkeletalMeshAlembicLoader(plugin.Loader):
|
||||||
"""Load Unreal SkeletalMesh from Alembic"""
|
"""Load Unreal SkeletalMesh from Alembic"""
|
||||||
|
|
||||||
families = ["pointcache"]
|
families = ["pointcache", "skeletalMesh"]
|
||||||
label = "Import Alembic Skeletal Mesh"
|
label = "Import Alembic Skeletal Mesh"
|
||||||
representations = ["abc"]
|
representations = ["abc"]
|
||||||
icon = "cube"
|
icon = "cube"
|
||||||
|
|
@ -14,7 +14,7 @@ import unreal # noqa
|
||||||
class StaticMeshAlembicLoader(plugin.Loader):
|
class StaticMeshAlembicLoader(plugin.Loader):
|
||||||
"""Load Unreal StaticMesh from Alembic"""
|
"""Load Unreal StaticMesh from Alembic"""
|
||||||
|
|
||||||
families = ["model"]
|
families = ["model", "staticMesh"]
|
||||||
label = "Import Alembic Static Mesh"
|
label = "Import Alembic Static Mesh"
|
||||||
representations = ["abc"]
|
representations = ["abc"]
|
||||||
icon = "cube"
|
icon = "cube"
|
||||||
|
|
@ -14,9 +14,9 @@ else:
|
||||||
|
|
||||||
|
|
||||||
class FileTransaction(object):
|
class FileTransaction(object):
|
||||||
"""
|
"""File transaction with rollback options.
|
||||||
|
|
||||||
The file transaction is a three step process.
|
The file transaction is a three-step process.
|
||||||
|
|
||||||
1) Rename any existing files to a "temporary backup" during `process()`
|
1) Rename any existing files to a "temporary backup" during `process()`
|
||||||
2) Copy the files to final destination during `process()`
|
2) Copy the files to final destination during `process()`
|
||||||
|
|
@ -39,14 +39,12 @@ class FileTransaction(object):
|
||||||
|
|
||||||
Warning:
|
Warning:
|
||||||
Any folders created during the transfer will not be removed.
|
Any folders created during the transfer will not be removed.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
MODE_COPY = 0
|
MODE_COPY = 0
|
||||||
MODE_HARDLINK = 1
|
MODE_HARDLINK = 1
|
||||||
|
|
||||||
def __init__(self, log=None):
|
def __init__(self, log=None):
|
||||||
|
|
||||||
if log is None:
|
if log is None:
|
||||||
log = logging.getLogger("FileTransaction")
|
log = logging.getLogger("FileTransaction")
|
||||||
|
|
||||||
|
|
@ -63,49 +61,64 @@ class FileTransaction(object):
|
||||||
self._backup_to_original = {}
|
self._backup_to_original = {}
|
||||||
|
|
||||||
def add(self, src, dst, mode=MODE_COPY):
|
def add(self, src, dst, mode=MODE_COPY):
|
||||||
"""Add a new file to transfer queue"""
|
"""Add a new file to transfer queue.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
src (str): Source path.
|
||||||
|
dst (str): Destination path.
|
||||||
|
mode (MODE_COPY, MODE_HARDLINK): Transfer mode.
|
||||||
|
"""
|
||||||
|
|
||||||
opts = {"mode": mode}
|
opts = {"mode": mode}
|
||||||
|
|
||||||
src = os.path.abspath(src)
|
src = os.path.normpath(os.path.abspath(src))
|
||||||
dst = os.path.abspath(dst)
|
dst = os.path.normpath(os.path.abspath(dst))
|
||||||
|
|
||||||
if dst in self._transfers:
|
if dst in self._transfers:
|
||||||
queued_src = self._transfers[dst][0]
|
queued_src = self._transfers[dst][0]
|
||||||
if src == queued_src:
|
if src == queued_src:
|
||||||
self.log.debug("File transfer was already "
|
self.log.debug(
|
||||||
"in queue: {} -> {}".format(src, dst))
|
"File transfer was already in queue: {} -> {}".format(
|
||||||
|
src, dst))
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
self.log.warning("File transfer in queue replaced..")
|
self.log.warning("File transfer in queue replaced..")
|
||||||
self.log.debug("Removed from queue: "
|
self.log.debug(
|
||||||
"{} -> {}".format(queued_src, dst))
|
"Removed from queue: {} -> {} replaced by {} -> {}".format(
|
||||||
self.log.debug("Added to queue: {} -> {}".format(src, dst))
|
queued_src, dst, src, dst))
|
||||||
|
|
||||||
self._transfers[dst] = (src, opts)
|
self._transfers[dst] = (src, opts)
|
||||||
|
|
||||||
def process(self):
|
def process(self):
|
||||||
|
|
||||||
# Backup any existing files
|
# Backup any existing files
|
||||||
for dst in self._transfers.keys():
|
for dst, (src, _) in self._transfers.items():
|
||||||
if os.path.exists(dst):
|
if dst == src or not os.path.exists(dst):
|
||||||
|
continue
|
||||||
|
|
||||||
# Backup original file
|
# Backup original file
|
||||||
# todo: add timestamp or uuid to ensure unique
|
# todo: add timestamp or uuid to ensure unique
|
||||||
backup = dst + ".bak"
|
backup = dst + ".bak"
|
||||||
self._backup_to_original[backup] = dst
|
self._backup_to_original[backup] = dst
|
||||||
self.log.debug("Backup existing file: "
|
self.log.debug(
|
||||||
"{} -> {}".format(dst, backup))
|
"Backup existing file: {} -> {}".format(dst, backup))
|
||||||
os.rename(dst, backup)
|
os.rename(dst, backup)
|
||||||
|
|
||||||
# Copy the files to transfer
|
# Copy the files to transfer
|
||||||
for dst, (src, opts) in self._transfers.items():
|
for dst, (src, opts) in self._transfers.items():
|
||||||
|
if dst == src:
|
||||||
|
self.log.debug(
|
||||||
|
"Source and destionation are same files {} -> {}".format(
|
||||||
|
src, dst))
|
||||||
|
continue
|
||||||
|
|
||||||
self._create_folder_for_file(dst)
|
self._create_folder_for_file(dst)
|
||||||
|
|
||||||
if opts["mode"] == self.MODE_COPY:
|
if opts["mode"] == self.MODE_COPY:
|
||||||
self.log.debug("Copying file ... {} -> {}".format(src, dst))
|
self.log.debug("Copying file ... {} -> {}".format(src, dst))
|
||||||
copyfile(src, dst)
|
copyfile(src, dst)
|
||||||
elif opts["mode"] == self.MODE_HARDLINK:
|
elif opts["mode"] == self.MODE_HARDLINK:
|
||||||
self.log.debug("Hardlinking file ... {} -> {}".format(src,
|
self.log.debug("Hardlinking file ... {} -> {}".format(
|
||||||
dst))
|
src, dst))
|
||||||
create_hard_link(src, dst)
|
create_hard_link(src, dst)
|
||||||
|
|
||||||
self._transferred.append(dst)
|
self._transferred.append(dst)
|
||||||
|
|
@ -116,22 +129,20 @@ class FileTransaction(object):
|
||||||
try:
|
try:
|
||||||
os.remove(backup)
|
os.remove(backup)
|
||||||
except OSError:
|
except OSError:
|
||||||
self.log.error("Failed to remove backup file: "
|
self.log.error(
|
||||||
"{}".format(backup),
|
"Failed to remove backup file: {}".format(backup),
|
||||||
exc_info=True)
|
exc_info=True)
|
||||||
|
|
||||||
def rollback(self):
|
def rollback(self):
|
||||||
|
|
||||||
errors = 0
|
errors = 0
|
||||||
|
|
||||||
# Rollback any transferred files
|
# Rollback any transferred files
|
||||||
for path in self._transferred:
|
for path in self._transferred:
|
||||||
try:
|
try:
|
||||||
os.remove(path)
|
os.remove(path)
|
||||||
except OSError:
|
except OSError:
|
||||||
errors += 1
|
errors += 1
|
||||||
self.log.error("Failed to rollback created file: "
|
self.log.error(
|
||||||
"{}".format(path),
|
"Failed to rollback created file: {}".format(path),
|
||||||
exc_info=True)
|
exc_info=True)
|
||||||
|
|
||||||
# Rollback the backups
|
# Rollback the backups
|
||||||
|
|
@ -140,13 +151,15 @@ class FileTransaction(object):
|
||||||
os.rename(backup, original)
|
os.rename(backup, original)
|
||||||
except OSError:
|
except OSError:
|
||||||
errors += 1
|
errors += 1
|
||||||
self.log.error("Failed to restore original file: "
|
self.log.error(
|
||||||
"{} -> {}".format(backup, original),
|
"Failed to restore original file: {} -> {}".format(
|
||||||
|
backup, original),
|
||||||
exc_info=True)
|
exc_info=True)
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
self.log.error("{} errors occurred during "
|
self.log.error(
|
||||||
"rollback.".format(errors), exc_info=True)
|
"{} errors occurred during rollback.".format(errors),
|
||||||
|
exc_info=True)
|
||||||
six.reraise(*sys.exc_info())
|
six.reraise(*sys.exc_info())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
|
||||||
|
|
@ -422,7 +422,7 @@ class TemplateResult(str):
|
||||||
|
|
||||||
cls = self.__class__
|
cls = self.__class__
|
||||||
return cls(
|
return cls(
|
||||||
os.path.normpath(self),
|
os.path.normpath(self.replace("\\", "/")),
|
||||||
self.template,
|
self.template,
|
||||||
self.solved,
|
self.solved,
|
||||||
self.used_values,
|
self.used_values,
|
||||||
|
|
|
||||||
|
|
@ -77,26 +77,38 @@ def get_transcode_temp_directory():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_oiio_info_for_input(filepath, logger=None):
|
def get_oiio_info_for_input(filepath, logger=None, subimages=False):
|
||||||
"""Call oiiotool to get information about input and return stdout.
|
"""Call oiiotool to get information about input and return stdout.
|
||||||
|
|
||||||
Stdout should contain xml format string.
|
Stdout should contain xml format string.
|
||||||
"""
|
"""
|
||||||
args = [
|
args = [
|
||||||
get_oiio_tools_path(), "--info", "-v", "-i:infoformat=xml", filepath
|
get_oiio_tools_path(),
|
||||||
|
"--info",
|
||||||
|
"-v"
|
||||||
]
|
]
|
||||||
|
if subimages:
|
||||||
|
args.append("-a")
|
||||||
|
|
||||||
|
args.extend(["-i:infoformat=xml", filepath])
|
||||||
|
|
||||||
output = run_subprocess(args, logger=logger)
|
output = run_subprocess(args, logger=logger)
|
||||||
output = output.replace("\r\n", "\n")
|
output = output.replace("\r\n", "\n")
|
||||||
|
|
||||||
xml_started = False
|
xml_started = False
|
||||||
|
subimages_lines = []
|
||||||
lines = []
|
lines = []
|
||||||
for line in output.split("\n"):
|
for line in output.split("\n"):
|
||||||
if not xml_started:
|
if not xml_started:
|
||||||
if not line.startswith("<"):
|
if not line.startswith("<"):
|
||||||
continue
|
continue
|
||||||
xml_started = True
|
xml_started = True
|
||||||
|
|
||||||
if xml_started:
|
if xml_started:
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
if line == "</ImageSpec>":
|
||||||
|
subimages_lines.append(lines)
|
||||||
|
lines = []
|
||||||
|
|
||||||
if not xml_started:
|
if not xml_started:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
|
|
@ -105,12 +117,19 @@ def get_oiio_info_for_input(filepath, logger=None):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
xml_text = "\n".join(lines)
|
output = []
|
||||||
return parse_oiio_xml_output(xml_text, logger=logger)
|
for subimage_lines in subimages_lines:
|
||||||
|
xml_text = "\n".join(subimage_lines)
|
||||||
|
output.append(parse_oiio_xml_output(xml_text, logger=logger))
|
||||||
|
|
||||||
|
if subimages:
|
||||||
|
return output
|
||||||
|
return output[0]
|
||||||
|
|
||||||
|
|
||||||
class RationalToInt:
|
class RationalToInt:
|
||||||
"""Rational value stored as division of 2 integers using string."""
|
"""Rational value stored as division of 2 integers using string."""
|
||||||
|
|
||||||
def __init__(self, string_value):
|
def __init__(self, string_value):
|
||||||
parts = string_value.split("/")
|
parts = string_value.split("/")
|
||||||
top = float(parts[0])
|
top = float(parts[0])
|
||||||
|
|
@ -157,16 +176,16 @@ def convert_value_by_type_name(value_type, value, logger=None):
|
||||||
if value_type == "int":
|
if value_type == "int":
|
||||||
return int(value)
|
return int(value)
|
||||||
|
|
||||||
if value_type == "float":
|
if value_type in ("float", "double"):
|
||||||
return float(value)
|
return float(value)
|
||||||
|
|
||||||
# Vectors will probably have more types
|
# Vectors will probably have more types
|
||||||
if value_type in ("vec2f", "float2"):
|
if value_type in ("vec2f", "float2", "float2d"):
|
||||||
return [float(item) for item in value.split(",")]
|
return [float(item) for item in value.split(",")]
|
||||||
|
|
||||||
# Matrix should be always have square size of element 3x3, 4x4
|
# Matrix should be always have square size of element 3x3, 4x4
|
||||||
# - are returned as list of lists
|
# - are returned as list of lists
|
||||||
if value_type == "matrix":
|
if value_type in ("matrix", "matrixd"):
|
||||||
output = []
|
output = []
|
||||||
current_index = -1
|
current_index = -1
|
||||||
parts = value.split(",")
|
parts = value.split(",")
|
||||||
|
|
@ -198,7 +217,7 @@ def convert_value_by_type_name(value_type, value, logger=None):
|
||||||
if value_type == "rational2i":
|
if value_type == "rational2i":
|
||||||
return RationalToInt(value)
|
return RationalToInt(value)
|
||||||
|
|
||||||
if value_type == "vector":
|
if value_type in ("vector", "vectord"):
|
||||||
parts = [part.strip() for part in value.split(",")]
|
parts = [part.strip() for part in value.split(",")]
|
||||||
output = []
|
output = []
|
||||||
for part in parts:
|
for part in parts:
|
||||||
|
|
@ -380,6 +399,10 @@ def should_convert_for_ffmpeg(src_filepath):
|
||||||
if not input_info:
|
if not input_info:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
subimages = input_info.get("subimages")
|
||||||
|
if subimages is not None and subimages > 1:
|
||||||
|
return True
|
||||||
|
|
||||||
# Check compression
|
# Check compression
|
||||||
compression = input_info["attribs"].get("compression")
|
compression = input_info["attribs"].get("compression")
|
||||||
if compression in ("dwaa", "dwab"):
|
if compression in ("dwaa", "dwab"):
|
||||||
|
|
@ -453,7 +476,7 @@ def convert_for_ffmpeg(
|
||||||
if input_frame_start is not None and input_frame_end is not None:
|
if input_frame_start is not None and input_frame_end is not None:
|
||||||
is_sequence = int(input_frame_end) != int(input_frame_start)
|
is_sequence = int(input_frame_end) != int(input_frame_start)
|
||||||
|
|
||||||
input_info = get_oiio_info_for_input(first_input_path)
|
input_info = get_oiio_info_for_input(first_input_path, logger=logger)
|
||||||
|
|
||||||
# Change compression only if source compression is "dwaa" or "dwab"
|
# Change compression only if source compression is "dwaa" or "dwab"
|
||||||
# - they're not supported in ffmpeg
|
# - they're not supported in ffmpeg
|
||||||
|
|
@ -488,13 +511,21 @@ def convert_for_ffmpeg(
|
||||||
input_channels.append(alpha)
|
input_channels.append(alpha)
|
||||||
input_channels_str = ",".join(input_channels)
|
input_channels_str = ",".join(input_channels)
|
||||||
|
|
||||||
oiio_cmd.extend([
|
subimages = input_info.get("subimages")
|
||||||
|
input_arg = "-i"
|
||||||
|
if subimages is None or subimages == 1:
|
||||||
# Tell oiiotool which channels should be loaded
|
# Tell oiiotool which channels should be loaded
|
||||||
# - other channels are not loaded to memory so helps to avoid memory
|
# - other channels are not loaded to memory so helps to avoid memory
|
||||||
# leak issues
|
# leak issues
|
||||||
"-i:ch={}".format(input_channels_str), first_input_path,
|
# - this option is crashing if used on multipart/subimages exrs
|
||||||
|
input_arg += ":ch={}".format(input_channels_str)
|
||||||
|
|
||||||
|
oiio_cmd.extend([
|
||||||
|
input_arg, first_input_path,
|
||||||
# Tell oiiotool which channels should be put to top stack (and output)
|
# Tell oiiotool which channels should be put to top stack (and output)
|
||||||
"--ch", channels_arg
|
"--ch", channels_arg,
|
||||||
|
# Use first subimage
|
||||||
|
"--subimage", "0"
|
||||||
])
|
])
|
||||||
|
|
||||||
# Add frame definitions to arguments
|
# Add frame definitions to arguments
|
||||||
|
|
@ -588,7 +619,7 @@ def convert_input_paths_for_ffmpeg(
|
||||||
" \".exr\" extension. Got \"{}\"."
|
" \".exr\" extension. Got \"{}\"."
|
||||||
).format(ext))
|
).format(ext))
|
||||||
|
|
||||||
input_info = get_oiio_info_for_input(first_input_path)
|
input_info = get_oiio_info_for_input(first_input_path, logger=logger)
|
||||||
|
|
||||||
# Change compression only if source compression is "dwaa" or "dwab"
|
# Change compression only if source compression is "dwaa" or "dwab"
|
||||||
# - they're not supported in ffmpeg
|
# - they're not supported in ffmpeg
|
||||||
|
|
@ -606,12 +637,22 @@ def convert_input_paths_for_ffmpeg(
|
||||||
|
|
||||||
red, green, blue, alpha = review_channels
|
red, green, blue, alpha = review_channels
|
||||||
input_channels = [red, green, blue]
|
input_channels = [red, green, blue]
|
||||||
|
# TODO find subimage inder where rgba is available for multipart exrs
|
||||||
channels_arg = "R={},G={},B={}".format(red, green, blue)
|
channels_arg = "R={},G={},B={}".format(red, green, blue)
|
||||||
if alpha is not None:
|
if alpha is not None:
|
||||||
channels_arg += ",A={}".format(alpha)
|
channels_arg += ",A={}".format(alpha)
|
||||||
input_channels.append(alpha)
|
input_channels.append(alpha)
|
||||||
input_channels_str = ",".join(input_channels)
|
input_channels_str = ",".join(input_channels)
|
||||||
|
|
||||||
|
subimages = input_info.get("subimages")
|
||||||
|
input_arg = "-i"
|
||||||
|
if subimages is None or subimages == 1:
|
||||||
|
# Tell oiiotool which channels should be loaded
|
||||||
|
# - other channels are not loaded to memory so helps to avoid memory
|
||||||
|
# leak issues
|
||||||
|
# - this option is crashing if used on multipart exrs
|
||||||
|
input_arg += ":ch={}".format(input_channels_str)
|
||||||
|
|
||||||
for input_path in input_paths:
|
for input_path in input_paths:
|
||||||
# Prepare subprocess arguments
|
# Prepare subprocess arguments
|
||||||
oiio_cmd = [
|
oiio_cmd = [
|
||||||
|
|
@ -625,13 +666,12 @@ def convert_input_paths_for_ffmpeg(
|
||||||
oiio_cmd.extend(["--compression", compression])
|
oiio_cmd.extend(["--compression", compression])
|
||||||
|
|
||||||
oiio_cmd.extend([
|
oiio_cmd.extend([
|
||||||
# Tell oiiotool which channels should be loaded
|
input_arg, input_path,
|
||||||
# - other channels are not loaded to memory so helps to
|
|
||||||
# avoid memory leak issues
|
|
||||||
"-i:ch={}".format(input_channels_str), input_path,
|
|
||||||
# Tell oiiotool which channels should be put to top stack
|
# Tell oiiotool which channels should be put to top stack
|
||||||
# (and output)
|
# (and output)
|
||||||
"--ch", channels_arg
|
"--ch", channels_arg,
|
||||||
|
# Use first subimage
|
||||||
|
"--subimage", "0"
|
||||||
])
|
])
|
||||||
|
|
||||||
for attr_name, attr_value in input_info["attribs"].items():
|
for attr_name, attr_value in input_info["attribs"].items():
|
||||||
|
|
|
||||||
|
|
@ -135,9 +135,9 @@ class FirstVersionStatus(BaseEvent):
|
||||||
|
|
||||||
new_status = asset_version_statuses.get(found_item["status"])
|
new_status = asset_version_statuses.get(found_item["status"])
|
||||||
if not new_status:
|
if not new_status:
|
||||||
self.log.warning(
|
self.log.warning((
|
||||||
"AssetVersion doesn't have status `{}`."
|
"AssetVersion doesn't have status `{}`."
|
||||||
).format(found_item["status"])
|
).format(found_item["status"]))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,9 @@
|
||||||
import os
|
import os
|
||||||
|
import threading
|
||||||
|
|
||||||
import gazu
|
import gazu
|
||||||
|
|
||||||
from openpype.client import (
|
from openpype.client import get_project, get_assets, get_asset_by_name
|
||||||
get_project,
|
|
||||||
get_assets,
|
|
||||||
get_asset_by_name
|
|
||||||
)
|
|
||||||
from openpype.pipeline import AvalonMongoDB
|
from openpype.pipeline import AvalonMongoDB
|
||||||
from .credentials import validate_credentials
|
from .credentials import validate_credentials
|
||||||
from .update_op_with_zou import (
|
from .update_op_with_zou import (
|
||||||
|
|
@ -397,6 +394,13 @@ def start_listeners(login: str, password: str):
|
||||||
login (str): Kitsu user login
|
login (str): Kitsu user login
|
||||||
password (str): Kitsu user password
|
password (str): Kitsu user password
|
||||||
"""
|
"""
|
||||||
|
# Refresh token every week
|
||||||
|
def refresh_token_every_week():
|
||||||
|
print("Refreshing token...")
|
||||||
|
gazu.refresh_token()
|
||||||
|
threading.Timer(7 * 3600 * 24, refresh_token_every_week).start()
|
||||||
|
|
||||||
|
refresh_token_every_week()
|
||||||
|
|
||||||
# Connect to server
|
# Connect to server
|
||||||
listener = Listener(login, password)
|
listener = Listener(login, password)
|
||||||
|
|
|
||||||
|
|
@ -1,21 +1,27 @@
|
||||||
|
import collections
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
from openpype.client import (
|
from openpype.client import (
|
||||||
get_last_version_by_subset_name,
|
get_assets,
|
||||||
|
get_subsets,
|
||||||
|
get_last_versions,
|
||||||
get_representations,
|
get_representations,
|
||||||
)
|
)
|
||||||
from openpype.pipeline import (
|
from openpype.pipeline.load import get_representation_path_with_anatomy
|
||||||
legacy_io,
|
|
||||||
get_representation_path,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class CollectAudio(pyblish.api.InstancePlugin):
|
class CollectAudio(pyblish.api.ContextPlugin):
|
||||||
"""Collect asset's last published audio.
|
"""Collect asset's last published audio.
|
||||||
|
|
||||||
The audio subset name searched for is defined in:
|
The audio subset name searched for is defined in:
|
||||||
project settings > Collect Audio
|
project settings > Collect Audio
|
||||||
|
|
||||||
|
Note:
|
||||||
|
The plugin was instance plugin but because of so much queries the
|
||||||
|
plugin was slowing down whole collection phase a lot thus was
|
||||||
|
converted to context plugin which requires only 4 queries top.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
label = "Collect Asset Audio"
|
label = "Collect Asset Audio"
|
||||||
order = pyblish.api.CollectorOrder + 0.1
|
order = pyblish.api.CollectorOrder + 0.1
|
||||||
families = ["review"]
|
families = ["review"]
|
||||||
|
|
@ -39,67 +45,134 @@ class CollectAudio(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
audio_subset_name = "audioMain"
|
audio_subset_name = "audioMain"
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, context):
|
||||||
|
# Fake filtering by family inside context plugin
|
||||||
|
filtered_instances = []
|
||||||
|
for instance in pyblish.api.instances_by_plugin(
|
||||||
|
context, self.__class__
|
||||||
|
):
|
||||||
|
# Skip instances that already have audio filled
|
||||||
if instance.data.get("audio"):
|
if instance.data.get("audio"):
|
||||||
self.log.info(
|
self.log.info(
|
||||||
"Skipping Audio collecion. It is already collected"
|
"Skipping Audio collecion. It is already collected"
|
||||||
)
|
)
|
||||||
|
continue
|
||||||
|
filtered_instances.append(instance)
|
||||||
|
|
||||||
|
# Skip if none of instances remained
|
||||||
|
if not filtered_instances:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Add audio to instance if exists.
|
# Add audio to instance if exists.
|
||||||
|
instances_by_asset_name = collections.defaultdict(list)
|
||||||
|
for instance in filtered_instances:
|
||||||
|
asset_name = instance.data["asset"]
|
||||||
|
instances_by_asset_name[asset_name].append(instance)
|
||||||
|
|
||||||
|
asset_names = set(instances_by_asset_name.keys())
|
||||||
self.log.info((
|
self.log.info((
|
||||||
"Searching for audio subset '{subset}'"
|
"Searching for audio subset '{subset}' in assets {assets}"
|
||||||
" in asset '{asset}'"
|
|
||||||
).format(
|
).format(
|
||||||
subset=self.audio_subset_name,
|
subset=self.audio_subset_name,
|
||||||
asset=instance.data["asset"]
|
assets=", ".join([
|
||||||
|
'"{}"'.format(asset_name)
|
||||||
|
for asset_name in asset_names
|
||||||
|
])
|
||||||
))
|
))
|
||||||
|
|
||||||
repre_doc = self._get_repre_doc(instance)
|
# Query all required documents
|
||||||
|
project_name = context.data["projectName"]
|
||||||
|
anatomy = context.data["anatomy"]
|
||||||
|
repre_docs_by_asset_names = self.query_representations(
|
||||||
|
project_name, asset_names)
|
||||||
|
|
||||||
# Add audio to instance if representation was found
|
for asset_name, instances in instances_by_asset_name.items():
|
||||||
if repre_doc:
|
repre_docs = repre_docs_by_asset_names[asset_name]
|
||||||
|
if not repre_docs:
|
||||||
|
continue
|
||||||
|
|
||||||
|
repre_doc = repre_docs[0]
|
||||||
|
repre_path = get_representation_path_with_anatomy(
|
||||||
|
repre_doc, anatomy
|
||||||
|
)
|
||||||
|
for instance in instances:
|
||||||
instance.data["audio"] = [{
|
instance.data["audio"] = [{
|
||||||
"offset": 0,
|
"offset": 0,
|
||||||
"filename": get_representation_path(repre_doc)
|
"filename": repre_path
|
||||||
}]
|
}]
|
||||||
self.log.info("Audio Data added to instance ...")
|
self.log.info("Audio Data added to instance ...")
|
||||||
|
|
||||||
def _get_repre_doc(self, instance):
|
def query_representations(self, project_name, asset_names):
|
||||||
cache = instance.context.data.get("__cache_asset_audio")
|
"""Query representations related to audio subsets for passed assets.
|
||||||
if cache is None:
|
|
||||||
cache = {}
|
|
||||||
instance.context.data["__cache_asset_audio"] = cache
|
|
||||||
asset_name = instance.data["asset"]
|
|
||||||
|
|
||||||
# first try to get it from cache
|
Args:
|
||||||
if asset_name in cache:
|
project_name (str): Project in which we're looking for all
|
||||||
return cache[asset_name]
|
entities.
|
||||||
|
asset_names (Iterable[str]): Asset names where to look for audio
|
||||||
|
subsets and their representations.
|
||||||
|
|
||||||
project_name = legacy_io.active_project()
|
Returns:
|
||||||
|
collections.defaultdict[str, List[Dict[Str, Any]]]: Representations
|
||||||
|
related to audio subsets by asset name.
|
||||||
|
"""
|
||||||
|
|
||||||
# Find latest versions document
|
output = collections.defaultdict(list)
|
||||||
last_version_doc = get_last_version_by_subset_name(
|
# Query asset documents
|
||||||
|
asset_docs = get_assets(
|
||||||
project_name,
|
project_name,
|
||||||
self.audio_subset_name,
|
asset_names=asset_names,
|
||||||
asset_name=asset_name,
|
fields=["_id", "name"]
|
||||||
fields=["_id"]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
repre_doc = None
|
asset_id_by_name = {}
|
||||||
if last_version_doc:
|
for asset_doc in asset_docs:
|
||||||
# Try to find it's representation (Expected there is only one)
|
asset_id_by_name[asset_doc["name"]] = asset_doc["_id"]
|
||||||
repre_docs = list(get_representations(
|
asset_ids = set(asset_id_by_name.values())
|
||||||
project_name, version_ids=[last_version_doc["_id"]]
|
|
||||||
))
|
# Query subsets with name define by 'audio_subset_name' attr
|
||||||
if not repre_docs:
|
# - one or none subsets with the name should be available on an asset
|
||||||
self.log.warning(
|
subset_docs = get_subsets(
|
||||||
"Version document does not contain any representations"
|
project_name,
|
||||||
|
subset_names=[self.audio_subset_name],
|
||||||
|
asset_ids=asset_ids,
|
||||||
|
fields=["_id", "parent"]
|
||||||
)
|
)
|
||||||
else:
|
subset_id_by_asset_id = {}
|
||||||
repre_doc = repre_docs[0]
|
for subset_doc in subset_docs:
|
||||||
|
asset_id = subset_doc["parent"]
|
||||||
|
subset_id_by_asset_id[asset_id] = subset_doc["_id"]
|
||||||
|
|
||||||
# update cache
|
subset_ids = set(subset_id_by_asset_id.values())
|
||||||
cache[asset_name] = repre_doc
|
if not subset_ids:
|
||||||
|
return output
|
||||||
|
|
||||||
return repre_doc
|
# Find all latest versions for the subsets
|
||||||
|
version_docs_by_subset_id = get_last_versions(
|
||||||
|
project_name, subset_ids=subset_ids, fields=["_id", "parent"]
|
||||||
|
)
|
||||||
|
version_id_by_subset_id = {
|
||||||
|
subset_id: version_doc["_id"]
|
||||||
|
for subset_id, version_doc in version_docs_by_subset_id.items()
|
||||||
|
}
|
||||||
|
version_ids = set(version_id_by_subset_id.values())
|
||||||
|
if not version_ids:
|
||||||
|
return output
|
||||||
|
|
||||||
|
# Find representations under latest versions of audio subsets
|
||||||
|
repre_docs = get_representations(
|
||||||
|
project_name, version_ids=version_ids
|
||||||
|
)
|
||||||
|
repre_docs_by_version_id = collections.defaultdict(list)
|
||||||
|
for repre_doc in repre_docs:
|
||||||
|
version_id = repre_doc["parent"]
|
||||||
|
repre_docs_by_version_id[version_id].append(repre_doc)
|
||||||
|
|
||||||
|
if not repre_docs_by_version_id:
|
||||||
|
return output
|
||||||
|
|
||||||
|
for asset_name in asset_names:
|
||||||
|
asset_id = asset_id_by_name.get(asset_name)
|
||||||
|
subset_id = subset_id_by_asset_id.get(asset_id)
|
||||||
|
version_id = version_id_by_subset_id.get(subset_id)
|
||||||
|
output[asset_name] = repre_docs_by_version_id[version_id]
|
||||||
|
return output
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
|
||||||
order = pyblish.api.CollectorOrder + 0.495
|
order = pyblish.api.CollectorOrder + 0.495
|
||||||
families = ["workfile",
|
families = ["workfile",
|
||||||
"pointcache",
|
"pointcache",
|
||||||
|
"proxyAbc",
|
||||||
"camera",
|
"camera",
|
||||||
"animation",
|
"animation",
|
||||||
"model",
|
"model",
|
||||||
|
|
@ -54,6 +55,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
|
||||||
"source",
|
"source",
|
||||||
"assembly",
|
"assembly",
|
||||||
"fbx",
|
"fbx",
|
||||||
|
"gltf",
|
||||||
"textures",
|
"textures",
|
||||||
"action",
|
"action",
|
||||||
"background",
|
"background",
|
||||||
|
|
|
||||||
|
|
@ -81,6 +81,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||||
order = pyblish.api.IntegratorOrder
|
order = pyblish.api.IntegratorOrder
|
||||||
families = ["workfile",
|
families = ["workfile",
|
||||||
"pointcache",
|
"pointcache",
|
||||||
|
"proxyAbc",
|
||||||
"camera",
|
"camera",
|
||||||
"animation",
|
"animation",
|
||||||
"model",
|
"model",
|
||||||
|
|
@ -111,6 +112,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||||
"image",
|
"image",
|
||||||
"assembly",
|
"assembly",
|
||||||
"fbx",
|
"fbx",
|
||||||
|
"gltf",
|
||||||
"textures",
|
"textures",
|
||||||
"action",
|
"action",
|
||||||
"harmony.template",
|
"harmony.template",
|
||||||
|
|
|
||||||
|
|
@ -76,6 +76,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
||||||
order = pyblish.api.IntegratorOrder + 0.00001
|
order = pyblish.api.IntegratorOrder + 0.00001
|
||||||
families = ["workfile",
|
families = ["workfile",
|
||||||
"pointcache",
|
"pointcache",
|
||||||
|
"proxyAbc",
|
||||||
"camera",
|
"camera",
|
||||||
"animation",
|
"animation",
|
||||||
"model",
|
"model",
|
||||||
|
|
@ -106,6 +107,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
||||||
"image",
|
"image",
|
||||||
"assembly",
|
"assembly",
|
||||||
"fbx",
|
"fbx",
|
||||||
|
"gltf",
|
||||||
"textures",
|
"textures",
|
||||||
"action",
|
"action",
|
||||||
"harmony.template",
|
"harmony.template",
|
||||||
|
|
|
||||||
BIN
openpype/resources/app_icons/3dsmax.png
Normal file
BIN
openpype/resources/app_icons/3dsmax.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 12 KiB |
|
|
@ -149,6 +149,14 @@
|
||||||
"Main"
|
"Main"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"CreateProxyAlembic": {
|
||||||
|
"enabled": true,
|
||||||
|
"write_color_sets": false,
|
||||||
|
"write_face_sets": false,
|
||||||
|
"defaults": [
|
||||||
|
"Main"
|
||||||
|
]
|
||||||
|
},
|
||||||
"CreateMultiverseUsd": {
|
"CreateMultiverseUsd": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"defaults": [
|
"defaults": [
|
||||||
|
|
@ -171,7 +179,21 @@
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"defaults": [
|
"defaults": [
|
||||||
"Main"
|
"Main"
|
||||||
]
|
],
|
||||||
|
"expandProcedurals": false,
|
||||||
|
"motionBlur": true,
|
||||||
|
"motionBlurKeys": 2,
|
||||||
|
"motionBlurLength": 0.5,
|
||||||
|
"maskOptions": false,
|
||||||
|
"maskCamera": false,
|
||||||
|
"maskLight": false,
|
||||||
|
"maskShape": false,
|
||||||
|
"maskShader": false,
|
||||||
|
"maskOverride": false,
|
||||||
|
"maskDriver": false,
|
||||||
|
"maskFilter": false,
|
||||||
|
"maskColor_manager": false,
|
||||||
|
"maskOperator": false
|
||||||
},
|
},
|
||||||
"CreateAssembly": {
|
"CreateAssembly": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
|
|
@ -250,6 +272,9 @@
|
||||||
"CollectFbxCamera": {
|
"CollectFbxCamera": {
|
||||||
"enabled": false
|
"enabled": false
|
||||||
},
|
},
|
||||||
|
"CollectGLTF": {
|
||||||
|
"enabled": false
|
||||||
|
},
|
||||||
"ValidateInstanceInContext": {
|
"ValidateInstanceInContext": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"optional": true,
|
"optional": true,
|
||||||
|
|
@ -569,6 +594,12 @@
|
||||||
"optional": false,
|
"optional": false,
|
||||||
"active": true
|
"active": true
|
||||||
},
|
},
|
||||||
|
"ExtractProxyAlembic": {
|
||||||
|
"enabled": true,
|
||||||
|
"families": [
|
||||||
|
"proxyAbc"
|
||||||
|
]
|
||||||
|
},
|
||||||
"ExtractAlembic": {
|
"ExtractAlembic": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"families": [
|
"families": [
|
||||||
|
|
@ -915,7 +946,7 @@
|
||||||
"current_context": [
|
"current_context": [
|
||||||
{
|
{
|
||||||
"subset_name_filters": [
|
"subset_name_filters": [
|
||||||
"\".+[Mm]ain\""
|
".+[Mm]ain"
|
||||||
],
|
],
|
||||||
"families": [
|
"families": [
|
||||||
"model"
|
"model"
|
||||||
|
|
@ -932,7 +963,8 @@
|
||||||
"subset_name_filters": [],
|
"subset_name_filters": [],
|
||||||
"families": [
|
"families": [
|
||||||
"animation",
|
"animation",
|
||||||
"pointcache"
|
"pointcache",
|
||||||
|
"proxyAbc"
|
||||||
],
|
],
|
||||||
"repre_names": [
|
"repre_names": [
|
||||||
"abc"
|
"abc"
|
||||||
|
|
|
||||||
|
|
@ -114,6 +114,35 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"3dsmax": {
|
||||||
|
"enabled": true,
|
||||||
|
"label": "3ds max",
|
||||||
|
"icon": "{}/app_icons/3dsmax.png",
|
||||||
|
"host_name": "max",
|
||||||
|
"environment": {
|
||||||
|
"ADSK_3DSMAX_STARTUPSCRIPTS_ADDON_DIR": "{OPENPYPE_ROOT}\\openpype\\hosts\\max\\startup"
|
||||||
|
},
|
||||||
|
"variants": {
|
||||||
|
"2023": {
|
||||||
|
"use_python_2": false,
|
||||||
|
"executables": {
|
||||||
|
"windows": [
|
||||||
|
"C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe"
|
||||||
|
],
|
||||||
|
"darwin": [],
|
||||||
|
"linux": []
|
||||||
|
},
|
||||||
|
"arguments": {
|
||||||
|
"windows": [],
|
||||||
|
"darwin": [],
|
||||||
|
"linux": []
|
||||||
|
},
|
||||||
|
"environment": {
|
||||||
|
"3DSMAX_VERSION": "2023"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"flame": {
|
"flame": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"label": "Flame",
|
"label": "Flame",
|
||||||
|
|
|
||||||
|
|
@ -152,6 +152,7 @@ class HostsEnumEntity(BaseEnumEntity):
|
||||||
|
|
||||||
schema_types = ["hosts-enum"]
|
schema_types = ["hosts-enum"]
|
||||||
all_host_names = [
|
all_host_names = [
|
||||||
|
"max",
|
||||||
"aftereffects",
|
"aftereffects",
|
||||||
"blender",
|
"blender",
|
||||||
"celaction",
|
"celaction",
|
||||||
|
|
|
||||||
|
|
@ -200,7 +200,128 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "dict",
|
||||||
|
"collapsible": true,
|
||||||
|
"key": "CreateProxyAlembic",
|
||||||
|
"label": "Create Proxy Alembic",
|
||||||
|
"checkbox_key": "enabled",
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "enabled",
|
||||||
|
"label": "Enabled"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "write_color_sets",
|
||||||
|
"label": "Write Color Sets"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "write_face_sets",
|
||||||
|
"label": "Write Face Sets"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "list",
|
||||||
|
"key": "defaults",
|
||||||
|
"label": "Default Subsets",
|
||||||
|
"object_type": "text"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "dict",
|
||||||
|
"collapsible": true,
|
||||||
|
"key": "CreateAss",
|
||||||
|
"label": "Create Ass",
|
||||||
|
"checkbox_key": "enabled",
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "enabled",
|
||||||
|
"label": "Enabled"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "list",
|
||||||
|
"key": "defaults",
|
||||||
|
"label": "Default Subsets",
|
||||||
|
"object_type": "text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "expandProcedurals",
|
||||||
|
"label": "Expand Procedurals"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "motionBlur",
|
||||||
|
"label": "Motion Blur"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "number",
|
||||||
|
"key": "motionBlurKeys",
|
||||||
|
"label": "Motion Blur Keys",
|
||||||
|
"minimum": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "number",
|
||||||
|
"key": "motionBlurLength",
|
||||||
|
"label": "Motion Blur Length",
|
||||||
|
"decimal": 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "maskOptions",
|
||||||
|
"label": "Mask Options"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "maskCamera",
|
||||||
|
"label": "Mask Camera"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "maskLight",
|
||||||
|
"label": "Mask Light"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "maskShape",
|
||||||
|
"label": "Mask Shape"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "maskShader",
|
||||||
|
"label": "Mask Shader"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "maskOverride",
|
||||||
|
"label": "Mask Override"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "maskDriver",
|
||||||
|
"label": "Mask Driver"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "maskFilter",
|
||||||
|
"label": "Mask Filter"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "maskColor_manager",
|
||||||
|
"label": "Mask Color Manager"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "maskOperator",
|
||||||
|
"label": "Mask Operator"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "schema_template",
|
"type": "schema_template",
|
||||||
"name": "template_create_plugin",
|
"name": "template_create_plugin",
|
||||||
|
|
@ -217,10 +338,6 @@
|
||||||
"key": "CreateMultiverseUsdOver",
|
"key": "CreateMultiverseUsdOver",
|
||||||
"label": "Create Multiverse USD Override"
|
"label": "Create Multiverse USD Override"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"key": "CreateAss",
|
|
||||||
"label": "Create Ass"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"key": "CreateAssembly",
|
"key": "CreateAssembly",
|
||||||
"label": "Create Assembly"
|
"label": "Create Assembly"
|
||||||
|
|
|
||||||
|
|
@ -35,6 +35,20 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "dict",
|
||||||
|
"collapsible": true,
|
||||||
|
"key": "CollectGLTF",
|
||||||
|
"label": "Collect Assets for GLTF/GLB export",
|
||||||
|
"checkbox_key": "enabled",
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "enabled",
|
||||||
|
"label": "Enabled"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "splitter"
|
"type": "splitter"
|
||||||
},
|
},
|
||||||
|
|
@ -638,6 +652,26 @@
|
||||||
"type": "label",
|
"type": "label",
|
||||||
"label": "Extractors"
|
"label": "Extractors"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "dict",
|
||||||
|
"collapsible": true,
|
||||||
|
"key": "ExtractProxyAlembic",
|
||||||
|
"label": "Extract Proxy Alembic",
|
||||||
|
"checkbox_key": "enabled",
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "enabled",
|
||||||
|
"label": "Enabled"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "families",
|
||||||
|
"label": "Families",
|
||||||
|
"type": "list",
|
||||||
|
"object_type": "text"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "dict",
|
"type": "dict",
|
||||||
"collapsible": true,
|
"collapsible": true,
|
||||||
|
|
|
||||||
|
|
@ -28,6 +28,7 @@
|
||||||
{"nukenodes": "nukenodes"},
|
{"nukenodes": "nukenodes"},
|
||||||
{"plate": "plate"},
|
{"plate": "plate"},
|
||||||
{"pointcache": "pointcache"},
|
{"pointcache": "pointcache"},
|
||||||
|
{"proxyAbc": "proxyAbc"},
|
||||||
{"prerender": "prerender"},
|
{"prerender": "prerender"},
|
||||||
{"redshiftproxy": "redshiftproxy"},
|
{"redshiftproxy": "redshiftproxy"},
|
||||||
{"reference": "reference"},
|
{"reference": "reference"},
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,39 @@
|
||||||
|
{
|
||||||
|
"type": "dict",
|
||||||
|
"key": "3dsmax",
|
||||||
|
"label": "Autodesk 3ds Max",
|
||||||
|
"collapsible": true,
|
||||||
|
"checkbox_key": "enabled",
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"key": "enabled",
|
||||||
|
"label": "Enabled"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "schema_template",
|
||||||
|
"name": "template_host_unchangables"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "environment",
|
||||||
|
"label": "Environment",
|
||||||
|
"type": "raw-json"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "dict-modifiable",
|
||||||
|
"key": "variants",
|
||||||
|
"collapsible_key": true,
|
||||||
|
"use_label_wrap": false,
|
||||||
|
"object_type": {
|
||||||
|
"type": "dict",
|
||||||
|
"collapsible": true,
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"type": "schema_template",
|
||||||
|
"name": "template_host_variant_items"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
@ -9,6 +9,10 @@
|
||||||
"type": "schema",
|
"type": "schema",
|
||||||
"name": "schema_maya"
|
"name": "schema_maya"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "schema",
|
||||||
|
"name": "schema_3dsmax"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "schema",
|
"type": "schema",
|
||||||
"name": "schema_flame"
|
"name": "schema_flame"
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,8 @@ def main(user_role=None):
|
||||||
user_role, ", ".join(allowed_roles)
|
user_role, ", ".join(allowed_roles)
|
||||||
))
|
))
|
||||||
|
|
||||||
|
app = QtWidgets.QApplication.instance()
|
||||||
|
if not app:
|
||||||
app = QtWidgets.QApplication(sys.argv)
|
app = QtWidgets.QApplication(sys.argv)
|
||||||
app.setWindowIcon(QtGui.QIcon(style.app_icon_path()))
|
app.setWindowIcon(QtGui.QIcon(style.app_icon_path()))
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -186,19 +186,11 @@ class FamilyWidget(QtWidgets.QWidget):
|
||||||
if item is None:
|
if item is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
asset_doc = None
|
|
||||||
if asset_name != self.NOT_SELECTED:
|
|
||||||
# Get the assets from the database which match with the name
|
|
||||||
project_name = self.dbcon.active_project()
|
|
||||||
asset_doc = get_asset_by_name(
|
|
||||||
project_name, asset_name, fields=["_id"]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get plugin and family
|
|
||||||
plugin = item.data(PluginRole)
|
|
||||||
|
|
||||||
# Early exit if no asset name
|
# Early exit if no asset name
|
||||||
if not asset_name.strip():
|
if (
|
||||||
|
asset_name == self.NOT_SELECTED
|
||||||
|
or not asset_name.strip()
|
||||||
|
):
|
||||||
self._build_menu([])
|
self._build_menu([])
|
||||||
item.setData(ExistsRole, False)
|
item.setData(ExistsRole, False)
|
||||||
print("Asset name is required ..")
|
print("Asset name is required ..")
|
||||||
|
|
@ -210,8 +202,10 @@ class FamilyWidget(QtWidgets.QWidget):
|
||||||
asset_doc = get_asset_by_name(
|
asset_doc = get_asset_by_name(
|
||||||
project_name, asset_name, fields=["_id"]
|
project_name, asset_name, fields=["_id"]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get plugin
|
# Get plugin
|
||||||
plugin = item.data(PluginRole)
|
plugin = item.data(PluginRole)
|
||||||
|
|
||||||
if asset_doc and plugin:
|
if asset_doc and plugin:
|
||||||
asset_id = asset_doc["_id"]
|
asset_id = asset_doc["_id"]
|
||||||
task_name = self.dbcon.Session["AVALON_TASK"]
|
task_name = self.dbcon.Session["AVALON_TASK"]
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
"""Package declaring Pype version."""
|
"""Package declaring Pype version."""
|
||||||
__version__ = "3.14.9-nightly.1"
|
__version__ = "3.14.9-nightly.3"
|
||||||
|
|
|
||||||
|
|
@ -41,7 +41,7 @@ Click = "^7"
|
||||||
dnspython = "^2.1.0"
|
dnspython = "^2.1.0"
|
||||||
ftrack-python-api = "^2.3.3"
|
ftrack-python-api = "^2.3.3"
|
||||||
shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"}
|
shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"}
|
||||||
gazu = "^0.8.28"
|
gazu = "^0.8.32"
|
||||||
google-api-python-client = "^1.12.8" # sync server google support (should be separate?)
|
google-api-python-client = "^1.12.8" # sync server google support (should be separate?)
|
||||||
jsonschema = "^2.6.0"
|
jsonschema = "^2.6.0"
|
||||||
keyring = "^22.0.1"
|
keyring = "^22.0.1"
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,8 @@ openpype_console module kitsu sync-service -l me@domain.ext -p my_password
|
||||||
### Events listening
|
### Events listening
|
||||||
Listening to Kitsu events is the key to automation of many tasks like _project/episode/sequence/shot/asset/task create/update/delete_ and some more. Events listening should run at all times to perform the required processing as it is not possible to catch some of them retrospectively with strong reliability. If such timeout has been encountered, you must relaunch the `sync-service` command to run the synchronization step again.
|
Listening to Kitsu events is the key to automation of many tasks like _project/episode/sequence/shot/asset/task create/update/delete_ and some more. Events listening should run at all times to perform the required processing as it is not possible to catch some of them retrospectively with strong reliability. If such timeout has been encountered, you must relaunch the `sync-service` command to run the synchronization step again.
|
||||||
|
|
||||||
|
Connection token is refreshed every week.
|
||||||
|
|
||||||
### Push to Kitsu
|
### Push to Kitsu
|
||||||
An utility function is provided to help update Kitsu data (a.k.a Zou database) with OpenPype data if the publishing to the production tracker hasn't been possible for some time. Running `push-to-zou` will create the data on behalf of the user.
|
An utility function is provided to help update Kitsu data (a.k.a Zou database) with OpenPype data if the publishing to the production tracker hasn't been possible for some time. Running `push-to-zou` will create the data on behalf of the user.
|
||||||
:::caution
|
:::caution
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue