mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
2469edc200
54 changed files with 788 additions and 1101 deletions
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,7 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to OpenPype Tray
|
||||
options:
|
||||
- 3.16.5-nightly.4
|
||||
- 3.16.5-nightly.3
|
||||
- 3.16.5-nightly.2
|
||||
- 3.16.5-nightly.1
|
||||
|
|
@ -134,7 +135,6 @@ body:
|
|||
- 3.14.9-nightly.1
|
||||
- 3.14.8
|
||||
- 3.14.8-nightly.4
|
||||
- 3.14.8-nightly.3
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
|
|
|||
|
|
@ -164,7 +164,7 @@ class RenderCreator(Creator):
|
|||
api.get_stub().rename_item(comp_id,
|
||||
new_comp_name)
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["aftereffects"]["create"]["RenderCreator"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,16 +0,0 @@
|
|||
from openpype.hosts.fusion.api import (
|
||||
comp_lock_and_undo_chunk,
|
||||
get_current_comp
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
comp = get_current_comp()
|
||||
"""Set all selected backgrounds to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Selected Backgrounds to 32bit'):
|
||||
tools = comp.GetToolList(True, "Background").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
from openpype.hosts.fusion.api import (
|
||||
comp_lock_and_undo_chunk,
|
||||
get_current_comp
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
comp = get_current_comp()
|
||||
"""Set all backgrounds to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Backgrounds to 32bit'):
|
||||
tools = comp.GetToolList(False, "Background").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
from openpype.hosts.fusion.api import (
|
||||
comp_lock_and_undo_chunk,
|
||||
get_current_comp
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
comp = get_current_comp()
|
||||
"""Set all selected loaders to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Selected Loaders to 32bit'):
|
||||
tools = comp.GetToolList(True, "Loader").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
from openpype.hosts.fusion.api import (
|
||||
comp_lock_and_undo_chunk,
|
||||
get_current_comp
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
comp = get_current_comp()
|
||||
"""Set all loaders to 32 bit"""
|
||||
with comp_lock_and_undo_chunk(comp, 'Loaders to 32bit'):
|
||||
tools = comp.GetToolList(False, "Loader").values()
|
||||
for tool in tools:
|
||||
tool.Depth = 5
|
||||
|
||||
|
||||
main()
|
||||
|
|
@ -1,200 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import glob
|
||||
import logging
|
||||
|
||||
from qtpy import QtWidgets, QtCore
|
||||
|
||||
import qtawesome as qta
|
||||
|
||||
from openpype.client import get_assets
|
||||
from openpype import style
|
||||
from openpype.pipeline import (
|
||||
install_host,
|
||||
get_current_project_name,
|
||||
)
|
||||
from openpype.hosts.fusion import api
|
||||
from openpype.pipeline.context_tools import get_workdir_from_session
|
||||
|
||||
log = logging.getLogger("Fusion Switch Shot")
|
||||
|
||||
|
||||
class App(QtWidgets.QWidget):
|
||||
|
||||
def __init__(self, parent=None):
|
||||
|
||||
################################################
|
||||
# |---------------------| |------------------| #
|
||||
# |Comp | |Asset | #
|
||||
# |[..][ v]| |[ v]| #
|
||||
# |---------------------| |------------------| #
|
||||
# | Update existing comp [ ] | #
|
||||
# |------------------------------------------| #
|
||||
# | Switch | #
|
||||
# |------------------------------------------| #
|
||||
################################################
|
||||
|
||||
QtWidgets.QWidget.__init__(self, parent)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout()
|
||||
|
||||
# Comp related input
|
||||
comp_hlayout = QtWidgets.QHBoxLayout()
|
||||
comp_label = QtWidgets.QLabel("Comp file")
|
||||
comp_label.setFixedWidth(50)
|
||||
comp_box = QtWidgets.QComboBox()
|
||||
|
||||
button_icon = qta.icon("fa.folder", color="white")
|
||||
open_from_dir = QtWidgets.QPushButton()
|
||||
open_from_dir.setIcon(button_icon)
|
||||
|
||||
comp_box.setFixedHeight(25)
|
||||
open_from_dir.setFixedWidth(25)
|
||||
open_from_dir.setFixedHeight(25)
|
||||
|
||||
comp_hlayout.addWidget(comp_label)
|
||||
comp_hlayout.addWidget(comp_box)
|
||||
comp_hlayout.addWidget(open_from_dir)
|
||||
|
||||
# Asset related input
|
||||
asset_hlayout = QtWidgets.QHBoxLayout()
|
||||
asset_label = QtWidgets.QLabel("Shot")
|
||||
asset_label.setFixedWidth(50)
|
||||
|
||||
asset_box = QtWidgets.QComboBox()
|
||||
asset_box.setLineEdit(QtWidgets.QLineEdit())
|
||||
asset_box.setFixedHeight(25)
|
||||
|
||||
refresh_icon = qta.icon("fa.refresh", color="white")
|
||||
refresh_btn = QtWidgets.QPushButton()
|
||||
refresh_btn.setIcon(refresh_icon)
|
||||
|
||||
asset_box.setFixedHeight(25)
|
||||
refresh_btn.setFixedWidth(25)
|
||||
refresh_btn.setFixedHeight(25)
|
||||
|
||||
asset_hlayout.addWidget(asset_label)
|
||||
asset_hlayout.addWidget(asset_box)
|
||||
asset_hlayout.addWidget(refresh_btn)
|
||||
|
||||
# Options
|
||||
options = QtWidgets.QHBoxLayout()
|
||||
options.setAlignment(QtCore.Qt.AlignLeft)
|
||||
|
||||
current_comp_check = QtWidgets.QCheckBox()
|
||||
current_comp_check.setChecked(True)
|
||||
current_comp_label = QtWidgets.QLabel("Use current comp")
|
||||
|
||||
options.addWidget(current_comp_label)
|
||||
options.addWidget(current_comp_check)
|
||||
|
||||
accept_btn = QtWidgets.QPushButton("Switch")
|
||||
|
||||
layout.addLayout(options)
|
||||
layout.addLayout(comp_hlayout)
|
||||
layout.addLayout(asset_hlayout)
|
||||
layout.addWidget(accept_btn)
|
||||
|
||||
self._open_from_dir = open_from_dir
|
||||
self._comps = comp_box
|
||||
self._assets = asset_box
|
||||
self._use_current = current_comp_check
|
||||
self._accept_btn = accept_btn
|
||||
self._refresh_btn = refresh_btn
|
||||
|
||||
self.setWindowTitle("Fusion Switch Shot")
|
||||
self.setLayout(layout)
|
||||
|
||||
self.resize(260, 140)
|
||||
self.setMinimumWidth(260)
|
||||
self.setFixedHeight(140)
|
||||
|
||||
self.connections()
|
||||
|
||||
# Update ui to correct state
|
||||
self._on_use_current_comp()
|
||||
self._refresh()
|
||||
|
||||
def connections(self):
|
||||
self._use_current.clicked.connect(self._on_use_current_comp)
|
||||
self._open_from_dir.clicked.connect(self._on_open_from_dir)
|
||||
self._refresh_btn.clicked.connect(self._refresh)
|
||||
self._accept_btn.clicked.connect(self._on_switch)
|
||||
|
||||
def _on_use_current_comp(self):
|
||||
state = self._use_current.isChecked()
|
||||
self._open_from_dir.setEnabled(not state)
|
||||
self._comps.setEnabled(not state)
|
||||
|
||||
def _on_open_from_dir(self):
|
||||
|
||||
start_dir = get_workdir_from_session()
|
||||
comp_file, _ = QtWidgets.QFileDialog.getOpenFileName(
|
||||
self, "Choose comp", start_dir)
|
||||
|
||||
if not comp_file:
|
||||
return
|
||||
|
||||
# Create completer
|
||||
self.populate_comp_box([comp_file])
|
||||
self._refresh()
|
||||
|
||||
def _refresh(self):
|
||||
# Clear any existing items
|
||||
self._assets.clear()
|
||||
|
||||
asset_names = self.collect_asset_names()
|
||||
completer = QtWidgets.QCompleter(asset_names)
|
||||
|
||||
self._assets.setCompleter(completer)
|
||||
self._assets.addItems(asset_names)
|
||||
|
||||
def _on_switch(self):
|
||||
|
||||
if not self._use_current.isChecked():
|
||||
file_name = self._comps.itemData(self._comps.currentIndex())
|
||||
else:
|
||||
comp = api.get_current_comp()
|
||||
file_name = comp.GetAttrs("COMPS_FileName")
|
||||
|
||||
asset = self._assets.currentText()
|
||||
|
||||
import colorbleed.scripts.fusion_switch_shot as switch_shot
|
||||
switch_shot.switch(asset_name=asset, filepath=file_name, new=True)
|
||||
|
||||
def collect_slap_comps(self, directory):
|
||||
items = glob.glob("{}/*.comp".format(directory))
|
||||
return items
|
||||
|
||||
def collect_asset_names(self):
|
||||
project_name = get_current_project_name()
|
||||
asset_docs = get_assets(project_name, fields=["name"])
|
||||
asset_names = {
|
||||
asset_doc["name"]
|
||||
for asset_doc in asset_docs
|
||||
}
|
||||
return list(asset_names)
|
||||
|
||||
def populate_comp_box(self, files):
|
||||
"""Ensure we display the filename only but the path is stored as well
|
||||
|
||||
Args:
|
||||
files (list): list of full file path [path/to/item/item.ext,]
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
for f in files:
|
||||
filename = os.path.basename(f)
|
||||
self._comps.addItem(filename, userData=f)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
install_host(api)
|
||||
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
window = App()
|
||||
window.setStyleSheet(style.load_stylesheet())
|
||||
window.show()
|
||||
sys.exit(app.exec_())
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
"""Forces Fusion to 'retrigger' the Loader to update.
|
||||
|
||||
Warning:
|
||||
This might change settings like 'Reverse', 'Loop', trims and other
|
||||
settings of the Loader. So use this at your own risk.
|
||||
|
||||
"""
|
||||
from openpype.hosts.fusion.api.pipeline import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
|
||||
def update_loader_ranges():
|
||||
comp = get_current_comp()
|
||||
with comp_lock_and_undo_chunk(comp, "Reload clip time ranges"):
|
||||
tools = comp.GetToolList(True, "Loader").values()
|
||||
for tool in tools:
|
||||
|
||||
# Get tool attributes
|
||||
tool_a = tool.GetAttrs()
|
||||
clipTable = tool_a['TOOLST_Clip_Name']
|
||||
altclipTable = tool_a['TOOLST_AltClip_Name']
|
||||
startTime = tool_a['TOOLNT_Clip_Start']
|
||||
old_global_in = tool.GlobalIn[comp.CurrentTime]
|
||||
|
||||
# Reapply
|
||||
for index, _ in clipTable.items():
|
||||
time = startTime[index]
|
||||
tool.Clip[time] = tool.Clip[time]
|
||||
|
||||
for index, _ in altclipTable.items():
|
||||
time = startTime[index]
|
||||
tool.ProxyFilename[time] = tool.ProxyFilename[time]
|
||||
|
||||
tool.GlobalIn[comp.CurrentTime] = old_global_in
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
update_loader_ranges()
|
||||
|
|
@ -5,7 +5,7 @@ Global = {
|
|||
Map = {
|
||||
["OpenPype:"] = "$(OPENPYPE_FUSION)/deploy",
|
||||
["Config:"] = "UserPaths:Config;OpenPype:Config",
|
||||
["Scripts:"] = "UserPaths:Scripts;Reactor:System/Scripts;OpenPype:Scripts",
|
||||
["Scripts:"] = "UserPaths:Scripts;Reactor:System/Scripts",
|
||||
},
|
||||
},
|
||||
Script = {
|
||||
|
|
|
|||
|
|
@ -30,10 +30,6 @@ class CreateSaver(NewCreator):
|
|||
instance_attributes = [
|
||||
"reviewable"
|
||||
]
|
||||
default_variants = [
|
||||
"Main",
|
||||
"Mask"
|
||||
]
|
||||
|
||||
# TODO: This should be renamed together with Nuke so it is aligned
|
||||
temp_rendering_path_template = (
|
||||
|
|
@ -250,11 +246,7 @@ class CreateSaver(NewCreator):
|
|||
label="Review",
|
||||
)
|
||||
|
||||
def apply_settings(
|
||||
self,
|
||||
project_settings,
|
||||
system_settings
|
||||
):
|
||||
def apply_settings(self, project_settings):
|
||||
"""Method called on initialization of plugin to apply settings."""
|
||||
|
||||
# plugin settings
|
||||
|
|
|
|||
|
|
@ -57,28 +57,31 @@ def create_interactive(creator_identifier, **kwargs):
|
|||
list: The created instances.
|
||||
|
||||
"""
|
||||
|
||||
# TODO Use Qt instead
|
||||
result, variant = hou.ui.readInput('Define variant name',
|
||||
buttons=("Ok", "Cancel"),
|
||||
initial_contents='Main',
|
||||
title="Define variant",
|
||||
help="Set the variant for the "
|
||||
"publish instance",
|
||||
close_choice=1)
|
||||
if result == 1:
|
||||
# User interrupted
|
||||
return
|
||||
variant = variant.strip()
|
||||
if not variant:
|
||||
raise RuntimeError("Empty variant value entered.")
|
||||
|
||||
host = registered_host()
|
||||
context = CreateContext(host)
|
||||
creator = context.manual_creators.get(creator_identifier)
|
||||
if not creator:
|
||||
raise RuntimeError("Invalid creator identifier: "
|
||||
"{}".format(creator_identifier))
|
||||
raise RuntimeError("Invalid creator identifier: {}".format(
|
||||
creator_identifier)
|
||||
)
|
||||
|
||||
# TODO Use Qt instead
|
||||
result, variant = hou.ui.readInput(
|
||||
"Define variant name",
|
||||
buttons=("Ok", "Cancel"),
|
||||
initial_contents=creator.get_default_variant(),
|
||||
title="Define variant",
|
||||
help="Set the variant for the publish instance",
|
||||
close_choice=1
|
||||
)
|
||||
|
||||
if result == 1:
|
||||
# User interrupted
|
||||
return
|
||||
|
||||
variant = variant.strip()
|
||||
if not variant:
|
||||
raise RuntimeError("Empty variant value entered.")
|
||||
|
||||
# TODO: Once more elaborate unique create behavior should exist per Creator
|
||||
# instead of per network editor area then we should move this from here
|
||||
|
|
|
|||
|
|
@ -296,7 +296,7 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
"""
|
||||
return [hou.ropNodeTypeCategory()]
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
"""Method called on initialization of plugin to apply settings."""
|
||||
|
||||
settings_name = self.settings_name
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from typing import Any, Dict, Union
|
|||
|
||||
import six
|
||||
from openpype.pipeline.context_tools import (
|
||||
get_current_project, get_current_project_asset,)
|
||||
get_current_project, get_current_project_asset)
|
||||
from pymxs import runtime as rt
|
||||
|
||||
JSON_PREFIX = "JSON::"
|
||||
|
|
@ -312,3 +312,98 @@ def set_timeline(frameStart, frameEnd):
|
|||
"""
|
||||
rt.animationRange = rt.interval(frameStart, frameEnd)
|
||||
return rt.animationRange
|
||||
|
||||
|
||||
def unique_namespace(namespace, format="%02d",
|
||||
prefix="", suffix="", con_suffix="CON"):
|
||||
"""Return unique namespace
|
||||
|
||||
Arguments:
|
||||
namespace (str): Name of namespace to consider
|
||||
format (str, optional): Formatting of the given iteration number
|
||||
suffix (str, optional): Only consider namespaces with this suffix.
|
||||
con_suffix: max only, for finding the name of the master container
|
||||
|
||||
>>> unique_namespace("bar")
|
||||
# bar01
|
||||
>>> unique_namespace(":hello")
|
||||
# :hello01
|
||||
>>> unique_namespace("bar:", suffix="_NS")
|
||||
# bar01_NS:
|
||||
|
||||
"""
|
||||
|
||||
def current_namespace():
|
||||
current = namespace
|
||||
# When inside a namespace Max adds no trailing :
|
||||
if not current.endswith(":"):
|
||||
current += ":"
|
||||
return current
|
||||
|
||||
# Always check against the absolute namespace root
|
||||
# There's no clash with :x if we're defining namespace :a:x
|
||||
ROOT = ":" if namespace.startswith(":") else current_namespace()
|
||||
|
||||
# Strip trailing `:` tokens since we might want to add a suffix
|
||||
start = ":" if namespace.startswith(":") else ""
|
||||
end = ":" if namespace.endswith(":") else ""
|
||||
namespace = namespace.strip(":")
|
||||
if ":" in namespace:
|
||||
# Split off any nesting that we don't uniqify anyway.
|
||||
parents, namespace = namespace.rsplit(":", 1)
|
||||
start += parents + ":"
|
||||
ROOT += start
|
||||
|
||||
iteration = 1
|
||||
increment_version = True
|
||||
while increment_version:
|
||||
nr_namespace = namespace + format % iteration
|
||||
unique = prefix + nr_namespace + suffix
|
||||
container_name = f"{unique}:{namespace}{con_suffix}"
|
||||
if not rt.getNodeByName(container_name):
|
||||
name_space = start + unique + end
|
||||
increment_version = False
|
||||
return name_space
|
||||
else:
|
||||
increment_version = True
|
||||
iteration += 1
|
||||
|
||||
|
||||
def get_namespace(container_name):
|
||||
"""Get the namespace and name of the sub-container
|
||||
|
||||
Args:
|
||||
container_name (str): the name of master container
|
||||
|
||||
Raises:
|
||||
RuntimeError: when there is no master container found
|
||||
|
||||
Returns:
|
||||
namespace (str): namespace of the sub-container
|
||||
name (str): name of the sub-container
|
||||
"""
|
||||
node = rt.getNodeByName(container_name)
|
||||
if not node:
|
||||
raise RuntimeError("Master Container Not Found..")
|
||||
name = rt.getUserProp(node, "name")
|
||||
namespace = rt.getUserProp(node, "namespace")
|
||||
return namespace, name
|
||||
|
||||
|
||||
def object_transform_set(container_children):
|
||||
"""A function which allows to store the transform of
|
||||
previous loaded object(s)
|
||||
Args:
|
||||
container_children(list): A list of nodes
|
||||
|
||||
Returns:
|
||||
transform_set (dict): A dict with all transform data of
|
||||
the previous loaded object(s)
|
||||
"""
|
||||
transform_set = {}
|
||||
for node in container_children:
|
||||
name = f"{node.name}.transform"
|
||||
transform_set[name] = node.pos
|
||||
name = f"{node.name}.scale"
|
||||
transform_set[name] = node.scale
|
||||
return transform_set
|
||||
|
|
|
|||
|
|
@ -15,8 +15,10 @@ from openpype.pipeline import (
|
|||
)
|
||||
from openpype.hosts.max.api.menu import OpenPypeMenu
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.plugin import MS_CUSTOM_ATTRIB
|
||||
from openpype.hosts.max import MAX_HOST_DIR
|
||||
|
||||
|
||||
from pymxs import runtime as rt # noqa
|
||||
|
||||
log = logging.getLogger("openpype.hosts.max")
|
||||
|
|
@ -152,17 +154,18 @@ def ls() -> list:
|
|||
yield lib.read(container)
|
||||
|
||||
|
||||
def containerise(name: str, nodes: list, context, loader=None, suffix="_CON"):
|
||||
def containerise(name: str, nodes: list, context,
|
||||
namespace=None, loader=None, suffix="_CON"):
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": "",
|
||||
"namespace": namespace or "",
|
||||
"loader": loader,
|
||||
"representation": context["representation"]["_id"],
|
||||
}
|
||||
|
||||
container_name = f"{name}{suffix}"
|
||||
container_name = f"{namespace}:{name}{suffix}"
|
||||
container = rt.container(name=container_name)
|
||||
for node in nodes:
|
||||
node.Parent = container
|
||||
|
|
@ -170,3 +173,53 @@ def containerise(name: str, nodes: list, context, loader=None, suffix="_CON"):
|
|||
if not lib.imprint(container_name, data):
|
||||
print(f"imprinting of {container_name} failed.")
|
||||
return container
|
||||
|
||||
|
||||
def load_custom_attribute_data():
|
||||
"""Re-loading the Openpype/AYON custom parameter built by the creator
|
||||
|
||||
Returns:
|
||||
attribute: re-loading the custom OP attributes set in Maxscript
|
||||
"""
|
||||
return rt.Execute(MS_CUSTOM_ATTRIB)
|
||||
|
||||
|
||||
def import_custom_attribute_data(container: str, selections: list):
|
||||
"""Importing the Openpype/AYON custom parameter built by the creator
|
||||
|
||||
Args:
|
||||
container (str): target container which adds custom attributes
|
||||
selections (list): nodes to be added into
|
||||
group in custom attributes
|
||||
"""
|
||||
attrs = load_custom_attribute_data()
|
||||
modifier = rt.EmptyModifier()
|
||||
rt.addModifier(container, modifier)
|
||||
container.modifiers[0].name = "OP Data"
|
||||
rt.custAttributes.add(container.modifiers[0], attrs)
|
||||
node_list = []
|
||||
sel_list = []
|
||||
for i in selections:
|
||||
node_ref = rt.NodeTransformMonitor(node=i)
|
||||
node_list.append(node_ref)
|
||||
sel_list.append(str(i))
|
||||
|
||||
# Setting the property
|
||||
rt.setProperty(
|
||||
container.modifiers[0].openPypeData,
|
||||
"all_handles", node_list)
|
||||
rt.setProperty(
|
||||
container.modifiers[0].openPypeData,
|
||||
"sel_list", sel_list)
|
||||
|
||||
def update_custom_attribute_data(container: str, selections: list):
|
||||
"""Updating the Openpype/AYON custom parameter built by the creator
|
||||
|
||||
Args:
|
||||
container (str): target container which adds custom attributes
|
||||
selections (list): nodes to be added into
|
||||
group in custom attributes
|
||||
"""
|
||||
if container.modifiers[0].name == "OP Data":
|
||||
rt.deleteModifier(container, container.modifiers[0])
|
||||
import_custom_attribute_data(container, selections)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,16 @@
|
|||
import os
|
||||
|
||||
from openpype.hosts.max.api import lib, maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
|
|
@ -13,50 +22,76 @@ class FbxLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
filepath = self.filepath_from_context(context)
|
||||
filepath = os.path.normpath(filepath)
|
||||
rt.FBXImporterSetParam("Animation", True)
|
||||
rt.FBXImporterSetParam("Camera", True)
|
||||
rt.FBXImporterSetParam("AxisConversionMethod", True)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("create"))
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.ImportFile(
|
||||
filepath,
|
||||
rt.name("noPrompt"),
|
||||
using=rt.FBXIMP)
|
||||
|
||||
container = rt.GetNodeByName(f"{name}")
|
||||
if not container:
|
||||
container = rt.Container()
|
||||
container.name = f"{name}"
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container = rt.container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(container, selections)
|
||||
|
||||
for selection in rt.GetCurrentSelection():
|
||||
for selection in selections:
|
||||
selection.Parent = container
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
|
||||
return containerise(
|
||||
name, [container], context, loader=self.__class__.__name__)
|
||||
name, [container], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
rt.Select(node.Children)
|
||||
fbx_reimport_cmd = (
|
||||
f"""
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
rt.Select(inst_container.Children)
|
||||
transform_data = object_transform_set(inst_container.Children)
|
||||
for prev_fbx_obj in rt.selection:
|
||||
if rt.isValidNode(prev_fbx_obj):
|
||||
rt.Delete(prev_fbx_obj)
|
||||
|
||||
FBXImporterSetParam "Animation" true
|
||||
FBXImporterSetParam "Cameras" true
|
||||
FBXImporterSetParam "AxisConversionMethod" true
|
||||
FbxExporterSetParam "UpAxis" "Y"
|
||||
FbxExporterSetParam "Preserveinstances" true
|
||||
rt.FBXImporterSetParam("Animation", True)
|
||||
rt.FBXImporterSetParam("Camera", True)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("merge"))
|
||||
rt.FBXImporterSetParam("AxisConversionMethod", True)
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.ImportFile(
|
||||
path, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
current_fbx_objects = rt.GetCurrentSelection()
|
||||
for fbx_object in current_fbx_objects:
|
||||
if fbx_object.Parent != inst_container:
|
||||
fbx_object.Parent = inst_container
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_object.pos = transform_data[
|
||||
f"{fbx_object.name}.transform"]
|
||||
fbx_object.scale = transform_data[
|
||||
f"{fbx_object.name}.scale"]
|
||||
|
||||
importFile @"{path}" #noPrompt using:FBXIMP
|
||||
""")
|
||||
rt.Execute(fbx_reimport_cmd)
|
||||
for children in node.Children:
|
||||
if rt.classOf(children) == rt.Container:
|
||||
if children.name == sub_node_name:
|
||||
update_custom_attribute_data(
|
||||
children, current_fbx_objects)
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,15 @@
|
|||
import os
|
||||
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise, import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
|
|
@ -16,22 +24,34 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
order = -8
|
||||
icon = "code-fork"
|
||||
color = "green"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
path = os.path.normpath(path)
|
||||
# import the max scene by using "merge file"
|
||||
path = path.replace('\\', '/')
|
||||
rt.MergeMaxFile(path)
|
||||
rt.MergeMaxFile(path, quiet=True, includeFullGroup=True)
|
||||
max_objects = rt.getLastMergedNodes()
|
||||
max_container = rt.Container(name=f"{name}")
|
||||
for max_object in max_objects:
|
||||
max_object.Parent = max_container
|
||||
max_object_names = [obj.name for obj in max_objects]
|
||||
# implement the OP/AYON custom attributes before load
|
||||
max_container = []
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
container = rt.Container(name=container_name)
|
||||
import_custom_attribute_data(container, max_objects)
|
||||
max_container.append(container)
|
||||
max_container.extend(max_objects)
|
||||
for max_obj, obj_name in zip(max_objects, max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
return containerise(
|
||||
name, [max_container], context, loader=self.__class__.__name__)
|
||||
name, max_container, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -39,15 +59,32 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
|
||||
rt.MergeMaxFile(path,
|
||||
rt.Name("noRedraw"),
|
||||
rt.Name("deleteOldDups"),
|
||||
rt.Name("useSceneMtlDups"))
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
# delete the old container with attribute
|
||||
# delete old duplicate
|
||||
rt.Select(node.Children)
|
||||
transform_data = object_transform_set(node.Children)
|
||||
for prev_max_obj in rt.GetCurrentSelection():
|
||||
if rt.isValidNode(prev_max_obj) and prev_max_obj.name != sub_container_name: # noqa
|
||||
rt.Delete(prev_max_obj)
|
||||
rt.MergeMaxFile(path, rt.Name("deleteOldDups"))
|
||||
|
||||
max_objects = rt.getLastMergedNodes()
|
||||
container_node = rt.GetNodeByName(node_name)
|
||||
for max_object in max_objects:
|
||||
max_object.Parent = container_node
|
||||
current_max_objects = rt.getLastMergedNodes()
|
||||
current_max_object_names = [obj.name for obj
|
||||
in current_max_objects]
|
||||
sub_container = rt.getNodeByName(sub_container_name)
|
||||
update_custom_attribute_data(sub_container, current_max_objects)
|
||||
for max_object in current_max_objects:
|
||||
max_object.Parent = node
|
||||
for max_obj, obj_name in zip(current_max_objects,
|
||||
current_max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
max_obj.pos = transform_data[
|
||||
f"{max_obj.name}.transform"]
|
||||
max_obj.scale = transform_data[
|
||||
f"{max_obj.name}.scale"]
|
||||
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
|
|
|
|||
|
|
@ -1,8 +1,14 @@
|
|||
import os
|
||||
from openpype.pipeline import load, get_representation_path
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
from openpype.hosts.max.api.lib import (
|
||||
maintained_selection, unique_namespace
|
||||
)
|
||||
|
||||
|
||||
class ModelAbcLoader(load.LoaderPlugin):
|
||||
|
|
@ -14,6 +20,7 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -30,7 +37,7 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
rt.AlembicImport.CustomAttributes = True
|
||||
rt.AlembicImport.UVs = True
|
||||
rt.AlembicImport.VertexColors = True
|
||||
rt.importFile(file_path, rt.name("noPrompt"))
|
||||
rt.importFile(file_path, rt.name("noPrompt"), using=rt.AlembicImport)
|
||||
|
||||
abc_after = {
|
||||
c
|
||||
|
|
@ -45,9 +52,22 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
self.log.error("Something failed when loading.")
|
||||
|
||||
abc_container = abc_containers.pop()
|
||||
import_custom_attribute_data(
|
||||
abc_container, abc_container.Children)
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
for abc_object in abc_container.Children:
|
||||
abc_object.name = f"{namespace}:{abc_object.name}"
|
||||
# rename the abc container with namespace
|
||||
abc_container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
abc_container.name = abc_container_name
|
||||
|
||||
return containerise(
|
||||
name, [abc_container], context, loader=self.__class__.__name__
|
||||
name, [abc_container], context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
|
@ -55,21 +75,19 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
rt.Select(node.Children)
|
||||
|
||||
for alembic in rt.Selection:
|
||||
abc = rt.GetNodeByName(alembic.name)
|
||||
rt.Select(abc.Children)
|
||||
for abc_con in rt.Selection:
|
||||
container = rt.GetNodeByName(abc_con.name)
|
||||
container.source = path
|
||||
rt.Select(container.Children)
|
||||
for abc_obj in rt.Selection:
|
||||
alembic_obj = rt.GetNodeByName(abc_obj.name)
|
||||
alembic_obj.source = path
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
rt.Select(node.Children)
|
||||
|
||||
for alembic in rt.Selection:
|
||||
abc = rt.GetNodeByName(alembic.name)
|
||||
update_custom_attribute_data(abc, abc.Children)
|
||||
rt.Select(abc.Children)
|
||||
for abc_con in abc.Children:
|
||||
abc_con.source = path
|
||||
rt.Select(abc_con.Children)
|
||||
for abc_obj in abc_con.Children:
|
||||
abc_obj.source = path
|
||||
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
|
|
|
|||
|
|
@ -1,7 +1,15 @@
|
|||
import os
|
||||
from openpype.pipeline import load, get_representation_path
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise, import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set
|
||||
)
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
|
||||
|
||||
|
|
@ -13,6 +21,7 @@ class FbxModelLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -20,39 +29,69 @@ class FbxModelLoader(load.LoaderPlugin):
|
|||
filepath = os.path.normpath(self.filepath_from_context(context))
|
||||
rt.FBXImporterSetParam("Animation", False)
|
||||
rt.FBXImporterSetParam("Cameras", False)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("create"))
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.importFile(filepath, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
|
||||
container = rt.GetNodeByName(name)
|
||||
if not container:
|
||||
container = rt.Container()
|
||||
container.name = name
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container = rt.container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(container, selections)
|
||||
|
||||
for selection in rt.GetCurrentSelection():
|
||||
for selection in selections:
|
||||
selection.Parent = container
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
|
||||
return containerise(
|
||||
name, [container], context, loader=self.__class__.__name__
|
||||
name, [container], context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
path = get_representation_path(representation)
|
||||
node = rt.getNodeByName(container["instance_node"])
|
||||
rt.select(node.Children)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
rt.Select(inst_container.Children)
|
||||
transform_data = object_transform_set(inst_container.Children)
|
||||
for prev_fbx_obj in rt.selection:
|
||||
if rt.isValidNode(prev_fbx_obj):
|
||||
rt.Delete(prev_fbx_obj)
|
||||
|
||||
rt.FBXImporterSetParam("Animation", False)
|
||||
rt.FBXImporterSetParam("Cameras", False)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("merge"))
|
||||
rt.FBXImporterSetParam("AxisConversionMethod", True)
|
||||
rt.FBXImporterSetParam("UpAxis", "Y")
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.importFile(path, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
current_fbx_objects = rt.GetCurrentSelection()
|
||||
for fbx_object in current_fbx_objects:
|
||||
if fbx_object.Parent != inst_container:
|
||||
fbx_object.Parent = inst_container
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_object.pos = transform_data[
|
||||
f"{fbx_object.name}.transform"]
|
||||
fbx_object.scale = transform_data[
|
||||
f"{fbx_object.name}.scale"]
|
||||
|
||||
for children in node.Children:
|
||||
if rt.classOf(children) == rt.Container:
|
||||
if children.name == sub_node_name:
|
||||
update_custom_attribute_data(
|
||||
children, current_fbx_objects)
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
node_name,
|
||||
{"representation": str(representation["_id"])},
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,18 @@
|
|||
import os
|
||||
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
maintained_selection,
|
||||
object_transform_set
|
||||
)
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
|
|
@ -14,6 +24,7 @@ class ObjLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -22,36 +33,49 @@ class ObjLoader(load.LoaderPlugin):
|
|||
self.log.debug("Executing command to import..")
|
||||
|
||||
rt.Execute(f'importFile @"{filepath}" #noPrompt using:ObjImp')
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
# create "missing" container for obj import
|
||||
container = rt.Container()
|
||||
container.name = name
|
||||
|
||||
container = rt.Container(name=f"{namespace}:{name}_{self.postfix}")
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(container, selections)
|
||||
# get current selection
|
||||
for selection in rt.GetCurrentSelection():
|
||||
for selection in selections:
|
||||
selection.Parent = container
|
||||
|
||||
asset = rt.GetNodeByName(name)
|
||||
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
return containerise(
|
||||
name, [asset], context, loader=self.__class__.__name__)
|
||||
name, [container], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.GetNodeByName(node_name)
|
||||
|
||||
instance_name, _ = node_name.split("_")
|
||||
container = rt.GetNodeByName(instance_name)
|
||||
for child in container.Children:
|
||||
rt.Delete(child)
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
rt.Select(inst_container.Children)
|
||||
transform_data = object_transform_set(inst_container.Children)
|
||||
for prev_obj in rt.selection:
|
||||
if rt.isValidNode(prev_obj):
|
||||
rt.Delete(prev_obj)
|
||||
|
||||
rt.Execute(f'importFile @"{path}" #noPrompt using:ObjImp')
|
||||
# get current selection
|
||||
for selection in rt.GetCurrentSelection():
|
||||
selection.Parent = container
|
||||
|
||||
selections = rt.GetCurrentSelection()
|
||||
update_custom_attribute_data(inst_container, selections)
|
||||
for selection in selections:
|
||||
selection.Parent = inst_container
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
selection.pos = transform_data[
|
||||
f"{selection.name}.transform"]
|
||||
selection.scale = transform_data[
|
||||
f"{selection.name}.scale"]
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,16 @@
|
|||
import os
|
||||
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set
|
||||
)
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
|
|
@ -15,6 +23,7 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -30,11 +39,24 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
rt.LogLevel = rt.Name("info")
|
||||
rt.USDImporter.importFile(filepath,
|
||||
importOptions=import_options)
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
asset = rt.GetNodeByName(name)
|
||||
import_custom_attribute_data(asset, asset.Children)
|
||||
for usd_asset in asset.Children:
|
||||
usd_asset.name = f"{namespace}:{usd_asset.name}"
|
||||
|
||||
asset_name = f"{namespace}:{name}_{self.postfix}"
|
||||
asset.name = asset_name
|
||||
# need to get the correct container after renamed
|
||||
asset = rt.GetNodeByName(asset_name)
|
||||
|
||||
|
||||
return containerise(
|
||||
name, [asset], context, loader=self.__class__.__name__)
|
||||
name, [asset], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -42,11 +64,16 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.GetNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
transform_data = None
|
||||
for n in node.Children:
|
||||
for r in n.Children:
|
||||
rt.Delete(r)
|
||||
rt.Select(n.Children)
|
||||
transform_data = object_transform_set(n.Children)
|
||||
for prev_usd_asset in rt.selection:
|
||||
if rt.isValidNode(prev_usd_asset):
|
||||
rt.Delete(prev_usd_asset)
|
||||
rt.Delete(n)
|
||||
instance_name, _ = node_name.split("_")
|
||||
|
||||
import_options = rt.USDImporter.CreateOptions()
|
||||
base_filename = os.path.basename(path)
|
||||
|
|
@ -55,11 +82,20 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
|
||||
rt.LogPath = log_filepath
|
||||
rt.LogLevel = rt.Name("info")
|
||||
rt.USDImporter.importFile(path,
|
||||
importOptions=import_options)
|
||||
rt.USDImporter.importFile(
|
||||
path, importOptions=import_options)
|
||||
|
||||
asset = rt.GetNodeByName(instance_name)
|
||||
asset = rt.GetNodeByName(name)
|
||||
asset.Parent = node
|
||||
import_custom_attribute_data(asset, asset.Children)
|
||||
for children in asset.Children:
|
||||
children.name = f"{namespace}:{children.name}"
|
||||
children.pos = transform_data[
|
||||
f"{children.name}.transform"]
|
||||
children.scale = transform_data[
|
||||
f"{children.name}.scale"]
|
||||
|
||||
asset.name = sub_node_name
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,12 @@ Because of limited api, alembics can be only loaded, but not easily updated.
|
|||
import os
|
||||
from openpype.pipeline import load, get_representation_path
|
||||
from openpype.hosts.max.api import lib, maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.lib import unique_namespace
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
|
||||
|
||||
class AbcLoader(load.LoaderPlugin):
|
||||
|
|
@ -19,6 +24,7 @@ class AbcLoader(load.LoaderPlugin):
|
|||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -33,7 +39,7 @@ class AbcLoader(load.LoaderPlugin):
|
|||
}
|
||||
|
||||
rt.AlembicImport.ImportToRoot = False
|
||||
rt.importFile(file_path, rt.name("noPrompt"))
|
||||
rt.importFile(file_path, rt.name("noPrompt"), using=rt.AlembicImport)
|
||||
|
||||
abc_after = {
|
||||
c
|
||||
|
|
@ -48,13 +54,27 @@ class AbcLoader(load.LoaderPlugin):
|
|||
self.log.error("Something failed when loading.")
|
||||
|
||||
abc_container = abc_containers.pop()
|
||||
|
||||
for abc in rt.GetCurrentSelection():
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(
|
||||
abc_container, abc_container.Children)
|
||||
for abc in selections:
|
||||
for cam_shape in abc.Children:
|
||||
cam_shape.playbackType = 2
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
for abc_object in abc_container.Children:
|
||||
abc_object.name = f"{namespace}:{abc_object.name}"
|
||||
# rename the abc container with namespace
|
||||
abc_container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
abc_container.name = abc_container_name
|
||||
|
||||
return containerise(
|
||||
name, [abc_container], context, loader=self.__class__.__name__
|
||||
name, [abc_container], context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
|
@ -63,28 +83,23 @@ class AbcLoader(load.LoaderPlugin):
|
|||
path = get_representation_path(representation)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
|
||||
alembic_objects = self.get_container_children(node, "AlembicObject")
|
||||
for alembic_object in alembic_objects:
|
||||
alembic_object.source = path
|
||||
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
{"representation": str(representation["_id"])},
|
||||
)
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node.Children)
|
||||
|
||||
for alembic in rt.Selection:
|
||||
abc = rt.GetNodeByName(alembic.name)
|
||||
update_custom_attribute_data(abc, abc.Children)
|
||||
rt.Select(abc.Children)
|
||||
for abc_con in rt.Selection:
|
||||
container = rt.GetNodeByName(abc_con.name)
|
||||
container.source = path
|
||||
rt.Select(container.Children)
|
||||
for abc_obj in rt.Selection:
|
||||
alembic_obj = rt.GetNodeByName(abc_obj.name)
|
||||
alembic_obj.source = path
|
||||
for abc_con in abc.Children:
|
||||
abc_con.source = path
|
||||
rt.Select(abc_con.Children)
|
||||
for abc_obj in abc_con.Children:
|
||||
abc_obj.source = path
|
||||
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
{"representation": str(representation["_id"])},
|
||||
)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,14 @@
|
|||
import os
|
||||
|
||||
from openpype.hosts.max.api import lib, maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace, get_namespace
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
|
|
@ -13,6 +20,7 @@ class PointCloudLoader(load.LoaderPlugin):
|
|||
order = -8
|
||||
icon = "code-fork"
|
||||
color = "green"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
"""load point cloud by tyCache"""
|
||||
|
|
@ -22,10 +30,19 @@ class PointCloudLoader(load.LoaderPlugin):
|
|||
obj = rt.tyCache()
|
||||
obj.filename = filepath
|
||||
|
||||
prt_container = rt.GetNodeByName(obj.name)
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
prt_container = rt.Container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
import_custom_attribute_data(prt_container, [obj])
|
||||
obj.Parent = prt_container
|
||||
obj.name = f"{namespace}:{obj.name}"
|
||||
|
||||
return containerise(
|
||||
name, [prt_container], context, loader=self.__class__.__name__)
|
||||
name, [prt_container], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""update the container"""
|
||||
|
|
@ -33,15 +50,18 @@ class PointCloudLoader(load.LoaderPlugin):
|
|||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
namespace, name = get_namespace(container["instance_node"])
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
update_custom_attribute_data(
|
||||
inst_container, inst_container.Children)
|
||||
with maintained_selection():
|
||||
rt.Select(node.Children)
|
||||
for prt in rt.Selection:
|
||||
prt_object = rt.GetNodeByName(prt.name)
|
||||
prt_object.filename = path
|
||||
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
for prt in inst_container.Children:
|
||||
prt.filename = path
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -5,8 +5,15 @@ from openpype.pipeline import (
|
|||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace, get_namespace
|
||||
)
|
||||
|
||||
|
||||
class RedshiftProxyLoader(load.LoaderPlugin):
|
||||
|
|
@ -18,6 +25,7 @@ class RedshiftProxyLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -30,24 +38,32 @@ class RedshiftProxyLoader(load.LoaderPlugin):
|
|||
if collections:
|
||||
rs_proxy.is_sequence = True
|
||||
|
||||
container = rt.container()
|
||||
container.name = name
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container = rt.Container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
rs_proxy.Parent = container
|
||||
|
||||
asset = rt.getNodeByName(name)
|
||||
rs_proxy.name = f"{namespace}:{rs_proxy.name}"
|
||||
import_custom_attribute_data(container, [rs_proxy])
|
||||
|
||||
return containerise(
|
||||
name, [asset], context, loader=self.__class__.__name__)
|
||||
name, [container], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.getNodeByName(container["instance_node"])
|
||||
for children in node.Children:
|
||||
children_node = rt.getNodeByName(children.name)
|
||||
for proxy in children_node.Children:
|
||||
proxy.file = path
|
||||
namespace, name = get_namespace(container["instance_node"])
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
|
||||
update_custom_attribute_data(
|
||||
inst_container, inst_container.Children)
|
||||
for proxy in inst_container.Children:
|
||||
proxy.file = path
|
||||
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
|
|
|
|||
|
|
@ -260,7 +260,7 @@ class MayaCreator(NewCreator, MayaCreatorBase):
|
|||
default=True)
|
||||
]
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
"""Method called on initialization of plugin to apply settings."""
|
||||
|
||||
settings_name = self.settings_name
|
||||
|
|
|
|||
|
|
@ -81,10 +81,8 @@ class CreateAnimation(plugin.MayaHiddenCreator):
|
|||
|
||||
return defs
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
super(CreateAnimation, self).apply_settings(
|
||||
project_settings, system_settings
|
||||
)
|
||||
def apply_settings(self, project_settings):
|
||||
super(CreateAnimation, self).apply_settings(project_settings)
|
||||
# Hardcoding creator to be enabled due to existing settings would
|
||||
# disable the creator causing the creator plugin to not be
|
||||
# discoverable.
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class CreateRenderlayer(plugin.RenderlayerCreator):
|
|||
render_settings = {}
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings, system_settings):
|
||||
def apply_settings(cls, project_settings):
|
||||
cls.render_settings = project_settings["maya"]["RenderSettings"]
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class CreateUnrealSkeletalMesh(plugin.MayaCreator):
|
|||
# Defined in settings
|
||||
joint_hints = set()
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
"""Apply project settings to creator"""
|
||||
settings = (
|
||||
project_settings["maya"]["create"]["CreateUnrealSkeletalMesh"]
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class CreateUnrealStaticMesh(plugin.MayaCreator):
|
|||
# Defined in settings
|
||||
collision_prefixes = []
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
"""Apply project settings to creator"""
|
||||
settings = project_settings["maya"]["create"]["CreateUnrealStaticMesh"]
|
||||
self.collision_prefixes = settings["collision_prefixes"]
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class CreateVRayScene(plugin.RenderlayerCreator):
|
|||
singleton_node_name = "vraysceneMain"
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings, system_settings):
|
||||
def apply_settings(cls, project_settings):
|
||||
cls.render_settings = project_settings["maya"]["RenderSettings"]
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
|
|
|
|||
|
|
@ -1,60 +0,0 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder, PublishValidationError, RepairAction
|
||||
)
|
||||
from openpype.pipeline import discover_legacy_creator_plugins
|
||||
from openpype.hosts.maya.api.lib import imprint
|
||||
|
||||
|
||||
class ValidateInstanceAttributes(pyblish.api.InstancePlugin):
|
||||
"""Validate Instance Attributes.
|
||||
|
||||
New attributes can be introduced as new features come in. Old instances
|
||||
will need to be updated with these attributes for the documentation to make
|
||||
sense, and users do not have to recreate the instances.
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["*"]
|
||||
label = "Instance Attributes"
|
||||
plugins_by_family = {
|
||||
p.family: p for p in discover_legacy_creator_plugins()
|
||||
}
|
||||
actions = [RepairAction]
|
||||
|
||||
@classmethod
|
||||
def get_missing_attributes(self, instance):
|
||||
plugin = self.plugins_by_family[instance.data["family"]]
|
||||
subset = instance.data["subset"]
|
||||
asset = instance.data["asset"]
|
||||
objset = instance.data["objset"]
|
||||
|
||||
missing_attributes = {}
|
||||
for key, value in plugin(subset, asset).data.items():
|
||||
if not cmds.objExists("{}.{}".format(objset, key)):
|
||||
missing_attributes[key] = value
|
||||
|
||||
return missing_attributes
|
||||
|
||||
def process(self, instance):
|
||||
objset = instance.data.get("objset")
|
||||
if objset is None:
|
||||
self.log.debug(
|
||||
"Skipping {} because no objectset found.".format(instance)
|
||||
)
|
||||
return
|
||||
|
||||
missing_attributes = self.get_missing_attributes(instance)
|
||||
if missing_attributes:
|
||||
raise PublishValidationError(
|
||||
"Missing attributes on {}:\n{}".format(
|
||||
objset, missing_attributes
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
imprint(instance.data["objset"], cls.get_missing_attributes(instance))
|
||||
|
|
@ -379,11 +379,7 @@ class NukeWriteCreator(NukeCreator):
|
|||
sys.exc_info()[2]
|
||||
)
|
||||
|
||||
def apply_settings(
|
||||
self,
|
||||
project_settings,
|
||||
system_settings
|
||||
):
|
||||
def apply_settings(self, project_settings):
|
||||
"""Method called on initialization of plugin to apply settings."""
|
||||
|
||||
# plugin settings
|
||||
|
|
|
|||
|
|
@ -98,7 +98,7 @@ class AutoImageCreator(PSAutoCreator):
|
|||
)
|
||||
]
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["AutoImageCreator"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -171,7 +171,7 @@ class ImageCreator(Creator):
|
|||
)
|
||||
]
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["ImageCreator"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class ReviewCreator(PSAutoCreator):
|
|||
it will get recreated in next publish either way).
|
||||
"""
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["ReviewCreator"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class WorkfileCreator(PSAutoCreator):
|
|||
in next publish automatically).
|
||||
"""
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["WorkfileCreator"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class BatchMovieCreator(TrayPublishCreator):
|
|||
# Position batch creator after simple creators
|
||||
order = 110
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
creator_settings = (
|
||||
project_settings["traypublisher"]["create"]["BatchMovieCreator"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ class CreateRenderlayer(TVPaintCreator):
|
|||
# - Mark by default instance for review
|
||||
mark_for_review = True
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["tvpaint"]["create"]["create_render_layer"]
|
||||
)
|
||||
|
|
@ -387,7 +387,7 @@ class CreateRenderPass(TVPaintCreator):
|
|||
# Settings
|
||||
mark_for_review = True
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["tvpaint"]["create"]["create_render_pass"]
|
||||
)
|
||||
|
|
@ -690,7 +690,7 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator):
|
|||
group_idx_offset = 10
|
||||
group_idx_padding = 3
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings
|
||||
["tvpaint"]
|
||||
|
|
@ -1029,7 +1029,7 @@ class TVPaintSceneRenderCreator(TVPaintAutoCreator):
|
|||
mark_for_review = True
|
||||
active_on_create = False
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["tvpaint"]["create"]["create_render_scene"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class TVPaintReviewCreator(TVPaintAutoCreator):
|
|||
# Settings
|
||||
active_on_create = True
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["tvpaint"]["create"]["create_review"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class TVPaintWorkfileCreator(TVPaintAutoCreator):
|
|||
label = "Workfile"
|
||||
icon = "fa.file-o"
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["tvpaint"]["create"]["create_workfile"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@
|
|||
"""Hook to launch Unreal and prepare projects."""
|
||||
import os
|
||||
import copy
|
||||
import shutil
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from qtpy import QtCore
|
||||
|
|
@ -224,10 +226,24 @@ class UnrealPrelaunchHook(PreLaunchHook):
|
|||
project_file = project_path / unreal_project_filename
|
||||
|
||||
if not project_file.is_file():
|
||||
self.exec_ue_project_gen(engine_version,
|
||||
unreal_project_name,
|
||||
engine_path,
|
||||
project_path)
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
self.exec_ue_project_gen(engine_version,
|
||||
unreal_project_name,
|
||||
engine_path,
|
||||
Path(temp_dir))
|
||||
try:
|
||||
self.log.info((
|
||||
f"Moving from {temp_dir} to "
|
||||
f"{project_path.as_posix()}"
|
||||
))
|
||||
shutil.copytree(
|
||||
temp_dir, project_path, dirs_exist_ok=True)
|
||||
|
||||
except shutil.Error as e:
|
||||
raise ApplicationLaunchFailed((
|
||||
f"{self.signature} Cannot copy directory {temp_dir} "
|
||||
f"to {project_path.as_posix()} - {e}"
|
||||
)) from e
|
||||
|
||||
self.launch_context.env["AYON_UNREAL_VERSION"] = engine_version
|
||||
# Append project file to launch arguments
|
||||
|
|
|
|||
|
|
@ -424,17 +424,25 @@ class TextDef(AbstractAttrDef):
|
|||
|
||||
|
||||
class EnumDef(AbstractAttrDef):
|
||||
"""Enumeration of single item from items.
|
||||
"""Enumeration of items.
|
||||
|
||||
Enumeration of single item from items. Or list of items if multiselection
|
||||
is enabled.
|
||||
|
||||
Args:
|
||||
items: Items definition that can be converted using
|
||||
'prepare_enum_items'.
|
||||
default: Default value. Must be one key(value) from passed items.
|
||||
items (Union[list[str], list[dict[str, Any]]): Items definition that
|
||||
can be converted using 'prepare_enum_items'.
|
||||
default (Optional[Any]): Default value. Must be one key(value) from
|
||||
passed items or list of values for multiselection.
|
||||
multiselection (Optional[bool]): If True, multiselection is allowed.
|
||||
Output is list of selected items.
|
||||
"""
|
||||
|
||||
type = "enum"
|
||||
|
||||
def __init__(self, key, items, default=None, **kwargs):
|
||||
def __init__(
|
||||
self, key, items, default=None, multiselection=False, **kwargs
|
||||
):
|
||||
if not items:
|
||||
raise ValueError((
|
||||
"Empty 'items' value. {} must have"
|
||||
|
|
@ -443,30 +451,44 @@ class EnumDef(AbstractAttrDef):
|
|||
|
||||
items = self.prepare_enum_items(items)
|
||||
item_values = [item["value"] for item in items]
|
||||
if default not in item_values:
|
||||
for value in item_values:
|
||||
default = value
|
||||
break
|
||||
item_values_set = set(item_values)
|
||||
if multiselection:
|
||||
if default is None:
|
||||
default = []
|
||||
default = list(item_values_set.intersection(default))
|
||||
|
||||
elif default not in item_values:
|
||||
default = next(iter(item_values), None)
|
||||
|
||||
super(EnumDef, self).__init__(key, default=default, **kwargs)
|
||||
|
||||
self.items = items
|
||||
self._item_values = set(item_values)
|
||||
self._item_values = item_values_set
|
||||
self.multiselection = multiselection
|
||||
|
||||
def __eq__(self, other):
|
||||
if not super(EnumDef, self).__eq__(other):
|
||||
return False
|
||||
|
||||
return self.items == other.items
|
||||
return (
|
||||
self.items == other.items
|
||||
and self.multiselection == other.multiselection
|
||||
)
|
||||
|
||||
def convert_value(self, value):
|
||||
if value in self._item_values:
|
||||
return value
|
||||
return self.default
|
||||
if not self.multiselection:
|
||||
if value in self._item_values:
|
||||
return value
|
||||
return self.default
|
||||
|
||||
if value is None:
|
||||
return copy.deepcopy(self.default)
|
||||
return list(self._item_values.intersection(value))
|
||||
|
||||
def serialize(self):
|
||||
data = super(EnumDef, self).serialize()
|
||||
data["items"] = copy.deepcopy(self.items)
|
||||
data["multiselection"] = self.multiselection
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
|
|
@ -270,8 +270,8 @@ def is_func_signature_supported(func, *args, **kwargs):
|
|||
|
||||
Args:
|
||||
func (function): A function where the signature should be tested.
|
||||
*args (tuple[Any]): Positional arguments for function signature.
|
||||
**kwargs (dict[str, Any]): Keyword arguments for function signature.
|
||||
*args (Any): Positional arguments for function signature.
|
||||
**kwargs (Any): Keyword arguments for function signature.
|
||||
|
||||
Returns:
|
||||
bool: Function can pass in arguments.
|
||||
|
|
|
|||
|
|
@ -334,12 +334,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
|
||||
payload = self._get_vray_render_payload(payload_data)
|
||||
|
||||
elif "assscene" in instance.data["families"]:
|
||||
self.log.debug("Submitting Arnold .ass standalone render..")
|
||||
ass_export_payload = self._get_arnold_export_payload(payload_data)
|
||||
export_job = self.submit(ass_export_payload)
|
||||
|
||||
payload = self._get_arnold_render_payload(payload_data)
|
||||
else:
|
||||
self.log.debug("Submitting MayaBatch render..")
|
||||
payload = self._get_maya_payload(payload_data)
|
||||
|
|
@ -635,53 +629,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
|
||||
return job_info, attr.asdict(plugin_info)
|
||||
|
||||
def _get_arnold_export_payload(self, data):
|
||||
|
||||
try:
|
||||
from openpype.scripts import export_maya_ass_job
|
||||
except Exception:
|
||||
raise AssertionError(
|
||||
"Expected module 'export_maya_ass_job' to be available")
|
||||
|
||||
module_path = export_maya_ass_job.__file__
|
||||
if module_path.endswith(".pyc"):
|
||||
module_path = module_path[: -len(".pyc")] + ".py"
|
||||
|
||||
script = os.path.normpath(module_path)
|
||||
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
job_info.Name = self._job_info_label("Export")
|
||||
|
||||
# Force a single frame Python job
|
||||
job_info.Plugin = "Python"
|
||||
job_info.Frames = 1
|
||||
|
||||
renderlayer = self._instance.data["setMembers"]
|
||||
|
||||
# add required env vars for the export script
|
||||
envs = {
|
||||
"AVALON_APP_NAME": os.environ.get("AVALON_APP_NAME"),
|
||||
"OPENPYPE_ASS_EXPORT_RENDER_LAYER": renderlayer,
|
||||
"OPENPYPE_ASS_EXPORT_SCENE_FILE": self.scene_path,
|
||||
"OPENPYPE_ASS_EXPORT_OUTPUT": job_info.OutputFilename[0],
|
||||
"OPENPYPE_ASS_EXPORT_START": int(self._instance.data["frameStartHandle"]), # noqa
|
||||
"OPENPYPE_ASS_EXPORT_END": int(self._instance.data["frameEndHandle"]), # noqa
|
||||
"OPENPYPE_ASS_EXPORT_STEP": 1
|
||||
}
|
||||
for key, value in envs.items():
|
||||
if not value:
|
||||
continue
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
plugin_info = PythonPluginInfo(
|
||||
ScriptFile=script,
|
||||
Version="3.6",
|
||||
Arguments="",
|
||||
SingleFrameOnly="True"
|
||||
)
|
||||
|
||||
return job_info, attr.asdict(plugin_info)
|
||||
|
||||
def _get_vray_render_payload(self, data):
|
||||
|
||||
# Job Info
|
||||
|
|
|
|||
|
|
@ -70,7 +70,10 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
# Update the representation expected files
|
||||
self.log.info("Update range from actual job range "
|
||||
"to frame list: {}".format(frame_list))
|
||||
repre["files"] = sorted(job_expected_files)
|
||||
# single item files must be string not list
|
||||
repre["files"] = (sorted(job_expected_files)
|
||||
if len(job_expected_files) > 1 else
|
||||
list(job_expected_files)[0])
|
||||
|
||||
# Update the expected files
|
||||
expected_files = job_expected_files
|
||||
|
|
|
|||
|
|
@ -1774,7 +1774,7 @@ class CreateContext:
|
|||
self.creator_discover_result = report
|
||||
for creator_class in report.plugins:
|
||||
if inspect.isabstract(creator_class):
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Skipping abstract Creator {}".format(str(creator_class))
|
||||
)
|
||||
continue
|
||||
|
|
@ -1804,6 +1804,7 @@ class CreateContext:
|
|||
self,
|
||||
self.headless
|
||||
)
|
||||
|
||||
if not creator.enabled:
|
||||
disabled_creators[creator_identifier] = creator
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -1,16 +1,12 @@
|
|||
import copy
|
||||
import collections
|
||||
|
||||
from abc import (
|
||||
ABCMeta,
|
||||
abstractmethod,
|
||||
abstractproperty
|
||||
)
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import six
|
||||
|
||||
from openpype.settings import get_system_settings, get_project_settings
|
||||
from openpype.lib import Logger
|
||||
from openpype.lib import Logger, is_func_signature_supported
|
||||
from openpype.pipeline.plugin_discover import (
|
||||
discover,
|
||||
register_plugin,
|
||||
|
|
@ -84,7 +80,8 @@ class SubsetConvertorPlugin(object):
|
|||
def host(self):
|
||||
return self._create_context.host
|
||||
|
||||
@abstractproperty
|
||||
@property
|
||||
@abstractmethod
|
||||
def identifier(self):
|
||||
"""Converted identifier.
|
||||
|
||||
|
|
@ -161,7 +158,6 @@ class BaseCreator:
|
|||
|
||||
Args:
|
||||
project_settings (Dict[str, Any]): Project settings.
|
||||
system_settings (Dict[str, Any]): System settings.
|
||||
create_context (CreateContext): Context which initialized creator.
|
||||
headless (bool): Running in headless mode.
|
||||
"""
|
||||
|
|
@ -208,10 +204,41 @@ class BaseCreator:
|
|||
# - we may use UI inside processing this attribute should be checked
|
||||
self.headless = headless
|
||||
|
||||
self.apply_settings(project_settings, system_settings)
|
||||
expect_system_settings = False
|
||||
if is_func_signature_supported(
|
||||
self.apply_settings, project_settings
|
||||
):
|
||||
self.apply_settings(project_settings)
|
||||
else:
|
||||
expect_system_settings = True
|
||||
# Backwards compatibility for system settings
|
||||
self.apply_settings(project_settings, system_settings)
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
"""Method called on initialization of plugin to apply settings."""
|
||||
init_use_base = any(
|
||||
self.__class__.__init__ is cls.__init__
|
||||
for cls in {
|
||||
BaseCreator,
|
||||
Creator,
|
||||
HiddenCreator,
|
||||
AutoCreator,
|
||||
}
|
||||
)
|
||||
if not init_use_base or expect_system_settings:
|
||||
self.log.warning((
|
||||
"WARNING: Source - Create plugin {}."
|
||||
" System settings argument will not be passed to"
|
||||
" '__init__' and 'apply_settings' methods in future versions"
|
||||
" of OpenPype. Planned version to drop the support"
|
||||
" is 3.16.6 or 3.17.0. Please contact Ynput core team if you"
|
||||
" need to keep system settings."
|
||||
).format(self.__class__.__name__))
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
"""Method called on initialization of plugin to apply settings.
|
||||
|
||||
Args:
|
||||
project_settings (dict[str, Any]): Project settings.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
|
@ -224,7 +251,8 @@ class BaseCreator:
|
|||
|
||||
return self.family
|
||||
|
||||
@abstractproperty
|
||||
@property
|
||||
@abstractmethod
|
||||
def family(self):
|
||||
"""Family that plugin represents."""
|
||||
|
||||
|
|
|
|||
|
|
@ -1,105 +0,0 @@
|
|||
"""This module is used for command line exporting of ASS files.
|
||||
|
||||
WARNING:
|
||||
This need to be rewriten to be able use it in Pype 3!
|
||||
"""
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
import subprocess
|
||||
import platform
|
||||
|
||||
try:
|
||||
from shutil import which
|
||||
except ImportError:
|
||||
# we are in python < 3.3
|
||||
def which(command):
|
||||
path = os.getenv('PATH')
|
||||
for p in path.split(os.path.pathsep):
|
||||
p = os.path.join(p, command)
|
||||
if os.path.exists(p) and os.access(p, os.X_OK):
|
||||
return p
|
||||
|
||||
handler = logging.basicConfig()
|
||||
log = logging.getLogger("Publish Image Sequences")
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
|
||||
|
||||
|
||||
def __main__():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--paths",
|
||||
nargs="*",
|
||||
default=[],
|
||||
help="The filepaths to publish. This can be a "
|
||||
"directory or a path to a .json publish "
|
||||
"configuration.")
|
||||
parser.add_argument("--gui",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Whether to run Pyblish in GUI mode.")
|
||||
|
||||
parser.add_argument("--pype", help="Pype root")
|
||||
|
||||
kwargs, args = parser.parse_known_args()
|
||||
|
||||
print("Running pype ...")
|
||||
auto_pype_root = os.path.dirname(os.path.abspath(__file__))
|
||||
auto_pype_root = os.path.abspath(auto_pype_root + "../../../../..")
|
||||
|
||||
auto_pype_root = os.environ.get('OPENPYPE_SETUP_PATH') or auto_pype_root
|
||||
if os.environ.get('OPENPYPE_SETUP_PATH'):
|
||||
print("Got Pype location from environment: {}".format(
|
||||
os.environ.get('OPENPYPE_SETUP_PATH')))
|
||||
|
||||
pype_command = "openpype.ps1"
|
||||
if platform.system().lower() == "linux":
|
||||
pype_command = "pype"
|
||||
elif platform.system().lower() == "windows":
|
||||
pype_command = "openpype.bat"
|
||||
|
||||
if kwargs.pype:
|
||||
pype_root = kwargs.pype
|
||||
else:
|
||||
# test if pype.bat / pype is in the PATH
|
||||
# if it is, which() will return its path and we use that.
|
||||
# if not, we use auto_pype_root path. Caveat of that one is
|
||||
# that it can be UNC path and that will not work on windows.
|
||||
|
||||
pype_path = which(pype_command)
|
||||
|
||||
if pype_path:
|
||||
pype_root = os.path.dirname(pype_path)
|
||||
else:
|
||||
pype_root = auto_pype_root
|
||||
|
||||
print("Set pype root to: {}".format(pype_root))
|
||||
print("Paths: {}".format(kwargs.paths or [os.getcwd()]))
|
||||
|
||||
# paths = kwargs.paths or [os.environ.get("OPENPYPE_METADATA_FILE")] or [os.getcwd()] # noqa
|
||||
|
||||
mayabatch = os.environ.get("AVALON_APP_NAME").replace("maya", "mayabatch")
|
||||
args = [
|
||||
os.path.join(pype_root, pype_command),
|
||||
"launch",
|
||||
"--app",
|
||||
mayabatch,
|
||||
"-script",
|
||||
os.path.join(pype_root, "repos", "pype",
|
||||
"pype", "scripts", "export_maya_ass_sequence.mel")
|
||||
]
|
||||
|
||||
print("Pype command: {}".format(" ".join(args)))
|
||||
# Forcing forwaring the environment because environment inheritance does
|
||||
# not always work.
|
||||
# Cast all values in environment to str to be safe
|
||||
env = {k: str(v) for k, v in os.environ.items()}
|
||||
exit_code = subprocess.call(args, env=env)
|
||||
if exit_code != 0:
|
||||
raise RuntimeError("Publishing failed.")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
__main__()
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
/*
|
||||
Script to export specified layer as ass files.
|
||||
|
||||
Attributes:
|
||||
|
||||
scene_file (str): Name of the scene to load.
|
||||
start (int): Start frame.
|
||||
end (int): End frame.
|
||||
step (int): Step size.
|
||||
output_path (str): File output path.
|
||||
render_layer (str): Name of render layer.
|
||||
|
||||
*/
|
||||
|
||||
$scene_file=`getenv "OPENPYPE_ASS_EXPORT_SCENE_FILE"`;
|
||||
$step=`getenv "OPENPYPE_ASS_EXPORT_STEP"`;
|
||||
$start=`getenv "OPENPYPE_ASS_EXPORT_START"`;
|
||||
$end=`getenv "OPENPYPE_ASS_EXPORT_END"`;
|
||||
$file_path=`getenv "OPENPYPE_ASS_EXPORT_OUTPUT"`;
|
||||
$render_layer = `getenv "OPENPYPE_ASS_EXPORT_RENDER_LAYER"`;
|
||||
|
||||
print("*** ASS Export Plugin\n");
|
||||
|
||||
if ($scene_file == "") {
|
||||
print("!!! cannot determine scene file\n");
|
||||
quit -a -ex -1;
|
||||
}
|
||||
|
||||
if ($step == "") {
|
||||
print("!!! cannot determine step size\n");
|
||||
quit -a -ex -1;
|
||||
}
|
||||
|
||||
if ($start == "") {
|
||||
print("!!! cannot determine start frame\n");
|
||||
quit -a -ex -1;
|
||||
}
|
||||
|
||||
if ($end == "") {
|
||||
print("!!! cannot determine end frame\n");
|
||||
quit -a -ex -1;
|
||||
}
|
||||
|
||||
if ($file_path == "") {
|
||||
print("!!! cannot determine output file\n");
|
||||
quit -a -ex -1;
|
||||
}
|
||||
|
||||
if ($render_layer == "") {
|
||||
print("!!! cannot determine render layer\n");
|
||||
quit -a -ex -1;
|
||||
}
|
||||
|
||||
|
||||
print(">>> Opening Scene [ " + $scene_file + " ]\n");
|
||||
|
||||
// open scene
|
||||
file -o -f $scene_file;
|
||||
|
||||
// switch to render layer
|
||||
print(">>> Switching layer [ "+ $render_layer + " ]\n");
|
||||
editRenderLayerGlobals -currentRenderLayer $render_layer;
|
||||
|
||||
// export
|
||||
print(">>> Exporting to [ " + $file_path + " ]\n");
|
||||
arnoldExportAss -mask 255 -sl 1 -ll 1 -bb 1 -sf $start -se $end -b -fs $step;
|
||||
print("--- Done\n");
|
||||
|
|
@ -1,241 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
import logging
|
||||
|
||||
from openpype.client import get_asset_by_name, get_versions
|
||||
|
||||
# Pipeline imports
|
||||
from openpype.hosts.fusion import api
|
||||
import openpype.hosts.fusion.api.lib as fusion_lib
|
||||
|
||||
# Config imports
|
||||
from openpype.lib import version_up
|
||||
from openpype.pipeline import (
|
||||
install_host,
|
||||
registered_host,
|
||||
legacy_io,
|
||||
get_current_project_name,
|
||||
)
|
||||
|
||||
from openpype.pipeline.context_tools import get_workdir_from_session
|
||||
from openpype.pipeline.version_start import get_versioning_start
|
||||
|
||||
log = logging.getLogger("Update Slap Comp")
|
||||
|
||||
|
||||
def _format_version_folder(folder):
|
||||
"""Format a version folder based on the filepath
|
||||
|
||||
Args:
|
||||
folder: file path to a folder
|
||||
|
||||
Returns:
|
||||
str: new version folder name
|
||||
"""
|
||||
|
||||
new_version = get_versioning_start(
|
||||
get_current_project_name(),
|
||||
"fusion",
|
||||
family="workfile"
|
||||
)
|
||||
if os.path.isdir(folder):
|
||||
re_version = re.compile(r"v\d+$")
|
||||
versions = [i for i in os.listdir(folder) if os.path.isdir(i)
|
||||
and re_version.match(i)]
|
||||
if versions:
|
||||
# ensure the "v" is not included
|
||||
new_version = int(max(versions)[1:]) + 1
|
||||
|
||||
version_folder = "v{:03d}".format(new_version)
|
||||
|
||||
return version_folder
|
||||
|
||||
|
||||
def _get_fusion_instance():
|
||||
fusion = getattr(sys.modules["__main__"], "fusion", None)
|
||||
if fusion is None:
|
||||
try:
|
||||
# Support for FuScript.exe, BlackmagicFusion module for py2 only
|
||||
import BlackmagicFusion as bmf
|
||||
fusion = bmf.scriptapp("Fusion")
|
||||
except ImportError:
|
||||
raise RuntimeError("Could not find a Fusion instance")
|
||||
return fusion
|
||||
|
||||
|
||||
def _format_filepath(session):
|
||||
|
||||
project = session["AVALON_PROJECT"]
|
||||
asset = session["AVALON_ASSET"]
|
||||
|
||||
# Save updated slap comp
|
||||
work_path = get_workdir_from_session(session)
|
||||
walk_to_dir = os.path.join(work_path, "scenes", "slapcomp")
|
||||
slapcomp_dir = os.path.abspath(walk_to_dir)
|
||||
|
||||
# Ensure destination exists
|
||||
if not os.path.isdir(slapcomp_dir):
|
||||
log.warning("Folder did not exist, creating folder structure")
|
||||
os.makedirs(slapcomp_dir)
|
||||
|
||||
# Compute output path
|
||||
new_filename = "{}_{}_slapcomp_v001.comp".format(project, asset)
|
||||
new_filepath = os.path.join(slapcomp_dir, new_filename)
|
||||
|
||||
# Create new unqiue filepath
|
||||
if os.path.exists(new_filepath):
|
||||
new_filepath = version_up(new_filepath)
|
||||
|
||||
return new_filepath
|
||||
|
||||
|
||||
def _update_savers(comp, session):
|
||||
"""Update all savers of the current comp to ensure the output is correct
|
||||
|
||||
Args:
|
||||
comp (object): current comp instance
|
||||
session (dict): the current Avalon session
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
new_work = get_workdir_from_session(session)
|
||||
renders = os.path.join(new_work, "renders")
|
||||
version_folder = _format_version_folder(renders)
|
||||
renders_version = os.path.join(renders, version_folder)
|
||||
|
||||
comp.Print("New renders to: %s\n" % renders)
|
||||
|
||||
with api.comp_lock_and_undo_chunk(comp):
|
||||
savers = comp.GetToolList(False, "Saver").values()
|
||||
for saver in savers:
|
||||
filepath = saver.GetAttrs("TOOLST_Clip_Name")[1.0]
|
||||
filename = os.path.basename(filepath)
|
||||
new_path = os.path.join(renders_version, filename)
|
||||
saver["Clip"] = new_path
|
||||
|
||||
|
||||
def update_frame_range(comp, representations):
|
||||
"""Update the frame range of the comp and render length
|
||||
|
||||
The start and end frame are based on the lowest start frame and the highest
|
||||
end frame
|
||||
|
||||
Args:
|
||||
comp (object): current focused comp
|
||||
representations (list) collection of dicts
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
|
||||
version_ids = [r["parent"] for r in representations]
|
||||
project_name = get_current_project_name()
|
||||
versions = list(get_versions(project_name, version_ids=version_ids))
|
||||
|
||||
start = min(v["data"]["frameStart"] for v in versions)
|
||||
end = max(v["data"]["frameEnd"] for v in versions)
|
||||
|
||||
fusion_lib.update_frame_range(start, end, comp=comp)
|
||||
|
||||
|
||||
def switch(asset_name, filepath=None, new=True):
|
||||
"""Switch the current containers of the file to the other asset (shot)
|
||||
|
||||
Args:
|
||||
filepath (str): file path of the comp file
|
||||
asset_name (str): name of the asset (shot)
|
||||
new (bool): Save updated comp under a different name
|
||||
|
||||
Returns:
|
||||
comp path (str): new filepath of the updated comp
|
||||
|
||||
"""
|
||||
|
||||
# If filepath provided, ensure it is valid absolute path
|
||||
if filepath is not None:
|
||||
if not os.path.isabs(filepath):
|
||||
filepath = os.path.abspath(filepath)
|
||||
|
||||
assert os.path.exists(filepath), "%s must exist " % filepath
|
||||
|
||||
# Assert asset name exists
|
||||
# It is better to do this here then to wait till switch_shot does it
|
||||
project_name = get_current_project_name()
|
||||
asset = get_asset_by_name(project_name, asset_name)
|
||||
assert asset, "Could not find '%s' in the database" % asset_name
|
||||
|
||||
# Go to comp
|
||||
if not filepath:
|
||||
current_comp = api.get_current_comp()
|
||||
assert current_comp is not None, "Could not find current comp"
|
||||
else:
|
||||
fusion = _get_fusion_instance()
|
||||
current_comp = fusion.LoadComp(filepath, quiet=True)
|
||||
assert current_comp is not None, "Fusion could not load '%s'" % filepath
|
||||
|
||||
host = registered_host()
|
||||
containers = list(host.ls())
|
||||
assert containers, "Nothing to update"
|
||||
|
||||
representations = []
|
||||
for container in containers:
|
||||
try:
|
||||
representation = fusion_lib.switch_item(container,
|
||||
asset_name=asset_name)
|
||||
representations.append(representation)
|
||||
except Exception as e:
|
||||
current_comp.Print("Error in switching! %s\n" % e.message)
|
||||
|
||||
message = "Switched %i Loaders of the %i\n" % (len(representations),
|
||||
len(containers))
|
||||
current_comp.Print(message)
|
||||
|
||||
# Build the session to switch to
|
||||
switch_to_session = legacy_io.Session.copy()
|
||||
switch_to_session["AVALON_ASSET"] = asset['name']
|
||||
|
||||
if new:
|
||||
comp_path = _format_filepath(switch_to_session)
|
||||
|
||||
# Update savers output based on new session
|
||||
_update_savers(current_comp, switch_to_session)
|
||||
else:
|
||||
comp_path = version_up(filepath)
|
||||
|
||||
current_comp.Print(comp_path)
|
||||
|
||||
current_comp.Print("\nUpdating frame range")
|
||||
update_frame_range(current_comp, representations)
|
||||
|
||||
current_comp.Save(comp_path)
|
||||
|
||||
return comp_path
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Switch to a shot within an"
|
||||
"existing comp file")
|
||||
|
||||
parser.add_argument("--file_path",
|
||||
type=str,
|
||||
default=True,
|
||||
help="File path of the comp to use")
|
||||
|
||||
parser.add_argument("--asset_name",
|
||||
type=str,
|
||||
default=True,
|
||||
help="Name of the asset (shot) to switch")
|
||||
|
||||
args, unknown = parser.parse_args()
|
||||
|
||||
install_host(api)
|
||||
switch(args.asset_name, args.file_path)
|
||||
|
||||
sys.exit(0)
|
||||
|
|
@ -19,6 +19,7 @@ from openpype.tools.utils import (
|
|||
CustomTextComboBox,
|
||||
FocusSpinBox,
|
||||
FocusDoubleSpinBox,
|
||||
MultiSelectionComboBox,
|
||||
)
|
||||
from openpype.widgets.nice_checkbox import NiceCheckbox
|
||||
|
||||
|
|
@ -412,10 +413,19 @@ class EnumAttrWidget(_BaseAttrDefWidget):
|
|||
self._multivalue = False
|
||||
super(EnumAttrWidget, self).__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def multiselection(self):
|
||||
return self.attr_def.multiselection
|
||||
|
||||
def _ui_init(self):
|
||||
input_widget = CustomTextComboBox(self)
|
||||
combo_delegate = QtWidgets.QStyledItemDelegate(input_widget)
|
||||
input_widget.setItemDelegate(combo_delegate)
|
||||
if self.multiselection:
|
||||
input_widget = MultiSelectionComboBox(self)
|
||||
|
||||
else:
|
||||
input_widget = CustomTextComboBox(self)
|
||||
combo_delegate = QtWidgets.QStyledItemDelegate(input_widget)
|
||||
input_widget.setItemDelegate(combo_delegate)
|
||||
self._combo_delegate = combo_delegate
|
||||
|
||||
if self.attr_def.tooltip:
|
||||
input_widget.setToolTip(self.attr_def.tooltip)
|
||||
|
|
@ -427,9 +437,11 @@ class EnumAttrWidget(_BaseAttrDefWidget):
|
|||
if idx >= 0:
|
||||
input_widget.setCurrentIndex(idx)
|
||||
|
||||
input_widget.currentIndexChanged.connect(self._on_value_change)
|
||||
if self.multiselection:
|
||||
input_widget.value_changed.connect(self._on_value_change)
|
||||
else:
|
||||
input_widget.currentIndexChanged.connect(self._on_value_change)
|
||||
|
||||
self._combo_delegate = combo_delegate
|
||||
self._input_widget = input_widget
|
||||
|
||||
self.main_layout.addWidget(input_widget, 0)
|
||||
|
|
@ -442,17 +454,40 @@ class EnumAttrWidget(_BaseAttrDefWidget):
|
|||
self.value_changed.emit(new_value, self.attr_def.id)
|
||||
|
||||
def current_value(self):
|
||||
if self.multiselection:
|
||||
return self._input_widget.value()
|
||||
idx = self._input_widget.currentIndex()
|
||||
return self._input_widget.itemData(idx)
|
||||
|
||||
def _multiselection_multivalue_prep(self, values):
|
||||
final = None
|
||||
multivalue = False
|
||||
for value in values:
|
||||
value = set(value)
|
||||
if final is None:
|
||||
final = value
|
||||
elif multivalue or final != value:
|
||||
final |= value
|
||||
multivalue = True
|
||||
return list(final), multivalue
|
||||
|
||||
def set_value(self, value, multivalue=False):
|
||||
if multivalue:
|
||||
set_value = set(value)
|
||||
if len(set_value) == 1:
|
||||
multivalue = False
|
||||
value = tuple(set_value)[0]
|
||||
if self.multiselection:
|
||||
value, multivalue = self._multiselection_multivalue_prep(
|
||||
value)
|
||||
else:
|
||||
set_value = set(value)
|
||||
if len(set_value) == 1:
|
||||
multivalue = False
|
||||
value = tuple(set_value)[0]
|
||||
|
||||
if not multivalue:
|
||||
if self.multiselection:
|
||||
self._input_widget.blockSignals(True)
|
||||
self._input_widget.set_value(value)
|
||||
self._input_widget.blockSignals(False)
|
||||
|
||||
elif not multivalue:
|
||||
idx = self._input_widget.findData(value)
|
||||
cur_idx = self._input_widget.currentIndex()
|
||||
if idx != cur_idx and idx >= 0:
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from qtpy import QtWidgets, QtCore, QtGui
|
|||
|
||||
from openpype.widgets.sliders import NiceSlider
|
||||
from openpype.tools.settings import CHILD_OFFSET
|
||||
from openpype.tools.utils import MultiSelectionComboBox
|
||||
from openpype.settings.entities.exceptions import BaseInvalidValue
|
||||
|
||||
from .widgets import (
|
||||
|
|
@ -15,7 +16,6 @@ from .widgets import (
|
|||
SettingsNiceCheckbox,
|
||||
SettingsLineEdit
|
||||
)
|
||||
from .multiselection_combobox import MultiSelectionComboBox
|
||||
from .wrapper_widgets import (
|
||||
WrapperWidget,
|
||||
CollapsibleWrapper,
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ from .models import (
|
|||
from .overlay_messages import (
|
||||
MessageOverlayObject,
|
||||
)
|
||||
from .multiselection_combobox import MultiSelectionComboBox
|
||||
|
||||
|
||||
__all__ = (
|
||||
|
|
@ -78,4 +79,6 @@ __all__ = (
|
|||
"RecursiveSortFilterProxyModel",
|
||||
|
||||
"MessageOverlayObject",
|
||||
|
||||
"MultiSelectionComboBox",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -170,8 +170,12 @@ def get_openpype_qt_app():
|
|||
if attr is not None:
|
||||
QtWidgets.QApplication.setAttribute(attr)
|
||||
|
||||
if hasattr(
|
||||
QtWidgets.QApplication, "setHighDpiScaleFactorRoundingPolicy"
|
||||
policy = os.getenv("QT_SCALE_FACTOR_ROUNDING_POLICY")
|
||||
if (
|
||||
hasattr(
|
||||
QtWidgets.QApplication, "setHighDpiScaleFactorRoundingPolicy"
|
||||
)
|
||||
and not policy
|
||||
):
|
||||
QtWidgets.QApplication.setHighDpiScaleFactorRoundingPolicy(
|
||||
QtCore.Qt.HighDpiScaleFactorRoundingPolicy.PassThrough
|
||||
|
|
|
|||
|
|
@ -1,9 +1,10 @@
|
|||
from qtpy import QtCore, QtGui, QtWidgets
|
||||
from openpype.tools.utils.lib import (
|
||||
|
||||
from .lib import (
|
||||
checkstate_int_to_enum,
|
||||
checkstate_enum_to_int,
|
||||
)
|
||||
from openpype.tools.utils.constants import (
|
||||
from .constants import (
|
||||
CHECKED_INT,
|
||||
UNCHECKED_INT,
|
||||
ITEM_IS_USER_TRISTATE,
|
||||
|
|
@ -60,12 +61,25 @@ class MultiSelectionComboBox(QtWidgets.QComboBox):
|
|||
self._block_mouse_release_timer = QtCore.QTimer(self, singleShot=True)
|
||||
self._initial_mouse_pos = None
|
||||
self._separator = separator
|
||||
self.placeholder_text = placeholder
|
||||
self.delegate = ComboItemDelegate(self)
|
||||
self.setItemDelegate(self.delegate)
|
||||
self._placeholder_text = placeholder
|
||||
delegate = ComboItemDelegate(self)
|
||||
self.setItemDelegate(delegate)
|
||||
|
||||
self.lines = {}
|
||||
self.item_height = None
|
||||
self._lines = {}
|
||||
self._item_height = None
|
||||
self._custom_text = None
|
||||
self._delegate = delegate
|
||||
|
||||
def get_placeholder_text(self):
|
||||
return self._placeholder_text
|
||||
|
||||
def set_placeholder_text(self, text):
|
||||
self._placeholder_text = text
|
||||
self._update_size_hint()
|
||||
|
||||
def set_custom_text(self, text):
|
||||
self._custom_text = text
|
||||
self._update_size_hint()
|
||||
|
||||
def focusInEvent(self, event):
|
||||
self.focused_in.emit()
|
||||
|
|
@ -158,7 +172,7 @@ class MultiSelectionComboBox(QtWidgets.QComboBox):
|
|||
if new_state is not None:
|
||||
model.setData(current_index, new_state, QtCore.Qt.CheckStateRole)
|
||||
self.view().update(current_index)
|
||||
self.update_size_hint()
|
||||
self._update_size_hint()
|
||||
self.value_changed.emit()
|
||||
return True
|
||||
|
||||
|
|
@ -182,25 +196,33 @@ class MultiSelectionComboBox(QtWidgets.QComboBox):
|
|||
self.initStyleOption(option)
|
||||
painter.drawComplexControl(QtWidgets.QStyle.CC_ComboBox, option)
|
||||
|
||||
# draw the icon and text
|
||||
items = self.checked_items_text()
|
||||
if not items:
|
||||
option.currentText = self.placeholder_text
|
||||
# draw the icon and text
|
||||
draw_text = True
|
||||
combotext = None
|
||||
if self._custom_text is not None:
|
||||
combotext = self._custom_text
|
||||
elif not items:
|
||||
combotext = self._placeholder_text
|
||||
else:
|
||||
draw_text = False
|
||||
if draw_text:
|
||||
option.currentText = combotext
|
||||
option.palette.setCurrentColorGroup(QtGui.QPalette.Disabled)
|
||||
painter.drawControl(QtWidgets.QStyle.CE_ComboBoxLabel, option)
|
||||
return
|
||||
|
||||
font_metricts = self.fontMetrics()
|
||||
|
||||
if self.item_height is None:
|
||||
if self._item_height is None:
|
||||
self.updateGeometry()
|
||||
self.update()
|
||||
return
|
||||
|
||||
for line, items in self.lines.items():
|
||||
for line, items in self._lines.items():
|
||||
top_y = (
|
||||
option.rect.top()
|
||||
+ (line * self.item_height)
|
||||
+ (line * self._item_height)
|
||||
+ self.top_bottom_margins
|
||||
)
|
||||
left_x = option.rect.left() + self.left_offset
|
||||
|
|
@ -210,7 +232,7 @@ class MultiSelectionComboBox(QtWidgets.QComboBox):
|
|||
|
||||
label_rect.moveTop(top_y)
|
||||
label_rect.moveLeft(left_x)
|
||||
label_rect.setHeight(self.item_height)
|
||||
label_rect.setHeight(self._item_height)
|
||||
label_rect.setWidth(
|
||||
label_rect.width() + self.left_right_padding
|
||||
)
|
||||
|
|
@ -239,14 +261,18 @@ class MultiSelectionComboBox(QtWidgets.QComboBox):
|
|||
|
||||
def resizeEvent(self, *args, **kwargs):
|
||||
super(MultiSelectionComboBox, self).resizeEvent(*args, **kwargs)
|
||||
self.update_size_hint()
|
||||
self._update_size_hint()
|
||||
|
||||
def update_size_hint(self):
|
||||
self.lines = {}
|
||||
def _update_size_hint(self):
|
||||
if self._custom_text is not None:
|
||||
self.update()
|
||||
return
|
||||
self._lines = {}
|
||||
|
||||
items = self.checked_items_text()
|
||||
if not items:
|
||||
self.update()
|
||||
self.repaint()
|
||||
return
|
||||
|
||||
option = QtWidgets.QStyleOptionComboBox()
|
||||
|
|
@ -259,7 +285,7 @@ class MultiSelectionComboBox(QtWidgets.QComboBox):
|
|||
total_width = option.rect.width() - btn_rect.width()
|
||||
|
||||
line = 0
|
||||
self.lines = {line: []}
|
||||
self._lines = {line: []}
|
||||
|
||||
font_metricts = self.fontMetrics()
|
||||
default_left_x = 0 + self.left_offset
|
||||
|
|
@ -270,18 +296,18 @@ class MultiSelectionComboBox(QtWidgets.QComboBox):
|
|||
right_x = left_x + width
|
||||
if right_x > total_width:
|
||||
left_x = int(default_left_x)
|
||||
if self.lines.get(line):
|
||||
if self._lines.get(line):
|
||||
line += 1
|
||||
self.lines[line] = [item]
|
||||
self._lines[line] = [item]
|
||||
left_x += width
|
||||
else:
|
||||
self.lines[line] = [item]
|
||||
self._lines[line] = [item]
|
||||
line += 1
|
||||
else:
|
||||
if line in self.lines:
|
||||
self.lines[line].append(item)
|
||||
if line in self._lines:
|
||||
self._lines[line].append(item)
|
||||
else:
|
||||
self.lines[line] = [item]
|
||||
self._lines[line] = [item]
|
||||
left_x = left_x + width + self.item_spacing
|
||||
|
||||
self.update()
|
||||
|
|
@ -289,18 +315,20 @@ class MultiSelectionComboBox(QtWidgets.QComboBox):
|
|||
|
||||
def sizeHint(self):
|
||||
value = super(MultiSelectionComboBox, self).sizeHint()
|
||||
lines = len(self.lines)
|
||||
if lines == 0:
|
||||
lines = 1
|
||||
lines = 1
|
||||
if self._custom_text is None:
|
||||
lines = len(self._lines)
|
||||
if lines == 0:
|
||||
lines = 1
|
||||
|
||||
if self.item_height is None:
|
||||
self.item_height = (
|
||||
if self._item_height is None:
|
||||
self._item_height = (
|
||||
self.fontMetrics().height()
|
||||
+ (2 * self.top_bottom_padding)
|
||||
+ (2 * self.top_bottom_margins)
|
||||
)
|
||||
value.setHeight(
|
||||
(lines * self.item_height)
|
||||
(lines * self._item_height)
|
||||
+ (2 * self.top_bottom_margins)
|
||||
)
|
||||
return value
|
||||
|
|
@ -316,7 +344,7 @@ class MultiSelectionComboBox(QtWidgets.QComboBox):
|
|||
else:
|
||||
check_state = UNCHECKED_INT
|
||||
self.setItemData(idx, check_state, QtCore.Qt.CheckStateRole)
|
||||
self.update_size_hint()
|
||||
self._update_size_hint()
|
||||
|
||||
def value(self):
|
||||
items = list()
|
||||
Loading…
Add table
Add a link
Reference in a new issue