Merge branch 'develop' into enhancement/tycache-enhancement-cached-material

This commit is contained in:
Kayla Man 2024-05-03 21:02:40 +08:00 committed by GitHub
commit 6d542a148f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
71 changed files with 3380 additions and 2155 deletions

View file

@ -8,14 +8,11 @@ from ayon_core.lib import Logger, register_event_callback
from ayon_core.pipeline import (
register_loader_plugin_path,
register_creator_plugin_path,
register_workfile_build_plugin_path,
AVALON_CONTAINER_ID,
AVALON_INSTANCE_ID,
AYON_INSTANCE_ID,
)
from ayon_core.hosts.aftereffects.api.workfile_template_builder import (
AEPlaceholderLoadPlugin,
AEPlaceholderCreatePlugin
)
from ayon_core.pipeline.load import any_outdated_containers
import ayon_core.hosts.aftereffects
@ -40,6 +37,7 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build")
class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
@ -76,6 +74,7 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
register_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
register_event_callback("application.launched", application_launch)
@ -118,12 +117,6 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
item["id"] = "publish_context"
self.stub.imprint(item["id"], item)
def get_workfile_build_placeholder_plugins(self):
return [
AEPlaceholderLoadPlugin,
AEPlaceholderCreatePlugin
]
# created instances section
def list_instances(self):
"""List all created instances from current workfile which

View file

@ -1,6 +1,7 @@
import os.path
import uuid
import shutil
from abc import abstractmethod
from ayon_core.pipeline import registered_host
from ayon_core.tools.workfile_template_build import (
@ -9,13 +10,9 @@ from ayon_core.tools.workfile_template_build import (
from ayon_core.pipeline.workfile.workfile_template_builder import (
AbstractTemplateBuilder,
PlaceholderPlugin,
LoadPlaceholderItem,
CreatePlaceholderItem,
PlaceholderLoadMixin,
PlaceholderCreateMixin
PlaceholderItem
)
from ayon_core.hosts.aftereffects.api import get_stub
from ayon_core.hosts.aftereffects.api.lib import set_settings
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
PLACEHOLDER_ID = "openpype.placeholder"
@ -51,6 +48,10 @@ class AETemplateBuilder(AbstractTemplateBuilder):
class AEPlaceholderPlugin(PlaceholderPlugin):
"""Contains generic methods for all PlaceholderPlugins."""
@abstractmethod
def _create_placeholder_item(self, item_data: dict) -> PlaceholderItem:
pass
def collect_placeholders(self):
"""Collect info from file metadata about created placeholders.
@ -63,17 +64,7 @@ class AEPlaceholderPlugin(PlaceholderPlugin):
if item.get("plugin_identifier") != self.identifier:
continue
if isinstance(self, AEPlaceholderLoadPlugin):
item = LoadPlaceholderItem(item["uuid"],
item["data"],
self)
elif isinstance(self, AEPlaceholderCreatePlugin):
item = CreatePlaceholderItem(item["uuid"],
item["data"],
self)
else:
raise NotImplementedError(f"Not implemented for {type(self)}")
item = self._create_placeholder_item(item)
output.append(item)
return output
@ -135,87 +126,6 @@ class AEPlaceholderPlugin(PlaceholderPlugin):
stub.imprint(item_id, container_data)
class AEPlaceholderCreatePlugin(AEPlaceholderPlugin, PlaceholderCreateMixin):
"""Adds Create placeholder.
This adds composition and runs Create
"""
identifier = "aftereffects.create"
label = "AfterEffects create"
def create_placeholder(self, placeholder_data):
stub = get_stub()
name = "CREATEPLACEHOLDER"
item_id = stub.add_item(name, "COMP")
self._imprint_item(item_id, name, placeholder_data, stub)
def populate_placeholder(self, placeholder):
"""Replace 'placeholder' with publishable instance.
Renames prepared composition name, creates publishable instance, sets
frame/duration settings according to DB.
"""
pre_create_data = {"use_selection": True}
item_id, item = self._get_item(placeholder)
get_stub().select_items([item_id])
self.populate_create_placeholder(placeholder, pre_create_data)
# apply settings for populated composition
item_id, metadata_item = self._get_item(placeholder)
set_settings(True, True, [item_id])
def get_placeholder_options(self, options=None):
return self.get_create_plugin_options(options)
class AEPlaceholderLoadPlugin(AEPlaceholderPlugin, PlaceholderLoadMixin):
identifier = "aftereffects.load"
label = "AfterEffects load"
def create_placeholder(self, placeholder_data):
"""Creates AE's Placeholder item in Project items list.
Sets dummy resolution/duration/fps settings, will be replaced when
populated.
"""
stub = get_stub()
name = "LOADERPLACEHOLDER"
item_id = stub.add_placeholder(name, 1920, 1060, 25, 10)
self._imprint_item(item_id, name, placeholder_data, stub)
def populate_placeholder(self, placeholder):
"""Use Openpype Loader from `placeholder` to create new FootageItems
New FootageItems are created, files are imported.
"""
self.populate_load_placeholder(placeholder)
errors = placeholder.get_errors()
stub = get_stub()
if errors:
stub.print_msg("\n".join(errors))
else:
if not placeholder.data["keep_placeholder"]:
metadata = stub.get_metadata()
for item in metadata:
if not item.get("is_placeholder"):
continue
scene_identifier = item.get("uuid")
if (scene_identifier and
scene_identifier == placeholder.scene_identifier):
stub.delete_item(item["members"][0])
stub.remove_instance(placeholder.scene_identifier, metadata)
def get_placeholder_options(self, options=None):
return self.get_load_plugin_options(options)
def load_succeed(self, placeholder, container):
placeholder_item_id, _ = self._get_item(placeholder)
item_id = container.id
get_stub().add_item_instead_placeholder(placeholder_item_id, item_id)
def build_workfile_template(*args, **kwargs):
builder = AETemplateBuilder(registered_host())
builder.build_template(*args, **kwargs)

View file

@ -0,0 +1,49 @@
from ayon_core.pipeline.workfile.workfile_template_builder import (
CreatePlaceholderItem,
PlaceholderCreateMixin
)
from ayon_core.hosts.aftereffects.api import get_stub
from ayon_core.hosts.aftereffects.api.lib import set_settings
import ayon_core.hosts.aftereffects.api.workfile_template_builder as wtb
class AEPlaceholderCreatePlugin(wtb.AEPlaceholderPlugin,
PlaceholderCreateMixin):
"""Adds Create placeholder.
This adds composition and runs Create
"""
identifier = "aftereffects.create"
label = "AfterEffects create"
def _create_placeholder_item(self, item_data) -> CreatePlaceholderItem:
return CreatePlaceholderItem(
scene_identifier=item_data["uuid"],
data=item_data["data"],
plugin=self
)
def create_placeholder(self, placeholder_data):
stub = get_stub()
name = "CREATEPLACEHOLDER"
item_id = stub.add_item(name, "COMP")
self._imprint_item(item_id, name, placeholder_data, stub)
def populate_placeholder(self, placeholder):
"""Replace 'placeholder' with publishable instance.
Renames prepared composition name, creates publishable instance, sets
frame/duration settings according to DB.
"""
pre_create_data = {"use_selection": True}
item_id, item = self._get_item(placeholder)
get_stub().select_items([item_id])
self.populate_create_placeholder(placeholder, pre_create_data)
# apply settings for populated composition
item_id, metadata_item = self._get_item(placeholder)
set_settings(True, True, [item_id])
def get_placeholder_options(self, options=None):
return self.get_create_plugin_options(options)

View file

@ -0,0 +1,60 @@
from ayon_core.pipeline.workfile.workfile_template_builder import (
LoadPlaceholderItem,
PlaceholderLoadMixin
)
from ayon_core.hosts.aftereffects.api import get_stub
import ayon_core.hosts.aftereffects.api.workfile_template_builder as wtb
class AEPlaceholderLoadPlugin(wtb.AEPlaceholderPlugin, PlaceholderLoadMixin):
identifier = "aftereffects.load"
label = "AfterEffects load"
def _create_placeholder_item(self, item_data) -> LoadPlaceholderItem:
return LoadPlaceholderItem(
scene_identifier=item_data["uuid"],
data=item_data["data"],
plugin=self
)
def create_placeholder(self, placeholder_data):
"""Creates AE's Placeholder item in Project items list.
Sets dummy resolution/duration/fps settings, will be replaced when
populated.
"""
stub = get_stub()
name = "LOADERPLACEHOLDER"
item_id = stub.add_placeholder(name, 1920, 1060, 25, 10)
self._imprint_item(item_id, name, placeholder_data, stub)
def populate_placeholder(self, placeholder):
"""Use Openpype Loader from `placeholder` to create new FootageItems
New FootageItems are created, files are imported.
"""
self.populate_load_placeholder(placeholder)
errors = placeholder.get_errors()
stub = get_stub()
if errors:
stub.print_msg("\n".join(errors))
else:
if not placeholder.data["keep_placeholder"]:
metadata = stub.get_metadata()
for item in metadata:
if not item.get("is_placeholder"):
continue
scene_identifier = item.get("uuid")
if (scene_identifier and
scene_identifier == placeholder.scene_identifier):
stub.delete_item(item["members"][0])
stub.remove_instance(placeholder.scene_identifier, metadata)
def get_placeholder_options(self, options=None):
return self.get_load_plugin_options(options)
def load_succeed(self, placeholder, container):
placeholder_item_id, _ = self._get_item(placeholder)
item_id = container.id
get_stub().add_item_instead_placeholder(placeholder_item_id, item_id)

View file

@ -8,6 +8,7 @@ from .lib import (
sync_avalon_data_to_workfile,
launch_workfiles_app,
before_project_save,
apply_colorspace_project
)
from .tags import add_tags_to_workfile
from .menu import update_menu_task_label
@ -44,6 +45,8 @@ def afterNewProjectCreated(event):
# reset workfiles startup not to open any more in session
os.environ["WORKFILES_STARTUP"] = "0"
apply_colorspace_project()
def beforeProjectLoad(event):
log.info("before project load event...")
@ -122,6 +125,7 @@ def register_hiero_events():
except RuntimeError:
pass
def register_events():
"""
Adding all callbacks.

View file

@ -11,7 +11,6 @@ import warnings
import json
import ast
import secrets
import shutil
import hiero
from qtpy import QtWidgets, QtCore
@ -36,9 +35,6 @@ from .constants import (
DEFAULT_SEQUENCE_NAME,
DEFAULT_BIN_NAME
)
from ayon_core.pipeline.colorspace import (
get_imageio_config
)
class _CTX:
@ -105,9 +101,9 @@ def flatten(list_):
def get_current_project(remove_untitled=False):
projects = flatten(hiero.core.projects())
projects = hiero.core.projects()
if not remove_untitled:
return next(iter(projects))
return projects[0]
# if remove_untitled
for proj in projects:
@ -1050,18 +1046,68 @@ def _set_hrox_project_knobs(doc, **knobs):
def apply_colorspace_project():
project_name = get_current_project_name()
# get path the the active projects
project = get_current_project(remove_untitled=True)
current_file = project.path()
# close the active project
project.close()
"""Apply colorspaces from settings.
Due to not being able to set the project settings through the Python API,
we need to do use some dubious code to find the widgets and set them. It is
possible to set the project settings without traversing through the widgets
but it involves reading the hrox files from disk with XML, so no in-memory
support. See https://community.foundry.com/discuss/topic/137771/change-a-project-s-default-color-transform-with-python # noqa
for more details.
"""
# get presets for hiero
project_name = get_current_project_name()
imageio = get_project_settings(project_name)["hiero"]["imageio"]
presets = imageio.get("workfile")
# Open Project Settings UI.
for act in hiero.ui.registeredActions():
if act.objectName() == "foundry.project.settings":
act.trigger()
# Find widgets from their sibling label.
labels = {
"Working Space:": "workingSpace",
"Viewer:": "viewerLut",
"Thumbnails:": "thumbnailLut",
"Monitor Out:": "monitorOutLut",
"8 Bit Files:": "eightBitLut",
"16 Bit Files:": "sixteenBitLut",
"Log Files:": "logLut",
"Floating Point Files:": "floatLut"
}
widgets = {x: None for x in labels.values()}
def _recursive_children(widget, labels, widgets):
children = widget.children()
for count, child in enumerate(children):
if isinstance(child, QtWidgets.QLabel):
if child.text() in labels.keys():
widgets[labels[child.text()]] = children[count + 1]
_recursive_children(child, labels, widgets)
app = QtWidgets.QApplication.instance()
title = "Project Settings"
for widget in app.topLevelWidgets():
if isinstance(widget, QtWidgets.QMainWindow):
if widget.windowTitle() != title:
continue
_recursive_children(widget, labels, widgets)
widget.close()
msg = "Setting value \"{}\" is not a valid option for \"{}\""
for key, widget in widgets.items():
options = [widget.itemText(i) for i in range(widget.count())]
setting_value = presets[key]
assert setting_value in options, msg.format(setting_value, key)
widget.setCurrentText(presets[key])
# This code block is for setting up project colorspaces for files on disk.
# Due to not having Python API access to set the project settings, the
# Foundry recommended way is to modify the hrox files on disk with XML. See
# this forum thread for more details;
# https://community.foundry.com/discuss/topic/137771/change-a-project-s-default-color-transform-with-python # noqa
'''
# backward compatibility layer
# TODO: remove this after some time
config_data = get_imageio_config(
@ -1074,6 +1120,13 @@ def apply_colorspace_project():
"ocioConfigName": "custom"
})
# get path the the active projects
project = get_current_project()
current_file = project.path()
msg = "The project needs to be saved to disk to apply colorspace settings."
assert current_file, msg
# save the workfile as subversion "comment:_colorspaceChange"
split_current_file = os.path.splitext(current_file)
copy_current_file = current_file
@ -1116,6 +1169,7 @@ def apply_colorspace_project():
# open the file as current project
hiero.core.openProject(copy_current_file)
'''
def apply_colorspace_clips():
@ -1125,10 +1179,8 @@ def apply_colorspace_clips():
# get presets for hiero
imageio = get_project_settings(project_name)["hiero"]["imageio"]
from pprint import pprint
presets = imageio.get("regexInputs", {}).get("inputs", {})
pprint(presets)
for clip in clips:
clip_media_source_path = clip.mediaSource().firstpath()
clip_name = clip.name()

View file

@ -144,7 +144,7 @@ def add_tags_to_workfile():
# Get project task types.
project_name = get_current_project_name()
project_entity = ayon_api.get_project(project_name)
task_types = project_entity["taskType"]
task_types = project_entity["taskTypes"]
nks_pres_tags["[Tasks]"] = {}
log.debug("__ tasks: {}".format(task_types))
for task_type in task_types:

View file

@ -51,13 +51,12 @@ def open_file(filepath):
project = hiero.core.projects()[-1]
# open project file
hiero.core.openProject(filepath.replace(os.path.sep, "/"))
# close previous project
project.close()
# Close previous project if its different to the current project.
filepath = filepath.replace(os.path.sep, "/")
if project.path().replace(os.path.sep, "/") != filepath:
# open project file
hiero.core.openProject(filepath)
project.close()
return True

View file

@ -24,8 +24,14 @@ class SetDefaultDisplayView(PreLaunchHook):
if not OCIO:
return
# workfile settings added in '0.2.13'
houdini_color_settings = \
self.data["project_settings"]["houdini"]["imageio"]["workfile"]
self.data["project_settings"]["houdini"]["imageio"].get("workfile")
if not houdini_color_settings:
self.log.info("Hook 'SetDefaultDisplayView' requires Houdini "
"addon version >= '0.2.13'")
return
if not houdini_color_settings["enabled"]:
self.log.info(

View file

@ -18,8 +18,11 @@ class CreateReview(plugin.HoudiniCreator):
def apply_settings(self, project_settings):
super(CreateReview, self).apply_settings(project_settings)
color_settings = project_settings["houdini"]["imageio"]["workfile"]
if color_settings["enabled"]:
# workfile settings added in '0.2.13'
color_settings = project_settings["houdini"]["imageio"].get(
"workfile", {}
)
if color_settings.get("enabled"):
self.review_color_space = color_settings.get("review_color_space")
def create(self, product_name, instance_data, pre_create_data):

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
import sys
import hou
import pyblish.api
import six
from ayon_core.pipeline import PublishValidationError
@ -26,28 +25,21 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError(
("Output node(s) `{}` are incorrect. "
"See plug-in log for details.").format(invalid),
title=self.label
"Output node '{}' is incorrect. "
"See plug-in log for details.".format(invalid),
title=self.label,
description=(
"### Invalid COP output node\n\n"
"The output node path for the instance must be set to a "
"valid COP node path.\n\nSee the log for more details."
)
)
@classmethod
def get_invalid(cls, instance):
output_node = instance.data.get("output_node")
import hou
try:
output_node = instance.data["output_node"]
except KeyError:
six.reraise(
PublishValidationError,
PublishValidationError(
"Can't determine COP output node.",
title=cls.__name__),
sys.exc_info()[2]
)
if output_node is None:
if not output_node:
node = hou.node(instance.data.get("instance_node"))
cls.log.error(
"COP Output node in '%s' does not exist. "
@ -61,8 +53,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
cls.log.error(
"Output node %s is not a COP node. "
"COP Path must point to a COP node, "
"instead found category type: %s"
% (output_node.path(), output_node.type().category().name())
"instead found category type: %s",
output_node.path(), output_node.type().category().name()
)
return [output_node.path()]
@ -70,9 +62,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
# is Cop2 to avoid potential edge case scenarios even though
# the isinstance check above should be stricter than this category
if output_node.type().category().name() != "Cop2":
raise PublishValidationError(
(
"Output node {} is not of category Cop2."
" This is a bug..."
).format(output_node.path()),
title=cls.label)
cls.log.error(
"Output node %s is not of category Cop2.", output_node.path()
)
return [output_node.path()]

View file

@ -45,9 +45,12 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin,
category="houdini")
apply_plugin_settings_automatically(cls, settings, logger=cls.log)
# workfile settings added in '0.2.13'
color_settings = project_settings["houdini"]["imageio"].get(
"workfile", {}
)
# Add review color settings
color_settings = project_settings["houdini"]["imageio"]["workfile"]
if color_settings["enabled"]:
if color_settings.get("enabled"):
cls.review_color_space = color_settings.get("review_color_space")

View file

@ -496,9 +496,9 @@ def object_transform_set(container_children):
"""
transform_set = {}
for node in container_children:
name = f"{node.name}.transform"
name = f"{node}.transform"
transform_set[name] = node.pos
name = f"{node.name}.scale"
name = f"{node}.scale"
transform_set[name] = node.scale
return transform_set
@ -519,6 +519,36 @@ def get_plugins() -> list:
return plugin_info_list
def update_modifier_node_names(event, node):
"""Update the name of the nodes after renaming
Args:
event (pymxs.MXSWrapperBase): Event Name (
Mandatory argument for rt.NodeEventCallback)
node (list): Event Number (
Mandatory argument for rt.NodeEventCallback)
"""
containers = [
obj
for obj in rt.Objects
if (
rt.ClassOf(obj) == rt.Container
and rt.getUserProp(obj, "id") == "pyblish.avalon.instance"
and rt.getUserProp(obj, "productType") not in {
"workfile", "tyflow"
}
)
]
if not containers:
return
for container in containers:
ayon_data = container.modifiers[0].openPypeData
updated_node_names = [str(node.node) for node
in ayon_data.all_handles]
rt.setProperty(ayon_data, "sel_list", updated_node_names)
@contextlib.contextmanager
def render_resolution(width, height):
"""Set render resolution option during context

View file

@ -63,6 +63,8 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
rt.callbacks.addScript(rt.Name('postWorkspaceChange'),
self._deferred_menu_creation)
rt.NodeEventCallback(
nameChanged=lib.update_modifier_node_names)
def workfile_has_unsaved_changes(self):
return rt.getSaveRequired()

View file

@ -117,7 +117,7 @@ class MaxSceneLoader(load.LoaderPlugin):
)
for max_obj, obj_name in zip(max_objects, max_object_names):
max_obj.name = f"{namespace}:{obj_name}"
max_container.append(rt.getNodeByName(max_obj.name))
max_container.append(max_obj)
return containerise(
name, max_container, context,
namespace, loader=self.__class__.__name__)
@ -158,11 +158,11 @@ class MaxSceneLoader(load.LoaderPlugin):
current_max_object_names):
max_obj.name = f"{namespace}:{obj_name}"
max_objects.append(max_obj)
max_transform = f"{max_obj.name}.transform"
max_transform = f"{max_obj}.transform"
if max_transform in transform_data.keys():
max_obj.pos = transform_data[max_transform] or 0
max_obj.scale = transform_data[
f"{max_obj.name}.scale"] or 0
f"{max_obj}.scale"] or 0
update_custom_attribute_data(node, max_objects)
lib.imprint(container["instance_node"], {

View file

@ -76,11 +76,11 @@ class FbxModelLoader(load.LoaderPlugin):
for fbx_object in current_fbx_objects:
fbx_object.name = f"{namespace}:{fbx_object.name}"
fbx_objects.append(fbx_object)
fbx_transform = f"{fbx_object.name}.transform"
fbx_transform = f"{fbx_object}.transform"
if fbx_transform in transform_data.keys():
fbx_object.pos = transform_data[fbx_transform] or 0
fbx_object.scale = transform_data[
f"{fbx_object.name}.scale"] or 0
f"{fbx_object}.scale"] or 0
with maintained_selection():
rt.Select(node)

View file

@ -67,11 +67,11 @@ class ObjLoader(load.LoaderPlugin):
selections = rt.GetCurrentSelection()
for selection in selections:
selection.name = f"{namespace}:{selection.name}"
selection_transform = f"{selection.name}.transform"
selection_transform = f"{selection}.transform"
if selection_transform in transform_data.keys():
selection.pos = transform_data[selection_transform] or 0
selection.scale = transform_data[
f"{selection.name}.scale"] or 0
f"{selection}.scale"] or 0
update_custom_attribute_data(node, selections)
with maintained_selection():
rt.Select(node)

View file

@ -95,11 +95,11 @@ class ModelUSDLoader(load.LoaderPlugin):
for children in asset.Children:
children.name = f"{namespace}:{children.name}"
usd_objects.append(children)
children_transform = f"{children.name}.transform"
children_transform = f"{children}.transform"
if children_transform in transform_data.keys():
children.pos = transform_data[children_transform] or 0
children.scale = transform_data[
f"{children.name}.scale"] or 0
f"{children}.scale"] or 0
asset.name = f"{namespace}:{asset.name}"
usd_objects.append(asset)

View file

@ -92,10 +92,10 @@ class OxAbcLoader(load.LoaderPlugin):
abc.Parent = container
abc.name = f"{namespace}:{abc.name}"
ox_abc_objects.append(abc)
ox_transform = f"{abc.name}.transform"
ox_transform = f"{abc}.transform"
if ox_transform in transform_data.keys():
abc.pos = transform_data[ox_transform] or 0
abc.scale = transform_data[f"{abc.name}.scale"] or 0
abc.scale = transform_data[f"{abc}.scale"] or 0
update_custom_attribute_data(node, ox_abc_objects)
lib.imprint(
container["instance_node"],

View file

@ -12,4 +12,4 @@
max create mode
python.ExecuteFile startup
)
)

View file

@ -0,0 +1,305 @@
import json
import logging
import os
from maya import cmds # noqa
from ayon_core.hosts.maya.api.lib import evaluation
log = logging.getLogger(__name__)
# The maya alembic export types
ALEMBIC_ARGS = {
"attr": (list, tuple),
"attrPrefix": (list, tuple),
"autoSubd": bool,
"dataFormat": str,
"endFrame": float,
"eulerFilter": bool,
"frameRange": str, # "start end"; overrides startFrame & endFrame
"frameRelativeSample": float,
"melPerFrameCallback": str,
"melPostJobCallback": str,
"noNormals": bool,
"preRoll": bool,
"preRollStartFrame": int,
"pythonPerFrameCallback": str,
"pythonPostJobCallback": str,
"renderableOnly": bool,
"root": (list, tuple),
"selection": bool,
"startFrame": float,
"step": float,
"stripNamespaces": bool,
"userAttr": (list, tuple),
"userAttrPrefix": (list, tuple),
"uvWrite": bool,
"uvsOnly": bool,
"verbose": bool,
"wholeFrameGeo": bool,
"worldSpace": bool,
"writeColorSets": bool,
"writeCreases": bool, # Maya 2015 Ext1+
"writeFaceSets": bool,
"writeUVSets": bool, # Maya 2017+
"writeVisibility": bool,
}
def extract_alembic(
file,
attr=None,
attrPrefix=None,
dataFormat="ogawa",
endFrame=None,
eulerFilter=True,
frameRange="",
noNormals=False,
preRoll=False,
preRollStartFrame=0,
renderableOnly=False,
root=None,
selection=True,
startFrame=None,
step=1.0,
stripNamespaces=True,
uvWrite=True,
verbose=False,
wholeFrameGeo=False,
worldSpace=False,
writeColorSets=False,
writeCreases=False,
writeFaceSets=False,
writeUVSets=False,
writeVisibility=False
):
"""Extract a single Alembic Cache.
This extracts an Alembic cache using the `-selection` flag to minimize
the extracted content to solely what was Collected into the instance.
Arguments:
file (str): The filepath to write the alembic file to.
attr (list of str, optional): A specific geometric attribute to write
out. Defaults to [].
attrPrefix (list of str, optional): Prefix filter for determining which
geometric attributes to write out. Defaults to ["ABC_"].
dataFormat (str): The data format to use for the cache,
defaults to "ogawa"
endFrame (float): End frame of output. Ignored if `frameRange`
provided.
eulerFilter (bool): When on, X, Y, and Z rotation data is filtered with
an Euler filter. Euler filtering helps resolve irregularities in
rotations especially if X, Y, and Z rotations exceed 360 degrees.
Defaults to True.
frameRange (tuple or str): Two-tuple with start and end frame or a
string formatted as: "startFrame endFrame". This argument
overrides `startFrame` and `endFrame` arguments.
noNormals (bool): When on, normal data from the original polygon
objects is not included in the exported Alembic cache file.
preRoll (bool): This frame range will not be sampled.
Defaults to False.
preRollStartFrame (float): The frame to start scene
evaluation at. This is used to set the starting frame for time
dependent translations and can be used to evaluate run-up that
isn't actually translated. Defaults to 0.
renderableOnly (bool): When on, any non-renderable nodes or hierarchy,
such as hidden objects, are not included in the Alembic file.
Defaults to False.
root (list of str): Maya dag path which will be parented to
the root of the Alembic file. Defaults to [], which means the
entire scene will be written out.
selection (bool): Write out all all selected nodes from the
active selection list that are descendents of the roots specified
with -root. Defaults to False.
startFrame (float): Start frame of output. Ignored if `frameRange`
provided.
step (float): The time interval (expressed in frames) at
which the frame range is sampled. Additional samples around each
frame can be specified with -frs. Defaults to 1.0.
stripNamespaces (bool): When on, any namespaces associated with the
exported objects are removed from the Alembic file. For example, an
object with the namespace taco:foo:bar appears as bar in the
Alembic file.
uvWrite (bool): When on, UV data from polygon meshes and subdivision
objects are written to the Alembic file. Only the current UV map is
included.
verbose (bool): When on, outputs frame number information to the
Script Editor or output window during extraction.
wholeFrameGeo (bool): Data for geometry will only be written
out on whole frames. Defaults to False.
worldSpace (bool): When on, the top node in the node hierarchy is
stored as world space. By default, these nodes are stored as local
space. Defaults to False.
writeColorSets (bool): Write all color sets on MFnMeshes as
color 3 or color 4 indexed geometry parameters with face varying
scope. Defaults to False.
writeCreases (bool): If the mesh has crease edges or crease
vertices, the mesh (OPolyMesh) would now be written out as an OSubD
and crease info will be stored in the Alembic file. Otherwise,
creases info won't be preserved in Alembic file unless a custom
Boolean attribute SubDivisionMesh has been added to mesh node and
its value is true. Defaults to False.
writeFaceSets (bool): Write all Face sets on MFnMeshes.
Defaults to False.
writeUVSets (bool): Write all uv sets on MFnMeshes as vector
2 indexed geometry parameters with face varying scope. Defaults to
False.
writeVisibility (bool): Visibility state will be stored in
the Alembic file. Otherwise everything written out is treated as
visible. Defaults to False.
"""
# Ensure alembic exporter is loaded
cmds.loadPlugin('AbcExport', quiet=True)
# Alembic Exporter requires forward slashes
file = file.replace('\\', '/')
# Ensure list arguments are valid.
attr = attr or []
attrPrefix = attrPrefix or []
root = root or []
# Pass the start and end frame on as `frameRange` so that it
# never conflicts with that argument
if not frameRange:
# Fallback to maya timeline if no start or end frame provided.
if startFrame is None:
startFrame = cmds.playbackOptions(query=True,
animationStartTime=True)
if endFrame is None:
endFrame = cmds.playbackOptions(query=True,
animationEndTime=True)
# Ensure valid types are converted to frame range
assert isinstance(startFrame, ALEMBIC_ARGS["startFrame"])
assert isinstance(endFrame, ALEMBIC_ARGS["endFrame"])
frameRange = "{0} {1}".format(startFrame, endFrame)
else:
# Allow conversion from tuple for `frameRange`
if isinstance(frameRange, (list, tuple)):
assert len(frameRange) == 2
frameRange = "{0} {1}".format(frameRange[0], frameRange[1])
# Assemble options
options = {
"selection": selection,
"frameRange": frameRange,
"eulerFilter": eulerFilter,
"noNormals": noNormals,
"preRoll": preRoll,
"renderableOnly": renderableOnly,
"uvWrite": uvWrite,
"writeColorSets": writeColorSets,
"writeFaceSets": writeFaceSets,
"wholeFrameGeo": wholeFrameGeo,
"worldSpace": worldSpace,
"writeVisibility": writeVisibility,
"writeUVSets": writeUVSets,
"writeCreases": writeCreases,
"dataFormat": dataFormat,
"step": step,
"attr": attr,
"attrPrefix": attrPrefix,
"stripNamespaces": stripNamespaces,
"verbose": verbose,
"preRollStartFrame": preRollStartFrame
}
# Validate options
for key, value in options.copy().items():
# Discard unknown options
if key not in ALEMBIC_ARGS:
log.warning("extract_alembic() does not support option '%s'. "
"Flag will be ignored..", key)
options.pop(key)
continue
# Validate value type
valid_types = ALEMBIC_ARGS[key]
if not isinstance(value, valid_types):
raise TypeError("Alembic option unsupported type: "
"{0} (expected {1})".format(value, valid_types))
# Ignore empty values, like an empty string, since they mess up how
# job arguments are built
if isinstance(value, (list, tuple)):
value = [x for x in value if x.strip()]
# Ignore option completely if no values remaining
if not value:
options.pop(key)
continue
options[key] = value
# The `writeCreases` argument was changed to `autoSubd` in Maya 2018+
maya_version = int(cmds.about(version=True))
if maya_version >= 2018:
options['autoSubd'] = options.pop('writeCreases', False)
# Format the job string from options
job_args = list()
for key, value in options.items():
if isinstance(value, (list, tuple)):
for entry in value:
job_args.append("-{} {}".format(key, entry))
elif isinstance(value, bool):
# Add only when state is set to True
if value:
job_args.append("-{0}".format(key))
else:
job_args.append("-{0} {1}".format(key, value))
job_str = " ".join(job_args)
job_str += ' -file "%s"' % file
# Ensure output directory exists
parent_dir = os.path.dirname(file)
if not os.path.exists(parent_dir):
os.makedirs(parent_dir)
if verbose:
log.debug("Preparing Alembic export with options: %s",
json.dumps(options, indent=4))
log.debug("Extracting Alembic with job arguments: %s", job_str)
# Perform extraction
print("Alembic Job Arguments : {}".format(job_str))
# Disable the parallel evaluation temporarily to ensure no buggy
# exports are made. (PLN-31)
# TODO: Make sure this actually fixes the issues
with evaluation("off"):
cmds.AbcExport(j=job_str, verbose=verbose)
if verbose:
log.debug("Extracted Alembic to: %s", file)
return file

View file

@ -70,37 +70,6 @@ DEFAULT_MATRIX = [1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0]
# The maya alembic export types
_alembic_options = {
"startFrame": float,
"endFrame": float,
"frameRange": str, # "start end"; overrides startFrame & endFrame
"eulerFilter": bool,
"frameRelativeSample": float,
"noNormals": bool,
"renderableOnly": bool,
"step": float,
"stripNamespaces": bool,
"uvWrite": bool,
"wholeFrameGeo": bool,
"worldSpace": bool,
"writeVisibility": bool,
"writeColorSets": bool,
"writeFaceSets": bool,
"writeCreases": bool, # Maya 2015 Ext1+
"writeUVSets": bool, # Maya 2017+
"dataFormat": str,
"root": (list, tuple),
"attr": (list, tuple),
"attrPrefix": (list, tuple),
"userAttr": (list, tuple),
"melPerFrameCallback": str,
"melPostJobCallback": str,
"pythonPerFrameCallback": str,
"pythonPostJobCallback": str,
"selection": bool
}
INT_FPS = {15, 24, 25, 30, 48, 50, 60, 44100, 48000}
FLOAT_FPS = {23.98, 23.976, 29.97, 47.952, 59.94}
@ -1346,178 +1315,6 @@ def is_visible(node,
return True
def extract_alembic(file,
startFrame=None,
endFrame=None,
selection=True,
uvWrite=True,
eulerFilter=True,
dataFormat="ogawa",
verbose=False,
**kwargs):
"""Extract a single Alembic Cache.
This extracts an Alembic cache using the `-selection` flag to minimize
the extracted content to solely what was Collected into the instance.
Arguments:
startFrame (float): Start frame of output. Ignored if `frameRange`
provided.
endFrame (float): End frame of output. Ignored if `frameRange`
provided.
frameRange (tuple or str): Two-tuple with start and end frame or a
string formatted as: "startFrame endFrame". This argument
overrides `startFrame` and `endFrame` arguments.
dataFormat (str): The data format to use for the cache,
defaults to "ogawa"
verbose (bool): When on, outputs frame number information to the
Script Editor or output window during extraction.
noNormals (bool): When on, normal data from the original polygon
objects is not included in the exported Alembic cache file.
renderableOnly (bool): When on, any non-renderable nodes or hierarchy,
such as hidden objects, are not included in the Alembic file.
Defaults to False.
stripNamespaces (bool): When on, any namespaces associated with the
exported objects are removed from the Alembic file. For example, an
object with the namespace taco:foo:bar appears as bar in the
Alembic file.
uvWrite (bool): When on, UV data from polygon meshes and subdivision
objects are written to the Alembic file. Only the current UV map is
included.
worldSpace (bool): When on, the top node in the node hierarchy is
stored as world space. By default, these nodes are stored as local
space. Defaults to False.
eulerFilter (bool): When on, X, Y, and Z rotation data is filtered with
an Euler filter. Euler filtering helps resolve irregularities in
rotations especially if X, Y, and Z rotations exceed 360 degrees.
Defaults to True.
"""
# Ensure alembic exporter is loaded
cmds.loadPlugin('AbcExport', quiet=True)
# Alembic Exporter requires forward slashes
file = file.replace('\\', '/')
# Pass the start and end frame on as `frameRange` so that it
# never conflicts with that argument
if "frameRange" not in kwargs:
# Fallback to maya timeline if no start or end frame provided.
if startFrame is None:
startFrame = cmds.playbackOptions(query=True,
animationStartTime=True)
if endFrame is None:
endFrame = cmds.playbackOptions(query=True,
animationEndTime=True)
# Ensure valid types are converted to frame range
assert isinstance(startFrame, _alembic_options["startFrame"])
assert isinstance(endFrame, _alembic_options["endFrame"])
kwargs["frameRange"] = "{0} {1}".format(startFrame, endFrame)
else:
# Allow conversion from tuple for `frameRange`
frame_range = kwargs["frameRange"]
if isinstance(frame_range, (list, tuple)):
assert len(frame_range) == 2
kwargs["frameRange"] = "{0} {1}".format(frame_range[0],
frame_range[1])
# Assemble options
options = {
"selection": selection,
"uvWrite": uvWrite,
"eulerFilter": eulerFilter,
"dataFormat": dataFormat
}
options.update(kwargs)
# Validate options
for key, value in options.copy().items():
# Discard unknown options
if key not in _alembic_options:
log.warning("extract_alembic() does not support option '%s'. "
"Flag will be ignored..", key)
options.pop(key)
continue
# Validate value type
valid_types = _alembic_options[key]
if not isinstance(value, valid_types):
raise TypeError("Alembic option unsupported type: "
"{0} (expected {1})".format(value, valid_types))
# Ignore empty values, like an empty string, since they mess up how
# job arguments are built
if isinstance(value, (list, tuple)):
value = [x for x in value if x.strip()]
# Ignore option completely if no values remaining
if not value:
options.pop(key)
continue
options[key] = value
# The `writeCreases` argument was changed to `autoSubd` in Maya 2018+
maya_version = int(cmds.about(version=True))
if maya_version >= 2018:
options['autoSubd'] = options.pop('writeCreases', False)
# Format the job string from options
job_args = list()
for key, value in options.items():
if isinstance(value, (list, tuple)):
for entry in value:
job_args.append("-{} {}".format(key, entry))
elif isinstance(value, bool):
# Add only when state is set to True
if value:
job_args.append("-{0}".format(key))
else:
job_args.append("-{0} {1}".format(key, value))
job_str = " ".join(job_args)
job_str += ' -file "%s"' % file
# Ensure output directory exists
parent_dir = os.path.dirname(file)
if not os.path.exists(parent_dir):
os.makedirs(parent_dir)
if verbose:
log.debug("Preparing Alembic export with options: %s",
json.dumps(options, indent=4))
log.debug("Extracting Alembic with job arguments: %s", job_str)
# Perform extraction
print("Alembic Job Arguments : {}".format(job_str))
# Disable the parallel evaluation temporarily to ensure no buggy
# exports are made. (PLN-31)
# TODO: Make sure this actually fixes the issues
with evaluation("off"):
cmds.AbcExport(j=job_str, verbose=verbose)
if verbose:
log.debug("Extracted Alembic to: %s", file)
return file
# region ID
def get_id_required_nodes(referenced_nodes=False,
nodes=None,
@ -2520,7 +2317,16 @@ def set_scene_fps(fps, update=True):
"""
fps_mapping = {
'2': '2fps',
'3': '3fps',
'4': '4fps',
'5': '5fps',
'6': '6fps',
'8': '8fps',
'10': '10fps',
'12': '12fps',
'15': 'game',
'16': '16fps',
'24': 'film',
'25': 'pal',
'30': 'ntsc',
@ -2612,21 +2418,24 @@ def get_fps_for_current_context():
Returns:
Union[int, float]: FPS value.
"""
project_name = get_current_project_name()
folder_path = get_current_folder_path()
folder_entity = ayon_api.get_folder_by_path(
project_name, folder_path, fields={"attrib.fps"}
) or {}
fps = folder_entity.get("attrib", {}).get("fps")
task_entity = get_current_task_entity(fields={"attrib"})
fps = task_entity.get("attrib", {}).get("fps")
if not fps:
project_entity = ayon_api.get_project(
project_name, fields=["attrib.fps"]
project_name = get_current_project_name()
folder_path = get_current_folder_path()
folder_entity = ayon_api.get_folder_by_path(
project_name, folder_path, fields={"attrib.fps"}
) or {}
fps = project_entity.get("attrib", {}).get("fps")
fps = folder_entity.get("attrib", {}).get("fps")
if not fps:
fps = 25
project_entity = ayon_api.get_project(
project_name, fields=["attrib.fps"]
) or {}
fps = project_entity.get("attrib", {}).get("fps")
if not fps:
fps = 25
return convert_to_maya_fps(fps)

View file

@ -30,9 +30,11 @@ from ayon_core.pipeline import (
register_loader_plugin_path,
register_inventory_action_path,
register_creator_plugin_path,
register_workfile_build_plugin_path,
deregister_loader_plugin_path,
deregister_inventory_action_path,
deregister_creator_plugin_path,
deregister_workfile_build_plugin_path,
AYON_CONTAINER_ID,
AVALON_CONTAINER_ID,
)
@ -47,7 +49,6 @@ from ayon_core.hosts.maya import MAYA_ROOT_DIR
from ayon_core.hosts.maya.lib import create_workspace_mel
from . import menu, lib
from .workfile_template_builder import MayaPlaceholderLoadPlugin
from .workio import (
open_file,
save_file,
@ -64,6 +65,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build")
AVALON_CONTAINERS = ":AVALON_CONTAINERS"
@ -93,7 +95,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
register_inventory_action_path(INVENTORY_PATH)
self.log.info(PUBLISH_PATH)
register_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
self.log.info("Installing callbacks ... ")
register_event_callback("init", on_init)
@ -148,11 +150,6 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
def get_containers(self):
return ls()
def get_workfile_build_placeholder_plugins(self):
return [
MayaPlaceholderLoadPlugin
]
@contextlib.contextmanager
def maintained_selection(self):
with lib.maintained_selection():
@ -338,6 +335,7 @@ def uninstall():
deregister_loader_plugin_path(LOAD_PATH)
deregister_creator_plugin_path(CREATE_PATH)
deregister_inventory_action_path(INVENTORY_PATH)
deregister_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
menu.uninstall()

View file

@ -1,5 +1,3 @@
import json
from maya import cmds
from ayon_core.pipeline import (
@ -10,16 +8,13 @@ from ayon_core.pipeline import (
)
from ayon_core.pipeline.workfile.workfile_template_builder import (
TemplateAlreadyImported,
AbstractTemplateBuilder,
PlaceholderPlugin,
LoadPlaceholderItem,
PlaceholderLoadMixin,
AbstractTemplateBuilder
)
from ayon_core.tools.workfile_template_build import (
WorkfileBuildPlaceholderDialog,
)
from .lib import read, imprint, get_reference_node, get_main_window
from .lib import get_main_window
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
@ -91,255 +86,6 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
return True
class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
identifier = "maya.load"
label = "Maya load"
def _collect_scene_placeholders(self):
# Cache placeholder data to shared data
placeholder_nodes = self.builder.get_shared_populate_data(
"placeholder_nodes"
)
if placeholder_nodes is None:
attributes = cmds.ls("*.plugin_identifier", long=True)
placeholder_nodes = {}
for attribute in attributes:
node_name = attribute.rpartition(".")[0]
placeholder_nodes[node_name] = (
self._parse_placeholder_node_data(node_name)
)
self.builder.set_shared_populate_data(
"placeholder_nodes", placeholder_nodes
)
return placeholder_nodes
def _parse_placeholder_node_data(self, node_name):
placeholder_data = read(node_name)
parent_name = (
cmds.getAttr(node_name + ".parent", asString=True)
or node_name.rpartition("|")[0]
or ""
)
if parent_name:
siblings = cmds.listRelatives(parent_name, children=True)
else:
siblings = cmds.ls(assemblies=True)
node_shortname = node_name.rpartition("|")[2]
current_index = cmds.getAttr(node_name + ".index", asString=True)
if current_index < 0:
current_index = siblings.index(node_shortname)
placeholder_data.update({
"parent": parent_name,
"index": current_index
})
return placeholder_data
def _create_placeholder_name(self, placeholder_data):
placeholder_name_parts = placeholder_data["builder_type"].split("_")
pos = 1
placeholder_product_type = placeholder_data.get("product_type")
if placeholder_product_type is None:
placeholder_product_type = placeholder_data.get("family")
if placeholder_product_type:
placeholder_name_parts.insert(pos, placeholder_product_type)
pos += 1
# add loader arguments if any
loader_args = placeholder_data["loader_args"]
if loader_args:
loader_args = json.loads(loader_args.replace('\'', '\"'))
values = [v for v in loader_args.values()]
for value in values:
placeholder_name_parts.insert(pos, value)
pos += 1
placeholder_name = "_".join(placeholder_name_parts)
return placeholder_name.capitalize()
def _get_loaded_repre_ids(self):
loaded_representation_ids = self.builder.get_shared_populate_data(
"loaded_representation_ids"
)
if loaded_representation_ids is None:
try:
containers = cmds.sets("AVALON_CONTAINERS", q=True)
except ValueError:
containers = []
loaded_representation_ids = {
cmds.getAttr(container + ".representation")
for container in containers
}
self.builder.set_shared_populate_data(
"loaded_representation_ids", loaded_representation_ids
)
return loaded_representation_ids
def create_placeholder(self, placeholder_data):
selection = cmds.ls(selection=True)
if len(selection) > 1:
raise ValueError("More then one item are selected")
parent = selection[0] if selection else None
placeholder_data["plugin_identifier"] = self.identifier
placeholder_name = self._create_placeholder_name(placeholder_data)
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
if parent:
placeholder = cmds.parent(placeholder, selection[0])[0]
imprint(placeholder, placeholder_data)
# Add helper attributes to keep placeholder info
cmds.addAttr(
placeholder,
longName="parent",
hidden=True,
dataType="string"
)
cmds.addAttr(
placeholder,
longName="index",
hidden=True,
attributeType="short",
defaultValue=-1
)
cmds.setAttr(placeholder + ".parent", "", type="string")
def update_placeholder(self, placeholder_item, placeholder_data):
node_name = placeholder_item.scene_identifier
new_values = {}
for key, value in placeholder_data.items():
placeholder_value = placeholder_item.data.get(key)
if value != placeholder_value:
new_values[key] = value
placeholder_item.data[key] = value
for key in new_values.keys():
cmds.deleteAttr(node_name + "." + key)
imprint(node_name, new_values)
def collect_placeholders(self):
output = []
scene_placeholders = self._collect_scene_placeholders()
for node_name, placeholder_data in scene_placeholders.items():
if placeholder_data.get("plugin_identifier") != self.identifier:
continue
# TODO do data validations and maybe upgrades if they are invalid
output.append(
LoadPlaceholderItem(node_name, placeholder_data, self)
)
return output
def populate_placeholder(self, placeholder):
self.populate_load_placeholder(placeholder)
def repopulate_placeholder(self, placeholder):
repre_ids = self._get_loaded_repre_ids()
self.populate_load_placeholder(placeholder, repre_ids)
def get_placeholder_options(self, options=None):
return self.get_load_plugin_options(options)
def post_placeholder_process(self, placeholder, failed):
"""Cleanup placeholder after load of its corresponding representations.
Args:
placeholder (PlaceholderItem): Item which was just used to load
representation.
failed (bool): Loading of representation failed.
"""
# Hide placeholder and add them to placeholder set
node = placeholder.scene_identifier
cmds.sets(node, addElement=PLACEHOLDER_SET)
cmds.hide(node)
cmds.setAttr(node + ".hiddenInOutliner", True)
def delete_placeholder(self, placeholder):
"""Remove placeholder if building was successful"""
cmds.delete(placeholder.scene_identifier)
def load_succeed(self, placeholder, container):
self._parent_in_hierarchy(placeholder, container)
def _parent_in_hierarchy(self, placeholder, container):
"""Parent loaded container to placeholder's parent.
ie : Set loaded content as placeholder's sibling
Args:
container (str): Placeholder loaded containers
"""
if not container:
return
roots = cmds.sets(container, q=True) or []
ref_node = None
try:
ref_node = get_reference_node(roots)
except AssertionError as e:
self.log.info(e.args[0])
nodes_to_parent = []
for root in roots:
if ref_node:
ref_root = cmds.referenceQuery(root, nodes=True)[0]
ref_root = (
cmds.listRelatives(ref_root, parent=True, path=True) or
[ref_root]
)
nodes_to_parent.extend(ref_root)
continue
if root.endswith("_RN"):
# Backwards compatibility for hardcoded reference names.
refRoot = cmds.referenceQuery(root, n=True)[0]
refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot]
nodes_to_parent.extend(refRoot)
elif root not in cmds.listSets(allSets=True):
nodes_to_parent.append(root)
elif not cmds.sets(root, q=True):
return
# Move loaded nodes to correct index in outliner hierarchy
placeholder_form = cmds.xform(
placeholder.scene_identifier,
q=True,
matrix=True,
worldSpace=True
)
scene_parent = cmds.listRelatives(
placeholder.scene_identifier, parent=True, fullPath=True
)
for node in set(nodes_to_parent):
cmds.reorder(node, front=True)
cmds.reorder(node, relative=placeholder.data["index"])
cmds.xform(node, matrix=placeholder_form, ws=True)
if scene_parent:
cmds.parent(node, scene_parent)
else:
cmds.parent(node, world=True)
holding_sets = cmds.listSets(object=placeholder.scene_identifier)
if not holding_sets:
return
for holding_set in holding_sets:
cmds.sets(roots, forceElement=holding_set)
def build_workfile_template(*args):
builder = MayaTemplateBuilder(registered_host())
builder.build_template()

View file

@ -1,89 +0,0 @@
from ayon_core.hosts.maya.api import (
lib,
plugin
)
from ayon_core.lib import (
BoolDef,
TextDef
)
class CreateAnimation(plugin.MayaHiddenCreator):
"""Animation output for character rigs
We hide the animation creator from the UI since the creation of it is
automated upon loading a rig. There's an inventory action to recreate it
for loaded rigs if by chance someone deleted the animation instance.
"""
identifier = "io.openpype.creators.maya.animation"
name = "animationDefault"
label = "Animation"
product_type = "animation"
icon = "male"
write_color_sets = False
write_face_sets = False
include_parent_hierarchy = False
include_user_defined_attributes = False
def get_instance_attr_defs(self):
defs = lib.collect_animation_defs()
defs.extend([
BoolDef("writeColorSets",
label="Write vertex colors",
tooltip="Write vertex colors with the geometry",
default=self.write_color_sets),
BoolDef("writeFaceSets",
label="Write face sets",
tooltip="Write face sets with the geometry",
default=self.write_face_sets),
BoolDef("writeNormals",
label="Write normals",
tooltip="Write normals with the deforming geometry",
default=True),
BoolDef("renderableOnly",
label="Renderable Only",
tooltip="Only export renderable visible shapes",
default=False),
BoolDef("visibleOnly",
label="Visible Only",
tooltip="Only export dag objects visible during "
"frame range",
default=False),
BoolDef("includeParentHierarchy",
label="Include Parent Hierarchy",
tooltip="Whether to include parent hierarchy of nodes in "
"the publish instance",
default=self.include_parent_hierarchy),
BoolDef("worldSpace",
label="World-Space Export",
default=True),
BoolDef("includeUserDefinedAttributes",
label="Include User Defined Attributes",
default=self.include_user_defined_attributes),
TextDef("attr",
label="Custom Attributes",
default="",
placeholder="attr1, attr2"),
TextDef("attrPrefix",
label="Custom Attributes Prefix",
placeholder="prefix1, prefix2")
])
# TODO: Implement these on a Deadline plug-in instead?
"""
# Default to not send to farm.
self.data["farm"] = False
self.data["priority"] = 50
"""
return defs
def apply_settings(self, project_settings):
super(CreateAnimation, self).apply_settings(project_settings)
# Hardcoding creator to be enabled due to existing settings would
# disable the creator causing the creator plugin to not be
# discoverable.
self.enabled = True

View file

@ -0,0 +1,139 @@
from maya import cmds
from ayon_core.hosts.maya.api import lib, plugin
from ayon_core.lib import (
BoolDef,
NumberDef,
)
from ayon_core.pipeline import CreatedInstance
def _get_animation_attr_defs(cls):
"""Get Animation generic definitions."""
defs = lib.collect_animation_defs()
defs.extend(
[
BoolDef("farm", label="Submit to Farm"),
NumberDef("priority", label="Farm job Priority", default=50),
BoolDef("refresh", label="Refresh viewport during export"),
BoolDef(
"includeParentHierarchy",
label="Include Parent Hierarchy",
tooltip=(
"Whether to include parent hierarchy of nodes in the "
"publish instance."
)
),
BoolDef(
"includeUserDefinedAttributes",
label="Include User Defined Attributes",
tooltip=(
"Whether to include all custom maya attributes found "
"on nodes as attributes in the Alembic data."
)
),
]
)
return defs
def convert_legacy_alembic_creator_attributes(node_data, class_name):
"""This is a legacy transfer of creator attributes to publish attributes
for ExtractAlembic/ExtractAnimation plugin.
"""
publish_attributes = node_data["publish_attributes"]
if class_name in publish_attributes:
return node_data
attributes = [
"attr",
"attrPrefix",
"visibleOnly",
"writeColorSets",
"writeFaceSets",
"writeNormals",
"renderableOnly",
"visibleOnly",
"worldSpace",
"renderableOnly"
]
plugin_attributes = {}
for attr in attributes:
if attr not in node_data["creator_attributes"]:
continue
value = node_data["creator_attributes"].pop(attr)
plugin_attributes[attr] = value
publish_attributes[class_name] = plugin_attributes
return node_data
class CreateAnimation(plugin.MayaHiddenCreator):
"""Animation output for character rigs
We hide the animation creator from the UI since the creation of it is
automated upon loading a rig. There's an inventory action to recreate it
for loaded rigs if by chance someone deleted the animation instance.
"""
identifier = "io.openpype.creators.maya.animation"
name = "animationDefault"
label = "Animation"
product_type = "animation"
icon = "male"
write_color_sets = False
write_face_sets = False
include_parent_hierarchy = False
include_user_defined_attributes = False
def read_instance_node(self, node):
node_data = super(CreateAnimation, self).read_instance_node(node)
node_data = convert_legacy_alembic_creator_attributes(
node_data, "ExtractAnimation"
)
return node_data
def get_instance_attr_defs(self):
defs = super(CreateAnimation, self).get_instance_attr_defs()
defs += _get_animation_attr_defs(self)
return defs
class CreatePointCache(plugin.MayaCreator):
"""Alembic pointcache for animated data"""
identifier = "io.openpype.creators.maya.pointcache"
label = "Pointcache"
product_type = "pointcache"
icon = "gears"
write_color_sets = False
write_face_sets = False
include_user_defined_attributes = False
def read_instance_node(self, node):
node_data = super(CreatePointCache, self).read_instance_node(node)
node_data = convert_legacy_alembic_creator_attributes(
node_data, "ExtractAlembic"
)
return node_data
def get_instance_attr_defs(self):
defs = super(CreatePointCache, self).get_instance_attr_defs()
defs += _get_animation_attr_defs(self)
return defs
def create(self, product_name, instance_data, pre_create_data):
instance = super(CreatePointCache, self).create(
product_name, instance_data, pre_create_data
)
instance_node = instance.get("instance_node")
# For Arnold standin proxy
proxy_set = cmds.sets(name=instance_node + "_proxy_SET", empty=True)
cmds.sets(proxy_set, forceElement=instance_node)

View file

@ -1,88 +0,0 @@
from maya import cmds
from ayon_core.hosts.maya.api import (
lib,
plugin
)
from ayon_core.lib import (
BoolDef,
TextDef
)
class CreatePointCache(plugin.MayaCreator):
"""Alembic pointcache for animated data"""
identifier = "io.openpype.creators.maya.pointcache"
label = "Pointcache"
product_type = "pointcache"
icon = "gears"
write_color_sets = False
write_face_sets = False
include_user_defined_attributes = False
def get_instance_attr_defs(self):
defs = lib.collect_animation_defs()
defs.extend([
BoolDef("writeColorSets",
label="Write vertex colors",
tooltip="Write vertex colors with the geometry",
default=False),
BoolDef("writeFaceSets",
label="Write face sets",
tooltip="Write face sets with the geometry",
default=False),
BoolDef("renderableOnly",
label="Renderable Only",
tooltip="Only export renderable visible shapes",
default=False),
BoolDef("visibleOnly",
label="Visible Only",
tooltip="Only export dag objects visible during "
"frame range",
default=False),
BoolDef("includeParentHierarchy",
label="Include Parent Hierarchy",
tooltip="Whether to include parent hierarchy of nodes in "
"the publish instance",
default=False),
BoolDef("worldSpace",
label="World-Space Export",
default=True),
BoolDef("refresh",
label="Refresh viewport during export",
default=False),
BoolDef("includeUserDefinedAttributes",
label="Include User Defined Attributes",
default=self.include_user_defined_attributes),
TextDef("attr",
label="Custom Attributes",
default="",
placeholder="attr1, attr2"),
TextDef("attrPrefix",
label="Custom Attributes Prefix",
default="",
placeholder="prefix1, prefix2")
])
# TODO: Implement these on a Deadline plug-in instead?
"""
# Default to not send to farm.
self.data["farm"] = False
self.data["priority"] = 50
"""
return defs
def create(self, product_name, instance_data, pre_create_data):
instance = super(CreatePointCache, self).create(
product_name, instance_data, pre_create_data
)
instance_node = instance.get("instance_node")
# For Arnold standin proxy
proxy_set = cmds.sets(name=instance_node + "_proxy_SET", empty=True)
cmds.sets(proxy_set, forceElement=instance_node)

View file

@ -0,0 +1,39 @@
from ayon_core.lib import (
BoolDef
)
from ayon_core.pipeline import (
load,
registered_host
)
from ayon_core.hosts.maya.api.workfile_template_builder import (
MayaTemplateBuilder
)
class LoadAsTemplate(load.LoaderPlugin):
"""Load workfile as a template """
product_types = {"workfile", "mayaScene"}
label = "Load as template"
representations = ["ma", "mb"]
icon = "wrench"
color = "#775555"
order = 10
options = [
BoolDef("keep_placeholders",
label="Keep Placeholders",
default=False),
BoolDef("create_first_version",
label="Create First Version",
default=False),
]
def load(self, context, name, namespace, data):
keep_placeholders = data.get("keep_placeholders", False)
create_first_version = data.get("create_first_version", False)
path = self.filepath_from_context(context)
builder = MayaTemplateBuilder(registered_host())
builder.build_template(template_path=path,
keep_placeholders=keep_placeholders,
create_first_version=create_first_version)

View file

@ -58,4 +58,3 @@ class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin):
if instance.data.get("farm"):
instance.data["families"].append("publish.farm")

View file

@ -14,7 +14,9 @@ class CollectUserDefinedAttributes(pyblish.api.InstancePlugin):
def process(self, instance):
# Collect user defined attributes.
if not instance.data.get("includeUserDefinedAttributes", False):
if not instance.data["creator_attributes"].get(
"includeUserDefinedAttributes"
):
return
if "out_hierarchy" in instance.data:

View file

@ -2,7 +2,7 @@ import os
import json
from ayon_core.pipeline import publish
from ayon_core.hosts.maya.api.lib import extract_alembic
from ayon_core.hosts.maya.api.alembic import extract_alembic
from maya import cmds

View file

@ -1,17 +1,28 @@
import os
from collections import OrderedDict
from maya import cmds
from ayon_core.pipeline import publish
from ayon_core.hosts.maya.api.alembic import extract_alembic
from ayon_core.hosts.maya.api.lib import (
extract_alembic,
suspended_refresh,
maintained_selection,
iter_visible_nodes_in_range
)
from ayon_core.lib import (
BoolDef,
TextDef,
NumberDef,
EnumDef,
UISeparatorDef,
UILabelDef,
)
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
from ayon_core.pipeline import KnownPublishError
class ExtractAlembic(publish.Extractor):
class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin):
"""Produce an alembic of just point positions and normals.
Positions and normals, uvs, creases are preserved, but nothing more,
@ -27,8 +38,35 @@ class ExtractAlembic(publish.Extractor):
targets = ["local", "remote"]
# From settings
attr = []
attrPrefix = []
autoSubd = False
bake_attributes = []
bake_attribute_prefixes = []
dataFormat = "ogawa"
eulerFilter = False
melPerFrameCallback = ""
melPostJobCallback = ""
overrides = []
preRoll = False
preRollStartFrame = 0
pythonPerFrameCallback = ""
pythonPostJobCallback = ""
renderableOnly = False
stripNamespaces = True
uvsOnly = False
uvWrite = False
userAttr = ""
userAttrPrefix = ""
verbose = False
visibleOnly = False
wholeFrameGeo = False
worldSpace = True
writeColorSets = False
writeFaceSets = False
writeNormals = True
writeUVSets = False
writeVisibility = False
def process(self, instance):
if instance.data.get("farm"):
@ -41,16 +79,38 @@ class ExtractAlembic(publish.Extractor):
start = float(instance.data.get("frameStartHandle", 1))
end = float(instance.data.get("frameEndHandle", 1))
attrs = instance.data.get("attr", "").split(";")
attrs = [value for value in attrs if value.strip()]
attribute_values = self.get_attr_values_from_data(
instance.data
)
attrs = [
attr.strip()
for attr in attribute_values.get("attr", "").split(";")
if attr.strip()
]
attrs += instance.data.get("userDefinedAttributes", [])
attrs += self.bake_attributes
attrs += ["cbId"]
attr_prefixes = instance.data.get("attrPrefix", "").split(";")
attr_prefixes = [value for value in attr_prefixes if value.strip()]
attr_prefixes = [
attr.strip()
for attr in attribute_values.get("attrPrefix", "").split(";")
if attr.strip()
]
attr_prefixes += self.bake_attribute_prefixes
user_attrs = [
attr.strip()
for attr in attribute_values.get("userAttr", "").split(";")
if attr.strip()
]
user_attr_prefixes = [
attr.strip()
for attr in attribute_values.get("userAttrPrefix", "").split(";")
if attr.strip()
]
self.log.debug("Extracting pointcache..")
dirname = self.staging_dir(instance)
@ -58,28 +118,83 @@ class ExtractAlembic(publish.Extractor):
filename = "{name}.abc".format(**instance.data)
path = os.path.join(parent_dir, filename)
options = {
"step": instance.data.get("step", 1.0),
"attr": attrs,
"attrPrefix": attr_prefixes,
"writeVisibility": True,
"writeCreases": True,
"writeColorSets": instance.data.get("writeColorSets", False),
"writeFaceSets": instance.data.get("writeFaceSets", False),
"uvWrite": True,
"selection": True,
"worldSpace": instance.data.get("worldSpace", True)
}
root = None
if not instance.data.get("includeParentHierarchy", True):
# Set the root nodes if we don't want to include parents
# The roots are to be considered the ones that are the actual
# direct members of the set
options["root"] = roots
root = roots
if int(cmds.about(version=True)) >= 2017:
# Since Maya 2017 alembic supports multiple uv sets - write them.
options["writeUVSets"] = True
kwargs = {
"file": path,
"attr": attrs,
"attrPrefix": attr_prefixes,
"userAttr": user_attrs,
"userAttrPrefix": user_attr_prefixes,
"dataFormat": attribute_values.get("dataFormat", self.dataFormat),
"endFrame": end,
"eulerFilter": attribute_values.get(
"eulerFilter", self.eulerFilter
),
"preRoll": attribute_values.get("preRoll", self.preRoll),
"preRollStartFrame": attribute_values.get(
"preRollStartFrame", self.preRollStartFrame
),
"renderableOnly": attribute_values.get(
"renderableOnly", self.renderableOnly
),
"root": root,
"selection": True,
"startFrame": start,
"step": instance.data.get(
"creator_attributes", {}
).get("step", 1.0),
"stripNamespaces": attribute_values.get(
"stripNamespaces", self.stripNamespaces
),
"uvWrite": attribute_values.get("uvWrite", self.uvWrite),
"verbose": attribute_values.get("verbose", self.verbose),
"wholeFrameGeo": attribute_values.get(
"wholeFrameGeo", self.wholeFrameGeo
),
"worldSpace": attribute_values.get("worldSpace", self.worldSpace),
"writeColorSets": attribute_values.get(
"writeColorSets", self.writeColorSets
),
"writeCreases": attribute_values.get(
"writeCreases", self.writeCreases
),
"writeFaceSets": attribute_values.get(
"writeFaceSets", self.writeFaceSets
),
"writeUVSets": attribute_values.get(
"writeUVSets", self.writeUVSets
),
"writeVisibility": attribute_values.get(
"writeVisibility", self.writeVisibility
),
"autoSubd": attribute_values.get(
"autoSubd", self.autoSubd
),
"uvsOnly": attribute_values.get(
"uvsOnly", self.uvsOnly
),
"writeNormals": attribute_values.get(
"writeNormals", self.writeNormals
),
"melPerFrameCallback": attribute_values.get(
"melPerFrameCallback", self.melPerFrameCallback
),
"melPostJobCallback": attribute_values.get(
"melPostJobCallback", self.melPostJobCallback
),
"pythonPerFrameCallback": attribute_values.get(
"pythonPerFrameCallback", self.pythonPostJobCallback
),
"pythonPostJobCallback": attribute_values.get(
"pythonPostJobCallback", self.pythonPostJobCallback
)
}
if instance.data.get("visibleOnly", False):
# If we only want to include nodes that are visible in the frame
@ -87,20 +202,19 @@ class ExtractAlembic(publish.Extractor):
# flag does not filter out those that are only hidden on some
# frames as it counts "animated" or "connected" visibilities as
# if it's always visible.
nodes = list(iter_visible_nodes_in_range(nodes,
start=start,
end=end))
nodes = list(
iter_visible_nodes_in_range(nodes, start=start, end=end)
)
suspend = not instance.data.get("refresh", False)
with suspended_refresh(suspend=suspend):
with maintained_selection():
cmds.select(nodes, noExpand=True)
extract_alembic(
file=path,
startFrame=start,
endFrame=end,
**options
self.log.debug(
"Running `extract_alembic` with the keyword arguments: "
"{}".format(kwargs)
)
extract_alembic(**kwargs)
if "representations" not in instance.data:
instance.data["representations"] = []
@ -124,21 +238,17 @@ class ExtractAlembic(publish.Extractor):
return
path = path.replace(".abc", "_proxy.abc")
kwargs["file"] = path
if not instance.data.get("includeParentHierarchy", True):
# Set the root nodes if we don't want to include parents
# The roots are to be considered the ones that are the actual
# direct members of the set
options["root"] = instance.data["proxyRoots"]
kwargs["root"] = instance.data["proxyRoots"]
with suspended_refresh(suspend=suspend):
with maintained_selection():
cmds.select(instance.data["proxy"])
extract_alembic(
file=path,
startFrame=start,
endFrame=end,
**options
)
extract_alembic(**kwargs)
representation = {
"name": "proxy",
@ -152,24 +262,274 @@ class ExtractAlembic(publish.Extractor):
def get_members_and_roots(self, instance):
return instance[:], instance.data.get("setMembers")
@classmethod
def get_attribute_defs(cls):
if not cls.overrides:
return []
override_defs = OrderedDict({
"autoSubd": BoolDef(
"autoSubd",
label="Auto Subd",
default=cls.autoSubd,
tooltip=(
"If this flag is present and the mesh has crease edges, "
"crease vertices or holes, the mesh (OPolyMesh) would now "
"be written out as an OSubD and crease info will be stored"
" in the Alembic file. Otherwise, creases info won't be "
"preserved in Alembic file unless a custom Boolean "
"attribute SubDivisionMesh has been added to mesh node and"
" its value is true."
)
),
"eulerFilter": BoolDef(
"eulerFilter",
label="Euler Filter",
default=cls.eulerFilter,
tooltip="Apply Euler filter while sampling rotations."
),
"renderableOnly": BoolDef(
"renderableOnly",
label="Renderable Only",
default=cls.renderableOnly,
tooltip="Only export renderable visible shapes."
),
"stripNamespaces": BoolDef(
"stripNamespaces",
label="Strip Namespaces",
default=cls.stripNamespaces,
tooltip=(
"Namespaces will be stripped off of the node before being "
"written to Alembic."
)
),
"uvsOnly": BoolDef(
"uvsOnly",
label="UVs Only",
default=cls.uvsOnly,
tooltip=(
"If this flag is present, only uv data for PolyMesh and "
"SubD shapes will be written to the Alembic file."
)
),
"uvWrite": BoolDef(
"uvWrite",
label="UV Write",
default=cls.uvWrite,
tooltip=(
"Uv data for PolyMesh and SubD shapes will be written to "
"the Alembic file."
)
),
"verbose": BoolDef(
"verbose",
label="Verbose",
default=cls.verbose,
tooltip="Prints the current frame that is being evaluated."
),
"visibleOnly": BoolDef(
"visibleOnly",
label="Visible Only",
default=cls.visibleOnly,
tooltip="Only export dag objects visible during frame range."
),
"wholeFrameGeo": BoolDef(
"wholeFrameGeo",
label="Whole Frame Geo",
default=cls.wholeFrameGeo,
tooltip=(
"Data for geometry will only be written out on whole "
"frames."
)
),
"worldSpace": BoolDef(
"worldSpace",
label="World Space",
default=cls.worldSpace,
tooltip="Any root nodes will be stored in world space."
),
"writeColorSets": BoolDef(
"writeColorSets",
label="Write Color Sets",
default=cls.writeColorSets,
tooltip="Write vertex colors with the geometry."
),
"writeFaceSets": BoolDef(
"writeFaceSets",
label="Write Face Sets",
default=cls.writeFaceSets,
tooltip="Write face sets with the geometry."
),
"writeNormals": BoolDef(
"writeNormals",
label="Write Normals",
default=cls.writeNormals,
tooltip="Write normals with the deforming geometry."
),
"writeUVSets": BoolDef(
"writeUVSets",
label="Write UV Sets",
default=cls.writeUVSets,
tooltip=(
"Write all uv sets on MFnMeshes as vector 2 indexed "
"geometry parameters with face varying scope."
)
),
"writeVisibility": BoolDef(
"writeVisibility",
label="Write Visibility",
default=cls.writeVisibility,
tooltip=(
"Visibility state will be stored in the Alembic file. "
"Otherwise everything written out is treated as visible."
)
),
"preRoll": BoolDef(
"preRoll",
label="Pre Roll",
default=cls.preRoll,
tooltip="This frame range will not be sampled."
),
"preRollStartFrame": NumberDef(
"preRollStartFrame",
label="Pre Roll Start Frame",
tooltip=(
"The frame to start scene evaluation at. This is used"
" to set the starting frame for time dependent "
"translations and can be used to evaluate run-up that"
" isn't actually translated."
),
default=cls.preRollStartFrame
),
"dataFormat": EnumDef(
"dataFormat",
label="Data Format",
items=["ogawa", "HDF"],
default=cls.dataFormat,
tooltip="The data format to use to write the file."
),
"attr": TextDef(
"attr",
label="Custom Attributes",
placeholder="attr1; attr2; ...",
default=cls.attr,
tooltip=(
"Attributes matching by name will be included in the "
"Alembic export. Attributes should be separated by "
"semi-colon `;`"
)
),
"attrPrefix": TextDef(
"attrPrefix",
label="Custom Attributes Prefix",
placeholder="prefix1; prefix2; ...",
default=cls.attrPrefix,
tooltip=(
"Attributes starting with these prefixes will be included "
"in the Alembic export. Attributes should be separated by "
"semi-colon `;`"
)
),
"userAttr": TextDef(
"userAttr",
label="User Attr",
placeholder="attr1; attr2; ...",
default=cls.userAttr,
tooltip=(
"Attributes matching by name will be included in the "
"Alembic export. Attributes should be separated by "
"semi-colon `;`"
)
),
"userAttrPrefix": TextDef(
"userAttrPrefix",
label="User Attr Prefix",
placeholder="prefix1; prefix2; ...",
default=cls.userAttrPrefix,
tooltip=(
"Attributes starting with these prefixes will be included "
"in the Alembic export. Attributes should be separated by "
"semi-colon `;`"
)
),
"melPerFrameCallback": TextDef(
"melPerFrameCallback",
label="Mel Per Frame Callback",
default=cls.melPerFrameCallback,
tooltip=(
"When each frame (and the static frame) is evaluated the "
"string specified is evaluated as a Mel command."
)
),
"melPostJobCallback": TextDef(
"melPostJobCallback",
label="Mel Post Job Callback",
default=cls.melPostJobCallback,
tooltip=(
"When the translation has finished the string specified "
"is evaluated as a Mel command."
)
),
"pythonPerFrameCallback": TextDef(
"pythonPerFrameCallback",
label="Python Per Frame Callback",
default=cls.pythonPerFrameCallback,
tooltip=(
"When each frame (and the static frame) is evaluated the "
"string specified is evaluated as a python command."
)
),
"pythonPostJobCallback": TextDef(
"pythonPostJobCallback",
label="Python Post Frame Callback",
default=cls.pythonPostJobCallback,
tooltip=(
"When the translation has finished the string specified "
"is evaluated as a python command."
)
)
})
defs = super(ExtractAlembic, cls).get_attribute_defs()
defs.extend([
UISeparatorDef("sep_alembic_options"),
UILabelDef("Alembic Options"),
])
# The Arguments that can be modified by the Publisher
overrides = set(cls.overrides)
for key, value in override_defs.items():
if key not in overrides:
continue
defs.append(value)
defs.append(
UISeparatorDef("sep_alembic_options_end")
)
return defs
class ExtractAnimation(ExtractAlembic):
label = "Extract Animation"
label = "Extract Animation (Alembic)"
families = ["animation"]
def get_members_and_roots(self, instance):
# Collect the out set nodes
out_sets = [node for node in instance if node.endswith("out_SET")]
if len(out_sets) != 1:
raise RuntimeError("Couldn't find exactly one out_SET: "
"{0}".format(out_sets))
raise KnownPublishError(
"Couldn't find exactly one out_SET: {0}".format(out_sets)
)
out_set = out_sets[0]
roots = cmds.sets(out_set, query=True)
roots = cmds.sets(out_set, query=True) or []
# Include all descendants
nodes = roots + cmds.listRelatives(roots,
allDescendents=True,
fullPath=True) or []
nodes = roots
nodes += cmds.listRelatives(
roots, allDescendents=True, fullPath=True
) or []
return nodes, roots

View file

@ -3,8 +3,8 @@ import os
from maya import cmds
from ayon_core.pipeline import publish
from ayon_core.hosts.maya.api.alembic import extract_alembic
from ayon_core.hosts.maya.api.lib import (
extract_alembic,
suspended_refresh,
maintained_selection,
iter_visible_nodes_in_range

View file

@ -5,8 +5,8 @@ import os
from maya import cmds # noqa
from ayon_core.pipeline import publish
from ayon_core.hosts.maya.api.alembic import extract_alembic
from ayon_core.hosts.maya.api.lib import (
extract_alembic,
suspended_refresh,
maintained_selection
)

View file

@ -5,7 +5,7 @@ import copy
from maya import cmds
import pyblish.api
from ayon_core.hosts.maya.api.lib import extract_alembic
from ayon_core.hosts.maya.api.alembic import extract_alembic
from ayon_core.pipeline import publish

View file

@ -0,0 +1,98 @@
import pyblish.api
from ayon_core.pipeline import OptionalPyblishPluginMixin
from ayon_core.pipeline.publish import RepairAction, PublishValidationError
class ValidateAlembicDefaultsPointcache(
pyblish.api.InstancePlugin, OptionalPyblishPluginMixin
):
"""Validate the attributes on the instance are defaults.
The defaults are defined in the project settings.
"""
order = pyblish.api.ValidatorOrder
families = ["pointcache"]
hosts = ["maya"]
label = "Validate Alembic Options Defaults"
actions = [RepairAction]
optional = True
plugin_name = "ExtractAlembic"
@classmethod
def _get_settings(cls, context):
maya_settings = context.data["project_settings"]["maya"]
settings = maya_settings["publish"]["ExtractAlembic"]
return settings
@classmethod
def _get_publish_attributes(cls, instance):
attributes = instance.data["publish_attributes"][
cls.plugin_name(
instance.data["publish_attributes"]
)
]
return attributes
def process(self, instance):
if not self.is_active(instance.data):
return
settings = self._get_settings(instance.context)
attributes = self._get_publish_attributes(instance)
msg = (
"Alembic Extract setting \"{}\" is not the default value:"
"\nCurrent: {}"
"\nDefault Value: {}\n"
)
errors = []
for key, value in attributes.items():
default_value = settings[key]
# Lists are best to compared sorted since we cant rely on the order
# of the items.
if isinstance(value, list):
value = sorted(value)
default_value = sorted(default_value)
if value != default_value:
errors.append(msg.format(key, value, default_value))
if errors:
raise PublishValidationError("\n".join(errors))
@classmethod
def repair(cls, instance):
# Find create instance twin.
create_context = instance.context.data["create_context"]
create_instance = create_context.get_instance_by_id(
instance.data["instance_id"]
)
# Set the settings values on the create context then save to workfile.
publish_attributes = instance.data["publish_attributes"]
plugin_name = cls.plugin_name(publish_attributes)
attributes = cls._get_publish_attributes(instance)
settings = cls._get_settings(instance.context)
create_publish_attributes = create_instance.data["publish_attributes"]
for key in attributes:
create_publish_attributes[plugin_name][key] = settings[key]
create_context.save_changes()
class ValidateAlembicDefaultsAnimation(
ValidateAlembicDefaultsPointcache
):
"""Validate the attributes on the instance are defaults.
The defaults are defined in the project settings.
"""
label = "Validate Alembic Options Defaults"
families = ["animation"]
plugin_name = "ExtractAnimation"

View file

@ -10,6 +10,7 @@ from ayon_core.pipeline.publish import (
RepairAction,
ValidateContentsOrder,
PublishValidationError,
OptionalPyblishPluginMixin
)
from ayon_core.hosts.maya.api import lib
from ayon_core.hosts.maya.api.lib_rendersettings import RenderSettings
@ -37,7 +38,8 @@ def get_redshift_image_format_labels():
return mel.eval("{0}={0}".format(var))
class ValidateRenderSettings(pyblish.api.InstancePlugin):
class ValidateRenderSettings(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validates the global render settings
* File Name Prefix must start with: `<Scene>`
@ -55,7 +57,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
* Frame Padding must be:
* default: 4
* Animation must be toggle on, in Render Settings - Common tab:
* Animation must be toggled on, in Render Settings - Common tab:
* vray: Animation on standard of specific
* arnold: Frame / Animation ext: Any choice without "(Single Frame)"
* redshift: Animation toggled on
@ -67,10 +69,11 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
"""
order = ValidateContentsOrder
label = "Render Settings"
label = "Validate Render Settings"
hosts = ["maya"]
families = ["renderlayer"]
actions = [RepairAction]
optional = True
ImagePrefixes = {
'mentalray': 'defaultRenderGlobals.imageFilePrefix',
@ -112,6 +115,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
DEFAULT_PREFIX = "<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>"
def process(self, instance):
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:

View file

@ -0,0 +1,270 @@
import json
from maya import cmds
from ayon_core.pipeline.workfile.workfile_template_builder import (
PlaceholderPlugin,
LoadPlaceholderItem,
PlaceholderLoadMixin,
)
from ayon_core.hosts.maya.api.lib import (
read,
imprint,
get_reference_node
)
from ayon_core.hosts.maya.api.workfile_template_builder import PLACEHOLDER_SET
class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
identifier = "maya.load"
label = "Maya load"
def _collect_scene_placeholders(self):
# Cache placeholder data to shared data
placeholder_nodes = self.builder.get_shared_populate_data(
"placeholder_nodes"
)
if placeholder_nodes is None:
attributes = cmds.ls("*.plugin_identifier", long=True)
placeholder_nodes = {}
for attribute in attributes:
node_name = attribute.rpartition(".")[0]
placeholder_nodes[node_name] = (
self._parse_placeholder_node_data(node_name)
)
self.builder.set_shared_populate_data(
"placeholder_nodes", placeholder_nodes
)
return placeholder_nodes
def _parse_placeholder_node_data(self, node_name):
placeholder_data = read(node_name)
parent_name = (
cmds.getAttr(node_name + ".parent", asString=True)
or node_name.rpartition("|")[0]
or ""
)
if parent_name:
siblings = cmds.listRelatives(parent_name, children=True)
else:
siblings = cmds.ls(assemblies=True)
node_shortname = node_name.rpartition("|")[2]
current_index = cmds.getAttr(node_name + ".index", asString=True)
if current_index < 0:
current_index = siblings.index(node_shortname)
placeholder_data.update({
"parent": parent_name,
"index": current_index
})
return placeholder_data
def _create_placeholder_name(self, placeholder_data):
placeholder_name_parts = placeholder_data["builder_type"].split("_")
pos = 1
placeholder_product_type = placeholder_data.get("product_type")
if placeholder_product_type is None:
placeholder_product_type = placeholder_data.get("family")
if placeholder_product_type:
placeholder_name_parts.insert(pos, placeholder_product_type)
pos += 1
# add loader arguments if any
loader_args = placeholder_data["loader_args"]
if loader_args:
loader_args = json.loads(loader_args.replace('\'', '\"'))
values = [v for v in loader_args.values()]
for value in values:
placeholder_name_parts.insert(pos, value)
pos += 1
placeholder_name = "_".join(placeholder_name_parts)
return placeholder_name.capitalize()
def _get_loaded_repre_ids(self):
loaded_representation_ids = self.builder.get_shared_populate_data(
"loaded_representation_ids"
)
if loaded_representation_ids is None:
try:
containers = cmds.sets("AVALON_CONTAINERS", q=True)
except ValueError:
containers = []
loaded_representation_ids = {
cmds.getAttr(container + ".representation")
for container in containers
}
self.builder.set_shared_populate_data(
"loaded_representation_ids", loaded_representation_ids
)
return loaded_representation_ids
def create_placeholder(self, placeholder_data):
selection = cmds.ls(selection=True)
if len(selection) > 1:
raise ValueError("More then one item are selected")
parent = selection[0] if selection else None
placeholder_data["plugin_identifier"] = self.identifier
placeholder_name = self._create_placeholder_name(placeholder_data)
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
if parent:
placeholder = cmds.parent(placeholder, selection[0])[0]
imprint(placeholder, placeholder_data)
# Add helper attributes to keep placeholder info
cmds.addAttr(
placeholder,
longName="parent",
hidden=True,
dataType="string"
)
cmds.addAttr(
placeholder,
longName="index",
hidden=True,
attributeType="short",
defaultValue=-1
)
cmds.setAttr(placeholder + ".parent", "", type="string")
def update_placeholder(self, placeholder_item, placeholder_data):
node_name = placeholder_item.scene_identifier
new_values = {}
for key, value in placeholder_data.items():
placeholder_value = placeholder_item.data.get(key)
if value != placeholder_value:
new_values[key] = value
placeholder_item.data[key] = value
for key in new_values.keys():
cmds.deleteAttr(node_name + "." + key)
imprint(node_name, new_values)
def collect_placeholders(self):
output = []
scene_placeholders = self._collect_scene_placeholders()
for node_name, placeholder_data in scene_placeholders.items():
if placeholder_data.get("plugin_identifier") != self.identifier:
continue
# TODO do data validations and maybe upgrades if they are invalid
output.append(
LoadPlaceholderItem(node_name, placeholder_data, self)
)
return output
def populate_placeholder(self, placeholder):
self.populate_load_placeholder(placeholder)
def repopulate_placeholder(self, placeholder):
repre_ids = self._get_loaded_repre_ids()
self.populate_load_placeholder(placeholder, repre_ids)
def get_placeholder_options(self, options=None):
return self.get_load_plugin_options(options)
def post_placeholder_process(self, placeholder, failed):
"""Cleanup placeholder after load of its corresponding representations.
Args:
placeholder (PlaceholderItem): Item which was just used to load
representation.
failed (bool): Loading of representation failed.
"""
# Hide placeholder and add them to placeholder set
node = placeholder.scene_identifier
# If we just populate the placeholders from current scene, the
# placeholder set will not be created so account for that.
if not cmds.objExists(PLACEHOLDER_SET):
cmds.sets(name=PLACEHOLDER_SET, empty=True)
cmds.sets(node, addElement=PLACEHOLDER_SET)
cmds.hide(node)
cmds.setAttr(node + ".hiddenInOutliner", True)
def delete_placeholder(self, placeholder):
"""Remove placeholder if building was successful"""
cmds.delete(placeholder.scene_identifier)
def load_succeed(self, placeholder, container):
self._parent_in_hierarchy(placeholder, container)
def _parent_in_hierarchy(self, placeholder, container):
"""Parent loaded container to placeholder's parent.
ie : Set loaded content as placeholder's sibling
Args:
container (str): Placeholder loaded containers
"""
if not container:
return
roots = cmds.sets(container, q=True) or []
ref_node = None
try:
ref_node = get_reference_node(roots)
except AssertionError as e:
self.log.info(e.args[0])
nodes_to_parent = []
for root in roots:
if ref_node:
ref_root = cmds.referenceQuery(root, nodes=True)[0]
ref_root = (
cmds.listRelatives(ref_root, parent=True, path=True) or
[ref_root]
)
nodes_to_parent.extend(ref_root)
continue
if root.endswith("_RN"):
# Backwards compatibility for hardcoded reference names.
refRoot = cmds.referenceQuery(root, n=True)[0]
refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot]
nodes_to_parent.extend(refRoot)
elif root not in cmds.listSets(allSets=True):
nodes_to_parent.append(root)
elif not cmds.sets(root, q=True):
return
# Move loaded nodes to correct index in outliner hierarchy
placeholder_form = cmds.xform(
placeholder.scene_identifier,
q=True,
matrix=True,
worldSpace=True
)
scene_parent = cmds.listRelatives(
placeholder.scene_identifier, parent=True, fullPath=True
)
for node in set(nodes_to_parent):
cmds.reorder(node, front=True)
cmds.reorder(node, relative=placeholder.data["index"])
cmds.xform(node, matrix=placeholder_form, ws=True)
if scene_parent:
cmds.parent(node, scene_parent)
else:
if cmds.listRelatives(node, parent=True):
cmds.parent(node, world=True)
holding_sets = cmds.listSets(object=placeholder.scene_identifier)
if not holding_sets:
return
for holding_set in holding_sets:
cmds.sets(roots, forceElement=holding_set)

View file

@ -7,7 +7,7 @@ from maya import cmds
import ayon_api
from ayon_core.pipeline import get_current_project_name
import ayon_core.hosts.maya.lib as maya_lib
import ayon_core.hosts.maya.api.lib as maya_lib
from . import lib
from .alembic import get_alembic_ids_cache

View file

@ -1495,18 +1495,28 @@ class WorkfileSettings(object):
filter_knobs = [
"viewerProcess",
"wipe_position"
"wipe_position",
"monitorOutOutputTransform"
]
display, viewer = get_viewer_config_from_string(
viewer_dict["viewerProcess"]
)
viewer_process = create_viewer_profile_string(
viewer, display, path_like=False
)
display, viewer = get_viewer_config_from_string(
viewer_dict["output_transform"]
)
output_transform = create_viewer_profile_string(
viewer, display, path_like=False
)
erased_viewers = []
for v in nuke.allNodes(filter="Viewer"):
# set viewProcess to preset from settings
v["viewerProcess"].setValue(
str(viewer_dict["viewerProcess"])
)
v["viewerProcess"].setValue(viewer_process)
if str(viewer_dict["viewerProcess"]) \
not in v["viewerProcess"].value():
if viewer_process not in v["viewerProcess"].value():
copy_inputs = v.dependencies()
copy_knobs = {k: v[k].value() for k in v.knobs()
if k not in filter_knobs}
@ -1524,11 +1534,11 @@ class WorkfileSettings(object):
# set copied knobs
for k, v in copy_knobs.items():
print(k, v)
nv[k].setValue(v)
# set viewerProcess
nv["viewerProcess"].setValue(str(viewer_dict["viewerProcess"]))
nv["viewerProcess"].setValue(viewer_process)
nv["monitorOutOutputTransform"].setValue(output_transform)
if erased_viewers:
log.warning(
@ -1547,7 +1557,6 @@ class WorkfileSettings(object):
host_name="nuke"
)
viewer_process_settings = imageio_host["viewer"]["viewerProcess"]
workfile_settings = imageio_host["workfile"]
color_management = workfile_settings["color_management"]
native_ocio_config = workfile_settings["native_ocio_config"]
@ -1574,29 +1583,6 @@ class WorkfileSettings(object):
residual_path
))
# get monitor lut from settings respecting Nuke version differences
monitor_lut = workfile_settings["thumbnail_space"]
monitor_lut_data = self._get_monitor_settings(
viewer_process_settings, monitor_lut
)
monitor_lut_data["workingSpaceLUT"] = (
workfile_settings["working_space"]
)
# then set the rest
for knob, value_ in monitor_lut_data.items():
# skip unfilled ocio config path
# it will be dict in value
if isinstance(value_, dict):
continue
# skip empty values
if not value_:
continue
if self._root_node[knob].value() not in value_:
self._root_node[knob].setValue(str(value_))
log.debug("nuke.root()['{}'] changed to: {}".format(
knob, value_))
# set ocio config path
if config_data:
config_path = config_data["path"].replace("\\", "/")
@ -1611,6 +1597,31 @@ class WorkfileSettings(object):
if correct_settings:
self._set_ocio_config_path_to_workfile(config_data)
# get monitor lut from settings respecting Nuke version differences
monitor_lut_data = self._get_monitor_settings(
workfile_settings["monitor_out_lut"],
workfile_settings["monitor_lut"]
)
monitor_lut_data.update({
"workingSpaceLUT": workfile_settings["working_space"],
"int8Lut": workfile_settings["int_8_lut"],
"int16Lut": workfile_settings["int_16_lut"],
"logLut": workfile_settings["log_lut"],
"floatLut": workfile_settings["float_lut"]
})
# then set the rest
for knob, value_ in monitor_lut_data.items():
# skip unfilled ocio config path
# it will be dict in value
if isinstance(value_, dict):
continue
# skip empty values
if not value_:
continue
self._root_node[knob].setValue(str(value_))
log.debug("nuke.root()['{}'] changed to: {}".format(knob, value_))
def _get_monitor_settings(self, viewer_lut, monitor_lut):
""" Get monitor settings from viewer and monitor lut

View file

@ -18,6 +18,7 @@ from ayon_core.pipeline import (
register_loader_plugin_path,
register_creator_plugin_path,
register_inventory_action_path,
register_workfile_build_plugin_path,
AYON_INSTANCE_ID,
AVALON_INSTANCE_ID,
AVALON_CONTAINER_ID,
@ -52,8 +53,6 @@ from .lib import (
MENU_LABEL,
)
from .workfile_template_builder import (
NukePlaceholderLoadPlugin,
NukePlaceholderCreatePlugin,
build_workfile_template,
create_placeholder,
update_placeholder,
@ -76,6 +75,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build")
# registering pyblish gui regarding settings in presets
if os.getenv("PYBLISH_GUI", None):
@ -105,18 +105,11 @@ class NukeHost(
def get_workfile_extensions(self):
return file_extensions()
def get_workfile_build_placeholder_plugins(self):
return [
NukePlaceholderLoadPlugin,
NukePlaceholderCreatePlugin
]
def get_containers(self):
return ls()
def install(self):
''' Installing all requarements for Nuke host
'''
"""Installing all requirements for Nuke host"""
pyblish.api.register_host("nuke")
@ -125,6 +118,7 @@ class NukeHost(
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
register_inventory_action_path(INVENTORY_PATH)
register_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
# Register AYON event for workfiles loading.
register_event_callback("workio.open_file", check_inventory_versions)
@ -178,7 +172,6 @@ def add_nuke_callbacks():
# set apply all workfile settings on script load and save
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
if nuke_settings["dirmap"]["enabled"]:
log.info("Added Nuke's dir-mapping callback ...")
# Add dirmap for file paths.

View file

@ -1151,7 +1151,6 @@ def _remove_old_knobs(node):
"OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority",
"deadlineChunkSize", "deadlineConcurrentTasks", "Deadline"
]
print(node.name())
# remove all old knobs
for knob in node.allKnobs():

View file

@ -1,30 +1,17 @@
import collections
import nuke
from ayon_core.pipeline import registered_host
from ayon_core.pipeline.workfile.workfile_template_builder import (
AbstractTemplateBuilder,
PlaceholderPlugin,
LoadPlaceholderItem,
CreatePlaceholderItem,
PlaceholderLoadMixin,
PlaceholderCreateMixin,
)
from ayon_core.tools.workfile_template_build import (
WorkfileBuildPlaceholderDialog,
)
from .lib import (
find_free_space_to_paste_nodes,
get_extreme_positions,
get_group_io_nodes,
imprint,
refresh_node,
refresh_nodes,
reset_selection,
get_names_from_nodes,
get_nodes_by_names,
select_nodes,
duplicate_node,
node_tempfile,
get_main_window,
WorkfileSettings,
)
@ -54,6 +41,7 @@ class NukeTemplateBuilder(AbstractTemplateBuilder):
return True
class NukePlaceholderPlugin(PlaceholderPlugin):
node_color = 4278190335
@ -120,843 +108,6 @@ class NukePlaceholderPlugin(PlaceholderPlugin):
nuke.delete(placeholder_node)
class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
identifier = "nuke.load"
label = "Nuke load"
def _parse_placeholder_node_data(self, node):
placeholder_data = super(
NukePlaceholderLoadPlugin, self
)._parse_placeholder_node_data(node)
node_knobs = node.knobs()
nb_children = 0
if "nb_children" in node_knobs:
nb_children = int(node_knobs["nb_children"].getValue())
placeholder_data["nb_children"] = nb_children
siblings = []
if "siblings" in node_knobs:
siblings = node_knobs["siblings"].values()
placeholder_data["siblings"] = siblings
node_full_name = node.fullName()
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
placeholder_data["last_loaded"] = []
placeholder_data["delete"] = False
return placeholder_data
def _get_loaded_repre_ids(self):
loaded_representation_ids = self.builder.get_shared_populate_data(
"loaded_representation_ids"
)
if loaded_representation_ids is None:
loaded_representation_ids = set()
for node in nuke.allNodes():
if "repre_id" in node.knobs():
loaded_representation_ids.add(
node.knob("repre_id").getValue()
)
self.builder.set_shared_populate_data(
"loaded_representation_ids", loaded_representation_ids
)
return loaded_representation_ids
def _before_placeholder_load(self, placeholder):
placeholder.data["nodes_init"] = nuke.allNodes()
def _before_repre_load(self, placeholder, representation):
placeholder.data["last_repre_id"] = representation["id"]
def collect_placeholders(self):
output = []
scene_placeholders = self._collect_scene_placeholders()
for node_name, node in scene_placeholders.items():
plugin_identifier_knob = node.knob("plugin_identifier")
if (
plugin_identifier_knob is None
or plugin_identifier_knob.getValue() != self.identifier
):
continue
placeholder_data = self._parse_placeholder_node_data(node)
# TODO do data validations and maybe updgrades if are invalid
output.append(
LoadPlaceholderItem(node_name, placeholder_data, self)
)
return output
def populate_placeholder(self, placeholder):
self.populate_load_placeholder(placeholder)
def repopulate_placeholder(self, placeholder):
repre_ids = self._get_loaded_repre_ids()
self.populate_load_placeholder(placeholder, repre_ids)
def get_placeholder_options(self, options=None):
return self.get_load_plugin_options(options)
def post_placeholder_process(self, placeholder, failed):
"""Cleanup placeholder after load of its corresponding representations.
Args:
placeholder (PlaceholderItem): Item which was just used to load
representation.
failed (bool): Loading of representation failed.
"""
# deselect all selected nodes
placeholder_node = nuke.toNode(placeholder.scene_identifier)
# getting the latest nodes added
# TODO get from shared populate data!
nodes_init = placeholder.data["nodes_init"]
nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init))
self.log.debug("Loaded nodes: {}".format(nodes_loaded))
if not nodes_loaded:
return
placeholder.data["delete"] = True
nodes_loaded = self._move_to_placeholder_group(
placeholder, nodes_loaded
)
placeholder.data["last_loaded"] = nodes_loaded
refresh_nodes(nodes_loaded)
# positioning of the loaded nodes
min_x, min_y, _, _ = get_extreme_positions(nodes_loaded)
for node in nodes_loaded:
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
node.setXYpos(xpos, ypos)
refresh_nodes(nodes_loaded)
# fix the problem of z_order for backdrops
self._fix_z_order(placeholder)
if placeholder.data.get("keep_placeholder"):
self._imprint_siblings(placeholder)
if placeholder.data["nb_children"] == 0:
# save initial nodes positions and dimensions, update them
# and set inputs and outputs of loaded nodes
if placeholder.data.get("keep_placeholder"):
self._imprint_inits()
self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded)
self._set_loaded_connections(placeholder)
elif placeholder.data["siblings"]:
# create copies of placeholder siblings for the new loaded nodes,
# set their inputs and outputs and update all nodes positions and
# dimensions and siblings names
siblings = get_nodes_by_names(placeholder.data["siblings"])
refresh_nodes(siblings)
copies = self._create_sib_copies(placeholder)
new_nodes = list(copies.values()) # copies nodes
self._update_nodes(new_nodes, nodes_loaded)
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
new_nodes_name = get_names_from_nodes(new_nodes)
imprint(placeholder_node, {"siblings": new_nodes_name})
self._set_copies_connections(placeholder, copies)
self._update_nodes(
nuke.allNodes(),
new_nodes + nodes_loaded,
20
)
new_siblings = get_names_from_nodes(new_nodes)
placeholder.data["siblings"] = new_siblings
else:
# if the placeholder doesn't have siblings, the loaded
# nodes will be placed in a free space
xpointer, ypointer = find_free_space_to_paste_nodes(
nodes_loaded, direction="bottom", offset=200
)
node = nuke.createNode("NoOp")
reset_selection()
nuke.delete(node)
for node in nodes_loaded:
xpos = (node.xpos() - min_x) + xpointer
ypos = (node.ypos() - min_y) + ypointer
node.setXYpos(xpos, ypos)
placeholder.data["nb_children"] += 1
reset_selection()
# go back to root group
nuke.root().begin()
def _move_to_placeholder_group(self, placeholder, nodes_loaded):
"""
opening the placeholder's group and copying loaded nodes in it.
Returns :
nodes_loaded (list): the new list of pasted nodes
"""
groups_name = placeholder.data["group_name"]
reset_selection()
select_nodes(nodes_loaded)
if groups_name:
with node_tempfile() as filepath:
nuke.nodeCopy(filepath)
for node in nuke.selectedNodes():
nuke.delete(node)
group = nuke.toNode(groups_name)
group.begin()
nuke.nodePaste(filepath)
nodes_loaded = nuke.selectedNodes()
return nodes_loaded
def _fix_z_order(self, placeholder):
"""Fix the problem of z_order when a backdrop is loaded."""
nodes_loaded = placeholder.data["last_loaded"]
loaded_backdrops = []
bd_orders = set()
for node in nodes_loaded:
if isinstance(node, nuke.BackdropNode):
loaded_backdrops.append(node)
bd_orders.add(node.knob("z_order").getValue())
if not bd_orders:
return
sib_orders = set()
for node_name in placeholder.data["siblings"]:
node = nuke.toNode(node_name)
if isinstance(node, nuke.BackdropNode):
sib_orders.add(node.knob("z_order").getValue())
if not sib_orders:
return
min_order = min(bd_orders)
max_order = max(sib_orders)
for backdrop_node in loaded_backdrops:
z_order = backdrop_node.knob("z_order").getValue()
backdrop_node.knob("z_order").setValue(
z_order + max_order - min_order + 1)
def _imprint_siblings(self, placeholder):
"""
- add siblings names to placeholder attributes (nodes loaded with it)
- add Id to the attributes of all the other nodes
"""
loaded_nodes = placeholder.data["last_loaded"]
loaded_nodes_set = set(loaded_nodes)
data = {"repre_id": str(placeholder.data["last_repre_id"])}
for node in loaded_nodes:
node_knobs = node.knobs()
if "builder_type" not in node_knobs:
# save the id of representation for all imported nodes
imprint(node, data)
node.knob("repre_id").setVisible(False)
refresh_node(node)
continue
if (
"is_placeholder" not in node_knobs
or (
"is_placeholder" in node_knobs
and node.knob("is_placeholder").value()
)
):
siblings = list(loaded_nodes_set - {node})
siblings_name = get_names_from_nodes(siblings)
siblings = {"siblings": siblings_name}
imprint(node, siblings)
def _imprint_inits(self):
"""Add initial positions and dimensions to the attributes"""
for node in nuke.allNodes():
refresh_node(node)
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
node.knob("x_init").setVisible(False)
node.knob("y_init").setVisible(False)
width = node.screenWidth()
height = node.screenHeight()
if "bdwidth" in node.knobs():
imprint(node, {"w_init": width, "h_init": height})
node.knob("w_init").setVisible(False)
node.knob("h_init").setVisible(False)
refresh_node(node)
def _update_nodes(
self, placeholder, nodes, considered_nodes, offset_y=None
):
"""Adjust backdrop nodes dimensions and positions.
Considering some nodes sizes.
Args:
nodes (list): list of nodes to update
considered_nodes (list): list of nodes to consider while updating
positions and dimensions
offset (int): distance between copies
"""
placeholder_node = nuke.toNode(placeholder.scene_identifier)
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
diff_x = diff_y = 0
contained_nodes = [] # for backdrops
if offset_y is None:
width_ph = placeholder_node.screenWidth()
height_ph = placeholder_node.screenHeight()
diff_y = max_y - min_y - height_ph
diff_x = max_x - min_x - width_ph
contained_nodes = [placeholder_node]
min_x = placeholder_node.xpos()
min_y = placeholder_node.ypos()
else:
siblings = get_nodes_by_names(placeholder.data["siblings"])
minX, _, maxX, _ = get_extreme_positions(siblings)
diff_y = max_y - min_y + 20
diff_x = abs(max_x - min_x - maxX + minX)
contained_nodes = considered_nodes
if diff_y <= 0 and diff_x <= 0:
return
for node in nodes:
refresh_node(node)
if (
node == placeholder_node
or node in considered_nodes
):
continue
if (
not isinstance(node, nuke.BackdropNode)
or (
isinstance(node, nuke.BackdropNode)
and not set(contained_nodes) <= set(node.getNodes())
)
):
if offset_y is None and node.xpos() >= min_x:
node.setXpos(node.xpos() + diff_x)
if node.ypos() >= min_y:
node.setYpos(node.ypos() + diff_y)
else:
width = node.screenWidth()
height = node.screenHeight()
node.knob("bdwidth").setValue(width + diff_x)
node.knob("bdheight").setValue(height + diff_y)
refresh_node(node)
def _set_loaded_connections(self, placeholder):
"""
set inputs and outputs of loaded nodes"""
placeholder_node = nuke.toNode(placeholder.scene_identifier)
input_node, output_node = get_group_io_nodes(
placeholder.data["last_loaded"]
)
for node in placeholder_node.dependent():
for idx in range(node.inputs()):
if node.input(idx) == placeholder_node and output_node:
node.setInput(idx, output_node)
for node in placeholder_node.dependencies():
for idx in range(placeholder_node.inputs()):
if placeholder_node.input(idx) == node and input_node:
input_node.setInput(0, node)
def _create_sib_copies(self, placeholder):
""" creating copies of the palce_holder siblings (the ones who were
loaded with it) for the new nodes added
Returns :
copies (dict) : with copied nodes names and their copies
"""
copies = {}
siblings = get_nodes_by_names(placeholder.data["siblings"])
for node in siblings:
new_node = duplicate_node(node)
x_init = int(new_node.knob("x_init").getValue())
y_init = int(new_node.knob("y_init").getValue())
new_node.setXYpos(x_init, y_init)
if isinstance(new_node, nuke.BackdropNode):
w_init = new_node.knob("w_init").getValue()
h_init = new_node.knob("h_init").getValue()
new_node.knob("bdwidth").setValue(w_init)
new_node.knob("bdheight").setValue(h_init)
refresh_node(node)
if "repre_id" in node.knobs().keys():
node.removeKnob(node.knob("repre_id"))
copies[node.name()] = new_node
return copies
def _set_copies_connections(self, placeholder, copies):
"""Set inputs and outputs of the copies.
Args:
copies (dict): Copied nodes by their names.
"""
last_input, last_output = get_group_io_nodes(
placeholder.data["last_loaded"]
)
siblings = get_nodes_by_names(placeholder.data["siblings"])
siblings_input, siblings_output = get_group_io_nodes(siblings)
copy_input = copies[siblings_input.name()]
copy_output = copies[siblings_output.name()]
for node_init in siblings:
if node_init == siblings_output:
continue
node_copy = copies[node_init.name()]
for node in node_init.dependent():
for idx in range(node.inputs()):
if node.input(idx) != node_init:
continue
if node in siblings:
copies[node.name()].setInput(idx, node_copy)
else:
last_input.setInput(0, node_copy)
for node in node_init.dependencies():
for idx in range(node_init.inputs()):
if node_init.input(idx) != node:
continue
if node_init == siblings_input:
copy_input.setInput(idx, node)
elif node in siblings:
node_copy.setInput(idx, copies[node.name()])
else:
node_copy.setInput(idx, last_output)
siblings_input.setInput(0, copy_output)
class NukePlaceholderCreatePlugin(
NukePlaceholderPlugin, PlaceholderCreateMixin
):
identifier = "nuke.create"
label = "Nuke create"
def _parse_placeholder_node_data(self, node):
placeholder_data = super(
NukePlaceholderCreatePlugin, self
)._parse_placeholder_node_data(node)
node_knobs = node.knobs()
nb_children = 0
if "nb_children" in node_knobs:
nb_children = int(node_knobs["nb_children"].getValue())
placeholder_data["nb_children"] = nb_children
siblings = []
if "siblings" in node_knobs:
siblings = node_knobs["siblings"].values()
placeholder_data["siblings"] = siblings
node_full_name = node.fullName()
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
placeholder_data["last_loaded"] = []
placeholder_data["delete"] = False
return placeholder_data
def _before_instance_create(self, placeholder):
placeholder.data["nodes_init"] = nuke.allNodes()
def collect_placeholders(self):
output = []
scene_placeholders = self._collect_scene_placeholders()
for node_name, node in scene_placeholders.items():
plugin_identifier_knob = node.knob("plugin_identifier")
if (
plugin_identifier_knob is None
or plugin_identifier_knob.getValue() != self.identifier
):
continue
placeholder_data = self._parse_placeholder_node_data(node)
output.append(
CreatePlaceholderItem(node_name, placeholder_data, self)
)
return output
def populate_placeholder(self, placeholder):
self.populate_create_placeholder(placeholder)
def repopulate_placeholder(self, placeholder):
self.populate_create_placeholder(placeholder)
def get_placeholder_options(self, options=None):
return self.get_create_plugin_options(options)
def post_placeholder_process(self, placeholder, failed):
"""Cleanup placeholder after load of its corresponding representations.
Args:
placeholder (PlaceholderItem): Item which was just used to load
representation.
failed (bool): Loading of representation failed.
"""
# deselect all selected nodes
placeholder_node = nuke.toNode(placeholder.scene_identifier)
# getting the latest nodes added
nodes_init = placeholder.data["nodes_init"]
nodes_created = list(set(nuke.allNodes()) - set(nodes_init))
self.log.debug("Created nodes: {}".format(nodes_created))
if not nodes_created:
return
placeholder.data["delete"] = True
nodes_created = self._move_to_placeholder_group(
placeholder, nodes_created
)
placeholder.data["last_created"] = nodes_created
refresh_nodes(nodes_created)
# positioning of the created nodes
min_x, min_y, _, _ = get_extreme_positions(nodes_created)
for node in nodes_created:
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
node.setXYpos(xpos, ypos)
refresh_nodes(nodes_created)
# fix the problem of z_order for backdrops
self._fix_z_order(placeholder)
if placeholder.data.get("keep_placeholder"):
self._imprint_siblings(placeholder)
if placeholder.data["nb_children"] == 0:
# save initial nodes positions and dimensions, update them
# and set inputs and outputs of created nodes
if placeholder.data.get("keep_placeholder"):
self._imprint_inits()
self._update_nodes(placeholder, nuke.allNodes(), nodes_created)
self._set_created_connections(placeholder)
elif placeholder.data["siblings"]:
# create copies of placeholder siblings for the new created nodes,
# set their inputs and outputs and update all nodes positions and
# dimensions and siblings names
siblings = get_nodes_by_names(placeholder.data["siblings"])
refresh_nodes(siblings)
copies = self._create_sib_copies(placeholder)
new_nodes = list(copies.values()) # copies nodes
self._update_nodes(new_nodes, nodes_created)
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
new_nodes_name = get_names_from_nodes(new_nodes)
imprint(placeholder_node, {"siblings": new_nodes_name})
self._set_copies_connections(placeholder, copies)
self._update_nodes(
nuke.allNodes(),
new_nodes + nodes_created,
20
)
new_siblings = get_names_from_nodes(new_nodes)
placeholder.data["siblings"] = new_siblings
else:
# if the placeholder doesn't have siblings, the created
# nodes will be placed in a free space
xpointer, ypointer = find_free_space_to_paste_nodes(
nodes_created, direction="bottom", offset=200
)
node = nuke.createNode("NoOp")
reset_selection()
nuke.delete(node)
for node in nodes_created:
xpos = (node.xpos() - min_x) + xpointer
ypos = (node.ypos() - min_y) + ypointer
node.setXYpos(xpos, ypos)
placeholder.data["nb_children"] += 1
reset_selection()
# go back to root group
nuke.root().begin()
def _move_to_placeholder_group(self, placeholder, nodes_created):
"""
opening the placeholder's group and copying created nodes in it.
Returns :
nodes_created (list): the new list of pasted nodes
"""
groups_name = placeholder.data["group_name"]
reset_selection()
select_nodes(nodes_created)
if groups_name:
with node_tempfile() as filepath:
nuke.nodeCopy(filepath)
for node in nuke.selectedNodes():
nuke.delete(node)
group = nuke.toNode(groups_name)
group.begin()
nuke.nodePaste(filepath)
nodes_created = nuke.selectedNodes()
return nodes_created
def _fix_z_order(self, placeholder):
"""Fix the problem of z_order when a backdrop is create."""
nodes_created = placeholder.data["last_created"]
created_backdrops = []
bd_orders = set()
for node in nodes_created:
if isinstance(node, nuke.BackdropNode):
created_backdrops.append(node)
bd_orders.add(node.knob("z_order").getValue())
if not bd_orders:
return
sib_orders = set()
for node_name in placeholder.data["siblings"]:
node = nuke.toNode(node_name)
if isinstance(node, nuke.BackdropNode):
sib_orders.add(node.knob("z_order").getValue())
if not sib_orders:
return
min_order = min(bd_orders)
max_order = max(sib_orders)
for backdrop_node in created_backdrops:
z_order = backdrop_node.knob("z_order").getValue()
backdrop_node.knob("z_order").setValue(
z_order + max_order - min_order + 1)
def _imprint_siblings(self, placeholder):
"""
- add siblings names to placeholder attributes (nodes created with it)
- add Id to the attributes of all the other nodes
"""
created_nodes = placeholder.data["last_created"]
created_nodes_set = set(created_nodes)
for node in created_nodes:
node_knobs = node.knobs()
if (
"is_placeholder" not in node_knobs
or (
"is_placeholder" in node_knobs
and node.knob("is_placeholder").value()
)
):
siblings = list(created_nodes_set - {node})
siblings_name = get_names_from_nodes(siblings)
siblings = {"siblings": siblings_name}
imprint(node, siblings)
def _imprint_inits(self):
"""Add initial positions and dimensions to the attributes"""
for node in nuke.allNodes():
refresh_node(node)
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
node.knob("x_init").setVisible(False)
node.knob("y_init").setVisible(False)
width = node.screenWidth()
height = node.screenHeight()
if "bdwidth" in node.knobs():
imprint(node, {"w_init": width, "h_init": height})
node.knob("w_init").setVisible(False)
node.knob("h_init").setVisible(False)
refresh_node(node)
def _update_nodes(
self, placeholder, nodes, considered_nodes, offset_y=None
):
"""Adjust backdrop nodes dimensions and positions.
Considering some nodes sizes.
Args:
nodes (list): list of nodes to update
considered_nodes (list): list of nodes to consider while updating
positions and dimensions
offset (int): distance between copies
"""
placeholder_node = nuke.toNode(placeholder.scene_identifier)
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
diff_x = diff_y = 0
contained_nodes = [] # for backdrops
if offset_y is None:
width_ph = placeholder_node.screenWidth()
height_ph = placeholder_node.screenHeight()
diff_y = max_y - min_y - height_ph
diff_x = max_x - min_x - width_ph
contained_nodes = [placeholder_node]
min_x = placeholder_node.xpos()
min_y = placeholder_node.ypos()
else:
siblings = get_nodes_by_names(placeholder.data["siblings"])
minX, _, maxX, _ = get_extreme_positions(siblings)
diff_y = max_y - min_y + 20
diff_x = abs(max_x - min_x - maxX + minX)
contained_nodes = considered_nodes
if diff_y <= 0 and diff_x <= 0:
return
for node in nodes:
refresh_node(node)
if (
node == placeholder_node
or node in considered_nodes
):
continue
if (
not isinstance(node, nuke.BackdropNode)
or (
isinstance(node, nuke.BackdropNode)
and not set(contained_nodes) <= set(node.getNodes())
)
):
if offset_y is None and node.xpos() >= min_x:
node.setXpos(node.xpos() + diff_x)
if node.ypos() >= min_y:
node.setYpos(node.ypos() + diff_y)
else:
width = node.screenWidth()
height = node.screenHeight()
node.knob("bdwidth").setValue(width + diff_x)
node.knob("bdheight").setValue(height + diff_y)
refresh_node(node)
def _set_created_connections(self, placeholder):
"""
set inputs and outputs of created nodes"""
placeholder_node = nuke.toNode(placeholder.scene_identifier)
input_node, output_node = get_group_io_nodes(
placeholder.data["last_created"]
)
for node in placeholder_node.dependent():
for idx in range(node.inputs()):
if node.input(idx) == placeholder_node and output_node:
node.setInput(idx, output_node)
for node in placeholder_node.dependencies():
for idx in range(placeholder_node.inputs()):
if placeholder_node.input(idx) == node and input_node:
input_node.setInput(0, node)
def _create_sib_copies(self, placeholder):
""" creating copies of the palce_holder siblings (the ones who were
created with it) for the new nodes added
Returns :
copies (dict) : with copied nodes names and their copies
"""
copies = {}
siblings = get_nodes_by_names(placeholder.data["siblings"])
for node in siblings:
new_node = duplicate_node(node)
x_init = int(new_node.knob("x_init").getValue())
y_init = int(new_node.knob("y_init").getValue())
new_node.setXYpos(x_init, y_init)
if isinstance(new_node, nuke.BackdropNode):
w_init = new_node.knob("w_init").getValue()
h_init = new_node.knob("h_init").getValue()
new_node.knob("bdwidth").setValue(w_init)
new_node.knob("bdheight").setValue(h_init)
refresh_node(node)
if "repre_id" in node.knobs().keys():
node.removeKnob(node.knob("repre_id"))
copies[node.name()] = new_node
return copies
def _set_copies_connections(self, placeholder, copies):
"""Set inputs and outputs of the copies.
Args:
copies (dict): Copied nodes by their names.
"""
last_input, last_output = get_group_io_nodes(
placeholder.data["last_created"]
)
siblings = get_nodes_by_names(placeholder.data["siblings"])
siblings_input, siblings_output = get_group_io_nodes(siblings)
copy_input = copies[siblings_input.name()]
copy_output = copies[siblings_output.name()]
for node_init in siblings:
if node_init == siblings_output:
continue
node_copy = copies[node_init.name()]
for node in node_init.dependent():
for idx in range(node.inputs()):
if node.input(idx) != node_init:
continue
if node in siblings:
copies[node.name()].setInput(idx, node_copy)
else:
last_input.setInput(0, node_copy)
for node in node_init.dependencies():
for idx in range(node_init.inputs()):
if node_init.input(idx) != node:
continue
if node_init == siblings_input:
copy_input.setInput(idx, node)
elif node in siblings:
node_copy.setInput(idx, copies[node.name()])
else:
node_copy.setInput(idx, last_output)
siblings_input.setInput(0, copy_output)
def build_workfile_template(*args, **kwargs):
builder = NukeTemplateBuilder(registered_host())
builder.build_template(*args, **kwargs)

View file

@ -0,0 +1,428 @@
import nuke
from ayon_core.pipeline.workfile.workfile_template_builder import (
CreatePlaceholderItem,
PlaceholderCreateMixin,
)
from ayon_core.hosts.nuke.api.lib import (
find_free_space_to_paste_nodes,
get_extreme_positions,
get_group_io_nodes,
imprint,
refresh_node,
refresh_nodes,
reset_selection,
get_names_from_nodes,
get_nodes_by_names,
select_nodes,
duplicate_node,
node_tempfile,
)
from ayon_core.hosts.nuke.api.workfile_template_builder import (
NukePlaceholderPlugin
)
class NukePlaceholderCreatePlugin(
NukePlaceholderPlugin, PlaceholderCreateMixin
):
identifier = "nuke.create"
label = "Nuke create"
def _parse_placeholder_node_data(self, node):
placeholder_data = super(
NukePlaceholderCreatePlugin, self
)._parse_placeholder_node_data(node)
node_knobs = node.knobs()
nb_children = 0
if "nb_children" in node_knobs:
nb_children = int(node_knobs["nb_children"].getValue())
placeholder_data["nb_children"] = nb_children
siblings = []
if "siblings" in node_knobs:
siblings = node_knobs["siblings"].values()
placeholder_data["siblings"] = siblings
node_full_name = node.fullName()
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
placeholder_data["last_loaded"] = []
placeholder_data["delete"] = False
return placeholder_data
def _before_instance_create(self, placeholder):
placeholder.data["nodes_init"] = nuke.allNodes()
def collect_placeholders(self):
output = []
scene_placeholders = self._collect_scene_placeholders()
for node_name, node in scene_placeholders.items():
plugin_identifier_knob = node.knob("plugin_identifier")
if (
plugin_identifier_knob is None
or plugin_identifier_knob.getValue() != self.identifier
):
continue
placeholder_data = self._parse_placeholder_node_data(node)
output.append(
CreatePlaceholderItem(node_name, placeholder_data, self)
)
return output
def populate_placeholder(self, placeholder):
self.populate_create_placeholder(placeholder)
def repopulate_placeholder(self, placeholder):
self.populate_create_placeholder(placeholder)
def get_placeholder_options(self, options=None):
return self.get_create_plugin_options(options)
def post_placeholder_process(self, placeholder, failed):
"""Cleanup placeholder after load of its corresponding representations.
Args:
placeholder (PlaceholderItem): Item which was just used to load
representation.
failed (bool): Loading of representation failed.
"""
# deselect all selected nodes
placeholder_node = nuke.toNode(placeholder.scene_identifier)
# getting the latest nodes added
nodes_init = placeholder.data["nodes_init"]
nodes_created = list(set(nuke.allNodes()) - set(nodes_init))
self.log.debug("Created nodes: {}".format(nodes_created))
if not nodes_created:
return
placeholder.data["delete"] = True
nodes_created = self._move_to_placeholder_group(
placeholder, nodes_created
)
placeholder.data["last_created"] = nodes_created
refresh_nodes(nodes_created)
# positioning of the created nodes
min_x, min_y, _, _ = get_extreme_positions(nodes_created)
for node in nodes_created:
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
node.setXYpos(xpos, ypos)
refresh_nodes(nodes_created)
# fix the problem of z_order for backdrops
self._fix_z_order(placeholder)
if placeholder.data.get("keep_placeholder"):
self._imprint_siblings(placeholder)
if placeholder.data["nb_children"] == 0:
# save initial nodes positions and dimensions, update them
# and set inputs and outputs of created nodes
if placeholder.data.get("keep_placeholder"):
self._imprint_inits()
self._update_nodes(placeholder, nuke.allNodes(), nodes_created)
self._set_created_connections(placeholder)
elif placeholder.data["siblings"]:
# create copies of placeholder siblings for the new created nodes,
# set their inputs and outputs and update all nodes positions and
# dimensions and siblings names
siblings = get_nodes_by_names(placeholder.data["siblings"])
refresh_nodes(siblings)
copies = self._create_sib_copies(placeholder)
new_nodes = list(copies.values()) # copies nodes
self._update_nodes(new_nodes, nodes_created)
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
new_nodes_name = get_names_from_nodes(new_nodes)
imprint(placeholder_node, {"siblings": new_nodes_name})
self._set_copies_connections(placeholder, copies)
self._update_nodes(
nuke.allNodes(),
new_nodes + nodes_created,
20
)
new_siblings = get_names_from_nodes(new_nodes)
placeholder.data["siblings"] = new_siblings
else:
# if the placeholder doesn't have siblings, the created
# nodes will be placed in a free space
xpointer, ypointer = find_free_space_to_paste_nodes(
nodes_created, direction="bottom", offset=200
)
node = nuke.createNode("NoOp")
reset_selection()
nuke.delete(node)
for node in nodes_created:
xpos = (node.xpos() - min_x) + xpointer
ypos = (node.ypos() - min_y) + ypointer
node.setXYpos(xpos, ypos)
placeholder.data["nb_children"] += 1
reset_selection()
# go back to root group
nuke.root().begin()
def _move_to_placeholder_group(self, placeholder, nodes_created):
"""
opening the placeholder's group and copying created nodes in it.
Returns :
nodes_created (list): the new list of pasted nodes
"""
groups_name = placeholder.data["group_name"]
reset_selection()
select_nodes(nodes_created)
if groups_name:
with node_tempfile() as filepath:
nuke.nodeCopy(filepath)
for node in nuke.selectedNodes():
nuke.delete(node)
group = nuke.toNode(groups_name)
group.begin()
nuke.nodePaste(filepath)
nodes_created = nuke.selectedNodes()
return nodes_created
def _fix_z_order(self, placeholder):
"""Fix the problem of z_order when a backdrop is create."""
nodes_created = placeholder.data["last_created"]
created_backdrops = []
bd_orders = set()
for node in nodes_created:
if isinstance(node, nuke.BackdropNode):
created_backdrops.append(node)
bd_orders.add(node.knob("z_order").getValue())
if not bd_orders:
return
sib_orders = set()
for node_name in placeholder.data["siblings"]:
node = nuke.toNode(node_name)
if isinstance(node, nuke.BackdropNode):
sib_orders.add(node.knob("z_order").getValue())
if not sib_orders:
return
min_order = min(bd_orders)
max_order = max(sib_orders)
for backdrop_node in created_backdrops:
z_order = backdrop_node.knob("z_order").getValue()
backdrop_node.knob("z_order").setValue(
z_order + max_order - min_order + 1)
def _imprint_siblings(self, placeholder):
"""
- add siblings names to placeholder attributes (nodes created with it)
- add Id to the attributes of all the other nodes
"""
created_nodes = placeholder.data["last_created"]
created_nodes_set = set(created_nodes)
for node in created_nodes:
node_knobs = node.knobs()
if (
"is_placeholder" not in node_knobs
or (
"is_placeholder" in node_knobs
and node.knob("is_placeholder").value()
)
):
siblings = list(created_nodes_set - {node})
siblings_name = get_names_from_nodes(siblings)
siblings = {"siblings": siblings_name}
imprint(node, siblings)
def _imprint_inits(self):
"""Add initial positions and dimensions to the attributes"""
for node in nuke.allNodes():
refresh_node(node)
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
node.knob("x_init").setVisible(False)
node.knob("y_init").setVisible(False)
width = node.screenWidth()
height = node.screenHeight()
if "bdwidth" in node.knobs():
imprint(node, {"w_init": width, "h_init": height})
node.knob("w_init").setVisible(False)
node.knob("h_init").setVisible(False)
refresh_node(node)
def _update_nodes(
self, placeholder, nodes, considered_nodes, offset_y=None
):
"""Adjust backdrop nodes dimensions and positions.
Considering some nodes sizes.
Args:
nodes (list): list of nodes to update
considered_nodes (list): list of nodes to consider while updating
positions and dimensions
offset (int): distance between copies
"""
placeholder_node = nuke.toNode(placeholder.scene_identifier)
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
diff_x = diff_y = 0
contained_nodes = [] # for backdrops
if offset_y is None:
width_ph = placeholder_node.screenWidth()
height_ph = placeholder_node.screenHeight()
diff_y = max_y - min_y - height_ph
diff_x = max_x - min_x - width_ph
contained_nodes = [placeholder_node]
min_x = placeholder_node.xpos()
min_y = placeholder_node.ypos()
else:
siblings = get_nodes_by_names(placeholder.data["siblings"])
minX, _, maxX, _ = get_extreme_positions(siblings)
diff_y = max_y - min_y + 20
diff_x = abs(max_x - min_x - maxX + minX)
contained_nodes = considered_nodes
if diff_y <= 0 and diff_x <= 0:
return
for node in nodes:
refresh_node(node)
if (
node == placeholder_node
or node in considered_nodes
):
continue
if (
not isinstance(node, nuke.BackdropNode)
or (
isinstance(node, nuke.BackdropNode)
and not set(contained_nodes) <= set(node.getNodes())
)
):
if offset_y is None and node.xpos() >= min_x:
node.setXpos(node.xpos() + diff_x)
if node.ypos() >= min_y:
node.setYpos(node.ypos() + diff_y)
else:
width = node.screenWidth()
height = node.screenHeight()
node.knob("bdwidth").setValue(width + diff_x)
node.knob("bdheight").setValue(height + diff_y)
refresh_node(node)
def _set_created_connections(self, placeholder):
"""
set inputs and outputs of created nodes"""
placeholder_node = nuke.toNode(placeholder.scene_identifier)
input_node, output_node = get_group_io_nodes(
placeholder.data["last_created"]
)
for node in placeholder_node.dependent():
for idx in range(node.inputs()):
if node.input(idx) == placeholder_node and output_node:
node.setInput(idx, output_node)
for node in placeholder_node.dependencies():
for idx in range(placeholder_node.inputs()):
if placeholder_node.input(idx) == node and input_node:
input_node.setInput(0, node)
def _create_sib_copies(self, placeholder):
""" creating copies of the palce_holder siblings (the ones who were
created with it) for the new nodes added
Returns :
copies (dict) : with copied nodes names and their copies
"""
copies = {}
siblings = get_nodes_by_names(placeholder.data["siblings"])
for node in siblings:
new_node = duplicate_node(node)
x_init = int(new_node.knob("x_init").getValue())
y_init = int(new_node.knob("y_init").getValue())
new_node.setXYpos(x_init, y_init)
if isinstance(new_node, nuke.BackdropNode):
w_init = new_node.knob("w_init").getValue()
h_init = new_node.knob("h_init").getValue()
new_node.knob("bdwidth").setValue(w_init)
new_node.knob("bdheight").setValue(h_init)
refresh_node(node)
if "repre_id" in node.knobs().keys():
node.removeKnob(node.knob("repre_id"))
copies[node.name()] = new_node
return copies
def _set_copies_connections(self, placeholder, copies):
"""Set inputs and outputs of the copies.
Args:
copies (dict): Copied nodes by their names.
"""
last_input, last_output = get_group_io_nodes(
placeholder.data["last_created"]
)
siblings = get_nodes_by_names(placeholder.data["siblings"])
siblings_input, siblings_output = get_group_io_nodes(siblings)
copy_input = copies[siblings_input.name()]
copy_output = copies[siblings_output.name()]
for node_init in siblings:
if node_init == siblings_output:
continue
node_copy = copies[node_init.name()]
for node in node_init.dependent():
for idx in range(node.inputs()):
if node.input(idx) != node_init:
continue
if node in siblings:
copies[node.name()].setInput(idx, node_copy)
else:
last_input.setInput(0, node_copy)
for node in node_init.dependencies():
for idx in range(node_init.inputs()):
if node_init.input(idx) != node:
continue
if node_init == siblings_input:
copy_input.setInput(idx, node)
elif node in siblings:
node_copy.setInput(idx, copies[node.name()])
else:
node_copy.setInput(idx, last_output)
siblings_input.setInput(0, copy_output)

View file

@ -0,0 +1,455 @@
import nuke
from ayon_core.pipeline.workfile.workfile_template_builder import (
LoadPlaceholderItem,
PlaceholderLoadMixin,
)
from ayon_core.hosts.nuke.api.lib import (
find_free_space_to_paste_nodes,
get_extreme_positions,
get_group_io_nodes,
imprint,
refresh_node,
refresh_nodes,
reset_selection,
get_names_from_nodes,
get_nodes_by_names,
select_nodes,
duplicate_node,
node_tempfile,
)
from ayon_core.hosts.nuke.api.workfile_template_builder import (
NukePlaceholderPlugin
)
class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
identifier = "nuke.load"
label = "Nuke load"
def _parse_placeholder_node_data(self, node):
placeholder_data = super(
NukePlaceholderLoadPlugin, self
)._parse_placeholder_node_data(node)
node_knobs = node.knobs()
nb_children = 0
if "nb_children" in node_knobs:
nb_children = int(node_knobs["nb_children"].getValue())
placeholder_data["nb_children"] = nb_children
siblings = []
if "siblings" in node_knobs:
siblings = node_knobs["siblings"].values()
placeholder_data["siblings"] = siblings
node_full_name = node.fullName()
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
placeholder_data["last_loaded"] = []
placeholder_data["delete"] = False
return placeholder_data
def _get_loaded_repre_ids(self):
loaded_representation_ids = self.builder.get_shared_populate_data(
"loaded_representation_ids"
)
if loaded_representation_ids is None:
loaded_representation_ids = set()
for node in nuke.allNodes():
if "repre_id" in node.knobs():
loaded_representation_ids.add(
node.knob("repre_id").getValue()
)
self.builder.set_shared_populate_data(
"loaded_representation_ids", loaded_representation_ids
)
return loaded_representation_ids
def _before_placeholder_load(self, placeholder):
placeholder.data["nodes_init"] = nuke.allNodes()
def _before_repre_load(self, placeholder, representation):
placeholder.data["last_repre_id"] = representation["id"]
def collect_placeholders(self):
output = []
scene_placeholders = self._collect_scene_placeholders()
for node_name, node in scene_placeholders.items():
plugin_identifier_knob = node.knob("plugin_identifier")
if (
plugin_identifier_knob is None
or plugin_identifier_knob.getValue() != self.identifier
):
continue
placeholder_data = self._parse_placeholder_node_data(node)
# TODO do data validations and maybe updgrades if are invalid
output.append(
LoadPlaceholderItem(node_name, placeholder_data, self)
)
return output
def populate_placeholder(self, placeholder):
self.populate_load_placeholder(placeholder)
def repopulate_placeholder(self, placeholder):
repre_ids = self._get_loaded_repre_ids()
self.populate_load_placeholder(placeholder, repre_ids)
def get_placeholder_options(self, options=None):
return self.get_load_plugin_options(options)
def post_placeholder_process(self, placeholder, failed):
"""Cleanup placeholder after load of its corresponding representations.
Args:
placeholder (PlaceholderItem): Item which was just used to load
representation.
failed (bool): Loading of representation failed.
"""
# deselect all selected nodes
placeholder_node = nuke.toNode(placeholder.scene_identifier)
# getting the latest nodes added
# TODO get from shared populate data!
nodes_init = placeholder.data["nodes_init"]
nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init))
self.log.debug("Loaded nodes: {}".format(nodes_loaded))
if not nodes_loaded:
return
placeholder.data["delete"] = True
nodes_loaded = self._move_to_placeholder_group(
placeholder, nodes_loaded
)
placeholder.data["last_loaded"] = nodes_loaded
refresh_nodes(nodes_loaded)
# positioning of the loaded nodes
min_x, min_y, _, _ = get_extreme_positions(nodes_loaded)
for node in nodes_loaded:
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
node.setXYpos(xpos, ypos)
refresh_nodes(nodes_loaded)
# fix the problem of z_order for backdrops
self._fix_z_order(placeholder)
if placeholder.data.get("keep_placeholder"):
self._imprint_siblings(placeholder)
if placeholder.data["nb_children"] == 0:
# save initial nodes positions and dimensions, update them
# and set inputs and outputs of loaded nodes
if placeholder.data.get("keep_placeholder"):
self._imprint_inits()
self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded)
self._set_loaded_connections(placeholder)
elif placeholder.data["siblings"]:
# create copies of placeholder siblings for the new loaded nodes,
# set their inputs and outputs and update all nodes positions and
# dimensions and siblings names
siblings = get_nodes_by_names(placeholder.data["siblings"])
refresh_nodes(siblings)
copies = self._create_sib_copies(placeholder)
new_nodes = list(copies.values()) # copies nodes
self._update_nodes(new_nodes, nodes_loaded)
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
new_nodes_name = get_names_from_nodes(new_nodes)
imprint(placeholder_node, {"siblings": new_nodes_name})
self._set_copies_connections(placeholder, copies)
self._update_nodes(
nuke.allNodes(),
new_nodes + nodes_loaded,
20
)
new_siblings = get_names_from_nodes(new_nodes)
placeholder.data["siblings"] = new_siblings
else:
# if the placeholder doesn't have siblings, the loaded
# nodes will be placed in a free space
xpointer, ypointer = find_free_space_to_paste_nodes(
nodes_loaded, direction="bottom", offset=200
)
node = nuke.createNode("NoOp")
reset_selection()
nuke.delete(node)
for node in nodes_loaded:
xpos = (node.xpos() - min_x) + xpointer
ypos = (node.ypos() - min_y) + ypointer
node.setXYpos(xpos, ypos)
placeholder.data["nb_children"] += 1
reset_selection()
# go back to root group
nuke.root().begin()
def _move_to_placeholder_group(self, placeholder, nodes_loaded):
"""
opening the placeholder's group and copying loaded nodes in it.
Returns :
nodes_loaded (list): the new list of pasted nodes
"""
groups_name = placeholder.data["group_name"]
reset_selection()
select_nodes(nodes_loaded)
if groups_name:
with node_tempfile() as filepath:
nuke.nodeCopy(filepath)
for node in nuke.selectedNodes():
nuke.delete(node)
group = nuke.toNode(groups_name)
group.begin()
nuke.nodePaste(filepath)
nodes_loaded = nuke.selectedNodes()
return nodes_loaded
def _fix_z_order(self, placeholder):
"""Fix the problem of z_order when a backdrop is loaded."""
nodes_loaded = placeholder.data["last_loaded"]
loaded_backdrops = []
bd_orders = set()
for node in nodes_loaded:
if isinstance(node, nuke.BackdropNode):
loaded_backdrops.append(node)
bd_orders.add(node.knob("z_order").getValue())
if not bd_orders:
return
sib_orders = set()
for node_name in placeholder.data["siblings"]:
node = nuke.toNode(node_name)
if isinstance(node, nuke.BackdropNode):
sib_orders.add(node.knob("z_order").getValue())
if not sib_orders:
return
min_order = min(bd_orders)
max_order = max(sib_orders)
for backdrop_node in loaded_backdrops:
z_order = backdrop_node.knob("z_order").getValue()
backdrop_node.knob("z_order").setValue(
z_order + max_order - min_order + 1)
def _imprint_siblings(self, placeholder):
"""
- add siblings names to placeholder attributes (nodes loaded with it)
- add Id to the attributes of all the other nodes
"""
loaded_nodes = placeholder.data["last_loaded"]
loaded_nodes_set = set(loaded_nodes)
data = {"repre_id": str(placeholder.data["last_repre_id"])}
for node in loaded_nodes:
node_knobs = node.knobs()
if "builder_type" not in node_knobs:
# save the id of representation for all imported nodes
imprint(node, data)
node.knob("repre_id").setVisible(False)
refresh_node(node)
continue
if (
"is_placeholder" not in node_knobs
or (
"is_placeholder" in node_knobs
and node.knob("is_placeholder").value()
)
):
siblings = list(loaded_nodes_set - {node})
siblings_name = get_names_from_nodes(siblings)
siblings = {"siblings": siblings_name}
imprint(node, siblings)
def _imprint_inits(self):
"""Add initial positions and dimensions to the attributes"""
for node in nuke.allNodes():
refresh_node(node)
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
node.knob("x_init").setVisible(False)
node.knob("y_init").setVisible(False)
width = node.screenWidth()
height = node.screenHeight()
if "bdwidth" in node.knobs():
imprint(node, {"w_init": width, "h_init": height})
node.knob("w_init").setVisible(False)
node.knob("h_init").setVisible(False)
refresh_node(node)
def _update_nodes(
self, placeholder, nodes, considered_nodes, offset_y=None
):
"""Adjust backdrop nodes dimensions and positions.
Considering some nodes sizes.
Args:
nodes (list): list of nodes to update
considered_nodes (list): list of nodes to consider while updating
positions and dimensions
offset (int): distance between copies
"""
placeholder_node = nuke.toNode(placeholder.scene_identifier)
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
diff_x = diff_y = 0
contained_nodes = [] # for backdrops
if offset_y is None:
width_ph = placeholder_node.screenWidth()
height_ph = placeholder_node.screenHeight()
diff_y = max_y - min_y - height_ph
diff_x = max_x - min_x - width_ph
contained_nodes = [placeholder_node]
min_x = placeholder_node.xpos()
min_y = placeholder_node.ypos()
else:
siblings = get_nodes_by_names(placeholder.data["siblings"])
minX, _, maxX, _ = get_extreme_positions(siblings)
diff_y = max_y - min_y + 20
diff_x = abs(max_x - min_x - maxX + minX)
contained_nodes = considered_nodes
if diff_y <= 0 and diff_x <= 0:
return
for node in nodes:
refresh_node(node)
if (
node == placeholder_node
or node in considered_nodes
):
continue
if (
not isinstance(node, nuke.BackdropNode)
or (
isinstance(node, nuke.BackdropNode)
and not set(contained_nodes) <= set(node.getNodes())
)
):
if offset_y is None and node.xpos() >= min_x:
node.setXpos(node.xpos() + diff_x)
if node.ypos() >= min_y:
node.setYpos(node.ypos() + diff_y)
else:
width = node.screenWidth()
height = node.screenHeight()
node.knob("bdwidth").setValue(width + diff_x)
node.knob("bdheight").setValue(height + diff_y)
refresh_node(node)
def _set_loaded_connections(self, placeholder):
"""
set inputs and outputs of loaded nodes"""
placeholder_node = nuke.toNode(placeholder.scene_identifier)
input_node, output_node = get_group_io_nodes(
placeholder.data["last_loaded"]
)
for node in placeholder_node.dependent():
for idx in range(node.inputs()):
if node.input(idx) == placeholder_node and output_node:
node.setInput(idx, output_node)
for node in placeholder_node.dependencies():
for idx in range(placeholder_node.inputs()):
if placeholder_node.input(idx) == node and input_node:
input_node.setInput(0, node)
def _create_sib_copies(self, placeholder):
""" creating copies of the palce_holder siblings (the ones who were
loaded with it) for the new nodes added
Returns :
copies (dict) : with copied nodes names and their copies
"""
copies = {}
siblings = get_nodes_by_names(placeholder.data["siblings"])
for node in siblings:
new_node = duplicate_node(node)
x_init = int(new_node.knob("x_init").getValue())
y_init = int(new_node.knob("y_init").getValue())
new_node.setXYpos(x_init, y_init)
if isinstance(new_node, nuke.BackdropNode):
w_init = new_node.knob("w_init").getValue()
h_init = new_node.knob("h_init").getValue()
new_node.knob("bdwidth").setValue(w_init)
new_node.knob("bdheight").setValue(h_init)
refresh_node(node)
if "repre_id" in node.knobs().keys():
node.removeKnob(node.knob("repre_id"))
copies[node.name()] = new_node
return copies
def _set_copies_connections(self, placeholder, copies):
"""Set inputs and outputs of the copies.
Args:
copies (dict): Copied nodes by their names.
"""
last_input, last_output = get_group_io_nodes(
placeholder.data["last_loaded"]
)
siblings = get_nodes_by_names(placeholder.data["siblings"])
siblings_input, siblings_output = get_group_io_nodes(siblings)
copy_input = copies[siblings_input.name()]
copy_output = copies[siblings_output.name()]
for node_init in siblings:
if node_init == siblings_output:
continue
node_copy = copies[node_init.name()]
for node in node_init.dependent():
for idx in range(node.inputs()):
if node.input(idx) != node_init:
continue
if node in siblings:
copies[node.name()].setInput(idx, node_copy)
else:
last_input.setInput(0, node_copy)
for node in node_init.dependencies():
for idx in range(node_init.inputs()):
if node_init.input(idx) != node:
continue
if node_init == siblings_input:
copy_input.setInput(idx, node)
elif node in siblings:
node_copy.setInput(idx, copies[node.name()])
else:
node_copy.setInput(idx, last_output)
siblings_input.setInput(0, copy_output)

View file

@ -260,11 +260,11 @@ class UEProjectGenerationWorker(UEWorker):
self.failed.emit(msg, return_code)
raise RuntimeError(msg)
# ensure we have PySide2 installed in engine
# ensure we have PySide2/6 installed in engine
self.progress.emit(0)
self.stage_begin.emit(
(f"Checking PySide2 installation... {stage_count} "
(f"Checking Qt bindings installation... {stage_count} "
f" out of {stage_count}"))
python_path = None
if platform.system().lower() == "windows":
@ -287,11 +287,30 @@ class UEProjectGenerationWorker(UEWorker):
msg = f"Unreal Python not found at {python_path}"
self.failed.emit(msg, 1)
raise RuntimeError(msg)
pyside_cmd = [python_path.as_posix(),
"-m",
"pip",
"install",
"pyside2"]
pyside_version = "PySide2"
ue_version = self.ue_version.split(".")
if int(ue_version[0]) == 5 and int(ue_version[1]) >= 4:
# Use PySide6 6.6.3 because 6.7.0 had a bug
# - 'QPushButton' can't be added to 'QBoxLayout'
pyside_version = "PySide6==6.6.3"
site_packages_prefix = python_path.parent.as_posix()
pyside_cmd = [
python_path.as_posix(),
"-m", "pip",
"install",
"--ignore-installed",
pyside_version,
]
if platform.system().lower() == "windows":
pyside_cmd += ["--target", site_packages_prefix]
print(f"--- Installing {pyside_version} ...")
print(" ".join(pyside_cmd))
pyside_install = subprocess.Popen(pyside_cmd,
stdout=subprocess.PIPE,
@ -306,8 +325,8 @@ class UEProjectGenerationWorker(UEWorker):
return_code = pyside_install.wait()
if return_code and return_code != 0:
msg = ("Failed to create the project! "
"The installation of PySide2 has failed!")
msg = (f"Failed to create the project! {return_code} "
f"The installation of {pyside_version} has failed!: {pyside_install}")
self.failed.emit(msg, return_code)
raise RuntimeError(msg)

View file

@ -27,6 +27,10 @@ from .local_settings import (
get_openpype_username,
)
from .ayon_connection import initialize_ayon_connection
from .cache import (
CacheItem,
NestedCacheItem,
)
from .events import (
emit_event,
register_event_callback
@ -157,6 +161,9 @@ __all__ = [
"initialize_ayon_connection",
"CacheItem",
"NestedCacheItem",
"emit_event",
"register_event_callback",

View file

@ -0,0 +1,250 @@
import time
import collections
InitInfo = collections.namedtuple(
"InitInfo",
["default_factory", "lifetime"]
)
def _default_factory_func():
return None
class CacheItem:
"""Simple cache item with lifetime and default factory for default value.
Default factory should return default value that is used on init
and on reset.
Args:
default_factory (Optional[callable]): Function that returns default
value used on init and on reset.
lifetime (Optional[int]): Lifetime of the cache data in seconds.
Default lifetime is 120 seconds.
"""
def __init__(self, default_factory=None, lifetime=None):
if lifetime is None:
lifetime = 120
self._lifetime = lifetime
self._last_update = None
if default_factory is None:
default_factory = _default_factory_func
self._default_factory = default_factory
self._data = default_factory()
@property
def is_valid(self):
"""Is cache valid to use.
Return:
bool: True if cache is valid, False otherwise.
"""
if self._last_update is None:
return False
return (time.time() - self._last_update) < self._lifetime
def set_lifetime(self, lifetime):
"""Change lifetime of cache item.
Args:
lifetime (int): Lifetime of the cache data in seconds.
"""
self._lifetime = lifetime
def set_invalid(self):
"""Set cache as invalid."""
self._last_update = None
def reset(self):
"""Set cache as invalid and reset data."""
self._last_update = None
self._data = self._default_factory()
def get_data(self):
"""Receive cached data.
Returns:
Any: Any data that are cached.
"""
return self._data
def update_data(self, data):
"""Update cache data.
Args:
data (Any): Any data that are cached.
"""
self._data = data
self._last_update = time.time()
class NestedCacheItem:
"""Helper for cached items stored in nested structure.
Example:
>>> cache = NestedCacheItem(levels=2, default_factory=lambda: 0)
>>> cache["a"]["b"].is_valid
False
>>> cache["a"]["b"].get_data()
0
>>> cache["a"]["b"] = 1
>>> cache["a"]["b"].is_valid
True
>>> cache["a"]["b"].get_data()
1
>>> cache.reset()
>>> cache["a"]["b"].is_valid
False
Args:
levels (int): Number of nested levels where read cache is stored.
default_factory (Optional[callable]): Function that returns default
value used on init and on reset.
lifetime (Optional[int]): Lifetime of the cache data in seconds.
Default value is based on default value of 'CacheItem'.
_init_info (Optional[InitInfo]): Private argument. Init info for
nested cache where created from parent item.
"""
def __init__(
self, levels=1, default_factory=None, lifetime=None, _init_info=None
):
if levels < 1:
raise ValueError("Nested levels must be greater than 0")
self._data_by_key = {}
if _init_info is None:
_init_info = InitInfo(default_factory, lifetime)
self._init_info = _init_info
self._levels = levels
def __getitem__(self, key):
"""Get cached data.
Args:
key (str): Key of the cache item.
Returns:
Union[NestedCacheItem, CacheItem]: Cache item.
"""
cache = self._data_by_key.get(key)
if cache is None:
if self._levels > 1:
cache = NestedCacheItem(
levels=self._levels - 1,
_init_info=self._init_info
)
else:
cache = CacheItem(
self._init_info.default_factory,
self._init_info.lifetime
)
self._data_by_key[key] = cache
return cache
def __setitem__(self, key, value):
"""Update cached data.
Args:
key (str): Key of the cache item.
value (Any): Any data that are cached.
"""
if self._levels > 1:
raise AttributeError((
"{} does not support '__setitem__'. Lower nested level by {}"
).format(self.__class__.__name__, self._levels - 1))
cache = self[key]
cache.update_data(value)
def get(self, key):
"""Get cached data.
Args:
key (str): Key of the cache item.
Returns:
Union[NestedCacheItem, CacheItem]: Cache item.
"""
return self[key]
def cached_count(self):
"""Amount of cached items.
Returns:
int: Amount of cached items.
"""
return len(self._data_by_key)
def clear_key(self, key):
"""Clear cached item by key.
Args:
key (str): Key of the cache item.
"""
self._data_by_key.pop(key, None)
def clear_invalid(self):
"""Clear all invalid cache items.
Note:
To clear all cache items use 'reset'.
"""
changed = {}
children_are_nested = self._levels > 1
for key, cache in tuple(self._data_by_key.items()):
if children_are_nested:
output = cache.clear_invalid()
if output:
changed[key] = output
if not cache.cached_count():
self._data_by_key.pop(key)
elif not cache.is_valid:
changed[key] = cache.get_data()
self._data_by_key.pop(key)
return changed
def reset(self):
"""Reset cache.
Note:
To clear only invalid cache items use 'clear_invalid'.
"""
self._data_by_key = {}
def set_lifetime(self, lifetime):
"""Change lifetime of all children cache items.
Args:
lifetime (int): Lifetime of the cache data in seconds.
"""
self._init_info.lifetime = lifetime
for cache in self._data_by_key.values():
cache.set_lifetime(lifetime)
@property
def is_valid(self):
"""Raise reasonable error when called on wrong level.
Raises:
AttributeError: If called on nested cache item.
"""
raise AttributeError((
"{} does not support 'is_valid'. Lower nested level by '{}'"
).format(self.__class__.__name__, self._levels))

View file

@ -97,6 +97,15 @@ from .context_tools import (
get_current_folder_path,
get_current_task_name
)
from .workfile import (
discover_workfile_build_plugins,
register_workfile_build_plugin,
deregister_workfile_build_plugin,
register_workfile_build_plugin_path,
deregister_workfile_build_plugin_path,
)
install = install_host
uninstall = uninstall_host
@ -198,6 +207,13 @@ __all__ = (
"get_current_folder_path",
"get_current_task_name",
# Workfile templates
"discover_workfile_build_plugins",
"register_workfile_build_plugin",
"deregister_workfile_build_plugin",
"register_workfile_build_plugin_path",
"deregister_workfile_build_plugin_path",
# Backwards compatible function names
"install",
"uninstall",

View file

@ -3,11 +3,16 @@ import re
import copy
import platform
import collections
import time
import ayon_api
from ayon_core.lib import Logger, get_local_site_id, StringTemplate
from ayon_core.lib import (
Logger,
get_local_site_id,
StringTemplate,
CacheItem,
NestedCacheItem,
)
from ayon_core.addon import AddonsManager
from .exceptions import RootCombinationError, ProjectNotSet
@ -397,62 +402,11 @@ class BaseAnatomy(object):
)
class CacheItem:
"""Helper to cache data.
Helper does not handle refresh of data and does not mark data as outdated.
Who uses the object should check of outdated state on his own will.
"""
default_lifetime = 10
def __init__(self, lifetime=None):
self._data = None
self._cached = None
self._lifetime = lifetime or self.default_lifetime
@property
def data(self):
"""Cached data/object.
Returns:
Any: Whatever was cached.
"""
return self._data
@property
def is_outdated(self):
"""Item has outdated cache.
Lifetime of cache item expired or was not yet set.
Returns:
bool: Item is outdated.
"""
if self._cached is None:
return True
return (time.time() - self._cached) > self._lifetime
def update_data(self, data):
"""Update cache of data.
Args:
data (Any): Data to cache.
"""
self._data = data
self._cached = time.time()
class Anatomy(BaseAnatomy):
_sitesync_addon_cache = CacheItem()
_project_cache = collections.defaultdict(CacheItem)
_default_site_id_cache = collections.defaultdict(CacheItem)
_root_overrides_cache = collections.defaultdict(
lambda: collections.defaultdict(CacheItem)
)
_project_cache = NestedCacheItem(lifetime=10)
_sitesync_addon_cache = CacheItem(lifetime=60)
_default_site_id_cache = NestedCacheItem(lifetime=60)
_root_overrides_cache = NestedCacheItem(2, lifetime=60)
def __init__(
self, project_name=None, site_name=None, project_entity=None
@ -477,18 +431,18 @@ class Anatomy(BaseAnatomy):
@classmethod
def get_project_entity_from_cache(cls, project_name):
project_cache = cls._project_cache[project_name]
if project_cache.is_outdated:
if not project_cache.is_valid:
project_cache.update_data(ayon_api.get_project(project_name))
return copy.deepcopy(project_cache.data)
return copy.deepcopy(project_cache.get_data())
@classmethod
def get_sitesync_addon(cls):
if cls._sitesync_addon_cache.is_outdated:
if not cls._sitesync_addon_cache.is_valid:
manager = AddonsManager()
cls._sitesync_addon_cache.update_data(
manager.get_enabled_addon("sitesync")
)
return cls._sitesync_addon_cache.data
return cls._sitesync_addon_cache.get_data()
@classmethod
def _get_studio_roots_overrides(cls, project_name):
@ -533,14 +487,14 @@ class Anatomy(BaseAnatomy):
elif not site_name:
# Use sync server to receive active site name
project_cache = cls._default_site_id_cache[project_name]
if project_cache.is_outdated:
if not project_cache.is_valid:
project_cache.update_data(
sitesync_addon.get_active_site_type(project_name)
)
site_name = project_cache.data
site_name = project_cache.get_data()
site_cache = cls._root_overrides_cache[project_name][site_name]
if site_cache.is_outdated:
if not site_cache.is_valid:
if site_name == "studio":
# Handle studio root overrides without sync server
# - studio root overrides can be done even without sync server
@ -553,4 +507,4 @@ class Anatomy(BaseAnatomy):
project_name, site_name
)
site_cache.update_data(roots_overrides)
return site_cache.data
return site_cache.get_data()

View file

@ -225,6 +225,7 @@ def create_skeleton_instance(
instance_skeleton_data = {
"productType": product_type,
"productName": data["productName"],
"task": data["task"],
"families": families,
"folderPath": data["folderPath"],
"frameStart": time_data.start,

View file

@ -21,6 +21,15 @@ from .utils import (
from .build_workfile import BuildWorkfile
from .workfile_template_builder import (
discover_workfile_build_plugins,
register_workfile_build_plugin,
deregister_workfile_build_plugin,
register_workfile_build_plugin_path,
deregister_workfile_build_plugin_path,
)
__all__ = (
"get_workfile_template_key_from_context",
"get_workfile_template_key",
@ -39,4 +48,10 @@ __all__ = (
"should_open_workfiles_tool_on_launch",
"BuildWorkfile",
"discover_workfile_build_plugins",
"register_workfile_build_plugin",
"deregister_workfile_build_plugin",
"register_workfile_build_plugin_path",
"deregister_workfile_build_plugin_path",
)

View file

@ -36,6 +36,7 @@ from ayon_core.lib import (
filter_profiles,
attribute_definitions,
)
from ayon_core.lib.events import EventSystem, EventCallback, Event
from ayon_core.lib.attribute_definitions import get_attributes_keys
from ayon_core.pipeline import Anatomy
from ayon_core.pipeline.load import (
@ -43,6 +44,13 @@ from ayon_core.pipeline.load import (
get_representation_contexts,
load_with_repre_context,
)
from ayon_core.pipeline.plugin_discover import (
discover,
register_plugin,
register_plugin_path,
deregister_plugin,
deregister_plugin_path
)
from ayon_core.pipeline.create import (
discover_legacy_creator_plugins,
@ -124,6 +132,8 @@ class AbstractTemplateBuilder(object):
self._current_task_entity = _NOT_SET
self._linked_folder_entities = _NOT_SET
self._event_system = EventSystem()
@property
def project_name(self):
if isinstance(self._host, HostBase):
@ -211,10 +221,14 @@ class AbstractTemplateBuilder(object):
Returns:
List[PlaceholderPlugin]: Plugin classes available for host.
"""
plugins = []
# Backwards compatibility
if hasattr(self._host, "get_workfile_build_placeholder_plugins"):
return self._host.get_workfile_build_placeholder_plugins()
return []
plugins.extend(discover(PlaceholderPlugin))
return plugins
@property
def host(self):
@ -257,6 +271,8 @@ class AbstractTemplateBuilder(object):
self._project_settings = None
self._event_system = EventSystem()
self.clear_shared_data()
self.clear_shared_populate_data()
@ -498,15 +514,21 @@ class AbstractTemplateBuilder(object):
process if version is created
"""
template_preset = self.get_template_preset()
if template_path is None:
template_path = template_preset["path"]
if keep_placeholders is None:
keep_placeholders = template_preset["keep_placeholder"]
if create_first_version is None:
create_first_version = template_preset["create_first_version"]
if any(
value is None
for value in [
template_path,
keep_placeholders,
create_first_version,
]
):
template_preset = self.get_template_preset()
if template_path is None:
template_path = template_preset["path"]
if keep_placeholders is None:
keep_placeholders = template_preset["keep_placeholder"]
if create_first_version is None:
create_first_version = template_preset["create_first_version"]
# check if first version is created
created_version_workfile = False
@ -729,6 +751,16 @@ class AbstractTemplateBuilder(object):
placeholder.set_finished()
# Trigger on_depth_processed event
self.emit_event(
topic="template.depth_processed",
data={
"depth": iter_counter,
"placeholders_by_scene_id": placeholder_by_scene_id
},
source="builder"
)
# Clear shared data before getting new placeholders
self.clear_shared_populate_data()
@ -747,6 +779,16 @@ class AbstractTemplateBuilder(object):
placeholder_by_scene_id[identifier] = placeholder
placeholders.append(placeholder)
# Trigger on_finished event
self.emit_event(
topic="template.finished",
data={
"depth": iter_counter,
"placeholders_by_scene_id": placeholder_by_scene_id,
},
source="builder"
)
self.refresh()
def _get_build_profiles(self):
@ -772,12 +814,14 @@ class AbstractTemplateBuilder(object):
- 'project_settings/{host name}/templated_workfile_build/profiles'
Returns:
str: Path to a template file with placeholders.
dict: Dictionary with `path`, `keep_placeholder` and
`create_first_version` settings from the template preset
for current context.
Raises:
TemplateProfileNotFound: When profiles are not filled.
TemplateLoadFailed: Profile was found but path is not set.
TemplateNotFound: Path was set but file does not exists.
TemplateNotFound: Path was set but file does not exist.
"""
host_name = self.host_name
@ -872,6 +916,30 @@ class AbstractTemplateBuilder(object):
"create_first_version": create_first_version
}
def emit_event(self, topic, data=None, source=None) -> Event:
return self._event_system.emit(topic, data, source)
def add_event_callback(self, topic, callback, order=None):
return self._event_system.add_callback(topic, callback, order=order)
def add_on_finished_callback(
self, callback, order=None
) -> EventCallback:
return self.add_event_callback(
topic="template.finished",
callback=callback,
order=order
)
def add_on_depth_processed_callback(
self, callback, order=None
) -> EventCallback:
return self.add_event_callback(
topic="template.depth_processed",
callback=callback,
order=order
)
@six.add_metaclass(ABCMeta)
class PlaceholderPlugin(object):
@ -1904,3 +1972,23 @@ class CreatePlaceholderItem(PlaceholderItem):
def create_failed(self, creator_data):
self._failed_created_publish_instances.append(creator_data)
def discover_workfile_build_plugins(*args, **kwargs):
return discover(PlaceholderPlugin, *args, **kwargs)
def register_workfile_build_plugin(plugin: PlaceholderPlugin):
register_plugin(PlaceholderPlugin, plugin)
def deregister_workfile_build_plugin(plugin: PlaceholderPlugin):
deregister_plugin(PlaceholderPlugin, plugin)
def register_workfile_build_plugin_path(path: str):
register_plugin_path(PlaceholderPlugin, path)
def deregister_workfile_build_plugin_path(path: str):
deregister_plugin_path(PlaceholderPlugin, path)

View file

@ -1,239 +1,31 @@
import time
import collections
import warnings
InitInfo = collections.namedtuple(
"InitInfo",
["default_factory", "lifetime"]
from ayon_core.lib import CacheItem as _CacheItem
from ayon_core.lib import NestedCacheItem as _NestedCacheItem
# Cache classes were moved to `ayon_core.lib.cache`
class CacheItem(_CacheItem):
def __init__(self, *args, **kwargs):
warnings.warn(
"Used 'CacheItem' from deprecated location "
"'ayon_core.tools.common_models', use 'ayon_core.lib' instead.",
DeprecationWarning,
)
super().__init__(*args, **kwargs)
class NestedCacheItem(_NestedCacheItem):
def __init__(self, *args, **kwargs):
warnings.warn(
"Used 'NestedCacheItem' from deprecated location "
"'ayon_core.tools.common_models', use 'ayon_core.lib' instead.",
DeprecationWarning,
)
super().__init__(*args, **kwargs)
__all__ = (
"CacheItem",
"NestedCacheItem",
)
def _default_factory_func():
return None
class CacheItem:
"""Simple cache item with lifetime and default value.
Args:
default_factory (Optional[callable]): Function that returns default
value used on init and on reset.
lifetime (Optional[int]): Lifetime of the cache data in seconds.
"""
def __init__(self, default_factory=None, lifetime=None):
if lifetime is None:
lifetime = 120
self._lifetime = lifetime
self._last_update = None
if default_factory is None:
default_factory = _default_factory_func
self._default_factory = default_factory
self._data = default_factory()
@property
def is_valid(self):
"""Is cache valid to use.
Return:
bool: True if cache is valid, False otherwise.
"""
if self._last_update is None:
return False
return (time.time() - self._last_update) < self._lifetime
def set_lifetime(self, lifetime):
"""Change lifetime of cache item.
Args:
lifetime (int): Lifetime of the cache data in seconds.
"""
self._lifetime = lifetime
def set_invalid(self):
"""Set cache as invalid."""
self._last_update = None
def reset(self):
"""Set cache as invalid and reset data."""
self._last_update = None
self._data = self._default_factory()
def get_data(self):
"""Receive cached data.
Returns:
Any: Any data that are cached.
"""
return self._data
def update_data(self, data):
self._data = data
self._last_update = time.time()
class NestedCacheItem:
"""Helper for cached items stored in nested structure.
Example:
>>> cache = NestedCacheItem(levels=2, default_factory=lambda: 0)
>>> cache["a"]["b"].is_valid
False
>>> cache["a"]["b"].get_data()
0
>>> cache["a"]["b"] = 1
>>> cache["a"]["b"].is_valid
True
>>> cache["a"]["b"].get_data()
1
>>> cache.reset()
>>> cache["a"]["b"].is_valid
False
Args:
levels (int): Number of nested levels where read cache is stored.
default_factory (Optional[callable]): Function that returns default
value used on init and on reset.
lifetime (Optional[int]): Lifetime of the cache data in seconds.
_init_info (Optional[InitInfo]): Private argument. Init info for
nested cache where created from parent item.
"""
def __init__(
self, levels=1, default_factory=None, lifetime=None, _init_info=None
):
if levels < 1:
raise ValueError("Nested levels must be greater than 0")
self._data_by_key = {}
if _init_info is None:
_init_info = InitInfo(default_factory, lifetime)
self._init_info = _init_info
self._levels = levels
def __getitem__(self, key):
"""Get cached data.
Args:
key (str): Key of the cache item.
Returns:
Union[NestedCacheItem, CacheItem]: Cache item.
"""
cache = self._data_by_key.get(key)
if cache is None:
if self._levels > 1:
cache = NestedCacheItem(
levels=self._levels - 1,
_init_info=self._init_info
)
else:
cache = CacheItem(
self._init_info.default_factory,
self._init_info.lifetime
)
self._data_by_key[key] = cache
return cache
def __setitem__(self, key, value):
"""Update cached data.
Args:
key (str): Key of the cache item.
value (Any): Any data that are cached.
"""
if self._levels > 1:
raise AttributeError((
"{} does not support '__setitem__'. Lower nested level by {}"
).format(self.__class__.__name__, self._levels - 1))
cache = self[key]
cache.update_data(value)
def get(self, key):
"""Get cached data.
Args:
key (str): Key of the cache item.
Returns:
Union[NestedCacheItem, CacheItem]: Cache item.
"""
return self[key]
def cached_count(self):
"""Amount of cached items.
Returns:
int: Amount of cached items.
"""
return len(self._data_by_key)
def clear_key(self, key):
"""Clear cached item by key.
Args:
key (str): Key of the cache item.
"""
self._data_by_key.pop(key, None)
def clear_invalid(self):
"""Clear all invalid cache items.
Note:
To clear all cache items use 'reset'.
"""
changed = {}
children_are_nested = self._levels > 1
for key, cache in tuple(self._data_by_key.items()):
if children_are_nested:
output = cache.clear_invalid()
if output:
changed[key] = output
if not cache.cached_count():
self._data_by_key.pop(key)
elif not cache.is_valid:
changed[key] = cache.get_data()
self._data_by_key.pop(key)
return changed
def reset(self):
"""Reset cache.
Note:
To clear only invalid cache items use 'clear_invalid'.
"""
self._data_by_key = {}
def set_lifetime(self, lifetime):
"""Change lifetime of all children cache items.
Args:
lifetime (int): Lifetime of the cache data in seconds.
"""
self._init_info.lifetime = lifetime
for cache in self._data_by_key.values():
cache.set_lifetime(lifetime)
@property
def is_valid(self):
"""Raise reasonable error when called on wront level.
Raises:
AttributeError: If called on nested cache item.
"""
raise AttributeError((
"{} does not support 'is_valid'. Lower nested level by '{}'"
).format(self.__class__.__name__, self._levels))

View file

@ -6,8 +6,7 @@ import ayon_api
import six
from ayon_core.style import get_default_entity_icon_color
from .cache import NestedCacheItem
from ayon_core.lib import NestedCacheItem
HIERARCHY_MODEL_SENDER = "hierarchy.model"

View file

@ -5,8 +5,7 @@ import ayon_api
import six
from ayon_core.style import get_default_entity_icon_color
from .cache import CacheItem
from ayon_core.lib import CacheItem
PROJECTS_MODEL_SENDER = "projects.model"

View file

@ -5,7 +5,7 @@ import collections
import ayon_api
import appdirs
from .cache import NestedCacheItem
from ayon_core.lib import NestedCacheItem
FileInfo = collections.namedtuple(
"FileInfo",

View file

@ -6,6 +6,7 @@ import uuid
import ayon_api
from ayon_core.lib import NestedCacheItem
from ayon_core.pipeline.load import (
discover_loader_plugins,
ProductLoaderPlugin,
@ -17,7 +18,6 @@ from ayon_core.pipeline.load import (
LoadError,
IncompatibleLoaderError,
)
from ayon_core.tools.common_models import NestedCacheItem
from ayon_core.tools.loader.abstract import ActionItem
ACTIONS_MODEL_SENDER = "actions.model"

View file

@ -5,8 +5,8 @@ import arrow
import ayon_api
from ayon_api.operations import OperationsSession
from ayon_core.lib import NestedCacheItem
from ayon_core.style import get_default_entity_icon_color
from ayon_core.tools.common_models import NestedCacheItem
from ayon_core.tools.loader.abstract import (
ProductTypeItem,
ProductItem,

View file

@ -2,9 +2,8 @@ import collections
from ayon_api import get_representations, get_versions_links
from ayon_core.lib import Logger
from ayon_core.lib import Logger, NestedCacheItem
from ayon_core.addon import AddonsManager
from ayon_core.tools.common_models import NestedCacheItem
from ayon_core.tools.loader.abstract import ActionItem
DOWNLOAD_IDENTIFIER = "sitesync.download"

View file

@ -106,7 +106,7 @@ line-ending = "auto"
[tool.codespell]
# Ignore words that are not in the dictionary.
ignore-words-list = "ayon,ynput,parms,parm,hda,developpement"
ignore-words-list = "ayon,ynput,parms,parm,hda,developpement,ue"
skip = "./.*,./package/*,*/vendor/*,*/unreal/integration/*,*/aftereffects/api/extension/js/libs/*"
count = true

View file

@ -110,6 +110,26 @@ class ApplicationsAddon(AYONAddon, IPluginPaths):
]
}
def launch_application(
self, app_name, project_name, folder_path, task_name
):
"""Launch application.
Args:
app_name (str): Full application name e.g. 'maya/2024'.
project_name (str): Project name.
folder_path (str): Folder path.
task_name (str): Task name.
"""
app_manager = self.get_applications_manager()
return app_manager.launch(
app_name,
project_name=project_name,
folder_path=folder_path,
task_name=task_name,
)
# --- CLI ---
def cli(self, addon_click_group):
main_group = click_wrap.group(
@ -134,6 +154,17 @@ class ApplicationsAddon(AYONAddon, IPluginPaths):
default=None
)
)
(
main_group.command(
self._cli_launch_applications,
name="launch",
help="Launch application"
)
.option("--app", required=True, help="Application name")
.option("--project", required=True, help="Project name")
.option("--folder", required=True, help="Folder path")
.option("--task", required=True, help="Task name")
)
# Convert main command to click object and add it to parent group
addon_click_group.add_command(
main_group.to_click_obj()
@ -171,3 +202,15 @@ class ApplicationsAddon(AYONAddon, IPluginPaths):
with open(output_json_path, "w") as file_stream:
json.dump(env, file_stream, indent=4)
def _cli_launch_applications(self, project, folder, task, app):
"""Launch application.
Args:
project (str): Project name.
folder (str): Folder path.
task (str): Task name.
app (str): Full application name e.g. 'maya/2024'.
"""
self.launch_application(app, project, folder, task)

View file

@ -1,3 +1,10 @@
name = "applications"
title = "Applications"
version = "0.2.0"
version = "0.2.1"
ayon_server_version = ">=1.0.7"
ayon_launcher_version = ">=1.0.2"
ayon_required_addons = {
"core": ">0.3.0",
}
ayon_compatible_addons = {}

View file

@ -1,3 +1,3 @@
name = "deadline"
title = "Deadline"
version = "0.1.10"
version = "0.1.11"

View file

@ -191,7 +191,6 @@ class NukeSubmitDeadlineModel(BaseSettingsModel):
@validator(
"limit_groups",
"env_allowed_keys",
"env_search_replace_values")
def validate_unique_names(cls, value):
ensure_unique_names(value)

View file

@ -1,3 +1,3 @@
name = "hiero"
title = "Hiero"
version = "0.1.2"
version = "0.1.3"

View file

@ -149,15 +149,15 @@ class ImageIOSettings(BaseSettingsModel):
DEFAULT_IMAGEIO_SETTINGS = {
"workfile": {
"ocioConfigName": "nuke-default",
"workingSpace": "linear",
"viewerLut": "sRGB",
"eightBitLut": "sRGB",
"sixteenBitLut": "sRGB",
"logLut": "Cineon",
"floatLut": "linear",
"thumbnailLut": "sRGB",
"monitorOutLut": "sRGB"
"ocioConfigName": "aces_1.2",
"workingSpace": "role_scene_linear",
"viewerLut": "ACES/sRGB",
"eightBitLut": "role_matte_paint",
"sixteenBitLut": "role_texture_paint",
"logLut": "role_compositing_log",
"floatLut": "role_scene_linear",
"thumbnailLut": "ACES/sRGB",
"monitorOutLut": "ACES/sRGB"
},
"regexInputs": {
"inputs": [

View file

@ -1,3 +1,3 @@
name = "maya"
title = "Maya"
version = "0.1.16"
version = "0.1.17"

View file

@ -35,6 +35,51 @@ def angular_unit_enum():
]
def extract_alembic_data_format_enum():
return [
{"label": "ogawa", "value": "ogawa"},
{"label": "HDF", "value": "HDF"}
]
def extract_alembic_overrides_enum():
return [
{"label": "Custom Attributes", "value": "attr"},
{"label": "Custom Attributes Prefix", "value": "attrPrefix"},
{"label": "Auto Subd", "value": "autoSubd"},
{"label": "Data Format", "value": "dataFormat"},
{"label": "Euler Filter", "value": "eulerFilter"},
{"label": "Mel Per Frame Callback", "value": "melPerFrameCallback"},
{"label": "Mel Post Job Callback", "value": "melPostJobCallback"},
{"label": "Pre Roll", "value": "preRoll"},
{"label": "Pre Roll Start Frame", "value": "preRollStartFrame"},
{
"label": "Python Per Frame Callback",
"value": "pythonPerFrameCallback"
},
{
"label": "Python Post Job Callback",
"value": "pythonPostJobCallback"
},
{"label": "Renderable Only", "value": "renderableOnly"},
{"label": "Strip Namespaces", "value": "stripNamespaces"},
{"label": "User Attr", "value": "userAttr"},
{"label": "User Attr Prefix", "value": "userAttrPrefix"},
{"label": "UV Write", "value": "uvWrite"},
{"label": "UVs Only", "value": "uvsOnly"},
{"label": "Verbose", "value": "verbose"},
{"label": "Visible Only", "value": "visibleOnly"},
{"label": "Whole Frame Geo", "value": "wholeFrameGeo"},
{"label": "World Space", "value": "worldSpace"},
{"label": "Write Color Sets", "value": "writeColorSets"},
{"label": "Write Creases", "value": "writeCreases"},
{"label": "Write Face Sets", "value": "writeFaceSets"},
{"label": "Write Normals", "value": "writeNormals"},
{"label": "Write UV Sets", "value": "writeUVSets"},
{"label": "Write Visibility", "value": "writeVisibility"}
]
class BasicValidateModel(BaseSettingsModel):
enabled: bool = SettingsField(title="Enabled")
optional: bool = SettingsField(title="Optional")
@ -184,7 +229,7 @@ class ValidateAttributesModel(BaseSettingsModel):
if not success:
raise BadRequestException(
"The attibutes can't be parsed as json object"
"The attributes can't be parsed as json object"
)
return value
@ -220,7 +265,7 @@ class ValidateUnrealStaticMeshNameModel(BaseSettingsModel):
enabled: bool = SettingsField(title="ValidateUnrealStaticMeshName")
optional: bool = SettingsField(title="Optional")
validate_mesh: bool = SettingsField(title="Validate mesh names")
validate_collision: bool = SettingsField(title="Validate collison names")
validate_collision: bool = SettingsField(title="Validate collision names")
class ValidateCycleErrorModel(BaseSettingsModel):
@ -243,7 +288,7 @@ class ValidatePluginPathAttributesModel(BaseSettingsModel):
and the node attribute is <b>abc_file</b>
"""
enabled: bool = True
enabled: bool = SettingsField(title="Enabled")
optional: bool = SettingsField(title="Optional")
active: bool = SettingsField(title="Active")
attribute: list[ValidatePluginPathAttributesAttrModel] = SettingsField(
@ -265,6 +310,9 @@ class RendererAttributesModel(BaseSettingsModel):
class ValidateRenderSettingsModel(BaseSettingsModel):
enabled: bool = SettingsField(title="Enabled")
optional: bool = SettingsField(title="Optional")
active: bool = SettingsField(title="Active")
arnold_render_attributes: list[RendererAttributesModel] = SettingsField(
default_factory=list, title="Arnold Render Attributes")
vray_render_attributes: list[RendererAttributesModel] = SettingsField(
@ -299,6 +347,115 @@ class ExtractAlembicModel(BaseSettingsModel):
families: list[str] = SettingsField(
default_factory=list,
title="Families")
autoSubd: bool = SettingsField(
title="Auto Subd",
description=(
"If this flag is present and the mesh has crease edges, crease "
"vertices or holes, the mesh (OPolyMesh) would now be written out "
"as an OSubD and crease info will be stored in the Alembic file. "
"Otherwise, creases info won't be preserved in Alembic file unless"
" a custom Boolean attribute SubDivisionMesh has been added to "
"mesh node and its value is true."
)
)
eulerFilter: bool = SettingsField(
title="Euler Filter",
description="Apply Euler filter while sampling rotations."
)
renderableOnly: bool = SettingsField(
title="Renderable Only",
description="Only export renderable visible shapes."
)
stripNamespaces: bool = SettingsField(
title="Strip Namespaces",
description=(
"Namespaces will be stripped off of the node before being written "
"to Alembic."
)
)
uvsOnly: bool = SettingsField(
title="UVs Only",
description=(
"If this flag is present, only uv data for PolyMesh and SubD "
"shapes will be written to the Alembic file."
)
)
uvWrite: bool = SettingsField(
title="UV Write",
description=(
"Uv data for PolyMesh and SubD shapes will be written to the "
"Alembic file."
)
)
verbose: bool = SettingsField(
title="Verbose",
description="Prints the current frame that is being evaluated."
)
visibleOnly: bool = SettingsField(
title="Visible Only",
description="Only export dag objects visible during frame range."
)
wholeFrameGeo: bool = SettingsField(
title="Whole Frame Geo",
description=(
"Data for geometry will only be written out on whole frames."
)
)
worldSpace: bool = SettingsField(
title="World Space",
description="Any root nodes will be stored in world space."
)
writeColorSets: bool = SettingsField(
title="Write Color Sets",
description="Write vertex colors with the geometry."
)
writeFaceSets: bool = SettingsField(
title="Write Face Sets",
description="Write face sets with the geometry."
)
writeNormals: bool = SettingsField(
title="Write Normals",
description="Write normals with the deforming geometry."
)
writeUVSets: bool = SettingsField(
title="Write UV Sets",
description=(
"Write all uv sets on MFnMeshes as vector 2 indexed geometry "
"parameters with face varying scope."
)
)
writeVisibility: bool = SettingsField(
title="Write Visibility",
description=(
"Visibility state will be stored in the Alembic file. Otherwise "
"everything written out is treated as visible."
)
)
preRoll: bool = SettingsField(
title="Pre Roll",
description=(
"When enabled, the pre roll start frame is used to pre roll the "
"When enabled, the pre roll start frame is used to being the "
"evaluation of the mesh. From the pre roll start frame to the "
"alembic start frame, will not be written to disk. This can be "
"used for simulation run up."
)
)
preRollStartFrame: int = SettingsField(
title="Pre Roll Start Frame",
description=(
"The frame to start scene evaluation at. This is used to set the "
"starting frame for time dependent translations and can be used to"
" evaluate run-up that isn't actually translated.\n"
"NOTE: Pre Roll needs to be enabled for this start frame "
"to be considered."
)
)
dataFormat: str = SettingsField(
enum_resolver=extract_alembic_data_format_enum,
title="Data Format",
description="The data format to use to write the file."
)
bake_attributes: list[str] = SettingsField(
default_factory=list, title="Bake Attributes",
description="List of attributes that will be included in the alembic "
@ -309,6 +466,73 @@ class ExtractAlembicModel(BaseSettingsModel):
description="List of attribute prefixes for attributes that will be "
"included in the alembic export.",
)
attr: str = SettingsField(
title="Custom Attributes",
placeholder="attr1;attr2",
description=(
"Attributes matching by name will be included in the Alembic "
"export. Attributes should be separated by semi-colon `;`"
)
)
attrPrefix: str = SettingsField(
title="Custom Attributes Prefix",
placeholder="prefix1;prefix2",
description=(
"Attributes starting with these prefixes will be included in the "
"Alembic export. Attributes should be separated by semi-colon `;`"
)
)
userAttr: str = SettingsField(
title="User Attr",
placeholder="attr1;attr2",
description=(
"Attributes matching by name will be included in the Alembic "
"export. Attributes should be separated by semi-colon `;`"
)
)
userAttrPrefix: str = SettingsField(
title="User Attr Prefix",
placeholder="prefix1;prefix2",
description=(
"Attributes starting with these prefixes will be included in the "
"Alembic export. Attributes should be separated by semi-colon `;`"
)
)
melPerFrameCallback: str = SettingsField(
title="Mel Per Frame Callback",
description=(
"When each frame (and the static frame) is evaluated the string "
"specified is evaluated as a Mel command."
)
)
melPostJobCallback: str = SettingsField(
title="Mel Post Job Callback",
description=(
"When the translation has finished the string specified is "
"evaluated as a Mel command."
)
)
pythonPerFrameCallback: str = SettingsField(
title="Python Per Frame Callback",
description=(
"When each frame (and the static frame) is evaluated the string "
"specified is evaluated as a python command."
)
)
pythonPostJobCallback: str = SettingsField(
title="Python Post Job Callback",
description=(
"When the translation has finished the string specified is "
"evaluated as a python command."
)
)
overrides: list[str] = SettingsField(
enum_resolver=extract_alembic_overrides_enum,
title="Exposed Overrides",
description=(
"Expose the attribute in this list to the user when publishing."
)
)
class ExtractObjModel(BaseSettingsModel):
@ -392,7 +616,7 @@ class ExtractGPUCacheModel(BaseSettingsModel):
title="Optimize Animations For Motion Blur"
)
writeMaterials: bool = SettingsField(title="Write Materials")
useBaseTessellation: bool = SettingsField(title="User Base Tesselation")
useBaseTessellation: bool = SettingsField(title="User Based Tessellation")
class PublishersModel(BaseSettingsModel):
@ -668,15 +892,19 @@ class PublishersModel(BaseSettingsModel):
default_factory=BasicValidateModel,
title="Validate Alembic Visible Node",
)
ValidateAlembicDefaultsPointcache: BasicValidateModel = SettingsField(
default_factory=BasicValidateModel,
title="Validate Alembic Defaults Pointcache"
)
ValidateAlembicDefaultsAnimation: BasicValidateModel = SettingsField(
default_factory=BasicValidateModel,
title="Validate Alembic Defaults Animation"
)
ExtractProxyAlembic: ExtractProxyAlembicModel = SettingsField(
default_factory=ExtractProxyAlembicModel,
title="Extract Proxy Alembic",
section="Model Extractors",
)
ExtractAlembic: ExtractAlembicModel = SettingsField(
default_factory=ExtractAlembicModel,
title="Extract Alembic",
)
ExtractObj: ExtractObjModel = SettingsField(
default_factory=ExtractObjModel,
title="Extract OBJ"
@ -811,6 +1039,10 @@ class PublishersModel(BaseSettingsModel):
default_factory=ExtractModelModel,
title="Extract Model (Maya Scene)"
)
ExtractAlembic: ExtractAlembicModel = SettingsField(
default_factory=ExtractAlembicModel,
title="Extract Alembic"
)
DEFAULT_SUFFIX_NAMING = {
@ -942,6 +1174,9 @@ DEFAULT_PUBLISH_SETTINGS = {
]
},
"ValidateRenderSettings": {
"enabled": True,
"active": True,
"optional": False,
"arnold_render_attributes": [],
"vray_render_attributes": [],
"redshift_render_attributes": [],
@ -1200,16 +1435,6 @@ DEFAULT_PUBLISH_SETTINGS = {
"proxyAbc"
]
},
"ExtractAlembic": {
"enabled": True,
"families": [
"pointcache",
"model",
"vrayproxy.alembic"
],
"bake_attributes": [],
"bake_attribute_prefixes": []
},
"ExtractObj": {
"enabled": False,
"optional": True,
@ -1330,6 +1555,16 @@ DEFAULT_PUBLISH_SETTINGS = {
"optional": False,
"validate_shapes": True
},
"ValidateAlembicDefaultsPointcache": {
"enabled": True,
"optional": True,
"active": True
},
"ValidateAlembicDefaultsAnimation": {
"enabled": True,
"optional": True,
"active": True
},
"ExtractPlayblast": DEFAULT_PLAYBLAST_SETTING,
"ExtractMayaSceneRaw": {
"enabled": True,
@ -1371,6 +1606,52 @@ DEFAULT_PUBLISH_SETTINGS = {
"ExtractModel": {
"enabled": True,
"optional": True,
"active": True,
"active": True
},
"ExtractAlembic": {
"enabled": True,
"families": [
"pointcache",
"model",
"vrayproxy.alembic"
],
"attr": "",
"attrPrefix": "",
"autoSubd": False,
"bake_attributes": [],
"bake_attribute_prefixes": [],
"dataFormat": "ogawa",
"eulerFilter": False,
"melPerFrameCallback": "",
"melPostJobCallback": "",
"overrides": [
"attr",
"attrPrefix",
"renderableOnly",
"visibleOnly",
"worldSpace",
"writeColorSets",
"writeFaceSets",
"writeNormals"
],
"preRoll": False,
"preRollStartFrame": 0,
"pythonPerFrameCallback": "",
"pythonPostJobCallback": "",
"renderableOnly": False,
"stripNamespaces": True,
"uvsOnly": False,
"uvWrite": False,
"userAttr": "",
"userAttrPrefix": "",
"verbose": False,
"visibleOnly": False,
"wholeFrameGeo": False,
"worldSpace": True,
"writeColorSets": False,
"writeFaceSets": False,
"writeNormals": True,
"writeUVSets": False,
"writeVisibility": False
}
}

View file

@ -1,3 +1,3 @@
name = "nuke"
title = "Nuke"
version = "0.1.10"
version = "0.1.11"

View file

@ -97,8 +97,23 @@ class WorkfileColorspaceSettings(BaseSettingsModel):
working_space: str = SettingsField(
title="Working Space"
)
thumbnail_space: str = SettingsField(
title="Thumbnail Space"
monitor_lut: str = SettingsField(
title="Thumbnails"
)
monitor_out_lut: str = SettingsField(
title="Monitor Out"
)
int_8_lut: str = SettingsField(
title="8-bit Files"
)
int_16_lut: str = SettingsField(
title="16-bit Files"
)
log_lut: str = SettingsField(
title="Log Files"
)
float_lut: str = SettingsField(
title="Float Files"
)
@ -120,6 +135,9 @@ class ViewProcessModel(BaseSettingsModel):
viewerProcess: str = SettingsField(
title="Viewer Process Name"
)
output_transform: str = SettingsField(
title="Output Transform"
)
class ImageIOConfigModel(BaseSettingsModel):
@ -214,16 +232,23 @@ class ImageIOSettings(BaseSettingsModel):
DEFAULT_IMAGEIO_SETTINGS = {
"viewer": {
"viewerProcess": "sRGB (default)"
"viewerProcess": "ACES/sRGB",
"output_transform": "ACES/sRGB"
},
"baking": {
"viewerProcess": "rec709 (default)"
"viewerProcess": "ACES/Rec.709",
"output_transform": "ACES/Rec.709"
},
"workfile": {
"color_management": "OCIO",
"native_ocio_config": "nuke-default",
"working_space": "scene_linear",
"thumbnail_space": "sRGB (default)",
"native_ocio_config": "aces_1.2",
"working_space": "role_scene_linear",
"monitor_lut": "ACES/sRGB",
"monitor_out_lut": "ACES/sRGB",
"int_8_lut": "role_matte_paint",
"int_16_lut": "role_texture_paint",
"log_lut": "role_compositing_log",
"float_lut": "role_scene_linear"
},
"nodes": {
"required_nodes": [