mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into bugfix/task_deadline
This commit is contained in:
commit
98a27e02fc
53 changed files with 3162 additions and 1376 deletions
|
|
@ -8,14 +8,11 @@ from ayon_core.lib import Logger, register_event_callback
|
|||
from ayon_core.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_workfile_build_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
AVALON_INSTANCE_ID,
|
||||
AYON_INSTANCE_ID,
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api.workfile_template_builder import (
|
||||
AEPlaceholderLoadPlugin,
|
||||
AEPlaceholderCreatePlugin
|
||||
)
|
||||
from ayon_core.pipeline.load import any_outdated_containers
|
||||
import ayon_core.hosts.aftereffects
|
||||
|
||||
|
|
@ -40,6 +37,7 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
|||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build")
|
||||
|
||||
|
||||
class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
|
|
@ -76,6 +74,7 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
|
||||
|
||||
register_event_callback("application.launched", application_launch)
|
||||
|
||||
|
|
@ -118,12 +117,6 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
item["id"] = "publish_context"
|
||||
self.stub.imprint(item["id"], item)
|
||||
|
||||
def get_workfile_build_placeholder_plugins(self):
|
||||
return [
|
||||
AEPlaceholderLoadPlugin,
|
||||
AEPlaceholderCreatePlugin
|
||||
]
|
||||
|
||||
# created instances section
|
||||
def list_instances(self):
|
||||
"""List all created instances from current workfile which
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os.path
|
||||
import uuid
|
||||
import shutil
|
||||
from abc import abstractmethod
|
||||
|
||||
from ayon_core.pipeline import registered_host
|
||||
from ayon_core.tools.workfile_template_build import (
|
||||
|
|
@ -9,13 +10,9 @@ from ayon_core.tools.workfile_template_build import (
|
|||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
CreatePlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
PlaceholderCreateMixin
|
||||
PlaceholderItem
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
from ayon_core.hosts.aftereffects.api.lib import set_settings
|
||||
|
||||
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
||||
PLACEHOLDER_ID = "openpype.placeholder"
|
||||
|
|
@ -51,6 +48,10 @@ class AETemplateBuilder(AbstractTemplateBuilder):
|
|||
class AEPlaceholderPlugin(PlaceholderPlugin):
|
||||
"""Contains generic methods for all PlaceholderPlugins."""
|
||||
|
||||
@abstractmethod
|
||||
def _create_placeholder_item(self, item_data: dict) -> PlaceholderItem:
|
||||
pass
|
||||
|
||||
def collect_placeholders(self):
|
||||
"""Collect info from file metadata about created placeholders.
|
||||
|
||||
|
|
@ -63,17 +64,7 @@ class AEPlaceholderPlugin(PlaceholderPlugin):
|
|||
if item.get("plugin_identifier") != self.identifier:
|
||||
continue
|
||||
|
||||
if isinstance(self, AEPlaceholderLoadPlugin):
|
||||
item = LoadPlaceholderItem(item["uuid"],
|
||||
item["data"],
|
||||
self)
|
||||
elif isinstance(self, AEPlaceholderCreatePlugin):
|
||||
item = CreatePlaceholderItem(item["uuid"],
|
||||
item["data"],
|
||||
self)
|
||||
else:
|
||||
raise NotImplementedError(f"Not implemented for {type(self)}")
|
||||
|
||||
item = self._create_placeholder_item(item)
|
||||
output.append(item)
|
||||
|
||||
return output
|
||||
|
|
@ -135,87 +126,6 @@ class AEPlaceholderPlugin(PlaceholderPlugin):
|
|||
stub.imprint(item_id, container_data)
|
||||
|
||||
|
||||
class AEPlaceholderCreatePlugin(AEPlaceholderPlugin, PlaceholderCreateMixin):
|
||||
"""Adds Create placeholder.
|
||||
|
||||
This adds composition and runs Create
|
||||
"""
|
||||
identifier = "aftereffects.create"
|
||||
label = "AfterEffects create"
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
stub = get_stub()
|
||||
name = "CREATEPLACEHOLDER"
|
||||
item_id = stub.add_item(name, "COMP")
|
||||
|
||||
self._imprint_item(item_id, name, placeholder_data, stub)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
"""Replace 'placeholder' with publishable instance.
|
||||
|
||||
Renames prepared composition name, creates publishable instance, sets
|
||||
frame/duration settings according to DB.
|
||||
"""
|
||||
pre_create_data = {"use_selection": True}
|
||||
item_id, item = self._get_item(placeholder)
|
||||
get_stub().select_items([item_id])
|
||||
self.populate_create_placeholder(placeholder, pre_create_data)
|
||||
|
||||
# apply settings for populated composition
|
||||
item_id, metadata_item = self._get_item(placeholder)
|
||||
set_settings(True, True, [item_id])
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_create_plugin_options(options)
|
||||
|
||||
|
||||
class AEPlaceholderLoadPlugin(AEPlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "aftereffects.load"
|
||||
label = "AfterEffects load"
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
"""Creates AE's Placeholder item in Project items list.
|
||||
|
||||
Sets dummy resolution/duration/fps settings, will be replaced when
|
||||
populated.
|
||||
"""
|
||||
stub = get_stub()
|
||||
name = "LOADERPLACEHOLDER"
|
||||
item_id = stub.add_placeholder(name, 1920, 1060, 25, 10)
|
||||
|
||||
self._imprint_item(item_id, name, placeholder_data, stub)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
"""Use Openpype Loader from `placeholder` to create new FootageItems
|
||||
|
||||
New FootageItems are created, files are imported.
|
||||
"""
|
||||
self.populate_load_placeholder(placeholder)
|
||||
errors = placeholder.get_errors()
|
||||
stub = get_stub()
|
||||
if errors:
|
||||
stub.print_msg("\n".join(errors))
|
||||
else:
|
||||
if not placeholder.data["keep_placeholder"]:
|
||||
metadata = stub.get_metadata()
|
||||
for item in metadata:
|
||||
if not item.get("is_placeholder"):
|
||||
continue
|
||||
scene_identifier = item.get("uuid")
|
||||
if (scene_identifier and
|
||||
scene_identifier == placeholder.scene_identifier):
|
||||
stub.delete_item(item["members"][0])
|
||||
stub.remove_instance(placeholder.scene_identifier, metadata)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
placeholder_item_id, _ = self._get_item(placeholder)
|
||||
item_id = container.id
|
||||
get_stub().add_item_instead_placeholder(placeholder_item_id, item_id)
|
||||
|
||||
|
||||
def build_workfile_template(*args, **kwargs):
|
||||
builder = AETemplateBuilder(registered_host())
|
||||
builder.build_template(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,49 @@
|
|||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
CreatePlaceholderItem,
|
||||
PlaceholderCreateMixin
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
from ayon_core.hosts.aftereffects.api.lib import set_settings
|
||||
import ayon_core.hosts.aftereffects.api.workfile_template_builder as wtb
|
||||
|
||||
|
||||
class AEPlaceholderCreatePlugin(wtb.AEPlaceholderPlugin,
|
||||
PlaceholderCreateMixin):
|
||||
"""Adds Create placeholder.
|
||||
|
||||
This adds composition and runs Create
|
||||
"""
|
||||
identifier = "aftereffects.create"
|
||||
label = "AfterEffects create"
|
||||
|
||||
def _create_placeholder_item(self, item_data) -> CreatePlaceholderItem:
|
||||
return CreatePlaceholderItem(
|
||||
scene_identifier=item_data["uuid"],
|
||||
data=item_data["data"],
|
||||
plugin=self
|
||||
)
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
stub = get_stub()
|
||||
name = "CREATEPLACEHOLDER"
|
||||
item_id = stub.add_item(name, "COMP")
|
||||
|
||||
self._imprint_item(item_id, name, placeholder_data, stub)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
"""Replace 'placeholder' with publishable instance.
|
||||
|
||||
Renames prepared composition name, creates publishable instance, sets
|
||||
frame/duration settings according to DB.
|
||||
"""
|
||||
pre_create_data = {"use_selection": True}
|
||||
item_id, item = self._get_item(placeholder)
|
||||
get_stub().select_items([item_id])
|
||||
self.populate_create_placeholder(placeholder, pre_create_data)
|
||||
|
||||
# apply settings for populated composition
|
||||
item_id, metadata_item = self._get_item(placeholder)
|
||||
set_settings(True, True, [item_id])
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_create_plugin_options(options)
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
LoadPlaceholderItem,
|
||||
PlaceholderLoadMixin
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
import ayon_core.hosts.aftereffects.api.workfile_template_builder as wtb
|
||||
|
||||
|
||||
class AEPlaceholderLoadPlugin(wtb.AEPlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "aftereffects.load"
|
||||
label = "AfterEffects load"
|
||||
|
||||
def _create_placeholder_item(self, item_data) -> LoadPlaceholderItem:
|
||||
return LoadPlaceholderItem(
|
||||
scene_identifier=item_data["uuid"],
|
||||
data=item_data["data"],
|
||||
plugin=self
|
||||
)
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
"""Creates AE's Placeholder item in Project items list.
|
||||
|
||||
Sets dummy resolution/duration/fps settings, will be replaced when
|
||||
populated.
|
||||
"""
|
||||
stub = get_stub()
|
||||
name = "LOADERPLACEHOLDER"
|
||||
item_id = stub.add_placeholder(name, 1920, 1060, 25, 10)
|
||||
|
||||
self._imprint_item(item_id, name, placeholder_data, stub)
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
"""Use Openpype Loader from `placeholder` to create new FootageItems
|
||||
|
||||
New FootageItems are created, files are imported.
|
||||
"""
|
||||
self.populate_load_placeholder(placeholder)
|
||||
errors = placeholder.get_errors()
|
||||
stub = get_stub()
|
||||
if errors:
|
||||
stub.print_msg("\n".join(errors))
|
||||
else:
|
||||
if not placeholder.data["keep_placeholder"]:
|
||||
metadata = stub.get_metadata()
|
||||
for item in metadata:
|
||||
if not item.get("is_placeholder"):
|
||||
continue
|
||||
scene_identifier = item.get("uuid")
|
||||
if (scene_identifier and
|
||||
scene_identifier == placeholder.scene_identifier):
|
||||
stub.delete_item(item["members"][0])
|
||||
stub.remove_instance(placeholder.scene_identifier, metadata)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
placeholder_item_id, _ = self._get_item(placeholder)
|
||||
item_id = container.id
|
||||
get_stub().add_item_instead_placeholder(placeholder_item_id, item_id)
|
||||
|
|
@ -8,6 +8,7 @@ from .lib import (
|
|||
sync_avalon_data_to_workfile,
|
||||
launch_workfiles_app,
|
||||
before_project_save,
|
||||
apply_colorspace_project
|
||||
)
|
||||
from .tags import add_tags_to_workfile
|
||||
from .menu import update_menu_task_label
|
||||
|
|
@ -44,6 +45,8 @@ def afterNewProjectCreated(event):
|
|||
# reset workfiles startup not to open any more in session
|
||||
os.environ["WORKFILES_STARTUP"] = "0"
|
||||
|
||||
apply_colorspace_project()
|
||||
|
||||
|
||||
def beforeProjectLoad(event):
|
||||
log.info("before project load event...")
|
||||
|
|
@ -122,6 +125,7 @@ def register_hiero_events():
|
|||
except RuntimeError:
|
||||
pass
|
||||
|
||||
|
||||
def register_events():
|
||||
"""
|
||||
Adding all callbacks.
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ import warnings
|
|||
import json
|
||||
import ast
|
||||
import secrets
|
||||
import shutil
|
||||
import hiero
|
||||
|
||||
from qtpy import QtWidgets, QtCore
|
||||
|
|
@ -36,9 +35,6 @@ from .constants import (
|
|||
DEFAULT_SEQUENCE_NAME,
|
||||
DEFAULT_BIN_NAME
|
||||
)
|
||||
from ayon_core.pipeline.colorspace import (
|
||||
get_imageio_config
|
||||
)
|
||||
|
||||
|
||||
class _CTX:
|
||||
|
|
@ -105,9 +101,9 @@ def flatten(list_):
|
|||
|
||||
|
||||
def get_current_project(remove_untitled=False):
|
||||
projects = flatten(hiero.core.projects())
|
||||
projects = hiero.core.projects()
|
||||
if not remove_untitled:
|
||||
return next(iter(projects))
|
||||
return projects[0]
|
||||
|
||||
# if remove_untitled
|
||||
for proj in projects:
|
||||
|
|
@ -1050,18 +1046,68 @@ def _set_hrox_project_knobs(doc, **knobs):
|
|||
|
||||
|
||||
def apply_colorspace_project():
|
||||
project_name = get_current_project_name()
|
||||
# get path the the active projects
|
||||
project = get_current_project(remove_untitled=True)
|
||||
current_file = project.path()
|
||||
|
||||
# close the active project
|
||||
project.close()
|
||||
"""Apply colorspaces from settings.
|
||||
|
||||
Due to not being able to set the project settings through the Python API,
|
||||
we need to do use some dubious code to find the widgets and set them. It is
|
||||
possible to set the project settings without traversing through the widgets
|
||||
but it involves reading the hrox files from disk with XML, so no in-memory
|
||||
support. See https://community.foundry.com/discuss/topic/137771/change-a-project-s-default-color-transform-with-python # noqa
|
||||
for more details.
|
||||
"""
|
||||
# get presets for hiero
|
||||
project_name = get_current_project_name()
|
||||
imageio = get_project_settings(project_name)["hiero"]["imageio"]
|
||||
presets = imageio.get("workfile")
|
||||
|
||||
# Open Project Settings UI.
|
||||
for act in hiero.ui.registeredActions():
|
||||
if act.objectName() == "foundry.project.settings":
|
||||
act.trigger()
|
||||
|
||||
# Find widgets from their sibling label.
|
||||
labels = {
|
||||
"Working Space:": "workingSpace",
|
||||
"Viewer:": "viewerLut",
|
||||
"Thumbnails:": "thumbnailLut",
|
||||
"Monitor Out:": "monitorOutLut",
|
||||
"8 Bit Files:": "eightBitLut",
|
||||
"16 Bit Files:": "sixteenBitLut",
|
||||
"Log Files:": "logLut",
|
||||
"Floating Point Files:": "floatLut"
|
||||
}
|
||||
widgets = {x: None for x in labels.values()}
|
||||
|
||||
def _recursive_children(widget, labels, widgets):
|
||||
children = widget.children()
|
||||
for count, child in enumerate(children):
|
||||
if isinstance(child, QtWidgets.QLabel):
|
||||
if child.text() in labels.keys():
|
||||
widgets[labels[child.text()]] = children[count + 1]
|
||||
_recursive_children(child, labels, widgets)
|
||||
|
||||
app = QtWidgets.QApplication.instance()
|
||||
title = "Project Settings"
|
||||
for widget in app.topLevelWidgets():
|
||||
if isinstance(widget, QtWidgets.QMainWindow):
|
||||
if widget.windowTitle() != title:
|
||||
continue
|
||||
_recursive_children(widget, labels, widgets)
|
||||
widget.close()
|
||||
|
||||
msg = "Setting value \"{}\" is not a valid option for \"{}\""
|
||||
for key, widget in widgets.items():
|
||||
options = [widget.itemText(i) for i in range(widget.count())]
|
||||
setting_value = presets[key]
|
||||
assert setting_value in options, msg.format(setting_value, key)
|
||||
widget.setCurrentText(presets[key])
|
||||
|
||||
# This code block is for setting up project colorspaces for files on disk.
|
||||
# Due to not having Python API access to set the project settings, the
|
||||
# Foundry recommended way is to modify the hrox files on disk with XML. See
|
||||
# this forum thread for more details;
|
||||
# https://community.foundry.com/discuss/topic/137771/change-a-project-s-default-color-transform-with-python # noqa
|
||||
'''
|
||||
# backward compatibility layer
|
||||
# TODO: remove this after some time
|
||||
config_data = get_imageio_config(
|
||||
|
|
@ -1074,6 +1120,13 @@ def apply_colorspace_project():
|
|||
"ocioConfigName": "custom"
|
||||
})
|
||||
|
||||
# get path the the active projects
|
||||
project = get_current_project()
|
||||
current_file = project.path()
|
||||
|
||||
msg = "The project needs to be saved to disk to apply colorspace settings."
|
||||
assert current_file, msg
|
||||
|
||||
# save the workfile as subversion "comment:_colorspaceChange"
|
||||
split_current_file = os.path.splitext(current_file)
|
||||
copy_current_file = current_file
|
||||
|
|
@ -1116,6 +1169,7 @@ def apply_colorspace_project():
|
|||
|
||||
# open the file as current project
|
||||
hiero.core.openProject(copy_current_file)
|
||||
'''
|
||||
|
||||
|
||||
def apply_colorspace_clips():
|
||||
|
|
@ -1125,10 +1179,8 @@ def apply_colorspace_clips():
|
|||
|
||||
# get presets for hiero
|
||||
imageio = get_project_settings(project_name)["hiero"]["imageio"]
|
||||
from pprint import pprint
|
||||
|
||||
presets = imageio.get("regexInputs", {}).get("inputs", {})
|
||||
pprint(presets)
|
||||
for clip in clips:
|
||||
clip_media_source_path = clip.mediaSource().firstpath()
|
||||
clip_name = clip.name()
|
||||
|
|
|
|||
|
|
@ -144,7 +144,7 @@ def add_tags_to_workfile():
|
|||
# Get project task types.
|
||||
project_name = get_current_project_name()
|
||||
project_entity = ayon_api.get_project(project_name)
|
||||
task_types = project_entity["taskType"]
|
||||
task_types = project_entity["taskTypes"]
|
||||
nks_pres_tags["[Tasks]"] = {}
|
||||
log.debug("__ tasks: {}".format(task_types))
|
||||
for task_type in task_types:
|
||||
|
|
|
|||
|
|
@ -24,8 +24,14 @@ class SetDefaultDisplayView(PreLaunchHook):
|
|||
if not OCIO:
|
||||
return
|
||||
|
||||
# workfile settings added in '0.2.13'
|
||||
houdini_color_settings = \
|
||||
self.data["project_settings"]["houdini"]["imageio"]["workfile"]
|
||||
self.data["project_settings"]["houdini"]["imageio"].get("workfile")
|
||||
|
||||
if not houdini_color_settings:
|
||||
self.log.info("Hook 'SetDefaultDisplayView' requires Houdini "
|
||||
"addon version >= '0.2.13'")
|
||||
return
|
||||
|
||||
if not houdini_color_settings["enabled"]:
|
||||
self.log.info(
|
||||
|
|
|
|||
|
|
@ -18,8 +18,11 @@ class CreateReview(plugin.HoudiniCreator):
|
|||
|
||||
def apply_settings(self, project_settings):
|
||||
super(CreateReview, self).apply_settings(project_settings)
|
||||
color_settings = project_settings["houdini"]["imageio"]["workfile"]
|
||||
if color_settings["enabled"]:
|
||||
# workfile settings added in '0.2.13'
|
||||
color_settings = project_settings["houdini"]["imageio"].get(
|
||||
"workfile", {}
|
||||
)
|
||||
if color_settings.get("enabled"):
|
||||
self.review_color_space = color_settings.get("review_color_space")
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
import hou
|
||||
import pyblish.api
|
||||
import six
|
||||
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
|
||||
|
|
@ -26,28 +25,21 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
("Output node(s) `{}` are incorrect. "
|
||||
"See plug-in log for details.").format(invalid),
|
||||
title=self.label
|
||||
"Output node '{}' is incorrect. "
|
||||
"See plug-in log for details.".format(invalid),
|
||||
title=self.label,
|
||||
description=(
|
||||
"### Invalid COP output node\n\n"
|
||||
"The output node path for the instance must be set to a "
|
||||
"valid COP node path.\n\nSee the log for more details."
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
output_node = instance.data.get("output_node")
|
||||
|
||||
import hou
|
||||
|
||||
try:
|
||||
output_node = instance.data["output_node"]
|
||||
except KeyError:
|
||||
six.reraise(
|
||||
PublishValidationError,
|
||||
PublishValidationError(
|
||||
"Can't determine COP output node.",
|
||||
title=cls.__name__),
|
||||
sys.exc_info()[2]
|
||||
)
|
||||
|
||||
if output_node is None:
|
||||
if not output_node:
|
||||
node = hou.node(instance.data.get("instance_node"))
|
||||
cls.log.error(
|
||||
"COP Output node in '%s' does not exist. "
|
||||
|
|
@ -61,8 +53,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
cls.log.error(
|
||||
"Output node %s is not a COP node. "
|
||||
"COP Path must point to a COP node, "
|
||||
"instead found category type: %s"
|
||||
% (output_node.path(), output_node.type().category().name())
|
||||
"instead found category type: %s",
|
||||
output_node.path(), output_node.type().category().name()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
|
|
@ -70,9 +62,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
# is Cop2 to avoid potential edge case scenarios even though
|
||||
# the isinstance check above should be stricter than this category
|
||||
if output_node.type().category().name() != "Cop2":
|
||||
raise PublishValidationError(
|
||||
(
|
||||
"Output node {} is not of category Cop2."
|
||||
" This is a bug..."
|
||||
).format(output_node.path()),
|
||||
title=cls.label)
|
||||
cls.log.error(
|
||||
"Output node %s is not of category Cop2.", output_node.path()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
|
|
|||
|
|
@ -45,9 +45,12 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin,
|
|||
category="houdini")
|
||||
apply_plugin_settings_automatically(cls, settings, logger=cls.log)
|
||||
|
||||
# workfile settings added in '0.2.13'
|
||||
color_settings = project_settings["houdini"]["imageio"].get(
|
||||
"workfile", {}
|
||||
)
|
||||
# Add review color settings
|
||||
color_settings = project_settings["houdini"]["imageio"]["workfile"]
|
||||
if color_settings["enabled"]:
|
||||
if color_settings.get("enabled"):
|
||||
cls.review_color_space = color_settings.get("review_color_space")
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -496,9 +496,9 @@ def object_transform_set(container_children):
|
|||
"""
|
||||
transform_set = {}
|
||||
for node in container_children:
|
||||
name = f"{node.name}.transform"
|
||||
name = f"{node}.transform"
|
||||
transform_set[name] = node.pos
|
||||
name = f"{node.name}.scale"
|
||||
name = f"{node}.scale"
|
||||
transform_set[name] = node.scale
|
||||
return transform_set
|
||||
|
||||
|
|
@ -519,6 +519,36 @@ def get_plugins() -> list:
|
|||
return plugin_info_list
|
||||
|
||||
|
||||
def update_modifier_node_names(event, node):
|
||||
"""Update the name of the nodes after renaming
|
||||
|
||||
Args:
|
||||
event (pymxs.MXSWrapperBase): Event Name (
|
||||
Mandatory argument for rt.NodeEventCallback)
|
||||
node (list): Event Number (
|
||||
Mandatory argument for rt.NodeEventCallback)
|
||||
|
||||
"""
|
||||
containers = [
|
||||
obj
|
||||
for obj in rt.Objects
|
||||
if (
|
||||
rt.ClassOf(obj) == rt.Container
|
||||
and rt.getUserProp(obj, "id") == "pyblish.avalon.instance"
|
||||
and rt.getUserProp(obj, "productType") not in {
|
||||
"workfile", "tyflow"
|
||||
}
|
||||
)
|
||||
]
|
||||
if not containers:
|
||||
return
|
||||
for container in containers:
|
||||
ayon_data = container.modifiers[0].openPypeData
|
||||
updated_node_names = [str(node.node) for node
|
||||
in ayon_data.all_handles]
|
||||
rt.setProperty(ayon_data, "sel_list", updated_node_names)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def render_resolution(width, height):
|
||||
"""Set render resolution option during context
|
||||
|
|
|
|||
|
|
@ -63,6 +63,8 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
|
||||
rt.callbacks.addScript(rt.Name('postWorkspaceChange'),
|
||||
self._deferred_menu_creation)
|
||||
rt.NodeEventCallback(
|
||||
nameChanged=lib.update_modifier_node_names)
|
||||
|
||||
def workfile_has_unsaved_changes(self):
|
||||
return rt.getSaveRequired()
|
||||
|
|
|
|||
|
|
@ -117,7 +117,7 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
)
|
||||
for max_obj, obj_name in zip(max_objects, max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
max_container.append(rt.getNodeByName(max_obj.name))
|
||||
max_container.append(max_obj)
|
||||
return containerise(
|
||||
name, max_container, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
|
@ -158,11 +158,11 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
current_max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
max_objects.append(max_obj)
|
||||
max_transform = f"{max_obj.name}.transform"
|
||||
max_transform = f"{max_obj}.transform"
|
||||
if max_transform in transform_data.keys():
|
||||
max_obj.pos = transform_data[max_transform] or 0
|
||||
max_obj.scale = transform_data[
|
||||
f"{max_obj.name}.scale"] or 0
|
||||
f"{max_obj}.scale"] or 0
|
||||
|
||||
update_custom_attribute_data(node, max_objects)
|
||||
lib.imprint(container["instance_node"], {
|
||||
|
|
|
|||
|
|
@ -76,11 +76,11 @@ class FbxModelLoader(load.LoaderPlugin):
|
|||
for fbx_object in current_fbx_objects:
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_objects.append(fbx_object)
|
||||
fbx_transform = f"{fbx_object.name}.transform"
|
||||
fbx_transform = f"{fbx_object}.transform"
|
||||
if fbx_transform in transform_data.keys():
|
||||
fbx_object.pos = transform_data[fbx_transform] or 0
|
||||
fbx_object.scale = transform_data[
|
||||
f"{fbx_object.name}.scale"] or 0
|
||||
f"{fbx_object}.scale"] or 0
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
|
|
|||
|
|
@ -67,11 +67,11 @@ class ObjLoader(load.LoaderPlugin):
|
|||
selections = rt.GetCurrentSelection()
|
||||
for selection in selections:
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
selection_transform = f"{selection.name}.transform"
|
||||
selection_transform = f"{selection}.transform"
|
||||
if selection_transform in transform_data.keys():
|
||||
selection.pos = transform_data[selection_transform] or 0
|
||||
selection.scale = transform_data[
|
||||
f"{selection.name}.scale"] or 0
|
||||
f"{selection}.scale"] or 0
|
||||
update_custom_attribute_data(node, selections)
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
|
|
|||
|
|
@ -95,11 +95,11 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
for children in asset.Children:
|
||||
children.name = f"{namespace}:{children.name}"
|
||||
usd_objects.append(children)
|
||||
children_transform = f"{children.name}.transform"
|
||||
children_transform = f"{children}.transform"
|
||||
if children_transform in transform_data.keys():
|
||||
children.pos = transform_data[children_transform] or 0
|
||||
children.scale = transform_data[
|
||||
f"{children.name}.scale"] or 0
|
||||
f"{children}.scale"] or 0
|
||||
|
||||
asset.name = f"{namespace}:{asset.name}"
|
||||
usd_objects.append(asset)
|
||||
|
|
|
|||
|
|
@ -92,10 +92,10 @@ class OxAbcLoader(load.LoaderPlugin):
|
|||
abc.Parent = container
|
||||
abc.name = f"{namespace}:{abc.name}"
|
||||
ox_abc_objects.append(abc)
|
||||
ox_transform = f"{abc.name}.transform"
|
||||
ox_transform = f"{abc}.transform"
|
||||
if ox_transform in transform_data.keys():
|
||||
abc.pos = transform_data[ox_transform] or 0
|
||||
abc.scale = transform_data[f"{abc.name}.scale"] or 0
|
||||
abc.scale = transform_data[f"{abc}.scale"] or 0
|
||||
update_custom_attribute_data(node, ox_abc_objects)
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
|
|
|
|||
|
|
@ -53,6 +53,7 @@ class ExtractAlembic(publish.Extractor,
|
|||
hosts = ["max"]
|
||||
families = ["pointcache"]
|
||||
optional = True
|
||||
active = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
|
|
@ -102,24 +103,27 @@ class ExtractAlembic(publish.Extractor,
|
|||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
defs = super(ExtractAlembic, cls).get_attribute_defs()
|
||||
defs.extend([
|
||||
BoolDef("custom_attrs",
|
||||
label="Custom Attributes",
|
||||
default=False),
|
||||
]
|
||||
])
|
||||
return defs
|
||||
|
||||
|
||||
class ExtractCameraAlembic(ExtractAlembic):
|
||||
"""Extract Camera with AlembicExport."""
|
||||
|
||||
label = "Extract Alembic Camera"
|
||||
families = ["camera"]
|
||||
optional = True
|
||||
|
||||
|
||||
class ExtractModel(ExtractAlembic):
|
||||
class ExtractModelAlembic(ExtractAlembic):
|
||||
"""Extract Geometry in Alembic Format"""
|
||||
label = "Extract Geometry (Alembic)"
|
||||
families = ["model"]
|
||||
optional = True
|
||||
|
||||
def _set_abc_attributes(self, instance):
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
|
|
|
|||
|
|
@ -2520,7 +2520,16 @@ def set_scene_fps(fps, update=True):
|
|||
"""
|
||||
|
||||
fps_mapping = {
|
||||
'2': '2fps',
|
||||
'3': '3fps',
|
||||
'4': '4fps',
|
||||
'5': '5fps',
|
||||
'6': '6fps',
|
||||
'8': '8fps',
|
||||
'10': '10fps',
|
||||
'12': '12fps',
|
||||
'15': 'game',
|
||||
'16': '16fps',
|
||||
'24': 'film',
|
||||
'25': 'pal',
|
||||
'30': 'ntsc',
|
||||
|
|
@ -2612,21 +2621,24 @@ def get_fps_for_current_context():
|
|||
Returns:
|
||||
Union[int, float]: FPS value.
|
||||
"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
folder_path = get_current_folder_path()
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path, fields={"attrib.fps"}
|
||||
) or {}
|
||||
fps = folder_entity.get("attrib", {}).get("fps")
|
||||
task_entity = get_current_task_entity(fields={"attrib"})
|
||||
fps = task_entity.get("attrib", {}).get("fps")
|
||||
if not fps:
|
||||
project_entity = ayon_api.get_project(
|
||||
project_name, fields=["attrib.fps"]
|
||||
project_name = get_current_project_name()
|
||||
folder_path = get_current_folder_path()
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path, fields={"attrib.fps"}
|
||||
) or {}
|
||||
fps = project_entity.get("attrib", {}).get("fps")
|
||||
|
||||
fps = folder_entity.get("attrib", {}).get("fps")
|
||||
if not fps:
|
||||
fps = 25
|
||||
project_entity = ayon_api.get_project(
|
||||
project_name, fields=["attrib.fps"]
|
||||
) or {}
|
||||
fps = project_entity.get("attrib", {}).get("fps")
|
||||
|
||||
if not fps:
|
||||
fps = 25
|
||||
|
||||
return convert_to_maya_fps(fps)
|
||||
|
||||
|
|
|
|||
|
|
@ -720,7 +720,8 @@ class RenderProductsArnold(ARenderProducts):
|
|||
|
||||
# AOVs > Legacy > Maya Render View > Mode
|
||||
aovs_enabled = bool(
|
||||
self._get_attr("defaultArnoldRenderOptions.aovMode")
|
||||
self._get_attr(
|
||||
"defaultArnoldRenderOptions.aovMode", as_string=False)
|
||||
)
|
||||
if not aovs_enabled:
|
||||
return beauty_products
|
||||
|
|
|
|||
|
|
@ -30,9 +30,11 @@ from ayon_core.pipeline import (
|
|||
register_loader_plugin_path,
|
||||
register_inventory_action_path,
|
||||
register_creator_plugin_path,
|
||||
register_workfile_build_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_inventory_action_path,
|
||||
deregister_creator_plugin_path,
|
||||
deregister_workfile_build_plugin_path,
|
||||
AYON_CONTAINER_ID,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
|
@ -47,7 +49,6 @@ from ayon_core.hosts.maya import MAYA_ROOT_DIR
|
|||
from ayon_core.hosts.maya.lib import create_workspace_mel
|
||||
|
||||
from . import menu, lib
|
||||
from .workfile_template_builder import MayaPlaceholderLoadPlugin
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
|
|
@ -64,6 +65,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
|||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build")
|
||||
|
||||
AVALON_CONTAINERS = ":AVALON_CONTAINERS"
|
||||
|
||||
|
|
@ -93,7 +95,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
self.log.info(PUBLISH_PATH)
|
||||
register_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
|
||||
|
||||
self.log.info("Installing callbacks ... ")
|
||||
register_event_callback("init", on_init)
|
||||
|
|
@ -148,11 +150,6 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def get_workfile_build_placeholder_plugins(self):
|
||||
return [
|
||||
MayaPlaceholderLoadPlugin
|
||||
]
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection(self):
|
||||
with lib.maintained_selection():
|
||||
|
|
@ -338,6 +335,7 @@ def uninstall():
|
|||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
deregister_inventory_action_path(INVENTORY_PATH)
|
||||
deregister_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
|
||||
|
||||
menu.uninstall()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import json
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
|
|
@ -10,16 +8,13 @@ from ayon_core.pipeline import (
|
|||
)
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
TemplateAlreadyImported,
|
||||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
AbstractTemplateBuilder
|
||||
)
|
||||
from ayon_core.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
)
|
||||
|
||||
from .lib import read, imprint, get_reference_node, get_main_window
|
||||
from .lib import get_main_window
|
||||
|
||||
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
||||
|
||||
|
|
@ -91,255 +86,6 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
|
|||
return True
|
||||
|
||||
|
||||
class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "maya.load"
|
||||
label = "Maya load"
|
||||
|
||||
def _collect_scene_placeholders(self):
|
||||
# Cache placeholder data to shared data
|
||||
placeholder_nodes = self.builder.get_shared_populate_data(
|
||||
"placeholder_nodes"
|
||||
)
|
||||
if placeholder_nodes is None:
|
||||
attributes = cmds.ls("*.plugin_identifier", long=True)
|
||||
placeholder_nodes = {}
|
||||
for attribute in attributes:
|
||||
node_name = attribute.rpartition(".")[0]
|
||||
placeholder_nodes[node_name] = (
|
||||
self._parse_placeholder_node_data(node_name)
|
||||
)
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"placeholder_nodes", placeholder_nodes
|
||||
)
|
||||
return placeholder_nodes
|
||||
|
||||
def _parse_placeholder_node_data(self, node_name):
|
||||
placeholder_data = read(node_name)
|
||||
parent_name = (
|
||||
cmds.getAttr(node_name + ".parent", asString=True)
|
||||
or node_name.rpartition("|")[0]
|
||||
or ""
|
||||
)
|
||||
if parent_name:
|
||||
siblings = cmds.listRelatives(parent_name, children=True)
|
||||
else:
|
||||
siblings = cmds.ls(assemblies=True)
|
||||
node_shortname = node_name.rpartition("|")[2]
|
||||
current_index = cmds.getAttr(node_name + ".index", asString=True)
|
||||
if current_index < 0:
|
||||
current_index = siblings.index(node_shortname)
|
||||
|
||||
placeholder_data.update({
|
||||
"parent": parent_name,
|
||||
"index": current_index
|
||||
})
|
||||
return placeholder_data
|
||||
|
||||
def _create_placeholder_name(self, placeholder_data):
|
||||
placeholder_name_parts = placeholder_data["builder_type"].split("_")
|
||||
|
||||
pos = 1
|
||||
placeholder_product_type = placeholder_data.get("product_type")
|
||||
if placeholder_product_type is None:
|
||||
placeholder_product_type = placeholder_data.get("family")
|
||||
|
||||
if placeholder_product_type:
|
||||
placeholder_name_parts.insert(pos, placeholder_product_type)
|
||||
pos += 1
|
||||
|
||||
# add loader arguments if any
|
||||
loader_args = placeholder_data["loader_args"]
|
||||
if loader_args:
|
||||
loader_args = json.loads(loader_args.replace('\'', '\"'))
|
||||
values = [v for v in loader_args.values()]
|
||||
for value in values:
|
||||
placeholder_name_parts.insert(pos, value)
|
||||
pos += 1
|
||||
|
||||
placeholder_name = "_".join(placeholder_name_parts)
|
||||
|
||||
return placeholder_name.capitalize()
|
||||
|
||||
def _get_loaded_repre_ids(self):
|
||||
loaded_representation_ids = self.builder.get_shared_populate_data(
|
||||
"loaded_representation_ids"
|
||||
)
|
||||
if loaded_representation_ids is None:
|
||||
try:
|
||||
containers = cmds.sets("AVALON_CONTAINERS", q=True)
|
||||
except ValueError:
|
||||
containers = []
|
||||
|
||||
loaded_representation_ids = {
|
||||
cmds.getAttr(container + ".representation")
|
||||
for container in containers
|
||||
}
|
||||
self.builder.set_shared_populate_data(
|
||||
"loaded_representation_ids", loaded_representation_ids
|
||||
)
|
||||
return loaded_representation_ids
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
selection = cmds.ls(selection=True)
|
||||
if len(selection) > 1:
|
||||
raise ValueError("More then one item are selected")
|
||||
|
||||
parent = selection[0] if selection else None
|
||||
|
||||
placeholder_data["plugin_identifier"] = self.identifier
|
||||
|
||||
placeholder_name = self._create_placeholder_name(placeholder_data)
|
||||
|
||||
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
|
||||
if parent:
|
||||
placeholder = cmds.parent(placeholder, selection[0])[0]
|
||||
|
||||
imprint(placeholder, placeholder_data)
|
||||
|
||||
# Add helper attributes to keep placeholder info
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="parent",
|
||||
hidden=True,
|
||||
dataType="string"
|
||||
)
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="index",
|
||||
hidden=True,
|
||||
attributeType="short",
|
||||
defaultValue=-1
|
||||
)
|
||||
|
||||
cmds.setAttr(placeholder + ".parent", "", type="string")
|
||||
|
||||
def update_placeholder(self, placeholder_item, placeholder_data):
|
||||
node_name = placeholder_item.scene_identifier
|
||||
new_values = {}
|
||||
for key, value in placeholder_data.items():
|
||||
placeholder_value = placeholder_item.data.get(key)
|
||||
if value != placeholder_value:
|
||||
new_values[key] = value
|
||||
placeholder_item.data[key] = value
|
||||
|
||||
for key in new_values.keys():
|
||||
cmds.deleteAttr(node_name + "." + key)
|
||||
|
||||
imprint(node_name, new_values)
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, placeholder_data in scene_placeholders.items():
|
||||
if placeholder_data.get("plugin_identifier") != self.identifier:
|
||||
continue
|
||||
|
||||
# TODO do data validations and maybe upgrades if they are invalid
|
||||
output.append(
|
||||
LoadPlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_load_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
repre_ids = self._get_loaded_repre_ids()
|
||||
self.populate_load_placeholder(placeholder, repre_ids)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# Hide placeholder and add them to placeholder set
|
||||
node = placeholder.scene_identifier
|
||||
|
||||
cmds.sets(node, addElement=PLACEHOLDER_SET)
|
||||
cmds.hide(node)
|
||||
cmds.setAttr(node + ".hiddenInOutliner", True)
|
||||
|
||||
def delete_placeholder(self, placeholder):
|
||||
"""Remove placeholder if building was successful"""
|
||||
cmds.delete(placeholder.scene_identifier)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
self._parent_in_hierarchy(placeholder, container)
|
||||
|
||||
def _parent_in_hierarchy(self, placeholder, container):
|
||||
"""Parent loaded container to placeholder's parent.
|
||||
|
||||
ie : Set loaded content as placeholder's sibling
|
||||
|
||||
Args:
|
||||
container (str): Placeholder loaded containers
|
||||
"""
|
||||
|
||||
if not container:
|
||||
return
|
||||
|
||||
roots = cmds.sets(container, q=True) or []
|
||||
ref_node = None
|
||||
try:
|
||||
ref_node = get_reference_node(roots)
|
||||
except AssertionError as e:
|
||||
self.log.info(e.args[0])
|
||||
|
||||
nodes_to_parent = []
|
||||
for root in roots:
|
||||
if ref_node:
|
||||
ref_root = cmds.referenceQuery(root, nodes=True)[0]
|
||||
ref_root = (
|
||||
cmds.listRelatives(ref_root, parent=True, path=True) or
|
||||
[ref_root]
|
||||
)
|
||||
nodes_to_parent.extend(ref_root)
|
||||
continue
|
||||
if root.endswith("_RN"):
|
||||
# Backwards compatibility for hardcoded reference names.
|
||||
refRoot = cmds.referenceQuery(root, n=True)[0]
|
||||
refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot]
|
||||
nodes_to_parent.extend(refRoot)
|
||||
elif root not in cmds.listSets(allSets=True):
|
||||
nodes_to_parent.append(root)
|
||||
|
||||
elif not cmds.sets(root, q=True):
|
||||
return
|
||||
|
||||
# Move loaded nodes to correct index in outliner hierarchy
|
||||
placeholder_form = cmds.xform(
|
||||
placeholder.scene_identifier,
|
||||
q=True,
|
||||
matrix=True,
|
||||
worldSpace=True
|
||||
)
|
||||
scene_parent = cmds.listRelatives(
|
||||
placeholder.scene_identifier, parent=True, fullPath=True
|
||||
)
|
||||
for node in set(nodes_to_parent):
|
||||
cmds.reorder(node, front=True)
|
||||
cmds.reorder(node, relative=placeholder.data["index"])
|
||||
cmds.xform(node, matrix=placeholder_form, ws=True)
|
||||
if scene_parent:
|
||||
cmds.parent(node, scene_parent)
|
||||
else:
|
||||
cmds.parent(node, world=True)
|
||||
|
||||
holding_sets = cmds.listSets(object=placeholder.scene_identifier)
|
||||
if not holding_sets:
|
||||
return
|
||||
for holding_set in holding_sets:
|
||||
cmds.sets(roots, forceElement=holding_set)
|
||||
|
||||
|
||||
def build_workfile_template(*args):
|
||||
builder = MayaTemplateBuilder(registered_host())
|
||||
builder.build_template()
|
||||
|
|
|
|||
101
client/ayon_core/hosts/maya/api/yeti.py
Normal file
101
client/ayon_core/hosts/maya/api/yeti.py
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
from typing import List
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
def get_yeti_user_variables(yeti_shape_node: str) -> List[str]:
|
||||
"""Get user defined yeti user variables for a `pgYetiMaya` shape node.
|
||||
|
||||
Arguments:
|
||||
yeti_shape_node (str): The `pgYetiMaya` shape node.
|
||||
|
||||
Returns:
|
||||
list: Attribute names (for a vector attribute it only lists the top
|
||||
parent attribute, not the attribute per axis)
|
||||
"""
|
||||
|
||||
attrs = cmds.listAttr(yeti_shape_node,
|
||||
userDefined=True,
|
||||
string=("yetiVariableV_*",
|
||||
"yetiVariableF_*")) or []
|
||||
valid_attrs = []
|
||||
for attr in attrs:
|
||||
attr_type = cmds.attributeQuery(attr, node=yeti_shape_node,
|
||||
attributeType=True)
|
||||
if attr.startswith("yetiVariableV_") and attr_type == "double3":
|
||||
# vector
|
||||
valid_attrs.append(attr)
|
||||
elif attr.startswith("yetiVariableF_") and attr_type == "double":
|
||||
valid_attrs.append(attr)
|
||||
|
||||
return valid_attrs
|
||||
|
||||
|
||||
def create_yeti_variable(yeti_shape_node: str,
|
||||
attr_name: str,
|
||||
value=None,
|
||||
force_value: bool = False) -> bool:
|
||||
"""Get user defined yeti user variables for a `pgYetiMaya` shape node.
|
||||
|
||||
Arguments:
|
||||
yeti_shape_node (str): The `pgYetiMaya` shape node.
|
||||
attr_name (str): The fully qualified yeti variable name, e.g.
|
||||
"yetiVariableF_myfloat" or "yetiVariableV_myvector"
|
||||
value (object): The value to set (must match the type of the attribute)
|
||||
When value is None it will ignored and not be set.
|
||||
force_value (bool): Whether to set the value if the attribute already
|
||||
exists or not.
|
||||
|
||||
Returns:
|
||||
bool: Whether the attribute value was set or not.
|
||||
|
||||
"""
|
||||
exists = cmds.attributeQuery(attr_name, node=yeti_shape_node, exists=True)
|
||||
if not exists:
|
||||
if attr_name.startswith("yetiVariableV_"):
|
||||
_create_vector_yeti_user_variable(yeti_shape_node, attr_name)
|
||||
if attr_name.startswith("yetiVariableF_"):
|
||||
_create_float_yeti_user_variable(yeti_shape_node, attr_name)
|
||||
|
||||
if value is not None and (not exists or force_value):
|
||||
plug = "{}.{}".format(yeti_shape_node, attr_name)
|
||||
if (
|
||||
isinstance(value, (list, tuple))
|
||||
and attr_name.startswith("yetiVariableV_")
|
||||
):
|
||||
cmds.setAttr(plug, *value, type="double3")
|
||||
else:
|
||||
cmds.setAttr(plug, value)
|
||||
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _create_vector_yeti_user_variable(yeti_shape_node: str, attr_name: str):
|
||||
if not attr_name.startswith("yetiVariableV_"):
|
||||
raise ValueError("Must start with yetiVariableV_")
|
||||
cmds.addAttr(yeti_shape_node,
|
||||
longName=attr_name,
|
||||
attributeType="double3",
|
||||
cachedInternally=True,
|
||||
keyable=True)
|
||||
for axis in "XYZ":
|
||||
cmds.addAttr(yeti_shape_node,
|
||||
longName="{}{}".format(attr_name, axis),
|
||||
attributeType="double",
|
||||
parent=attr_name,
|
||||
cachedInternally=True,
|
||||
keyable=True)
|
||||
|
||||
|
||||
def _create_float_yeti_user_variable(yeti_node: str, attr_name: str):
|
||||
if not attr_name.startswith("yetiVariableF_"):
|
||||
raise ValueError("Must start with yetiVariableF_")
|
||||
|
||||
cmds.addAttr(yeti_node,
|
||||
longName=attr_name,
|
||||
attributeType="double",
|
||||
cachedInternally=True,
|
||||
softMinValue=0,
|
||||
softMaxValue=100,
|
||||
keyable=True)
|
||||
39
client/ayon_core/hosts/maya/plugins/load/load_as_template.py
Normal file
39
client/ayon_core/hosts/maya/plugins/load/load_as_template.py
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
from ayon_core.lib import (
|
||||
BoolDef
|
||||
)
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
registered_host
|
||||
)
|
||||
from ayon_core.hosts.maya.api.workfile_template_builder import (
|
||||
MayaTemplateBuilder
|
||||
)
|
||||
|
||||
|
||||
class LoadAsTemplate(load.LoaderPlugin):
|
||||
"""Load workfile as a template """
|
||||
|
||||
product_types = {"workfile", "mayaScene"}
|
||||
label = "Load as template"
|
||||
representations = ["ma", "mb"]
|
||||
icon = "wrench"
|
||||
color = "#775555"
|
||||
order = 10
|
||||
|
||||
options = [
|
||||
BoolDef("keep_placeholders",
|
||||
label="Keep Placeholders",
|
||||
default=False),
|
||||
BoolDef("create_first_version",
|
||||
label="Create First Version",
|
||||
default=False),
|
||||
]
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
keep_placeholders = data.get("keep_placeholders", False)
|
||||
create_first_version = data.get("create_first_version", False)
|
||||
path = self.filepath_from_context(context)
|
||||
builder = MayaTemplateBuilder(registered_host())
|
||||
builder.build_template(template_path=path,
|
||||
keep_placeholders=keep_placeholders,
|
||||
create_first_version=create_first_version)
|
||||
|
|
@ -12,6 +12,7 @@ from ayon_core.pipeline import (
|
|||
get_representation_path
|
||||
)
|
||||
from ayon_core.hosts.maya.api import lib
|
||||
from ayon_core.hosts.maya.api.yeti import create_yeti_variable
|
||||
from ayon_core.hosts.maya.api.pipeline import containerise
|
||||
from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type
|
||||
|
||||
|
|
@ -23,8 +24,19 @@ SKIP_UPDATE_ATTRS = {
|
|||
"viewportDensity",
|
||||
"viewportWidth",
|
||||
"viewportLength",
|
||||
"renderDensity",
|
||||
"renderWidth",
|
||||
"renderLength",
|
||||
"increaseRenderBounds"
|
||||
}
|
||||
|
||||
SKIP_ATTR_MESSAGE = (
|
||||
"Skipping updating %s.%s to %s because it "
|
||||
"is considered a local overridable attribute. "
|
||||
"Either set manually or the load the cache "
|
||||
"anew."
|
||||
)
|
||||
|
||||
|
||||
def set_attribute(node, attr, value):
|
||||
"""Wrapper of set attribute which ignores None values"""
|
||||
|
|
@ -209,9 +221,31 @@ class YetiCacheLoader(load.LoaderPlugin):
|
|||
|
||||
for attr, value in node_settings["attrs"].items():
|
||||
if attr in SKIP_UPDATE_ATTRS:
|
||||
self.log.info(
|
||||
SKIP_ATTR_MESSAGE, yeti_node, attr, value
|
||||
)
|
||||
continue
|
||||
set_attribute(attr, value, yeti_node)
|
||||
|
||||
# Set up user defined attributes
|
||||
user_variables = node_settings.get("user_variables", {})
|
||||
for attr, value in user_variables.items():
|
||||
was_value_set = create_yeti_variable(
|
||||
yeti_shape_node=yeti_node,
|
||||
attr_name=attr,
|
||||
value=value,
|
||||
# We do not want to update the
|
||||
# value if it already exists so
|
||||
# that any local overrides that
|
||||
# may have been applied still
|
||||
# persist
|
||||
force_value=False
|
||||
)
|
||||
if not was_value_set:
|
||||
self.log.info(
|
||||
SKIP_ATTR_MESSAGE, yeti_node, attr, value
|
||||
)
|
||||
|
||||
cmds.setAttr("{}.representation".format(container_node),
|
||||
repre_entity["id"],
|
||||
typ="string")
|
||||
|
|
@ -332,6 +366,13 @@ class YetiCacheLoader(load.LoaderPlugin):
|
|||
for attr, value in attributes.items():
|
||||
set_attribute(attr, value, yeti_node)
|
||||
|
||||
# Set up user defined attributes
|
||||
user_variables = node_settings.get("user_variables", {})
|
||||
for attr, value in user_variables.items():
|
||||
create_yeti_variable(yeti_shape_node=yeti_node,
|
||||
attr_name=attr,
|
||||
value=value)
|
||||
|
||||
# Connect to the time node
|
||||
cmds.connectAttr("time1.outTime", "%s.currentTime" % yeti_node)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from maya import cmds
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.hosts.maya.api import lib
|
||||
from ayon_core.hosts.maya.api.yeti import get_yeti_user_variables
|
||||
|
||||
|
||||
SETTINGS = {
|
||||
|
|
@ -34,7 +35,7 @@ class CollectYetiCache(pyblish.api.InstancePlugin):
|
|||
- "increaseRenderBounds"
|
||||
- "imageSearchPath"
|
||||
|
||||
Other information is the name of the transform and it's Colorbleed ID
|
||||
Other information is the name of the transform and its `cbId`
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.45
|
||||
|
|
@ -54,6 +55,16 @@ class CollectYetiCache(pyblish.api.InstancePlugin):
|
|||
# Get specific node attributes
|
||||
attr_data = {}
|
||||
for attr in SETTINGS:
|
||||
# Ignore non-existing attributes with a warning, e.g. cbId
|
||||
# if they have not been generated yet
|
||||
if not cmds.attributeQuery(attr, node=shape, exists=True):
|
||||
self.log.warning(
|
||||
"Attribute '{}' not found on Yeti node: {}".format(
|
||||
attr, shape
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
current = cmds.getAttr("%s.%s" % (shape, attr))
|
||||
# change None to empty string as Maya doesn't support
|
||||
# NoneType in attributes
|
||||
|
|
@ -61,6 +72,12 @@ class CollectYetiCache(pyblish.api.InstancePlugin):
|
|||
current = ""
|
||||
attr_data[attr] = current
|
||||
|
||||
# Get user variable attributes
|
||||
user_variable_attrs = {
|
||||
attr: lib.get_attribute("{}.{}".format(shape, attr))
|
||||
for attr in get_yeti_user_variables(shape)
|
||||
}
|
||||
|
||||
# Get transform data
|
||||
parent = cmds.listRelatives(shape, parent=True)[0]
|
||||
transform_data = {"name": parent, "cbId": lib.get_id(parent)}
|
||||
|
|
@ -70,6 +87,7 @@ class CollectYetiCache(pyblish.api.InstancePlugin):
|
|||
"name": shape,
|
||||
"cbId": lib.get_id(shape),
|
||||
"attrs": attr_data,
|
||||
"user_variables": user_variable_attrs
|
||||
}
|
||||
|
||||
settings["nodes"].append(shape_data)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,270 @@
|
|||
import json
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
)
|
||||
from ayon_core.hosts.maya.api.lib import (
|
||||
read,
|
||||
imprint,
|
||||
get_reference_node
|
||||
)
|
||||
from ayon_core.hosts.maya.api.workfile_template_builder import PLACEHOLDER_SET
|
||||
|
||||
|
||||
class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "maya.load"
|
||||
label = "Maya load"
|
||||
|
||||
def _collect_scene_placeholders(self):
|
||||
# Cache placeholder data to shared data
|
||||
placeholder_nodes = self.builder.get_shared_populate_data(
|
||||
"placeholder_nodes"
|
||||
)
|
||||
if placeholder_nodes is None:
|
||||
attributes = cmds.ls("*.plugin_identifier", long=True)
|
||||
placeholder_nodes = {}
|
||||
for attribute in attributes:
|
||||
node_name = attribute.rpartition(".")[0]
|
||||
placeholder_nodes[node_name] = (
|
||||
self._parse_placeholder_node_data(node_name)
|
||||
)
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"placeholder_nodes", placeholder_nodes
|
||||
)
|
||||
return placeholder_nodes
|
||||
|
||||
def _parse_placeholder_node_data(self, node_name):
|
||||
placeholder_data = read(node_name)
|
||||
parent_name = (
|
||||
cmds.getAttr(node_name + ".parent", asString=True)
|
||||
or node_name.rpartition("|")[0]
|
||||
or ""
|
||||
)
|
||||
if parent_name:
|
||||
siblings = cmds.listRelatives(parent_name, children=True)
|
||||
else:
|
||||
siblings = cmds.ls(assemblies=True)
|
||||
node_shortname = node_name.rpartition("|")[2]
|
||||
current_index = cmds.getAttr(node_name + ".index", asString=True)
|
||||
if current_index < 0:
|
||||
current_index = siblings.index(node_shortname)
|
||||
|
||||
placeholder_data.update({
|
||||
"parent": parent_name,
|
||||
"index": current_index
|
||||
})
|
||||
return placeholder_data
|
||||
|
||||
def _create_placeholder_name(self, placeholder_data):
|
||||
placeholder_name_parts = placeholder_data["builder_type"].split("_")
|
||||
|
||||
pos = 1
|
||||
placeholder_product_type = placeholder_data.get("product_type")
|
||||
if placeholder_product_type is None:
|
||||
placeholder_product_type = placeholder_data.get("family")
|
||||
|
||||
if placeholder_product_type:
|
||||
placeholder_name_parts.insert(pos, placeholder_product_type)
|
||||
pos += 1
|
||||
|
||||
# add loader arguments if any
|
||||
loader_args = placeholder_data["loader_args"]
|
||||
if loader_args:
|
||||
loader_args = json.loads(loader_args.replace('\'', '\"'))
|
||||
values = [v for v in loader_args.values()]
|
||||
for value in values:
|
||||
placeholder_name_parts.insert(pos, value)
|
||||
pos += 1
|
||||
|
||||
placeholder_name = "_".join(placeholder_name_parts)
|
||||
|
||||
return placeholder_name.capitalize()
|
||||
|
||||
def _get_loaded_repre_ids(self):
|
||||
loaded_representation_ids = self.builder.get_shared_populate_data(
|
||||
"loaded_representation_ids"
|
||||
)
|
||||
if loaded_representation_ids is None:
|
||||
try:
|
||||
containers = cmds.sets("AVALON_CONTAINERS", q=True)
|
||||
except ValueError:
|
||||
containers = []
|
||||
|
||||
loaded_representation_ids = {
|
||||
cmds.getAttr(container + ".representation")
|
||||
for container in containers
|
||||
}
|
||||
self.builder.set_shared_populate_data(
|
||||
"loaded_representation_ids", loaded_representation_ids
|
||||
)
|
||||
return loaded_representation_ids
|
||||
|
||||
def create_placeholder(self, placeholder_data):
|
||||
selection = cmds.ls(selection=True)
|
||||
if len(selection) > 1:
|
||||
raise ValueError("More then one item are selected")
|
||||
|
||||
parent = selection[0] if selection else None
|
||||
|
||||
placeholder_data["plugin_identifier"] = self.identifier
|
||||
|
||||
placeholder_name = self._create_placeholder_name(placeholder_data)
|
||||
|
||||
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
|
||||
if parent:
|
||||
placeholder = cmds.parent(placeholder, selection[0])[0]
|
||||
|
||||
imprint(placeholder, placeholder_data)
|
||||
|
||||
# Add helper attributes to keep placeholder info
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="parent",
|
||||
hidden=True,
|
||||
dataType="string"
|
||||
)
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="index",
|
||||
hidden=True,
|
||||
attributeType="short",
|
||||
defaultValue=-1
|
||||
)
|
||||
|
||||
cmds.setAttr(placeholder + ".parent", "", type="string")
|
||||
|
||||
def update_placeholder(self, placeholder_item, placeholder_data):
|
||||
node_name = placeholder_item.scene_identifier
|
||||
new_values = {}
|
||||
for key, value in placeholder_data.items():
|
||||
placeholder_value = placeholder_item.data.get(key)
|
||||
if value != placeholder_value:
|
||||
new_values[key] = value
|
||||
placeholder_item.data[key] = value
|
||||
|
||||
for key in new_values.keys():
|
||||
cmds.deleteAttr(node_name + "." + key)
|
||||
|
||||
imprint(node_name, new_values)
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, placeholder_data in scene_placeholders.items():
|
||||
if placeholder_data.get("plugin_identifier") != self.identifier:
|
||||
continue
|
||||
|
||||
# TODO do data validations and maybe upgrades if they are invalid
|
||||
output.append(
|
||||
LoadPlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_load_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
repre_ids = self._get_loaded_repre_ids()
|
||||
self.populate_load_placeholder(placeholder, repre_ids)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# Hide placeholder and add them to placeholder set
|
||||
node = placeholder.scene_identifier
|
||||
|
||||
# If we just populate the placeholders from current scene, the
|
||||
# placeholder set will not be created so account for that.
|
||||
if not cmds.objExists(PLACEHOLDER_SET):
|
||||
cmds.sets(name=PLACEHOLDER_SET, empty=True)
|
||||
|
||||
cmds.sets(node, addElement=PLACEHOLDER_SET)
|
||||
cmds.hide(node)
|
||||
cmds.setAttr(node + ".hiddenInOutliner", True)
|
||||
|
||||
def delete_placeholder(self, placeholder):
|
||||
"""Remove placeholder if building was successful"""
|
||||
cmds.delete(placeholder.scene_identifier)
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
self._parent_in_hierarchy(placeholder, container)
|
||||
|
||||
def _parent_in_hierarchy(self, placeholder, container):
|
||||
"""Parent loaded container to placeholder's parent.
|
||||
|
||||
ie : Set loaded content as placeholder's sibling
|
||||
|
||||
Args:
|
||||
container (str): Placeholder loaded containers
|
||||
"""
|
||||
|
||||
if not container:
|
||||
return
|
||||
|
||||
roots = cmds.sets(container, q=True) or []
|
||||
ref_node = None
|
||||
try:
|
||||
ref_node = get_reference_node(roots)
|
||||
except AssertionError as e:
|
||||
self.log.info(e.args[0])
|
||||
|
||||
nodes_to_parent = []
|
||||
for root in roots:
|
||||
if ref_node:
|
||||
ref_root = cmds.referenceQuery(root, nodes=True)[0]
|
||||
ref_root = (
|
||||
cmds.listRelatives(ref_root, parent=True, path=True) or
|
||||
[ref_root]
|
||||
)
|
||||
nodes_to_parent.extend(ref_root)
|
||||
continue
|
||||
if root.endswith("_RN"):
|
||||
# Backwards compatibility for hardcoded reference names.
|
||||
refRoot = cmds.referenceQuery(root, n=True)[0]
|
||||
refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot]
|
||||
nodes_to_parent.extend(refRoot)
|
||||
elif root not in cmds.listSets(allSets=True):
|
||||
nodes_to_parent.append(root)
|
||||
|
||||
elif not cmds.sets(root, q=True):
|
||||
return
|
||||
|
||||
# Move loaded nodes to correct index in outliner hierarchy
|
||||
placeholder_form = cmds.xform(
|
||||
placeholder.scene_identifier,
|
||||
q=True,
|
||||
matrix=True,
|
||||
worldSpace=True
|
||||
)
|
||||
scene_parent = cmds.listRelatives(
|
||||
placeholder.scene_identifier, parent=True, fullPath=True
|
||||
)
|
||||
for node in set(nodes_to_parent):
|
||||
cmds.reorder(node, front=True)
|
||||
cmds.reorder(node, relative=placeholder.data["index"])
|
||||
cmds.xform(node, matrix=placeholder_form, ws=True)
|
||||
if scene_parent:
|
||||
cmds.parent(node, scene_parent)
|
||||
else:
|
||||
if cmds.listRelatives(node, parent=True):
|
||||
cmds.parent(node, world=True)
|
||||
|
||||
holding_sets = cmds.listSets(object=placeholder.scene_identifier)
|
||||
if not holding_sets:
|
||||
return
|
||||
for holding_set in holding_sets:
|
||||
cmds.sets(roots, forceElement=holding_set)
|
||||
|
|
@ -7,7 +7,7 @@ from maya import cmds
|
|||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import get_current_project_name
|
||||
import ayon_core.hosts.maya.lib as maya_lib
|
||||
import ayon_core.hosts.maya.api.lib as maya_lib
|
||||
from . import lib
|
||||
from .alembic import get_alembic_ids_cache
|
||||
|
||||
|
|
|
|||
|
|
@ -1495,18 +1495,28 @@ class WorkfileSettings(object):
|
|||
|
||||
filter_knobs = [
|
||||
"viewerProcess",
|
||||
"wipe_position"
|
||||
"wipe_position",
|
||||
"monitorOutOutputTransform"
|
||||
]
|
||||
|
||||
display, viewer = get_viewer_config_from_string(
|
||||
viewer_dict["viewerProcess"]
|
||||
)
|
||||
viewer_process = create_viewer_profile_string(
|
||||
viewer, display, path_like=False
|
||||
)
|
||||
display, viewer = get_viewer_config_from_string(
|
||||
viewer_dict["output_transform"]
|
||||
)
|
||||
output_transform = create_viewer_profile_string(
|
||||
viewer, display, path_like=False
|
||||
)
|
||||
erased_viewers = []
|
||||
for v in nuke.allNodes(filter="Viewer"):
|
||||
# set viewProcess to preset from settings
|
||||
v["viewerProcess"].setValue(
|
||||
str(viewer_dict["viewerProcess"])
|
||||
)
|
||||
v["viewerProcess"].setValue(viewer_process)
|
||||
|
||||
if str(viewer_dict["viewerProcess"]) \
|
||||
not in v["viewerProcess"].value():
|
||||
if viewer_process not in v["viewerProcess"].value():
|
||||
copy_inputs = v.dependencies()
|
||||
copy_knobs = {k: v[k].value() for k in v.knobs()
|
||||
if k not in filter_knobs}
|
||||
|
|
@ -1524,11 +1534,11 @@ class WorkfileSettings(object):
|
|||
|
||||
# set copied knobs
|
||||
for k, v in copy_knobs.items():
|
||||
print(k, v)
|
||||
nv[k].setValue(v)
|
||||
|
||||
# set viewerProcess
|
||||
nv["viewerProcess"].setValue(str(viewer_dict["viewerProcess"]))
|
||||
nv["viewerProcess"].setValue(viewer_process)
|
||||
nv["monitorOutOutputTransform"].setValue(output_transform)
|
||||
|
||||
if erased_viewers:
|
||||
log.warning(
|
||||
|
|
@ -1547,7 +1557,6 @@ class WorkfileSettings(object):
|
|||
host_name="nuke"
|
||||
)
|
||||
|
||||
viewer_process_settings = imageio_host["viewer"]["viewerProcess"]
|
||||
workfile_settings = imageio_host["workfile"]
|
||||
color_management = workfile_settings["color_management"]
|
||||
native_ocio_config = workfile_settings["native_ocio_config"]
|
||||
|
|
@ -1574,29 +1583,6 @@ class WorkfileSettings(object):
|
|||
residual_path
|
||||
))
|
||||
|
||||
# get monitor lut from settings respecting Nuke version differences
|
||||
monitor_lut = workfile_settings["thumbnail_space"]
|
||||
monitor_lut_data = self._get_monitor_settings(
|
||||
viewer_process_settings, monitor_lut
|
||||
)
|
||||
monitor_lut_data["workingSpaceLUT"] = (
|
||||
workfile_settings["working_space"]
|
||||
)
|
||||
|
||||
# then set the rest
|
||||
for knob, value_ in monitor_lut_data.items():
|
||||
# skip unfilled ocio config path
|
||||
# it will be dict in value
|
||||
if isinstance(value_, dict):
|
||||
continue
|
||||
# skip empty values
|
||||
if not value_:
|
||||
continue
|
||||
if self._root_node[knob].value() not in value_:
|
||||
self._root_node[knob].setValue(str(value_))
|
||||
log.debug("nuke.root()['{}'] changed to: {}".format(
|
||||
knob, value_))
|
||||
|
||||
# set ocio config path
|
||||
if config_data:
|
||||
config_path = config_data["path"].replace("\\", "/")
|
||||
|
|
@ -1611,6 +1597,31 @@ class WorkfileSettings(object):
|
|||
if correct_settings:
|
||||
self._set_ocio_config_path_to_workfile(config_data)
|
||||
|
||||
# get monitor lut from settings respecting Nuke version differences
|
||||
monitor_lut_data = self._get_monitor_settings(
|
||||
workfile_settings["monitor_out_lut"],
|
||||
workfile_settings["monitor_lut"]
|
||||
)
|
||||
monitor_lut_data.update({
|
||||
"workingSpaceLUT": workfile_settings["working_space"],
|
||||
"int8Lut": workfile_settings["int_8_lut"],
|
||||
"int16Lut": workfile_settings["int_16_lut"],
|
||||
"logLut": workfile_settings["log_lut"],
|
||||
"floatLut": workfile_settings["float_lut"]
|
||||
})
|
||||
|
||||
# then set the rest
|
||||
for knob, value_ in monitor_lut_data.items():
|
||||
# skip unfilled ocio config path
|
||||
# it will be dict in value
|
||||
if isinstance(value_, dict):
|
||||
continue
|
||||
# skip empty values
|
||||
if not value_:
|
||||
continue
|
||||
self._root_node[knob].setValue(str(value_))
|
||||
log.debug("nuke.root()['{}'] changed to: {}".format(knob, value_))
|
||||
|
||||
def _get_monitor_settings(self, viewer_lut, monitor_lut):
|
||||
""" Get monitor settings from viewer and monitor lut
|
||||
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ from ayon_core.pipeline import (
|
|||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_inventory_action_path,
|
||||
register_workfile_build_plugin_path,
|
||||
AYON_INSTANCE_ID,
|
||||
AVALON_INSTANCE_ID,
|
||||
AVALON_CONTAINER_ID,
|
||||
|
|
@ -52,8 +53,6 @@ from .lib import (
|
|||
MENU_LABEL,
|
||||
)
|
||||
from .workfile_template_builder import (
|
||||
NukePlaceholderLoadPlugin,
|
||||
NukePlaceholderCreatePlugin,
|
||||
build_workfile_template,
|
||||
create_placeholder,
|
||||
update_placeholder,
|
||||
|
|
@ -76,6 +75,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
|||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build")
|
||||
|
||||
# registering pyblish gui regarding settings in presets
|
||||
if os.getenv("PYBLISH_GUI", None):
|
||||
|
|
@ -105,18 +105,11 @@ class NukeHost(
|
|||
def get_workfile_extensions(self):
|
||||
return file_extensions()
|
||||
|
||||
def get_workfile_build_placeholder_plugins(self):
|
||||
return [
|
||||
NukePlaceholderLoadPlugin,
|
||||
NukePlaceholderCreatePlugin
|
||||
]
|
||||
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def install(self):
|
||||
''' Installing all requarements for Nuke host
|
||||
'''
|
||||
"""Installing all requirements for Nuke host"""
|
||||
|
||||
pyblish.api.register_host("nuke")
|
||||
|
||||
|
|
@ -125,6 +118,7 @@ class NukeHost(
|
|||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
register_workfile_build_plugin_path(WORKFILE_BUILD_PATH)
|
||||
|
||||
# Register AYON event for workfiles loading.
|
||||
register_event_callback("workio.open_file", check_inventory_versions)
|
||||
|
|
@ -178,7 +172,6 @@ def add_nuke_callbacks():
|
|||
# set apply all workfile settings on script load and save
|
||||
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
|
||||
|
||||
|
||||
if nuke_settings["dirmap"]["enabled"]:
|
||||
log.info("Added Nuke's dir-mapping callback ...")
|
||||
# Add dirmap for file paths.
|
||||
|
|
|
|||
|
|
@ -1151,7 +1151,6 @@ def _remove_old_knobs(node):
|
|||
"OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority",
|
||||
"deadlineChunkSize", "deadlineConcurrentTasks", "Deadline"
|
||||
]
|
||||
print(node.name())
|
||||
|
||||
# remove all old knobs
|
||||
for knob in node.allKnobs():
|
||||
|
|
|
|||
|
|
@ -1,30 +1,17 @@
|
|||
import collections
|
||||
import nuke
|
||||
|
||||
from ayon_core.pipeline import registered_host
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
AbstractTemplateBuilder,
|
||||
PlaceholderPlugin,
|
||||
LoadPlaceholderItem,
|
||||
CreatePlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
PlaceholderCreateMixin,
|
||||
)
|
||||
from ayon_core.tools.workfile_template_build import (
|
||||
WorkfileBuildPlaceholderDialog,
|
||||
)
|
||||
from .lib import (
|
||||
find_free_space_to_paste_nodes,
|
||||
get_extreme_positions,
|
||||
get_group_io_nodes,
|
||||
imprint,
|
||||
refresh_node,
|
||||
refresh_nodes,
|
||||
reset_selection,
|
||||
get_names_from_nodes,
|
||||
get_nodes_by_names,
|
||||
select_nodes,
|
||||
duplicate_node,
|
||||
node_tempfile,
|
||||
get_main_window,
|
||||
WorkfileSettings,
|
||||
)
|
||||
|
|
@ -54,6 +41,7 @@ class NukeTemplateBuilder(AbstractTemplateBuilder):
|
|||
|
||||
return True
|
||||
|
||||
|
||||
class NukePlaceholderPlugin(PlaceholderPlugin):
|
||||
node_color = 4278190335
|
||||
|
||||
|
|
@ -120,843 +108,6 @@ class NukePlaceholderPlugin(PlaceholderPlugin):
|
|||
nuke.delete(placeholder_node)
|
||||
|
||||
|
||||
class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "nuke.load"
|
||||
label = "Nuke load"
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = super(
|
||||
NukePlaceholderLoadPlugin, self
|
||||
)._parse_placeholder_node_data(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
placeholder_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
placeholder_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
placeholder_data["last_loaded"] = []
|
||||
placeholder_data["delete"] = False
|
||||
return placeholder_data
|
||||
|
||||
def _get_loaded_repre_ids(self):
|
||||
loaded_representation_ids = self.builder.get_shared_populate_data(
|
||||
"loaded_representation_ids"
|
||||
)
|
||||
if loaded_representation_ids is None:
|
||||
loaded_representation_ids = set()
|
||||
for node in nuke.allNodes():
|
||||
if "repre_id" in node.knobs():
|
||||
loaded_representation_ids.add(
|
||||
node.knob("repre_id").getValue()
|
||||
)
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"loaded_representation_ids", loaded_representation_ids
|
||||
)
|
||||
return loaded_representation_ids
|
||||
|
||||
def _before_placeholder_load(self, placeholder):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
|
||||
def _before_repre_load(self, placeholder, representation):
|
||||
placeholder.data["last_repre_id"] = representation["id"]
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, node in scene_placeholders.items():
|
||||
plugin_identifier_knob = node.knob("plugin_identifier")
|
||||
if (
|
||||
plugin_identifier_knob is None
|
||||
or plugin_identifier_knob.getValue() != self.identifier
|
||||
):
|
||||
continue
|
||||
|
||||
placeholder_data = self._parse_placeholder_node_data(node)
|
||||
# TODO do data validations and maybe updgrades if are invalid
|
||||
output.append(
|
||||
LoadPlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_load_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
repre_ids = self._get_loaded_repre_ids()
|
||||
self.populate_load_placeholder(placeholder, repre_ids)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# deselect all selected nodes
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
# getting the latest nodes added
|
||||
# TODO get from shared populate data!
|
||||
nodes_init = placeholder.data["nodes_init"]
|
||||
nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Loaded nodes: {}".format(nodes_loaded))
|
||||
if not nodes_loaded:
|
||||
return
|
||||
|
||||
placeholder.data["delete"] = True
|
||||
|
||||
nodes_loaded = self._move_to_placeholder_group(
|
||||
placeholder, nodes_loaded
|
||||
)
|
||||
placeholder.data["last_loaded"] = nodes_loaded
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# positioning of the loaded nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_loaded)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# fix the problem of z_order for backdrops
|
||||
self._fix_z_order(placeholder)
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_siblings(placeholder)
|
||||
|
||||
if placeholder.data["nb_children"] == 0:
|
||||
# save initial nodes positions and dimensions, update them
|
||||
# and set inputs and outputs of loaded nodes
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_inits()
|
||||
self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded)
|
||||
|
||||
self._set_loaded_connections(placeholder)
|
||||
|
||||
elif placeholder.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new loaded nodes,
|
||||
# set their inputs and outputs and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self._create_sib_copies(placeholder)
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self._update_nodes(new_nodes, nodes_loaded)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self._set_copies_connections(placeholder, copies)
|
||||
|
||||
self._update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_loaded,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
placeholder.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the loaded
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_loaded, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def _move_to_placeholder_group(self, placeholder, nodes_loaded):
|
||||
"""
|
||||
opening the placeholder's group and copying loaded nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_loaded (list): the new list of pasted nodes
|
||||
"""
|
||||
|
||||
groups_name = placeholder.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_loaded)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_loaded = nuke.selectedNodes()
|
||||
return nodes_loaded
|
||||
|
||||
def _fix_z_order(self, placeholder):
|
||||
"""Fix the problem of z_order when a backdrop is loaded."""
|
||||
|
||||
nodes_loaded = placeholder.data["last_loaded"]
|
||||
loaded_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_loaded:
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
loaded_backdrops.append(node)
|
||||
bd_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not bd_orders:
|
||||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in placeholder.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not sib_orders:
|
||||
return
|
||||
|
||||
min_order = min(bd_orders)
|
||||
max_order = max(sib_orders)
|
||||
for backdrop_node in loaded_backdrops:
|
||||
z_order = backdrop_node.knob("z_order").getValue()
|
||||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def _imprint_siblings(self, placeholder):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes loaded with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
loaded_nodes = placeholder.data["last_loaded"]
|
||||
loaded_nodes_set = set(loaded_nodes)
|
||||
data = {"repre_id": str(placeholder.data["last_repre_id"])}
|
||||
|
||||
for node in loaded_nodes:
|
||||
node_knobs = node.knobs()
|
||||
if "builder_type" not in node_knobs:
|
||||
# save the id of representation for all imported nodes
|
||||
imprint(node, data)
|
||||
node.knob("repre_id").setVisible(False)
|
||||
refresh_node(node)
|
||||
continue
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(loaded_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def _imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def _update_nodes(
|
||||
self, placeholder, nodes, considered_nodes, offset_y=None
|
||||
):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
||||
Args:
|
||||
nodes (list): list of nodes to update
|
||||
considered_nodes (list): list of nodes to consider while updating
|
||||
positions and dimensions
|
||||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
diff_x = diff_y = 0
|
||||
contained_nodes = [] # for backdrops
|
||||
|
||||
if offset_y is None:
|
||||
width_ph = placeholder_node.screenWidth()
|
||||
height_ph = placeholder_node.screenHeight()
|
||||
diff_y = max_y - min_y - height_ph
|
||||
diff_x = max_x - min_x - width_ph
|
||||
contained_nodes = [placeholder_node]
|
||||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
contained_nodes = considered_nodes
|
||||
|
||||
if diff_y <= 0 and diff_x <= 0:
|
||||
return
|
||||
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
if (
|
||||
node == placeholder_node
|
||||
or node in considered_nodes
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(node, nuke.BackdropNode)
|
||||
or (
|
||||
isinstance(node, nuke.BackdropNode)
|
||||
and not set(contained_nodes) <= set(node.getNodes())
|
||||
)
|
||||
):
|
||||
if offset_y is None and node.xpos() >= min_x:
|
||||
node.setXpos(node.xpos() + diff_x)
|
||||
|
||||
if node.ypos() >= min_y:
|
||||
node.setYpos(node.ypos() + diff_y)
|
||||
|
||||
else:
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
node.knob("bdwidth").setValue(width + diff_x)
|
||||
node.knob("bdheight").setValue(height + diff_y)
|
||||
|
||||
refresh_node(node)
|
||||
|
||||
def _set_loaded_connections(self, placeholder):
|
||||
"""
|
||||
set inputs and outputs of loaded nodes"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
input_node, output_node = get_group_io_nodes(
|
||||
placeholder.data["last_loaded"]
|
||||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
loaded with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def _set_copies_connections(self, placeholder, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(
|
||||
placeholder.data["last_loaded"]
|
||||
)
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
||||
for node_init in siblings:
|
||||
if node_init == siblings_output:
|
||||
continue
|
||||
|
||||
node_copy = copies[node_init.name()]
|
||||
for node in node_init.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) != node_init:
|
||||
continue
|
||||
|
||||
if node in siblings:
|
||||
copies[node.name()].setInput(idx, node_copy)
|
||||
else:
|
||||
last_input.setInput(0, node_copy)
|
||||
|
||||
for node in node_init.dependencies():
|
||||
for idx in range(node_init.inputs()):
|
||||
if node_init.input(idx) != node:
|
||||
continue
|
||||
|
||||
if node_init == siblings_input:
|
||||
copy_input.setInput(idx, node)
|
||||
elif node in siblings:
|
||||
node_copy.setInput(idx, copies[node.name()])
|
||||
else:
|
||||
node_copy.setInput(idx, last_output)
|
||||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
||||
|
||||
class NukePlaceholderCreatePlugin(
|
||||
NukePlaceholderPlugin, PlaceholderCreateMixin
|
||||
):
|
||||
identifier = "nuke.create"
|
||||
label = "Nuke create"
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = super(
|
||||
NukePlaceholderCreatePlugin, self
|
||||
)._parse_placeholder_node_data(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
placeholder_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
placeholder_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
placeholder_data["last_loaded"] = []
|
||||
placeholder_data["delete"] = False
|
||||
return placeholder_data
|
||||
|
||||
def _before_instance_create(self, placeholder):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, node in scene_placeholders.items():
|
||||
plugin_identifier_knob = node.knob("plugin_identifier")
|
||||
if (
|
||||
plugin_identifier_knob is None
|
||||
or plugin_identifier_knob.getValue() != self.identifier
|
||||
):
|
||||
continue
|
||||
|
||||
placeholder_data = self._parse_placeholder_node_data(node)
|
||||
|
||||
output.append(
|
||||
CreatePlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_create_plugin_options(options)
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# deselect all selected nodes
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
# getting the latest nodes added
|
||||
nodes_init = placeholder.data["nodes_init"]
|
||||
nodes_created = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Created nodes: {}".format(nodes_created))
|
||||
if not nodes_created:
|
||||
return
|
||||
|
||||
placeholder.data["delete"] = True
|
||||
|
||||
nodes_created = self._move_to_placeholder_group(
|
||||
placeholder, nodes_created
|
||||
)
|
||||
placeholder.data["last_created"] = nodes_created
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# positioning of the created nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_created)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# fix the problem of z_order for backdrops
|
||||
self._fix_z_order(placeholder)
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_siblings(placeholder)
|
||||
|
||||
if placeholder.data["nb_children"] == 0:
|
||||
# save initial nodes positions and dimensions, update them
|
||||
# and set inputs and outputs of created nodes
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_inits()
|
||||
self._update_nodes(placeholder, nuke.allNodes(), nodes_created)
|
||||
|
||||
self._set_created_connections(placeholder)
|
||||
|
||||
elif placeholder.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new created nodes,
|
||||
# set their inputs and outputs and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self._create_sib_copies(placeholder)
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self._update_nodes(new_nodes, nodes_created)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self._set_copies_connections(placeholder, copies)
|
||||
|
||||
self._update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_created,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
placeholder.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the created
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_created, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def _move_to_placeholder_group(self, placeholder, nodes_created):
|
||||
"""
|
||||
opening the placeholder's group and copying created nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_created (list): the new list of pasted nodes
|
||||
"""
|
||||
groups_name = placeholder.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_created)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_created = nuke.selectedNodes()
|
||||
return nodes_created
|
||||
|
||||
def _fix_z_order(self, placeholder):
|
||||
"""Fix the problem of z_order when a backdrop is create."""
|
||||
|
||||
nodes_created = placeholder.data["last_created"]
|
||||
created_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_created:
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
created_backdrops.append(node)
|
||||
bd_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not bd_orders:
|
||||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in placeholder.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not sib_orders:
|
||||
return
|
||||
|
||||
min_order = min(bd_orders)
|
||||
max_order = max(sib_orders)
|
||||
for backdrop_node in created_backdrops:
|
||||
z_order = backdrop_node.knob("z_order").getValue()
|
||||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def _imprint_siblings(self, placeholder):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes created with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
created_nodes = placeholder.data["last_created"]
|
||||
created_nodes_set = set(created_nodes)
|
||||
|
||||
for node in created_nodes:
|
||||
node_knobs = node.knobs()
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(created_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def _imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def _update_nodes(
|
||||
self, placeholder, nodes, considered_nodes, offset_y=None
|
||||
):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
||||
Args:
|
||||
nodes (list): list of nodes to update
|
||||
considered_nodes (list): list of nodes to consider while updating
|
||||
positions and dimensions
|
||||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
diff_x = diff_y = 0
|
||||
contained_nodes = [] # for backdrops
|
||||
|
||||
if offset_y is None:
|
||||
width_ph = placeholder_node.screenWidth()
|
||||
height_ph = placeholder_node.screenHeight()
|
||||
diff_y = max_y - min_y - height_ph
|
||||
diff_x = max_x - min_x - width_ph
|
||||
contained_nodes = [placeholder_node]
|
||||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
contained_nodes = considered_nodes
|
||||
|
||||
if diff_y <= 0 and diff_x <= 0:
|
||||
return
|
||||
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
if (
|
||||
node == placeholder_node
|
||||
or node in considered_nodes
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(node, nuke.BackdropNode)
|
||||
or (
|
||||
isinstance(node, nuke.BackdropNode)
|
||||
and not set(contained_nodes) <= set(node.getNodes())
|
||||
)
|
||||
):
|
||||
if offset_y is None and node.xpos() >= min_x:
|
||||
node.setXpos(node.xpos() + diff_x)
|
||||
|
||||
if node.ypos() >= min_y:
|
||||
node.setYpos(node.ypos() + diff_y)
|
||||
|
||||
else:
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
node.knob("bdwidth").setValue(width + diff_x)
|
||||
node.knob("bdheight").setValue(height + diff_y)
|
||||
|
||||
refresh_node(node)
|
||||
|
||||
def _set_created_connections(self, placeholder):
|
||||
"""
|
||||
set inputs and outputs of created nodes"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
input_node, output_node = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
created with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def _set_copies_connections(self, placeholder, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
||||
for node_init in siblings:
|
||||
if node_init == siblings_output:
|
||||
continue
|
||||
|
||||
node_copy = copies[node_init.name()]
|
||||
for node in node_init.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) != node_init:
|
||||
continue
|
||||
|
||||
if node in siblings:
|
||||
copies[node.name()].setInput(idx, node_copy)
|
||||
else:
|
||||
last_input.setInput(0, node_copy)
|
||||
|
||||
for node in node_init.dependencies():
|
||||
for idx in range(node_init.inputs()):
|
||||
if node_init.input(idx) != node:
|
||||
continue
|
||||
|
||||
if node_init == siblings_input:
|
||||
copy_input.setInput(idx, node)
|
||||
elif node in siblings:
|
||||
node_copy.setInput(idx, copies[node.name()])
|
||||
else:
|
||||
node_copy.setInput(idx, last_output)
|
||||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
||||
|
||||
def build_workfile_template(*args, **kwargs):
|
||||
builder = NukeTemplateBuilder(registered_host())
|
||||
builder.build_template(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,428 @@
|
|||
import nuke
|
||||
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
CreatePlaceholderItem,
|
||||
PlaceholderCreateMixin,
|
||||
)
|
||||
from ayon_core.hosts.nuke.api.lib import (
|
||||
find_free_space_to_paste_nodes,
|
||||
get_extreme_positions,
|
||||
get_group_io_nodes,
|
||||
imprint,
|
||||
refresh_node,
|
||||
refresh_nodes,
|
||||
reset_selection,
|
||||
get_names_from_nodes,
|
||||
get_nodes_by_names,
|
||||
select_nodes,
|
||||
duplicate_node,
|
||||
node_tempfile,
|
||||
)
|
||||
from ayon_core.hosts.nuke.api.workfile_template_builder import (
|
||||
NukePlaceholderPlugin
|
||||
)
|
||||
|
||||
|
||||
class NukePlaceholderCreatePlugin(
|
||||
NukePlaceholderPlugin, PlaceholderCreateMixin
|
||||
):
|
||||
identifier = "nuke.create"
|
||||
label = "Nuke create"
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = super(
|
||||
NukePlaceholderCreatePlugin, self
|
||||
)._parse_placeholder_node_data(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
placeholder_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
placeholder_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
placeholder_data["last_loaded"] = []
|
||||
placeholder_data["delete"] = False
|
||||
return placeholder_data
|
||||
|
||||
def _before_instance_create(self, placeholder):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, node in scene_placeholders.items():
|
||||
plugin_identifier_knob = node.knob("plugin_identifier")
|
||||
if (
|
||||
plugin_identifier_knob is None
|
||||
or plugin_identifier_knob.getValue() != self.identifier
|
||||
):
|
||||
continue
|
||||
|
||||
placeholder_data = self._parse_placeholder_node_data(node)
|
||||
|
||||
output.append(
|
||||
CreatePlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
self.populate_create_placeholder(placeholder)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_create_plugin_options(options)
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# deselect all selected nodes
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
# getting the latest nodes added
|
||||
nodes_init = placeholder.data["nodes_init"]
|
||||
nodes_created = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Created nodes: {}".format(nodes_created))
|
||||
if not nodes_created:
|
||||
return
|
||||
|
||||
placeholder.data["delete"] = True
|
||||
|
||||
nodes_created = self._move_to_placeholder_group(
|
||||
placeholder, nodes_created
|
||||
)
|
||||
placeholder.data["last_created"] = nodes_created
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# positioning of the created nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_created)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_created)
|
||||
|
||||
# fix the problem of z_order for backdrops
|
||||
self._fix_z_order(placeholder)
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_siblings(placeholder)
|
||||
|
||||
if placeholder.data["nb_children"] == 0:
|
||||
# save initial nodes positions and dimensions, update them
|
||||
# and set inputs and outputs of created nodes
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_inits()
|
||||
self._update_nodes(placeholder, nuke.allNodes(), nodes_created)
|
||||
|
||||
self._set_created_connections(placeholder)
|
||||
|
||||
elif placeholder.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new created nodes,
|
||||
# set their inputs and outputs and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self._create_sib_copies(placeholder)
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self._update_nodes(new_nodes, nodes_created)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self._set_copies_connections(placeholder, copies)
|
||||
|
||||
self._update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_created,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
placeholder.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the created
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_created, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_created:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def _move_to_placeholder_group(self, placeholder, nodes_created):
|
||||
"""
|
||||
opening the placeholder's group and copying created nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_created (list): the new list of pasted nodes
|
||||
"""
|
||||
groups_name = placeholder.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_created)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_created = nuke.selectedNodes()
|
||||
return nodes_created
|
||||
|
||||
def _fix_z_order(self, placeholder):
|
||||
"""Fix the problem of z_order when a backdrop is create."""
|
||||
|
||||
nodes_created = placeholder.data["last_created"]
|
||||
created_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_created:
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
created_backdrops.append(node)
|
||||
bd_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not bd_orders:
|
||||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in placeholder.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not sib_orders:
|
||||
return
|
||||
|
||||
min_order = min(bd_orders)
|
||||
max_order = max(sib_orders)
|
||||
for backdrop_node in created_backdrops:
|
||||
z_order = backdrop_node.knob("z_order").getValue()
|
||||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def _imprint_siblings(self, placeholder):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes created with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
created_nodes = placeholder.data["last_created"]
|
||||
created_nodes_set = set(created_nodes)
|
||||
|
||||
for node in created_nodes:
|
||||
node_knobs = node.knobs()
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(created_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def _imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def _update_nodes(
|
||||
self, placeholder, nodes, considered_nodes, offset_y=None
|
||||
):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
||||
Args:
|
||||
nodes (list): list of nodes to update
|
||||
considered_nodes (list): list of nodes to consider while updating
|
||||
positions and dimensions
|
||||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
diff_x = diff_y = 0
|
||||
contained_nodes = [] # for backdrops
|
||||
|
||||
if offset_y is None:
|
||||
width_ph = placeholder_node.screenWidth()
|
||||
height_ph = placeholder_node.screenHeight()
|
||||
diff_y = max_y - min_y - height_ph
|
||||
diff_x = max_x - min_x - width_ph
|
||||
contained_nodes = [placeholder_node]
|
||||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
contained_nodes = considered_nodes
|
||||
|
||||
if diff_y <= 0 and diff_x <= 0:
|
||||
return
|
||||
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
if (
|
||||
node == placeholder_node
|
||||
or node in considered_nodes
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(node, nuke.BackdropNode)
|
||||
or (
|
||||
isinstance(node, nuke.BackdropNode)
|
||||
and not set(contained_nodes) <= set(node.getNodes())
|
||||
)
|
||||
):
|
||||
if offset_y is None and node.xpos() >= min_x:
|
||||
node.setXpos(node.xpos() + diff_x)
|
||||
|
||||
if node.ypos() >= min_y:
|
||||
node.setYpos(node.ypos() + diff_y)
|
||||
|
||||
else:
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
node.knob("bdwidth").setValue(width + diff_x)
|
||||
node.knob("bdheight").setValue(height + diff_y)
|
||||
|
||||
refresh_node(node)
|
||||
|
||||
def _set_created_connections(self, placeholder):
|
||||
"""
|
||||
set inputs and outputs of created nodes"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
input_node, output_node = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
created with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def _set_copies_connections(self, placeholder, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(
|
||||
placeholder.data["last_created"]
|
||||
)
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
||||
for node_init in siblings:
|
||||
if node_init == siblings_output:
|
||||
continue
|
||||
|
||||
node_copy = copies[node_init.name()]
|
||||
for node in node_init.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) != node_init:
|
||||
continue
|
||||
|
||||
if node in siblings:
|
||||
copies[node.name()].setInput(idx, node_copy)
|
||||
else:
|
||||
last_input.setInput(0, node_copy)
|
||||
|
||||
for node in node_init.dependencies():
|
||||
for idx in range(node_init.inputs()):
|
||||
if node_init.input(idx) != node:
|
||||
continue
|
||||
|
||||
if node_init == siblings_input:
|
||||
copy_input.setInput(idx, node)
|
||||
elif node in siblings:
|
||||
node_copy.setInput(idx, copies[node.name()])
|
||||
else:
|
||||
node_copy.setInput(idx, last_output)
|
||||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
|
@ -0,0 +1,455 @@
|
|||
import nuke
|
||||
|
||||
from ayon_core.pipeline.workfile.workfile_template_builder import (
|
||||
LoadPlaceholderItem,
|
||||
PlaceholderLoadMixin,
|
||||
)
|
||||
from ayon_core.hosts.nuke.api.lib import (
|
||||
find_free_space_to_paste_nodes,
|
||||
get_extreme_positions,
|
||||
get_group_io_nodes,
|
||||
imprint,
|
||||
refresh_node,
|
||||
refresh_nodes,
|
||||
reset_selection,
|
||||
get_names_from_nodes,
|
||||
get_nodes_by_names,
|
||||
select_nodes,
|
||||
duplicate_node,
|
||||
node_tempfile,
|
||||
)
|
||||
from ayon_core.hosts.nuke.api.workfile_template_builder import (
|
||||
NukePlaceholderPlugin
|
||||
)
|
||||
|
||||
|
||||
class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
|
||||
identifier = "nuke.load"
|
||||
label = "Nuke load"
|
||||
|
||||
def _parse_placeholder_node_data(self, node):
|
||||
placeholder_data = super(
|
||||
NukePlaceholderLoadPlugin, self
|
||||
)._parse_placeholder_node_data(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
placeholder_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
placeholder_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
placeholder_data["last_loaded"] = []
|
||||
placeholder_data["delete"] = False
|
||||
return placeholder_data
|
||||
|
||||
def _get_loaded_repre_ids(self):
|
||||
loaded_representation_ids = self.builder.get_shared_populate_data(
|
||||
"loaded_representation_ids"
|
||||
)
|
||||
if loaded_representation_ids is None:
|
||||
loaded_representation_ids = set()
|
||||
for node in nuke.allNodes():
|
||||
if "repre_id" in node.knobs():
|
||||
loaded_representation_ids.add(
|
||||
node.knob("repre_id").getValue()
|
||||
)
|
||||
|
||||
self.builder.set_shared_populate_data(
|
||||
"loaded_representation_ids", loaded_representation_ids
|
||||
)
|
||||
return loaded_representation_ids
|
||||
|
||||
def _before_placeholder_load(self, placeholder):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
|
||||
def _before_repre_load(self, placeholder, representation):
|
||||
placeholder.data["last_repre_id"] = representation["id"]
|
||||
|
||||
def collect_placeholders(self):
|
||||
output = []
|
||||
scene_placeholders = self._collect_scene_placeholders()
|
||||
for node_name, node in scene_placeholders.items():
|
||||
plugin_identifier_knob = node.knob("plugin_identifier")
|
||||
if (
|
||||
plugin_identifier_knob is None
|
||||
or plugin_identifier_knob.getValue() != self.identifier
|
||||
):
|
||||
continue
|
||||
|
||||
placeholder_data = self._parse_placeholder_node_data(node)
|
||||
# TODO do data validations and maybe updgrades if are invalid
|
||||
output.append(
|
||||
LoadPlaceholderItem(node_name, placeholder_data, self)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def populate_placeholder(self, placeholder):
|
||||
self.populate_load_placeholder(placeholder)
|
||||
|
||||
def repopulate_placeholder(self, placeholder):
|
||||
repre_ids = self._get_loaded_repre_ids()
|
||||
self.populate_load_placeholder(placeholder, repre_ids)
|
||||
|
||||
def get_placeholder_options(self, options=None):
|
||||
return self.get_load_plugin_options(options)
|
||||
|
||||
def post_placeholder_process(self, placeholder, failed):
|
||||
"""Cleanup placeholder after load of its corresponding representations.
|
||||
|
||||
Args:
|
||||
placeholder (PlaceholderItem): Item which was just used to load
|
||||
representation.
|
||||
failed (bool): Loading of representation failed.
|
||||
"""
|
||||
# deselect all selected nodes
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
# getting the latest nodes added
|
||||
# TODO get from shared populate data!
|
||||
nodes_init = placeholder.data["nodes_init"]
|
||||
nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Loaded nodes: {}".format(nodes_loaded))
|
||||
if not nodes_loaded:
|
||||
return
|
||||
|
||||
placeholder.data["delete"] = True
|
||||
|
||||
nodes_loaded = self._move_to_placeholder_group(
|
||||
placeholder, nodes_loaded
|
||||
)
|
||||
placeholder.data["last_loaded"] = nodes_loaded
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# positioning of the loaded nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_loaded)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# fix the problem of z_order for backdrops
|
||||
self._fix_z_order(placeholder)
|
||||
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_siblings(placeholder)
|
||||
|
||||
if placeholder.data["nb_children"] == 0:
|
||||
# save initial nodes positions and dimensions, update them
|
||||
# and set inputs and outputs of loaded nodes
|
||||
if placeholder.data.get("keep_placeholder"):
|
||||
self._imprint_inits()
|
||||
self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded)
|
||||
|
||||
self._set_loaded_connections(placeholder)
|
||||
|
||||
elif placeholder.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new loaded nodes,
|
||||
# set their inputs and outputs and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self._create_sib_copies(placeholder)
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self._update_nodes(new_nodes, nodes_loaded)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self._set_copies_connections(placeholder, copies)
|
||||
|
||||
self._update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_loaded,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
placeholder.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the loaded
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_loaded, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
placeholder.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def _move_to_placeholder_group(self, placeholder, nodes_loaded):
|
||||
"""
|
||||
opening the placeholder's group and copying loaded nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_loaded (list): the new list of pasted nodes
|
||||
"""
|
||||
|
||||
groups_name = placeholder.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_loaded)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_loaded = nuke.selectedNodes()
|
||||
return nodes_loaded
|
||||
|
||||
def _fix_z_order(self, placeholder):
|
||||
"""Fix the problem of z_order when a backdrop is loaded."""
|
||||
|
||||
nodes_loaded = placeholder.data["last_loaded"]
|
||||
loaded_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_loaded:
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
loaded_backdrops.append(node)
|
||||
bd_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not bd_orders:
|
||||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in placeholder.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not sib_orders:
|
||||
return
|
||||
|
||||
min_order = min(bd_orders)
|
||||
max_order = max(sib_orders)
|
||||
for backdrop_node in loaded_backdrops:
|
||||
z_order = backdrop_node.knob("z_order").getValue()
|
||||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def _imprint_siblings(self, placeholder):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes loaded with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
loaded_nodes = placeholder.data["last_loaded"]
|
||||
loaded_nodes_set = set(loaded_nodes)
|
||||
data = {"repre_id": str(placeholder.data["last_repre_id"])}
|
||||
|
||||
for node in loaded_nodes:
|
||||
node_knobs = node.knobs()
|
||||
if "builder_type" not in node_knobs:
|
||||
# save the id of representation for all imported nodes
|
||||
imprint(node, data)
|
||||
node.knob("repre_id").setVisible(False)
|
||||
refresh_node(node)
|
||||
continue
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(loaded_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def _imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def _update_nodes(
|
||||
self, placeholder, nodes, considered_nodes, offset_y=None
|
||||
):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
||||
Args:
|
||||
nodes (list): list of nodes to update
|
||||
considered_nodes (list): list of nodes to consider while updating
|
||||
positions and dimensions
|
||||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
diff_x = diff_y = 0
|
||||
contained_nodes = [] # for backdrops
|
||||
|
||||
if offset_y is None:
|
||||
width_ph = placeholder_node.screenWidth()
|
||||
height_ph = placeholder_node.screenHeight()
|
||||
diff_y = max_y - min_y - height_ph
|
||||
diff_x = max_x - min_x - width_ph
|
||||
contained_nodes = [placeholder_node]
|
||||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
contained_nodes = considered_nodes
|
||||
|
||||
if diff_y <= 0 and diff_x <= 0:
|
||||
return
|
||||
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
if (
|
||||
node == placeholder_node
|
||||
or node in considered_nodes
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(node, nuke.BackdropNode)
|
||||
or (
|
||||
isinstance(node, nuke.BackdropNode)
|
||||
and not set(contained_nodes) <= set(node.getNodes())
|
||||
)
|
||||
):
|
||||
if offset_y is None and node.xpos() >= min_x:
|
||||
node.setXpos(node.xpos() + diff_x)
|
||||
|
||||
if node.ypos() >= min_y:
|
||||
node.setYpos(node.ypos() + diff_y)
|
||||
|
||||
else:
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
node.knob("bdwidth").setValue(width + diff_x)
|
||||
node.knob("bdheight").setValue(height + diff_y)
|
||||
|
||||
refresh_node(node)
|
||||
|
||||
def _set_loaded_connections(self, placeholder):
|
||||
"""
|
||||
set inputs and outputs of loaded nodes"""
|
||||
|
||||
placeholder_node = nuke.toNode(placeholder.scene_identifier)
|
||||
input_node, output_node = get_group_io_nodes(
|
||||
placeholder.data["last_loaded"]
|
||||
)
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node and output_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node and input_node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def _create_sib_copies(self, placeholder):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
loaded with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def _set_copies_connections(self, placeholder, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(
|
||||
placeholder.data["last_loaded"]
|
||||
)
|
||||
siblings = get_nodes_by_names(placeholder.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
||||
for node_init in siblings:
|
||||
if node_init == siblings_output:
|
||||
continue
|
||||
|
||||
node_copy = copies[node_init.name()]
|
||||
for node in node_init.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) != node_init:
|
||||
continue
|
||||
|
||||
if node in siblings:
|
||||
copies[node.name()].setInput(idx, node_copy)
|
||||
else:
|
||||
last_input.setInput(0, node_copy)
|
||||
|
||||
for node in node_init.dependencies():
|
||||
for idx in range(node_init.inputs()):
|
||||
if node_init.input(idx) != node:
|
||||
continue
|
||||
|
||||
if node_init == siblings_input:
|
||||
copy_input.setInput(idx, node)
|
||||
elif node in siblings:
|
||||
node_copy.setInput(idx, copies[node.name()])
|
||||
else:
|
||||
node_copy.setInput(idx, last_output)
|
||||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
|
||||
from pathlib import Path
|
||||
from ayon_core.lib import get_ayon_launcher_args
|
||||
from ayon_core.lib.execute import run_detached_process
|
||||
from ayon_core.addon import (
|
||||
|
|
@ -57,3 +58,62 @@ def launch():
|
|||
from ayon_core.tools import traypublisher
|
||||
|
||||
traypublisher.main()
|
||||
|
||||
|
||||
@cli_main.command()
|
||||
@click_wrap.option(
|
||||
"--filepath",
|
||||
help="Full path to CSV file with data",
|
||||
type=str,
|
||||
required=True
|
||||
)
|
||||
@click_wrap.option(
|
||||
"--project",
|
||||
help="Project name in which the context will be used",
|
||||
type=str,
|
||||
required=True
|
||||
)
|
||||
@click_wrap.option(
|
||||
"--folder-path",
|
||||
help="Asset name in which the context will be used",
|
||||
type=str,
|
||||
required=True
|
||||
)
|
||||
@click_wrap.option(
|
||||
"--task",
|
||||
help="Task name under Asset in which the context will be used",
|
||||
type=str,
|
||||
required=False
|
||||
)
|
||||
@click_wrap.option(
|
||||
"--ignore-validators",
|
||||
help="Option to ignore validators",
|
||||
type=bool,
|
||||
is_flag=True,
|
||||
required=False
|
||||
)
|
||||
def ingestcsv(
|
||||
filepath,
|
||||
project,
|
||||
folder_path,
|
||||
task,
|
||||
ignore_validators
|
||||
):
|
||||
"""Ingest CSV file into project.
|
||||
|
||||
This command will ingest CSV file into project. CSV file must be in
|
||||
specific format. See documentation for more information.
|
||||
"""
|
||||
from .csv_publish import csvpublish
|
||||
|
||||
# use Path to check if csv_filepath exists
|
||||
if not Path(filepath).exists():
|
||||
raise FileNotFoundError(f"File {filepath} does not exist.")
|
||||
|
||||
csvpublish(
|
||||
filepath,
|
||||
project,
|
||||
folder_path,
|
||||
task,
|
||||
ignore_validators
|
||||
)
|
||||
|
|
|
|||
86
client/ayon_core/hosts/traypublisher/csv_publish.py
Normal file
86
client/ayon_core/hosts/traypublisher/csv_publish.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
from ayon_api import get_folder_by_path, get_task_by_name
|
||||
from ayon_core.lib.attribute_definitions import FileDefItem
|
||||
from ayon_core.pipeline import install_host
|
||||
from ayon_core.pipeline.create import CreateContext
|
||||
|
||||
from ayon_core.hosts.traypublisher.api import TrayPublisherHost
|
||||
|
||||
|
||||
def csvpublish(
|
||||
filepath,
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name=None,
|
||||
ignore_validators=False
|
||||
):
|
||||
"""Publish CSV file.
|
||||
|
||||
Args:
|
||||
filepath (str): Path to CSV file.
|
||||
project_name (str): Project name.
|
||||
folder_path (str): Folder path.
|
||||
task_name (Optional[str]): Task name.
|
||||
ignore_validators (Optional[bool]): Option to ignore validators.
|
||||
"""
|
||||
|
||||
# initialization of host
|
||||
host = TrayPublisherHost()
|
||||
install_host(host)
|
||||
|
||||
# setting host context into project
|
||||
host.set_project_name(project_name)
|
||||
|
||||
# form precreate data with field values
|
||||
file_field = FileDefItem.from_paths([filepath], False).pop().to_dict()
|
||||
precreate_data = {
|
||||
"csv_filepath_data": file_field,
|
||||
}
|
||||
|
||||
# create context initialization
|
||||
create_context = CreateContext(host, headless=True)
|
||||
folder_entity = get_folder_by_path(
|
||||
project_name,
|
||||
folder_path=folder_path,
|
||||
)
|
||||
|
||||
if not folder_entity:
|
||||
ValueError(
|
||||
f"Folder path '{folder_path}' doesn't "
|
||||
f"exists at project '{project_name}'."
|
||||
)
|
||||
|
||||
task_entity = get_task_by_name(
|
||||
project_name,
|
||||
folder_entity["id"],
|
||||
task_name,
|
||||
)
|
||||
|
||||
if not task_entity:
|
||||
ValueError(
|
||||
f"Task name '{task_name}' doesn't "
|
||||
f"exists at folder '{folder_path}'."
|
||||
)
|
||||
|
||||
create_context.create(
|
||||
"io.ayon.creators.traypublisher.csv_ingest",
|
||||
"Main",
|
||||
folder_entity=folder_entity,
|
||||
task_entity=task_entity,
|
||||
pre_create_data=precreate_data,
|
||||
)
|
||||
|
||||
# publishing context initialization
|
||||
pyblish_context = pyblish.api.Context()
|
||||
pyblish_context.data["create_context"] = create_context
|
||||
|
||||
# redefine targets (skip 'local' to disable validators)
|
||||
if ignore_validators:
|
||||
targets = ["default", "ingest"]
|
||||
|
||||
# publishing
|
||||
pyblish.util.publish(context=pyblish_context, targets=targets)
|
||||
|
|
@ -0,0 +1,741 @@
|
|||
import os
|
||||
import re
|
||||
import csv
|
||||
import clique
|
||||
from io import StringIO
|
||||
from copy import deepcopy, copy
|
||||
|
||||
from ayon_api import get_folder_by_path, get_task_by_name
|
||||
from ayon_core.pipeline.create import get_product_name
|
||||
from ayon_core.pipeline import CreatedInstance
|
||||
from ayon_core.lib import FileDef, BoolDef
|
||||
from ayon_core.lib.transcoding import (
|
||||
VIDEO_EXTENSIONS, IMAGE_EXTENSIONS
|
||||
)
|
||||
from ayon_core.pipeline.create import CreatorError
|
||||
from ayon_core.hosts.traypublisher.api.plugin import (
|
||||
TrayPublishCreator
|
||||
)
|
||||
|
||||
|
||||
class IngestCSV(TrayPublishCreator):
|
||||
"""CSV ingest creator class"""
|
||||
|
||||
icon = "fa.file"
|
||||
|
||||
label = "CSV Ingest"
|
||||
product_type = "csv_ingest_file"
|
||||
identifier = "io.ayon.creators.traypublisher.csv_ingest"
|
||||
|
||||
default_variants = ["Main"]
|
||||
|
||||
description = "Ingest products' data from CSV file"
|
||||
detailed_description = """
|
||||
Ingest products' data from CSV file following column and representation
|
||||
configuration in project settings.
|
||||
"""
|
||||
|
||||
# Position in the list of creators.
|
||||
order = 10
|
||||
|
||||
# settings for this creator
|
||||
columns_config = {}
|
||||
representations_config = {}
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
"""Create an product from each row found in the CSV.
|
||||
|
||||
Args:
|
||||
subset_name (str): The subset name.
|
||||
instance_data (dict): The instance data.
|
||||
pre_create_data (dict):
|
||||
"""
|
||||
|
||||
csv_filepath_data = pre_create_data.get("csv_filepath_data", {})
|
||||
|
||||
folder = csv_filepath_data.get("directory", "")
|
||||
if not os.path.exists(folder):
|
||||
raise CreatorError(
|
||||
f"Directory '{folder}' does not exist."
|
||||
)
|
||||
filename = csv_filepath_data.get("filenames", [])
|
||||
self._process_csv_file(subset_name, instance_data, folder, filename[0])
|
||||
|
||||
def _process_csv_file(
|
||||
self, subset_name, instance_data, staging_dir, filename):
|
||||
"""Process CSV file.
|
||||
|
||||
Args:
|
||||
subset_name (str): The subset name.
|
||||
instance_data (dict): The instance data.
|
||||
staging_dir (str): The staging directory.
|
||||
filename (str): The filename.
|
||||
"""
|
||||
|
||||
# create new instance from the csv file via self function
|
||||
self._pass_data_to_csv_instance(
|
||||
instance_data,
|
||||
staging_dir,
|
||||
filename
|
||||
)
|
||||
|
||||
csv_instance = CreatedInstance(
|
||||
self.product_type, subset_name, instance_data, self
|
||||
)
|
||||
self._store_new_instance(csv_instance)
|
||||
|
||||
csv_instance["csvFileData"] = {
|
||||
"filename": filename,
|
||||
"staging_dir": staging_dir,
|
||||
}
|
||||
|
||||
# from special function get all data from csv file and convert them
|
||||
# to new instances
|
||||
csv_data_for_instances = self._get_data_from_csv(
|
||||
staging_dir, filename)
|
||||
|
||||
# create instances from csv data via self function
|
||||
self._create_instances_from_csv_data(
|
||||
csv_data_for_instances, staging_dir
|
||||
)
|
||||
|
||||
def _create_instances_from_csv_data(
|
||||
self,
|
||||
csv_data_for_instances,
|
||||
staging_dir
|
||||
):
|
||||
"""Create instances from csv data"""
|
||||
|
||||
for folder_path, prepared_data in csv_data_for_instances.items():
|
||||
project_name = self.create_context.get_current_project_name()
|
||||
products = prepared_data["products"]
|
||||
|
||||
for instance_name, product_data in products.items():
|
||||
# get important instance variables
|
||||
task_name = product_data["task_name"]
|
||||
task_type = product_data["task_type"]
|
||||
variant = product_data["variant"]
|
||||
product_type = product_data["product_type"]
|
||||
version = product_data["version"]
|
||||
|
||||
# create subset/product name
|
||||
product_name = get_product_name(
|
||||
project_name,
|
||||
task_name,
|
||||
task_type,
|
||||
self.host_name,
|
||||
product_type,
|
||||
variant
|
||||
)
|
||||
|
||||
# make sure frame start/end is inherited from csv columns
|
||||
# expected frame range data are handles excluded
|
||||
for _, repre_data in product_data["representations"].items(): # noqa: E501
|
||||
frame_start = repre_data["frameStart"]
|
||||
frame_end = repre_data["frameEnd"]
|
||||
handle_start = repre_data["handleStart"]
|
||||
handle_end = repre_data["handleEnd"]
|
||||
fps = repre_data["fps"]
|
||||
break
|
||||
|
||||
# try to find any version comment in representation data
|
||||
version_comment = next(
|
||||
iter(
|
||||
repre_data["comment"]
|
||||
for repre_data in product_data["representations"].values() # noqa: E501
|
||||
if repre_data["comment"]
|
||||
),
|
||||
None
|
||||
)
|
||||
|
||||
# try to find any slate switch in representation data
|
||||
slate_exists = any(
|
||||
repre_data["slate"]
|
||||
for _, repre_data in product_data["representations"].items() # noqa: E501
|
||||
)
|
||||
|
||||
# get representations from product data
|
||||
representations = product_data["representations"]
|
||||
label = f"{folder_path}_{product_name}_v{version:>03}"
|
||||
|
||||
families = ["csv_ingest"]
|
||||
if slate_exists:
|
||||
# adding slate to families mainly for loaders to be able
|
||||
# to filter out slates
|
||||
families.append("slate")
|
||||
|
||||
# make product data
|
||||
product_data = {
|
||||
"name": instance_name,
|
||||
"folderPath": folder_path,
|
||||
"families": families,
|
||||
"label": label,
|
||||
"task": task_name,
|
||||
"variant": variant,
|
||||
"source": "csv",
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"fps": fps,
|
||||
"version": version,
|
||||
"comment": version_comment,
|
||||
}
|
||||
|
||||
# create new instance
|
||||
new_instance = CreatedInstance(
|
||||
product_type, product_name, product_data, self
|
||||
)
|
||||
self._store_new_instance(new_instance)
|
||||
|
||||
if not new_instance.get("prepared_data_for_repres"):
|
||||
new_instance["prepared_data_for_repres"] = []
|
||||
|
||||
base_thumbnail_repre_data = {
|
||||
"name": "thumbnail",
|
||||
"ext": None,
|
||||
"files": None,
|
||||
"stagingDir": None,
|
||||
"stagingDir_persistent": True,
|
||||
"tags": ["thumbnail", "delete"],
|
||||
}
|
||||
# need to populate all thumbnails for all representations
|
||||
# so we can check if unique thumbnail per representation
|
||||
# is needed
|
||||
thumbnails = [
|
||||
repre_data["thumbnailPath"]
|
||||
for repre_data in representations.values()
|
||||
if repre_data["thumbnailPath"]
|
||||
]
|
||||
multiple_thumbnails = len(set(thumbnails)) > 1
|
||||
explicit_output_name = None
|
||||
thumbnails_processed = False
|
||||
for filepath, repre_data in representations.items():
|
||||
# check if any review derivate tag is present
|
||||
reviewable = any(
|
||||
tag for tag in repre_data.get("tags", [])
|
||||
# tag can be `ftrackreview` or `review`
|
||||
if "review" in tag
|
||||
)
|
||||
# since we need to populate multiple thumbnails as
|
||||
# representation with outputName for (Ftrack instance
|
||||
# integrator) pairing with reviewable video representations
|
||||
if (
|
||||
thumbnails
|
||||
and multiple_thumbnails
|
||||
and reviewable
|
||||
):
|
||||
# multiple unique thumbnails per representation needs
|
||||
# grouping by outputName
|
||||
# mainly used in Ftrack instance integrator
|
||||
explicit_output_name = repre_data["representationName"]
|
||||
relative_thumbnail_path = repre_data["thumbnailPath"]
|
||||
# representation might not have thumbnail path
|
||||
# so ignore this one
|
||||
if not relative_thumbnail_path:
|
||||
continue
|
||||
thumb_dir, thumb_file = \
|
||||
self._get_refactor_thumbnail_path(
|
||||
staging_dir, relative_thumbnail_path)
|
||||
filename, ext = os.path.splitext(thumb_file)
|
||||
thumbnail_repr_data = deepcopy(
|
||||
base_thumbnail_repre_data)
|
||||
thumbnail_repr_data.update({
|
||||
"name": "thumbnail_{}".format(filename),
|
||||
"ext": ext[1:],
|
||||
"files": thumb_file,
|
||||
"stagingDir": thumb_dir,
|
||||
"outputName": explicit_output_name,
|
||||
})
|
||||
new_instance["prepared_data_for_repres"].append({
|
||||
"type": "thumbnail",
|
||||
"colorspace": None,
|
||||
"representation": thumbnail_repr_data,
|
||||
})
|
||||
# also add thumbnailPath for ayon to integrate
|
||||
if not new_instance.get("thumbnailPath"):
|
||||
new_instance["thumbnailPath"] = (
|
||||
os.path.join(thumb_dir, thumb_file)
|
||||
)
|
||||
elif (
|
||||
thumbnails
|
||||
and not multiple_thumbnails
|
||||
and not thumbnails_processed
|
||||
or not reviewable
|
||||
):
|
||||
"""
|
||||
For case where we have only one thumbnail
|
||||
and not reviewable medias. This needs to be processed
|
||||
only once per instance.
|
||||
"""
|
||||
if not thumbnails:
|
||||
continue
|
||||
# here we will use only one thumbnail for
|
||||
# all representations
|
||||
relative_thumbnail_path = repre_data["thumbnailPath"]
|
||||
# popping last thumbnail from list since it is only one
|
||||
# and we do not need to iterate again over it
|
||||
if not relative_thumbnail_path:
|
||||
relative_thumbnail_path = thumbnails.pop()
|
||||
thumb_dir, thumb_file = \
|
||||
self._get_refactor_thumbnail_path(
|
||||
staging_dir, relative_thumbnail_path)
|
||||
_, ext = os.path.splitext(thumb_file)
|
||||
thumbnail_repr_data = deepcopy(
|
||||
base_thumbnail_repre_data)
|
||||
thumbnail_repr_data.update({
|
||||
"ext": ext[1:],
|
||||
"files": thumb_file,
|
||||
"stagingDir": thumb_dir
|
||||
})
|
||||
new_instance["prepared_data_for_repres"].append({
|
||||
"type": "thumbnail",
|
||||
"colorspace": None,
|
||||
"representation": thumbnail_repr_data,
|
||||
})
|
||||
# also add thumbnailPath for ayon to integrate
|
||||
if not new_instance.get("thumbnailPath"):
|
||||
new_instance["thumbnailPath"] = (
|
||||
os.path.join(thumb_dir, thumb_file)
|
||||
)
|
||||
|
||||
thumbnails_processed = True
|
||||
|
||||
# get representation data
|
||||
representation_data = self._get_representation_data(
|
||||
filepath, repre_data, staging_dir,
|
||||
explicit_output_name
|
||||
)
|
||||
|
||||
new_instance["prepared_data_for_repres"].append({
|
||||
"type": "media",
|
||||
"colorspace": repre_data["colorspace"],
|
||||
"representation": representation_data,
|
||||
})
|
||||
|
||||
def _get_refactor_thumbnail_path(
|
||||
self, staging_dir, relative_thumbnail_path):
|
||||
thumbnail_abs_path = os.path.join(
|
||||
staging_dir, relative_thumbnail_path)
|
||||
return os.path.split(
|
||||
thumbnail_abs_path)
|
||||
|
||||
def _get_representation_data(
|
||||
self, filepath, repre_data, staging_dir, explicit_output_name=None
|
||||
):
|
||||
"""Get representation data
|
||||
|
||||
Args:
|
||||
filepath (str): Filepath to representation file.
|
||||
repre_data (dict): Representation data from CSV file.
|
||||
staging_dir (str): Staging directory.
|
||||
explicit_output_name (Optional[str]): Explicit output name.
|
||||
For grouping purposes with reviewable components.
|
||||
Defaults to None.
|
||||
"""
|
||||
|
||||
# get extension of file
|
||||
basename = os.path.basename(filepath)
|
||||
extension = os.path.splitext(filepath)[-1].lower()
|
||||
|
||||
# validate filepath is having correct extension based on output
|
||||
repre_name = repre_data["representationName"]
|
||||
repre_config_data = None
|
||||
for repre in self.representations_config["representations"]:
|
||||
if repre["name"] == repre_name:
|
||||
repre_config_data = repre
|
||||
break
|
||||
|
||||
if not repre_config_data:
|
||||
raise CreatorError(
|
||||
f"Representation '{repre_name}' not found "
|
||||
"in config representation data."
|
||||
)
|
||||
|
||||
validate_extensions = repre_config_data["extensions"]
|
||||
if extension not in validate_extensions:
|
||||
raise CreatorError(
|
||||
f"File extension '{extension}' not valid for "
|
||||
f"output '{validate_extensions}'."
|
||||
)
|
||||
|
||||
is_sequence = (extension in IMAGE_EXTENSIONS)
|
||||
# convert ### string in file name to %03d
|
||||
# this is for correct frame range validation
|
||||
# example: file.###.exr -> file.%03d.exr
|
||||
if "#" in basename:
|
||||
padding = len(basename.split("#")) - 1
|
||||
basename = basename.replace("#" * padding, f"%0{padding}d")
|
||||
is_sequence = True
|
||||
|
||||
# make absolute path to file
|
||||
absfilepath = os.path.normpath(os.path.join(staging_dir, filepath))
|
||||
dirname = os.path.dirname(absfilepath)
|
||||
|
||||
# check if dirname exists
|
||||
if not os.path.isdir(dirname):
|
||||
raise CreatorError(
|
||||
f"Directory '{dirname}' does not exist."
|
||||
)
|
||||
|
||||
# collect all data from dirname
|
||||
paths_for_collection = []
|
||||
for file in os.listdir(dirname):
|
||||
filepath = os.path.join(dirname, file)
|
||||
paths_for_collection.append(filepath)
|
||||
|
||||
collections, _ = clique.assemble(paths_for_collection)
|
||||
|
||||
if collections:
|
||||
collections = collections[0]
|
||||
else:
|
||||
if is_sequence:
|
||||
raise CreatorError(
|
||||
f"No collections found in directory '{dirname}'."
|
||||
)
|
||||
|
||||
frame_start = None
|
||||
frame_end = None
|
||||
if is_sequence:
|
||||
files = [os.path.basename(file) for file in collections]
|
||||
frame_start = list(collections.indexes)[0]
|
||||
frame_end = list(collections.indexes)[-1]
|
||||
else:
|
||||
files = basename
|
||||
|
||||
tags = deepcopy(repre_data["tags"])
|
||||
# if slate in repre_data is True then remove one frame from start
|
||||
if repre_data["slate"]:
|
||||
tags.append("has_slate")
|
||||
|
||||
# get representation data
|
||||
representation_data = {
|
||||
"name": repre_name,
|
||||
"ext": extension[1:],
|
||||
"files": files,
|
||||
"stagingDir": dirname,
|
||||
"stagingDir_persistent": True,
|
||||
"tags": tags,
|
||||
}
|
||||
if extension in VIDEO_EXTENSIONS:
|
||||
representation_data.update({
|
||||
"fps": repre_data["fps"],
|
||||
"outputName": repre_name,
|
||||
})
|
||||
|
||||
if explicit_output_name:
|
||||
representation_data["outputName"] = explicit_output_name
|
||||
|
||||
if frame_start:
|
||||
representation_data["frameStart"] = frame_start
|
||||
if frame_end:
|
||||
representation_data["frameEnd"] = frame_end
|
||||
|
||||
return representation_data
|
||||
|
||||
def _get_data_from_csv(
|
||||
self, package_dir, filename
|
||||
):
|
||||
"""Generate instances from the csv file"""
|
||||
# get current project name and code from context.data
|
||||
project_name = self.create_context.get_current_project_name()
|
||||
|
||||
csv_file_path = os.path.join(
|
||||
package_dir, filename
|
||||
)
|
||||
|
||||
# make sure csv file contains columns from following list
|
||||
required_columns = [
|
||||
column["name"] for column in self.columns_config["columns"]
|
||||
if column["required_column"]
|
||||
]
|
||||
|
||||
# read csv file
|
||||
with open(csv_file_path, "r") as csv_file:
|
||||
csv_content = csv_file.read()
|
||||
|
||||
# read csv file with DictReader
|
||||
csv_reader = csv.DictReader(
|
||||
StringIO(csv_content),
|
||||
delimiter=self.columns_config["csv_delimiter"]
|
||||
)
|
||||
|
||||
# fix fieldnames
|
||||
# sometimes someone can keep extra space at the start or end of
|
||||
# the column name
|
||||
all_columns = [
|
||||
" ".join(column.rsplit()) for column in csv_reader.fieldnames]
|
||||
|
||||
# return back fixed fieldnames
|
||||
csv_reader.fieldnames = all_columns
|
||||
|
||||
# check if csv file contains all required columns
|
||||
if any(column not in all_columns for column in required_columns):
|
||||
raise CreatorError(
|
||||
f"Missing required columns: {required_columns}"
|
||||
)
|
||||
|
||||
csv_data = {}
|
||||
# get data from csv file
|
||||
for row in csv_reader:
|
||||
# Get required columns first
|
||||
# TODO: will need to be folder path in CSV
|
||||
# TODO: `context_asset_name` is now `folder_path`
|
||||
folder_path = self._get_row_value_with_validation(
|
||||
"Folder Path", row)
|
||||
task_name = self._get_row_value_with_validation(
|
||||
"Task Name", row)
|
||||
version = self._get_row_value_with_validation(
|
||||
"Version", row)
|
||||
|
||||
# Get optional columns
|
||||
variant = self._get_row_value_with_validation(
|
||||
"Variant", row)
|
||||
product_type = self._get_row_value_with_validation(
|
||||
"Product Type", row)
|
||||
|
||||
pre_product_name = (
|
||||
f"{task_name}{variant}{product_type}"
|
||||
f"{version}".replace(" ", "").lower()
|
||||
)
|
||||
|
||||
# get representation data
|
||||
filename, representation_data = \
|
||||
self._get_representation_row_data(row)
|
||||
|
||||
# TODO: batch query of all folder paths and task names
|
||||
|
||||
# get folder entity from folder path
|
||||
folder_entity = get_folder_by_path(
|
||||
project_name, folder_path)
|
||||
|
||||
# make sure asset exists
|
||||
if not folder_entity:
|
||||
raise CreatorError(
|
||||
f"Asset '{folder_path}' not found."
|
||||
)
|
||||
|
||||
# first get all tasks on the folder entity and then find
|
||||
task_entity = get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name)
|
||||
|
||||
# check if task name is valid task in asset doc
|
||||
if not task_entity:
|
||||
raise CreatorError(
|
||||
f"Task '{task_name}' not found in asset doc."
|
||||
)
|
||||
|
||||
# get all csv data into one dict and make sure there are no
|
||||
# duplicates data are already validated and sorted under
|
||||
# correct existing asset also check if asset exists and if
|
||||
# task name is valid task in asset doc and representations
|
||||
# are distributed under products following variants
|
||||
if folder_path not in csv_data:
|
||||
csv_data[folder_path] = {
|
||||
"folder_entity": folder_entity,
|
||||
"products": {
|
||||
pre_product_name: {
|
||||
"task_name": task_name,
|
||||
"task_type": task_entity["taskType"],
|
||||
"variant": variant,
|
||||
"product_type": product_type,
|
||||
"version": version,
|
||||
"representations": {
|
||||
filename: representation_data,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
else:
|
||||
csv_products = csv_data[folder_path]["products"]
|
||||
if pre_product_name not in csv_products:
|
||||
csv_products[pre_product_name] = {
|
||||
"task_name": task_name,
|
||||
"task_type": task_entity["taskType"],
|
||||
"variant": variant,
|
||||
"product_type": product_type,
|
||||
"version": version,
|
||||
"representations": {
|
||||
filename: representation_data,
|
||||
},
|
||||
}
|
||||
else:
|
||||
csv_representations = \
|
||||
csv_products[pre_product_name]["representations"]
|
||||
if filename in csv_representations:
|
||||
raise CreatorError(
|
||||
f"Duplicate filename '{filename}' in csv file."
|
||||
)
|
||||
csv_representations[filename] = representation_data
|
||||
|
||||
return csv_data
|
||||
|
||||
def _get_representation_row_data(self, row_data):
|
||||
"""Get representation row data"""
|
||||
# Get required columns first
|
||||
file_path = self._get_row_value_with_validation(
|
||||
"File Path", row_data)
|
||||
frame_start = self._get_row_value_with_validation(
|
||||
"Frame Start", row_data)
|
||||
frame_end = self._get_row_value_with_validation(
|
||||
"Frame End", row_data)
|
||||
handle_start = self._get_row_value_with_validation(
|
||||
"Handle Start", row_data)
|
||||
handle_end = self._get_row_value_with_validation(
|
||||
"Handle End", row_data)
|
||||
fps = self._get_row_value_with_validation(
|
||||
"FPS", row_data)
|
||||
|
||||
# Get optional columns
|
||||
thumbnail_path = self._get_row_value_with_validation(
|
||||
"Version Thumbnail", row_data)
|
||||
colorspace = self._get_row_value_with_validation(
|
||||
"Representation Colorspace", row_data)
|
||||
comment = self._get_row_value_with_validation(
|
||||
"Version Comment", row_data)
|
||||
repre = self._get_row_value_with_validation(
|
||||
"Representation", row_data)
|
||||
slate_exists = self._get_row_value_with_validation(
|
||||
"Slate Exists", row_data)
|
||||
repre_tags = self._get_row_value_with_validation(
|
||||
"Representation Tags", row_data)
|
||||
|
||||
# convert tags value to list
|
||||
tags_list = copy(self.representations_config["default_tags"])
|
||||
if repre_tags:
|
||||
tags_list = []
|
||||
tags_delimiter = self.representations_config["tags_delimiter"]
|
||||
# strip spaces from repre_tags
|
||||
if tags_delimiter in repre_tags:
|
||||
tags = repre_tags.split(tags_delimiter)
|
||||
for _tag in tags:
|
||||
tags_list.append(("".join(_tag.strip())).lower())
|
||||
else:
|
||||
tags_list.append(repre_tags)
|
||||
|
||||
representation_data = {
|
||||
"colorspace": colorspace,
|
||||
"comment": comment,
|
||||
"representationName": repre,
|
||||
"slate": slate_exists,
|
||||
"tags": tags_list,
|
||||
"thumbnailPath": thumbnail_path,
|
||||
"frameStart": int(frame_start),
|
||||
"frameEnd": int(frame_end),
|
||||
"handleStart": int(handle_start),
|
||||
"handleEnd": int(handle_end),
|
||||
"fps": float(fps),
|
||||
}
|
||||
return file_path, representation_data
|
||||
|
||||
def _get_row_value_with_validation(
|
||||
self, column_name, row_data, default_value=None
|
||||
):
|
||||
"""Get row value with validation"""
|
||||
|
||||
# get column data from column config
|
||||
column_data = None
|
||||
for column in self.columns_config["columns"]:
|
||||
if column["name"] == column_name:
|
||||
column_data = column
|
||||
break
|
||||
|
||||
if not column_data:
|
||||
raise CreatorError(
|
||||
f"Column '{column_name}' not found in column config."
|
||||
)
|
||||
|
||||
# get column value from row
|
||||
column_value = row_data.get(column_name)
|
||||
column_required = column_data["required_column"]
|
||||
|
||||
# check if column value is not empty string and column is required
|
||||
if column_value == "" and column_required:
|
||||
raise CreatorError(
|
||||
f"Value in column '{column_name}' is required."
|
||||
)
|
||||
|
||||
# get column type
|
||||
column_type = column_data["type"]
|
||||
# get column validation regex
|
||||
column_validation = column_data["validation_pattern"]
|
||||
# get column default value
|
||||
column_default = default_value or column_data["default"]
|
||||
|
||||
if column_type in ["number", "decimal"] and column_default == 0:
|
||||
column_default = None
|
||||
|
||||
# check if column value is not empty string
|
||||
if column_value == "":
|
||||
# set default value if column value is empty string
|
||||
column_value = column_default
|
||||
|
||||
# set column value to correct type following column type
|
||||
if column_type == "number" and column_value is not None:
|
||||
column_value = int(column_value)
|
||||
elif column_type == "decimal" and column_value is not None:
|
||||
column_value = float(column_value)
|
||||
elif column_type == "bool":
|
||||
column_value = column_value in ["true", "True"]
|
||||
|
||||
# check if column value matches validation regex
|
||||
if (
|
||||
column_value is not None and
|
||||
not re.match(str(column_validation), str(column_value))
|
||||
):
|
||||
raise CreatorError(
|
||||
f"Column '{column_name}' value '{column_value}' "
|
||||
f"does not match validation regex '{column_validation}' \n"
|
||||
f"Row data: {row_data} \n"
|
||||
f"Column data: {column_data}"
|
||||
)
|
||||
|
||||
return column_value
|
||||
|
||||
def _pass_data_to_csv_instance(
|
||||
self, instance_data, staging_dir, filename
|
||||
):
|
||||
"""Pass CSV representation file to instance data"""
|
||||
|
||||
representation = {
|
||||
"name": "csv",
|
||||
"ext": "csv",
|
||||
"files": filename,
|
||||
"stagingDir": staging_dir,
|
||||
"stagingDir_persistent": True,
|
||||
}
|
||||
|
||||
instance_data.update({
|
||||
"label": f"CSV: {filename}",
|
||||
"representations": [representation],
|
||||
"stagingDir": staging_dir,
|
||||
"stagingDir_persistent": True,
|
||||
})
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef(
|
||||
"add_review_family",
|
||||
default=True,
|
||||
label="Review"
|
||||
)
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
"""Creating pre-create attributes at creator plugin.
|
||||
|
||||
Returns:
|
||||
list: list of attribute object instances
|
||||
"""
|
||||
# Use same attributes as for instance attributes
|
||||
attr_defs = [
|
||||
FileDef(
|
||||
"csv_filepath_data",
|
||||
folders=False,
|
||||
extensions=[".csv"],
|
||||
allow_sequences=False,
|
||||
single_item=True,
|
||||
label="CSV File",
|
||||
),
|
||||
]
|
||||
return attr_defs
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
from pprint import pformat
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class CollectCSVIngestInstancesData(
|
||||
pyblish.api.InstancePlugin,
|
||||
publish.AYONPyblishPluginMixin,
|
||||
publish.ColormanagedPyblishPluginMixin
|
||||
):
|
||||
"""Collect CSV Ingest data from instance.
|
||||
"""
|
||||
|
||||
label = "Collect CSV Ingest instances data"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
hosts = ["traypublisher"]
|
||||
families = ["csv_ingest"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# expecting [(colorspace, repre_data), ...]
|
||||
prepared_repres_data_items = instance.data[
|
||||
"prepared_data_for_repres"]
|
||||
|
||||
for prep_repre_data in prepared_repres_data_items:
|
||||
type = prep_repre_data["type"]
|
||||
colorspace = prep_repre_data["colorspace"]
|
||||
repre_data = prep_repre_data["representation"]
|
||||
|
||||
# thumbnails should be skipped
|
||||
if type == "media":
|
||||
# colorspace name is passed from CSV column
|
||||
self.set_representation_colorspace(
|
||||
repre_data, instance.context, colorspace
|
||||
)
|
||||
elif type == "media" and colorspace is None:
|
||||
# TODO: implement colorspace file rules file parsing
|
||||
self.log.warning(
|
||||
"Colorspace is not defined in csv for following"
|
||||
f" representation: {pformat(repre_data)}"
|
||||
)
|
||||
pass
|
||||
elif type == "thumbnail":
|
||||
# thumbnails should be skipped
|
||||
pass
|
||||
|
||||
instance.data["representations"].append(repre_data)
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractCSVFile(publish.Extractor):
|
||||
"""
|
||||
Extractor export CSV file
|
||||
"""
|
||||
|
||||
label = "Extract CSV file"
|
||||
order = pyblish.api.ExtractorOrder - 0.45
|
||||
families = ["csv_ingest_file"]
|
||||
hosts = ["traypublisher"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
csv_file_data = instance.data["csvFileData"]
|
||||
|
||||
representation_csv = {
|
||||
'name': "csv_data",
|
||||
'ext': "csv",
|
||||
'files': csv_file_data["filename"],
|
||||
"stagingDir": csv_file_data["staging_dir"],
|
||||
"stagingDir_persistent": True
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation_csv)
|
||||
|
||||
self.log.info("Added CSV file representation: {}".format(
|
||||
representation_csv))
|
||||
|
|
@ -16,6 +16,7 @@ class ValidateExistingVersion(
|
|||
order = ValidateContentsOrder
|
||||
|
||||
hosts = ["traypublisher"]
|
||||
targets = ["local"]
|
||||
|
||||
actions = [RepairAction]
|
||||
|
||||
|
|
|
|||
|
|
@ -16,6 +16,8 @@ class ValidateFrameRange(OptionalPyblishPluginMixin,
|
|||
label = "Validate Frame Range"
|
||||
hosts = ["traypublisher"]
|
||||
families = ["render", "plate"]
|
||||
targets = ["local"]
|
||||
|
||||
order = ValidateContentsOrder
|
||||
|
||||
optional = True
|
||||
|
|
|
|||
|
|
@ -97,6 +97,15 @@ from .context_tools import (
|
|||
get_current_folder_path,
|
||||
get_current_task_name
|
||||
)
|
||||
|
||||
from .workfile import (
|
||||
discover_workfile_build_plugins,
|
||||
register_workfile_build_plugin,
|
||||
deregister_workfile_build_plugin,
|
||||
register_workfile_build_plugin_path,
|
||||
deregister_workfile_build_plugin_path,
|
||||
)
|
||||
|
||||
install = install_host
|
||||
uninstall = uninstall_host
|
||||
|
||||
|
|
@ -198,6 +207,13 @@ __all__ = (
|
|||
"get_current_folder_path",
|
||||
"get_current_task_name",
|
||||
|
||||
# Workfile templates
|
||||
"discover_workfile_build_plugins",
|
||||
"register_workfile_build_plugin",
|
||||
"deregister_workfile_build_plugin",
|
||||
"register_workfile_build_plugin_path",
|
||||
"deregister_workfile_build_plugin_path",
|
||||
|
||||
# Backwards compatible function names
|
||||
"install",
|
||||
"uninstall",
|
||||
|
|
|
|||
|
|
@ -21,6 +21,15 @@ from .utils import (
|
|||
from .build_workfile import BuildWorkfile
|
||||
|
||||
|
||||
from .workfile_template_builder import (
|
||||
discover_workfile_build_plugins,
|
||||
register_workfile_build_plugin,
|
||||
deregister_workfile_build_plugin,
|
||||
register_workfile_build_plugin_path,
|
||||
deregister_workfile_build_plugin_path,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"get_workfile_template_key_from_context",
|
||||
"get_workfile_template_key",
|
||||
|
|
@ -39,4 +48,10 @@ __all__ = (
|
|||
"should_open_workfiles_tool_on_launch",
|
||||
|
||||
"BuildWorkfile",
|
||||
|
||||
"discover_workfile_build_plugins",
|
||||
"register_workfile_build_plugin",
|
||||
"deregister_workfile_build_plugin",
|
||||
"register_workfile_build_plugin_path",
|
||||
"deregister_workfile_build_plugin_path",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ from ayon_core.lib import (
|
|||
filter_profiles,
|
||||
attribute_definitions,
|
||||
)
|
||||
from ayon_core.lib.events import EventSystem, EventCallback, Event
|
||||
from ayon_core.lib.attribute_definitions import get_attributes_keys
|
||||
from ayon_core.pipeline import Anatomy
|
||||
from ayon_core.pipeline.load import (
|
||||
|
|
@ -43,6 +44,13 @@ from ayon_core.pipeline.load import (
|
|||
get_representation_contexts,
|
||||
load_with_repre_context,
|
||||
)
|
||||
from ayon_core.pipeline.plugin_discover import (
|
||||
discover,
|
||||
register_plugin,
|
||||
register_plugin_path,
|
||||
deregister_plugin,
|
||||
deregister_plugin_path
|
||||
)
|
||||
|
||||
from ayon_core.pipeline.create import (
|
||||
discover_legacy_creator_plugins,
|
||||
|
|
@ -124,6 +132,8 @@ class AbstractTemplateBuilder(object):
|
|||
self._current_task_entity = _NOT_SET
|
||||
self._linked_folder_entities = _NOT_SET
|
||||
|
||||
self._event_system = EventSystem()
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
if isinstance(self._host, HostBase):
|
||||
|
|
@ -211,10 +221,14 @@ class AbstractTemplateBuilder(object):
|
|||
Returns:
|
||||
List[PlaceholderPlugin]: Plugin classes available for host.
|
||||
"""
|
||||
plugins = []
|
||||
|
||||
# Backwards compatibility
|
||||
if hasattr(self._host, "get_workfile_build_placeholder_plugins"):
|
||||
return self._host.get_workfile_build_placeholder_plugins()
|
||||
return []
|
||||
|
||||
plugins.extend(discover(PlaceholderPlugin))
|
||||
return plugins
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
|
|
@ -257,6 +271,8 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
self._project_settings = None
|
||||
|
||||
self._event_system = EventSystem()
|
||||
|
||||
self.clear_shared_data()
|
||||
self.clear_shared_populate_data()
|
||||
|
||||
|
|
@ -498,15 +514,21 @@ class AbstractTemplateBuilder(object):
|
|||
process if version is created
|
||||
|
||||
"""
|
||||
template_preset = self.get_template_preset()
|
||||
|
||||
if template_path is None:
|
||||
template_path = template_preset["path"]
|
||||
|
||||
if keep_placeholders is None:
|
||||
keep_placeholders = template_preset["keep_placeholder"]
|
||||
if create_first_version is None:
|
||||
create_first_version = template_preset["create_first_version"]
|
||||
if any(
|
||||
value is None
|
||||
for value in [
|
||||
template_path,
|
||||
keep_placeholders,
|
||||
create_first_version,
|
||||
]
|
||||
):
|
||||
template_preset = self.get_template_preset()
|
||||
if template_path is None:
|
||||
template_path = template_preset["path"]
|
||||
if keep_placeholders is None:
|
||||
keep_placeholders = template_preset["keep_placeholder"]
|
||||
if create_first_version is None:
|
||||
create_first_version = template_preset["create_first_version"]
|
||||
|
||||
# check if first version is created
|
||||
created_version_workfile = False
|
||||
|
|
@ -729,6 +751,16 @@ class AbstractTemplateBuilder(object):
|
|||
|
||||
placeholder.set_finished()
|
||||
|
||||
# Trigger on_depth_processed event
|
||||
self.emit_event(
|
||||
topic="template.depth_processed",
|
||||
data={
|
||||
"depth": iter_counter,
|
||||
"placeholders_by_scene_id": placeholder_by_scene_id
|
||||
},
|
||||
source="builder"
|
||||
)
|
||||
|
||||
# Clear shared data before getting new placeholders
|
||||
self.clear_shared_populate_data()
|
||||
|
||||
|
|
@ -747,6 +779,16 @@ class AbstractTemplateBuilder(object):
|
|||
placeholder_by_scene_id[identifier] = placeholder
|
||||
placeholders.append(placeholder)
|
||||
|
||||
# Trigger on_finished event
|
||||
self.emit_event(
|
||||
topic="template.finished",
|
||||
data={
|
||||
"depth": iter_counter,
|
||||
"placeholders_by_scene_id": placeholder_by_scene_id,
|
||||
},
|
||||
source="builder"
|
||||
)
|
||||
|
||||
self.refresh()
|
||||
|
||||
def _get_build_profiles(self):
|
||||
|
|
@ -772,12 +814,14 @@ class AbstractTemplateBuilder(object):
|
|||
- 'project_settings/{host name}/templated_workfile_build/profiles'
|
||||
|
||||
Returns:
|
||||
str: Path to a template file with placeholders.
|
||||
dict: Dictionary with `path`, `keep_placeholder` and
|
||||
`create_first_version` settings from the template preset
|
||||
for current context.
|
||||
|
||||
Raises:
|
||||
TemplateProfileNotFound: When profiles are not filled.
|
||||
TemplateLoadFailed: Profile was found but path is not set.
|
||||
TemplateNotFound: Path was set but file does not exists.
|
||||
TemplateNotFound: Path was set but file does not exist.
|
||||
"""
|
||||
|
||||
host_name = self.host_name
|
||||
|
|
@ -872,6 +916,30 @@ class AbstractTemplateBuilder(object):
|
|||
"create_first_version": create_first_version
|
||||
}
|
||||
|
||||
def emit_event(self, topic, data=None, source=None) -> Event:
|
||||
return self._event_system.emit(topic, data, source)
|
||||
|
||||
def add_event_callback(self, topic, callback, order=None):
|
||||
return self._event_system.add_callback(topic, callback, order=order)
|
||||
|
||||
def add_on_finished_callback(
|
||||
self, callback, order=None
|
||||
) -> EventCallback:
|
||||
return self.add_event_callback(
|
||||
topic="template.finished",
|
||||
callback=callback,
|
||||
order=order
|
||||
)
|
||||
|
||||
def add_on_depth_processed_callback(
|
||||
self, callback, order=None
|
||||
) -> EventCallback:
|
||||
return self.add_event_callback(
|
||||
topic="template.depth_processed",
|
||||
callback=callback,
|
||||
order=order
|
||||
)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class PlaceholderPlugin(object):
|
||||
|
|
@ -1904,3 +1972,23 @@ class CreatePlaceholderItem(PlaceholderItem):
|
|||
|
||||
def create_failed(self, creator_data):
|
||||
self._failed_created_publish_instances.append(creator_data)
|
||||
|
||||
|
||||
def discover_workfile_build_plugins(*args, **kwargs):
|
||||
return discover(PlaceholderPlugin, *args, **kwargs)
|
||||
|
||||
|
||||
def register_workfile_build_plugin(plugin: PlaceholderPlugin):
|
||||
register_plugin(PlaceholderPlugin, plugin)
|
||||
|
||||
|
||||
def deregister_workfile_build_plugin(plugin: PlaceholderPlugin):
|
||||
deregister_plugin(PlaceholderPlugin, plugin)
|
||||
|
||||
|
||||
def register_workfile_build_plugin_path(path: str):
|
||||
register_plugin_path(PlaceholderPlugin, path)
|
||||
|
||||
|
||||
def deregister_workfile_build_plugin_path(path: str):
|
||||
deregister_plugin_path(PlaceholderPlugin, path)
|
||||
|
|
|
|||
|
|
@ -167,7 +167,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"uasset",
|
||||
"blendScene",
|
||||
"yeticacheUE",
|
||||
"tycache"
|
||||
"tycache",
|
||||
"csv_ingest_file",
|
||||
]
|
||||
|
||||
default_template_name = "publish"
|
||||
|
|
|
|||
|
|
@ -1,3 +1,10 @@
|
|||
name = "applications"
|
||||
title = "Applications"
|
||||
version = "0.2.0"
|
||||
|
||||
ayon_server_version = ">=1.0.7"
|
||||
ayon_launcher_version = ">=1.0.2"
|
||||
ayon_required_addons = {
|
||||
"core": ">0.3.0",
|
||||
}
|
||||
ayon_compatible_addons = {}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import shutil
|
|||
import argparse
|
||||
import zipfile
|
||||
import types
|
||||
import importlib
|
||||
import importlib.machinery
|
||||
import platform
|
||||
import collections
|
||||
from pathlib import Path
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
name = "hiero"
|
||||
title = "Hiero"
|
||||
version = "0.1.2"
|
||||
version = "0.1.3"
|
||||
|
|
|
|||
|
|
@ -149,15 +149,15 @@ class ImageIOSettings(BaseSettingsModel):
|
|||
|
||||
DEFAULT_IMAGEIO_SETTINGS = {
|
||||
"workfile": {
|
||||
"ocioConfigName": "nuke-default",
|
||||
"workingSpace": "linear",
|
||||
"viewerLut": "sRGB",
|
||||
"eightBitLut": "sRGB",
|
||||
"sixteenBitLut": "sRGB",
|
||||
"logLut": "Cineon",
|
||||
"floatLut": "linear",
|
||||
"thumbnailLut": "sRGB",
|
||||
"monitorOutLut": "sRGB"
|
||||
"ocioConfigName": "aces_1.2",
|
||||
"workingSpace": "role_scene_linear",
|
||||
"viewerLut": "ACES/sRGB",
|
||||
"eightBitLut": "role_matte_paint",
|
||||
"sixteenBitLut": "role_texture_paint",
|
||||
"logLut": "role_compositing_log",
|
||||
"floatLut": "role_scene_linear",
|
||||
"thumbnailLut": "ACES/sRGB",
|
||||
"monitorOutLut": "ACES/sRGB"
|
||||
},
|
||||
"regexInputs": {
|
||||
"inputs": [
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
name = "nuke"
|
||||
title = "Nuke"
|
||||
version = "0.1.10"
|
||||
version = "0.1.11"
|
||||
|
|
|
|||
|
|
@ -97,8 +97,23 @@ class WorkfileColorspaceSettings(BaseSettingsModel):
|
|||
working_space: str = SettingsField(
|
||||
title="Working Space"
|
||||
)
|
||||
thumbnail_space: str = SettingsField(
|
||||
title="Thumbnail Space"
|
||||
monitor_lut: str = SettingsField(
|
||||
title="Thumbnails"
|
||||
)
|
||||
monitor_out_lut: str = SettingsField(
|
||||
title="Monitor Out"
|
||||
)
|
||||
int_8_lut: str = SettingsField(
|
||||
title="8-bit Files"
|
||||
)
|
||||
int_16_lut: str = SettingsField(
|
||||
title="16-bit Files"
|
||||
)
|
||||
log_lut: str = SettingsField(
|
||||
title="Log Files"
|
||||
)
|
||||
float_lut: str = SettingsField(
|
||||
title="Float Files"
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -120,6 +135,9 @@ class ViewProcessModel(BaseSettingsModel):
|
|||
viewerProcess: str = SettingsField(
|
||||
title="Viewer Process Name"
|
||||
)
|
||||
output_transform: str = SettingsField(
|
||||
title="Output Transform"
|
||||
)
|
||||
|
||||
|
||||
class ImageIOConfigModel(BaseSettingsModel):
|
||||
|
|
@ -214,16 +232,23 @@ class ImageIOSettings(BaseSettingsModel):
|
|||
|
||||
DEFAULT_IMAGEIO_SETTINGS = {
|
||||
"viewer": {
|
||||
"viewerProcess": "sRGB (default)"
|
||||
"viewerProcess": "ACES/sRGB",
|
||||
"output_transform": "ACES/sRGB"
|
||||
},
|
||||
"baking": {
|
||||
"viewerProcess": "rec709 (default)"
|
||||
"viewerProcess": "ACES/Rec.709",
|
||||
"output_transform": "ACES/Rec.709"
|
||||
},
|
||||
"workfile": {
|
||||
"color_management": "OCIO",
|
||||
"native_ocio_config": "nuke-default",
|
||||
"working_space": "scene_linear",
|
||||
"thumbnail_space": "sRGB (default)",
|
||||
"native_ocio_config": "aces_1.2",
|
||||
"working_space": "role_scene_linear",
|
||||
"monitor_lut": "ACES/sRGB",
|
||||
"monitor_out_lut": "ACES/sRGB",
|
||||
"int_8_lut": "role_matte_paint",
|
||||
"int_16_lut": "role_texture_paint",
|
||||
"log_lut": "role_compositing_log",
|
||||
"float_lut": "role_scene_linear"
|
||||
},
|
||||
"nodes": {
|
||||
"required_nodes": [
|
||||
|
|
|
|||
|
|
@ -1,4 +1,7 @@
|
|||
from pydantic import validator
|
||||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
from ayon_server.settings.validators import ensure_unique_names
|
||||
from ayon_server.exceptions import BadRequestException
|
||||
|
||||
|
||||
class BatchMovieCreatorPlugin(BaseSettingsModel):
|
||||
|
|
@ -22,11 +25,139 @@ class BatchMovieCreatorPlugin(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class ColumnItemModel(BaseSettingsModel):
|
||||
"""Allows to publish multiple video files in one go. <br />Name of matching
|
||||
asset is parsed from file names ('asset.mov', 'asset_v001.mov',
|
||||
'my_asset_to_publish.mov')"""
|
||||
|
||||
name: str = SettingsField(
|
||||
title="Name",
|
||||
default=""
|
||||
)
|
||||
|
||||
type: str = SettingsField(
|
||||
title="Type",
|
||||
default=""
|
||||
)
|
||||
|
||||
default: str = SettingsField(
|
||||
title="Default",
|
||||
default=""
|
||||
)
|
||||
|
||||
required_column: bool = SettingsField(
|
||||
title="Required Column",
|
||||
default=False
|
||||
)
|
||||
|
||||
validation_pattern: str = SettingsField(
|
||||
title="Validation Regex Pattern",
|
||||
default="^(.*)$"
|
||||
)
|
||||
|
||||
|
||||
class ColumnConfigModel(BaseSettingsModel):
|
||||
"""Allows to publish multiple video files in one go. <br />Name of matching
|
||||
asset is parsed from file names ('asset.mov', 'asset_v001.mov',
|
||||
'my_asset_to_publish.mov')"""
|
||||
|
||||
csv_delimiter: str = SettingsField(
|
||||
title="CSV delimiter",
|
||||
default=","
|
||||
)
|
||||
|
||||
columns: list[ColumnItemModel] = SettingsField(
|
||||
title="Columns",
|
||||
default_factory=list
|
||||
)
|
||||
|
||||
@validator("columns")
|
||||
def validate_unique_outputs(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class RepresentationItemModel(BaseSettingsModel):
|
||||
"""Allows to publish multiple video files in one go.
|
||||
|
||||
Name of matching asset is parsed from file names
|
||||
('asset.mov', 'asset_v001.mov', 'my_asset_to_publish.mov')
|
||||
"""
|
||||
|
||||
name: str = SettingsField(
|
||||
title="Name",
|
||||
default=""
|
||||
)
|
||||
|
||||
extensions: list[str] = SettingsField(
|
||||
title="Extensions",
|
||||
default_factory=list
|
||||
)
|
||||
|
||||
@validator("extensions")
|
||||
def validate_extension(cls, value):
|
||||
for ext in value:
|
||||
if not ext.startswith("."):
|
||||
raise BadRequestException(f"Extension must start with '.': {ext}")
|
||||
return value
|
||||
|
||||
|
||||
class RepresentationConfigModel(BaseSettingsModel):
|
||||
"""Allows to publish multiple video files in one go. <br />Name of matching
|
||||
asset is parsed from file names ('asset.mov', 'asset_v001.mov',
|
||||
'my_asset_to_publish.mov')"""
|
||||
|
||||
tags_delimiter: str = SettingsField(
|
||||
title="Tags delimiter",
|
||||
default=";"
|
||||
)
|
||||
|
||||
default_tags: list[str] = SettingsField(
|
||||
title="Default tags",
|
||||
default_factory=list
|
||||
)
|
||||
|
||||
representations: list[RepresentationItemModel] = SettingsField(
|
||||
title="Representations",
|
||||
default_factory=list
|
||||
)
|
||||
|
||||
@validator("representations")
|
||||
def validate_unique_outputs(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class IngestCSVPluginModel(BaseSettingsModel):
|
||||
"""Allows to publish multiple video files in one go. <br />Name of matching
|
||||
asset is parsed from file names ('asset.mov', 'asset_v001.mov',
|
||||
'my_asset_to_publish.mov')"""
|
||||
|
||||
enabled: bool = SettingsField(
|
||||
title="Enabled",
|
||||
default=False
|
||||
)
|
||||
|
||||
columns_config: ColumnConfigModel = SettingsField(
|
||||
title="Columns config",
|
||||
default_factory=ColumnConfigModel
|
||||
)
|
||||
|
||||
representations_config: RepresentationConfigModel = SettingsField(
|
||||
title="Representations config",
|
||||
default_factory=RepresentationConfigModel
|
||||
)
|
||||
|
||||
|
||||
class TrayPublisherCreatePluginsModel(BaseSettingsModel):
|
||||
BatchMovieCreator: BatchMovieCreatorPlugin = SettingsField(
|
||||
title="Batch Movie Creator",
|
||||
default_factory=BatchMovieCreatorPlugin
|
||||
)
|
||||
IngestCSV: IngestCSVPluginModel = SettingsField(
|
||||
title="Ingest CSV",
|
||||
default_factory=IngestCSVPluginModel
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_CREATORS = {
|
||||
|
|
@ -41,4 +172,170 @@ DEFAULT_CREATORS = {
|
|||
".mov"
|
||||
]
|
||||
},
|
||||
"IngestCSV": {
|
||||
"enabled": True,
|
||||
"columns_config": {
|
||||
"csv_delimiter": ",",
|
||||
"columns": [
|
||||
{
|
||||
"name": "File Path",
|
||||
"type": "text",
|
||||
"default": "",
|
||||
"required_column": True,
|
||||
"validation_pattern": "^([a-z0-9#._\\/]*)$"
|
||||
},
|
||||
{
|
||||
"name": "Folder Path",
|
||||
"type": "text",
|
||||
"default": "",
|
||||
"required_column": True,
|
||||
"validation_pattern": "^([a-zA-Z0-9_\\/]*)$"
|
||||
},
|
||||
{
|
||||
"name": "Task Name",
|
||||
"type": "text",
|
||||
"default": "",
|
||||
"required_column": True,
|
||||
"validation_pattern": "^(.*)$"
|
||||
},
|
||||
{
|
||||
"name": "Product Type",
|
||||
"type": "text",
|
||||
"default": "",
|
||||
"required_column": False,
|
||||
"validation_pattern": "^(.*)$"
|
||||
},
|
||||
{
|
||||
"name": "Variant",
|
||||
"type": "text",
|
||||
"default": "",
|
||||
"required_column": False,
|
||||
"validation_pattern": "^(.*)$"
|
||||
},
|
||||
{
|
||||
"name": "Version",
|
||||
"type": "number",
|
||||
"default": 1,
|
||||
"required_column": True,
|
||||
"validation_pattern": "^(\\d{1,3})$"
|
||||
},
|
||||
{
|
||||
"name": "Version Comment",
|
||||
"type": "text",
|
||||
"default": "",
|
||||
"required_column": False,
|
||||
"validation_pattern": "^(.*)$"
|
||||
},
|
||||
{
|
||||
"name": "Version Thumbnail",
|
||||
"type": "text",
|
||||
"default": "",
|
||||
"required_column": False,
|
||||
"validation_pattern": "^([a-zA-Z0-9#._\\/]*)$"
|
||||
},
|
||||
{
|
||||
"name": "Frame Start",
|
||||
"type": "number",
|
||||
"default": 0,
|
||||
"required_column": True,
|
||||
"validation_pattern": "^(\\d{1,8})$"
|
||||
},
|
||||
{
|
||||
"name": "Frame End",
|
||||
"type": "number",
|
||||
"default": 0,
|
||||
"required_column": True,
|
||||
"validation_pattern": "^(\\d{1,8})$"
|
||||
},
|
||||
{
|
||||
"name": "Handle Start",
|
||||
"type": "number",
|
||||
"default": 0,
|
||||
"required_column": True,
|
||||
"validation_pattern": "^(\\d)$"
|
||||
},
|
||||
{
|
||||
"name": "Handle End",
|
||||
"type": "number",
|
||||
"default": 0,
|
||||
"required_column": True,
|
||||
"validation_pattern": "^(\\d)$"
|
||||
},
|
||||
{
|
||||
"name": "FPS",
|
||||
"type": "decimal",
|
||||
"default": 0.0,
|
||||
"required_column": True,
|
||||
"validation_pattern": "^[0-9]*\\.[0-9]+$|^[0-9]+$"
|
||||
},
|
||||
{
|
||||
"name": "Slate Exists",
|
||||
"type": "bool",
|
||||
"default": True,
|
||||
"required_column": False,
|
||||
"validation_pattern": "(True|False)"
|
||||
},
|
||||
{
|
||||
"name": "Representation",
|
||||
"type": "text",
|
||||
"default": "",
|
||||
"required_column": False,
|
||||
"validation_pattern": "^(.*)$"
|
||||
},
|
||||
{
|
||||
"name": "Representation Colorspace",
|
||||
"type": "text",
|
||||
"default": "",
|
||||
"required_column": False,
|
||||
"validation_pattern": "^(.*)$"
|
||||
},
|
||||
{
|
||||
"name": "Representation Tags",
|
||||
"type": "text",
|
||||
"default": "",
|
||||
"required_column": False,
|
||||
"validation_pattern": "^(.*)$"
|
||||
}
|
||||
]
|
||||
},
|
||||
"representations_config": {
|
||||
"tags_delimiter": ";",
|
||||
"default_tags": [
|
||||
"review"
|
||||
],
|
||||
"representations": [
|
||||
{
|
||||
"name": "preview",
|
||||
"extensions": [
|
||||
".mp4",
|
||||
".mov"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "exr",
|
||||
"extensions": [
|
||||
".exr"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "edit",
|
||||
"extensions": [
|
||||
".mov"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "review",
|
||||
"extensions": [
|
||||
".mov"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "nuke",
|
||||
"extensions": [
|
||||
".nk"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue