Merge branch 'release/3.15.x' into feature/OP-4634_Use-qtpy-in-tools

# Conflicts:
#	poetry.lock
This commit is contained in:
Jakub Trllo 2023-01-16 18:09:09 +01:00
commit 782957bd0a
140 changed files with 6915 additions and 4613 deletions

16
.pre-commit-config.yaml Normal file
View file

@ -0,0 +1,16 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- id: no-commit-to-branch
args: [ '--pattern', '^(?!((enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-]+)$).*' ]
- repo: https://github.com/psf/black
rev: 22.12.0
hooks:
- id: black

View file

@ -49,7 +49,6 @@ class UpdateWindow(QtWidgets.QDialog):
self._update_thread = None
self.resize(QtCore.QSize(self._width, self._height))
self._init_ui()
# Set stylesheet
@ -81,6 +80,16 @@ class UpdateWindow(QtWidgets.QDialog):
self._progress_bar = progress_bar
def showEvent(self, event):
super().showEvent(event)
current_size = self.size()
new_size = QtCore.QSize(
max(current_size.width(), self._width),
max(current_size.height(), self._height)
)
if current_size != new_size:
self.resize(new_size)
def _run_update(self):
"""Start install process.

View file

@ -276,8 +276,8 @@ class CreatorWidget(QtWidgets.QDialog):
elif v["type"] == "QSpinBox":
data[k]["value"] = self.create_row(
content_layout, "QSpinBox", v["label"],
setRange=(1, 9999999), setValue=v["value"],
setToolTip=tool_tip)
setValue=v["value"], setMinimum=0,
setMaximum=100000, setToolTip=tool_tip)
return data

View file

@ -41,7 +41,7 @@ class ExtractThumnail(publish.Extractor):
track_item_name, thumb_frame, ".png")
thumb_path = os.path.join(staging_dir, thumb_file)
thumbnail = track_item.thumbnail(thumb_frame).save(
thumbnail = track_item.thumbnail(thumb_frame, "colour").save(
thumb_path,
format='png'
)

View file

@ -34,7 +34,7 @@ class CreateHDA(plugin.HoudiniCreator):
}
return subset_name.lower() in existing_subset_names_low
def _create_instance_node(
def create_instance_node(
self, node_name, parent, node_type="geometry"):
import hou

View file

@ -25,7 +25,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin):
# Check bypass state and reverse
active = True
node = hou.node(instance.get("instance_node"))
node = hou.node(instance.data.get("instance_node"))
if hasattr(node, "isBypassed"):
active = not node.isBypassed()

View file

@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
def process(self, instance):
rop = hou.node(instance.get("instance_node"))
rop = hou.node(instance.data.get("instance_node"))
# Collect chunkSize
chunk_size_parm = rop.parm("chunkSize")

View file

@ -21,7 +21,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin):
self.log.debug("No output node found..")
return
rop_node = hou.node(instance.get("instance_node"))
rop_node = hou.node(instance.data["instance_node"])
save_layers = []
for layer in usdlib.get_configured_save_layers(rop_node):
@ -56,6 +56,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin):
layer_inst.data["subset"] = "__stub__"
layer_inst.data["label"] = label
layer_inst.data["asset"] = instance.data["asset"]
layer_inst.data["instance_node"] = instance.data["instance_node"]
# include same USD ROP
layer_inst.append(rop_node)
# include layer data

View file

@ -17,7 +17,7 @@ class ExtractRedshiftProxy(publish.Extractor):
def process(self, instance):
ropnode = hou.node(instance.get("instance_node"))
ropnode = hou.node(instance.data.get("instance_node"))
# Get the filename from the filename parameter
# `.evalParm(parameter)` will make sure all tokens are resolved

View file

@ -18,7 +18,7 @@ class ExtractUSD(publish.Extractor):
def process(self, instance):
ropnode = hou.node(instance.get("instance_node"))
ropnode = hou.node(instance.data.get("instance_node"))
# Get the filename from the filename parameter
output = ropnode.evalParm("lopoutput")

View file

@ -187,7 +187,7 @@ class ExtractUSDLayered(publish.Extractor):
# Main ROP node, either a USD Rop or ROP network with
# multiple USD ROPs
node = hou.node(instance.get("instance_node"))
node = hou.node(instance.data["instance_node"])
# Collect any output dependencies that have not been processed yet
# during extraction of other instances

View file

@ -17,7 +17,7 @@ class ExtractVDBCache(publish.Extractor):
def process(self, instance):
ropnode = hou.node(instance.get("instance_node"))
ropnode = hou.node(instance.data["instance_node"])
# Get the filename from the filename parameter
# `.evalParm(parameter)` will make sure all tokens are resolved

View file

@ -37,8 +37,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin):
@classmethod
def get_invalid(cls, instance):
node = hou.node(instance.get("instance_node"))
node = hou.node(instance.data["instance_node"])
# Check trange parm, 0 means Render Current Frame
frame_range = node.evalParm("trange")
if frame_range == 0:

View file

@ -37,6 +37,6 @@ class ValidateBypassed(pyblish.api.InstancePlugin):
@classmethod
def get_invalid(cls, instance):
rop = hou.node(instance.get("instance_node"))
rop = hou.node(instance.data["instance_node"])
if hasattr(rop, "isBypassed") and rop.isBypassed():
return [rop]

View file

@ -48,7 +48,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
)
if output_node is None:
node = hou.node(instance.get("instance_node"))
node = hou.node(instance.data.get("instance_node"))
cls.log.error(
"COP Output node in '%s' does not exist. "
"Ensure a valid COP output path is set." % node.path()

View file

@ -37,8 +37,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin):
@classmethod
def get_invalid(cls, instance):
node = hou.node(instance.get("instance_node"))
node = hou.node(instance.data["instance_node"])
# Check trange parm, 0 means Render Current Frame
frame_range = node.evalParm("trange")
if frame_range == 0:

View file

@ -38,7 +38,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin):
validate_nodes = []
if len(instance) > 0:
validate_nodes.append(hou.node(instance.get("instance_node")))
validate_nodes.append(hou.node(instance.data.get("instance_node")))
output_node = instance.data.get("output_node")
if output_node:
validate_nodes.append(output_node)

View file

@ -28,7 +28,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin):
def process(self, instance):
rop = hou.node(instance.get("instance_node"))
rop = hou.node(instance.data.get("instance_node"))
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
stage = lop_path.stage(apply_viewport_overrides=False)

View file

@ -40,7 +40,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin):
def process(self, instance):
rop = hou.node(instance.get("instance_node"))
rop = hou.node(instance.data.get("instance_node"))
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
stage = lop_path.stage(apply_viewport_overrides=False)

View file

@ -36,7 +36,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin):
output_node = instance.data["output_node"]
if output_node is None:
node = hou.node(instance.get("instance_node"))
node = hou.node(instance.data.get("instance_node"))
cls.log.error(
"USD node '%s' LOP path does not exist. "
"Ensure a valid LOP path is set." % node.path()

View file

@ -24,7 +24,7 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin):
from pxr import UsdGeom
import hou
rop = hou.node(instance.get("instance_node"))
rop = hou.node(instance.data.get("instance_node"))
lop_path = hou_usdlib.get_usd_rop_loppath(rop)
stage = lop_path.stage(apply_viewport_overrides=False)

View file

@ -20,7 +20,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin):
def process(self, instance):
rop = hou.node(instance.get("instance_node"))
rop = hou.node(instance.data.get("instance_node"))
workspace = rop.parent()
definition = workspace.type().definition()

View file

@ -38,7 +38,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin):
if node is None:
cls.log.error(
"SOP path is not correctly set on "
"ROP node '%s'." % instance.get("instance_node")
"ROP node '%s'." % instance.data.get("instance_node")
)
return [instance]

View file

@ -28,7 +28,7 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
Args:
path (str): A path to current template (usually given by
get_template_path implementation)
get_template_preset implementation)
Returns:
bool: Wether the template was succesfully imported or not
@ -240,7 +240,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin):
cmds.setAttr(node + ".hiddenInOutliner", True)
def load_succeed(self, placeholder, container):
self._parent_in_hierarhchy(placeholder, container)
self._parent_in_hierarchy(placeholder, container)
def _parent_in_hierarchy(self, placeholder, container):
"""Parent loaded container to placeholder's parent.

View file

@ -72,15 +72,19 @@ class CreateRender(plugin.Creator):
def __init__(self, *args, **kwargs):
"""Constructor."""
super(CreateRender, self).__init__(*args, **kwargs)
deadline_settings = get_system_settings()["modules"]["deadline"]
if not deadline_settings["enabled"]:
self.deadline_servers = {}
return
# Defaults
self._project_settings = get_project_settings(
legacy_io.Session["AVALON_PROJECT"])
if self._project_settings["maya"]["RenderSettings"]["apply_render_settings"]: # noqa
lib_rendersettings.RenderSettings().set_default_renderer_settings()
# Deadline-only
manager = ModulesManager()
deadline_settings = get_system_settings()["modules"]["deadline"]
if not deadline_settings["enabled"]:
self.deadline_servers = {}
return
self.deadline_module = manager.modules_by_name["deadline"]
try:
default_servers = deadline_settings["deadline_urls"]
@ -193,8 +197,6 @@ class CreateRender(plugin.Creator):
pool_names = []
default_priority = 50
self.server_aliases = list(self.deadline_servers.keys())
self.data["deadlineServers"] = self.server_aliases
self.data["suspendPublishJob"] = False
self.data["review"] = True
self.data["extendFrames"] = False
@ -233,6 +235,9 @@ class CreateRender(plugin.Creator):
raise RuntimeError("Both Deadline and Muster are enabled")
if deadline_enabled:
self.server_aliases = list(self.deadline_servers.keys())
self.data["deadlineServers"] = self.server_aliases
try:
deadline_url = self.deadline_servers["default"]
except KeyError:
@ -254,6 +259,19 @@ class CreateRender(plugin.Creator):
default_priority)
self.data["tile_priority"] = tile_priority
pool_setting = (self._project_settings["deadline"]
["publish"]
["CollectDeadlinePools"])
primary_pool = pool_setting["primary_pool"]
self.data["primaryPool"] = self._set_default_pool(pool_names,
primary_pool)
# We add a string "-" to allow the user to not
# set any secondary pools
pool_names = ["-"] + pool_names
secondary_pool = pool_setting["secondary_pool"]
self.data["secondaryPool"] = self._set_default_pool(pool_names,
secondary_pool)
if muster_enabled:
self.log.info(">>> Loading Muster credentials ...")
self._load_credentials()
@ -273,18 +291,6 @@ class CreateRender(plugin.Creator):
self.log.info(" - pool: {}".format(pool["name"]))
pool_names.append(pool["name"])
pool_setting = (self._project_settings["deadline"]
["publish"]
["CollectDeadlinePools"])
primary_pool = pool_setting["primary_pool"]
self.data["primaryPool"] = self._set_default_pool(pool_names,
primary_pool)
# We add a string "-" to allow the user to not
# set any secondary pools
pool_names = ["-"] + pool_names
secondary_pool = pool_setting["secondary_pool"]
self.data["secondaryPool"] = self._set_default_pool(pool_names,
secondary_pool)
self.options = {"useSelection": False} # Force no content
def _set_default_pool(self, pool_names, pool_value):

View file

@ -23,8 +23,6 @@ class CameraWindow(QtWidgets.QDialog):
self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint)
self.camera = None
self.static_image_plane = False
self.show_in_all_views = False
self.widgets = {
"label": QtWidgets.QLabel("Select camera for image plane."),
@ -45,8 +43,6 @@ class CameraWindow(QtWidgets.QDialog):
for camera in cameras:
self.widgets["list"].addItem(camera)
self.widgets["staticImagePlane"].setText("Make Image Plane Static")
self.widgets["showInAllViews"].setText("Show Image Plane in All Views")
# Build buttons.
layout = QtWidgets.QHBoxLayout(self.widgets["buttons"])
@ -57,8 +53,6 @@ class CameraWindow(QtWidgets.QDialog):
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(self.widgets["label"])
layout.addWidget(self.widgets["list"])
layout.addWidget(self.widgets["staticImagePlane"])
layout.addWidget(self.widgets["showInAllViews"])
layout.addWidget(self.widgets["buttons"])
layout.addWidget(self.widgets["warning"])
@ -73,8 +67,6 @@ class CameraWindow(QtWidgets.QDialog):
if self.camera is None:
self.widgets["warning"].setVisible(True)
return
self.show_in_all_views = self.widgets["showInAllViews"].isChecked()
self.static_image_plane = self.widgets["staticImagePlane"].isChecked()
self.close()
@ -82,7 +74,6 @@ class CameraWindow(QtWidgets.QDialog):
self.camera = None
self.close()
class ImagePlaneLoader(load.LoaderPlugin):
"""Specific loader of plate for image planes on selected camera."""
@ -106,12 +97,10 @@ class ImagePlaneLoader(load.LoaderPlugin):
# Get camera from user selection.
camera = None
is_static_image_plane = None
is_in_all_views = None
# is_static_image_plane = None
# is_in_all_views = None
if data:
camera = pm.PyNode(data.get("camera"))
is_static_image_plane = data.get("static_image_plane")
is_in_all_views = data.get("in_all_views")
if not camera:
cameras = pm.ls(type="camera")
@ -119,11 +108,11 @@ class ImagePlaneLoader(load.LoaderPlugin):
camera_names["Create new camera."] = "create_camera"
window = CameraWindow(camera_names.keys())
window.exec_()
# Skip if no camera was selected (Dialog was closed)
if window.camera not in camera_names:
return
camera = camera_names[window.camera]
is_static_image_plane = window.static_image_plane
is_in_all_views = window.show_in_all_views
if camera == "create_camera":
camera = pm.createNode("camera")
@ -139,18 +128,14 @@ class ImagePlaneLoader(load.LoaderPlugin):
# Create image plane
image_plane_transform, image_plane_shape = pm.imagePlane(
fileName=context["representation"]["data"]["path"],
camera=camera, showInAllViews=is_in_all_views
)
camera=camera)
image_plane_shape.depth.set(image_plane_depth)
if is_static_image_plane:
image_plane_shape.detach()
image_plane_transform.setRotation(camera.getRotation())
start_frame = pm.playbackOptions(q=True, min=True)
end_frame = pm.playbackOptions(q=True, max=True)
image_plane_shape.frameOffset.set(1 - start_frame)
image_plane_shape.frameOffset.set(0)
image_plane_shape.frameIn.set(start_frame)
image_plane_shape.frameOut.set(end_frame)
image_plane_shape.frameCache.set(end_frame)
@ -180,9 +165,17 @@ class ImagePlaneLoader(load.LoaderPlugin):
QtWidgets.QMessageBox.Cancel
)
if reply == QtWidgets.QMessageBox.Ok:
pm.delete(
image_plane_shape.listConnections(type="expression")[0]
)
# find the input and output of frame extension
expressions = image_plane_shape.frameExtension.inputs()
frame_ext_output = image_plane_shape.frameExtension.outputs()
if expressions:
# the "time1" node is non-deletable attr
# in Maya, use disconnectAttr instead
pm.disconnectAttr(expressions, frame_ext_output)
if not image_plane_shape.frameExtension.isFreeToChange():
raise RuntimeError("Can't set frame extension for {}".format(image_plane_shape)) # noqa
# get the node of time instead and set the time for it.
image_plane_shape.frameExtension.set(start_frame)
new_nodes.extend(
@ -233,7 +226,8 @@ class ImagePlaneLoader(load.LoaderPlugin):
)
start_frame = asset["data"]["frameStart"]
end_frame = asset["data"]["frameEnd"]
image_plane_shape.frameOffset.set(1 - start_frame)
image_plane_shape.frameOffset.set(0)
image_plane_shape.frameIn.set(start_frame)
image_plane_shape.frameOut.set(end_frame)
image_plane_shape.frameCache.set(end_frame)

View file

@ -174,9 +174,6 @@ class CollectInstances(pyblish.api.ContextPlugin):
if "handles" in data:
data["handleStart"] = data["handles"]
data["handleEnd"] = data["handles"]
else:
data["handleStart"] = 0
data["handleEnd"] = 0
data["frameStartHandle"] = data["frameStart"] - data["handleStart"] # noqa: E501
data["frameEndHandle"] = data["frameEnd"] + data["handleEnd"] # noqa: E501

View file

@ -5,6 +5,11 @@ from openpype.pipeline.publish import (
RepairAction,
ValidateContentsOrder,
)
from openpype.hosts.maya.api.lib_rendersetup import (
get_attr_overrides,
get_attr_in_layer,
)
from maya.app.renderSetup.model.override import AbsOverride
class ValidateFrameRange(pyblish.api.InstancePlugin):
@ -92,10 +97,86 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
"""
Repair instance container to match asset data.
"""
cmds.setAttr(
"{}.frameStart".format(instance.data["name"]),
instance.context.data.get("frameStartHandle"))
cmds.setAttr(
"{}.frameEnd".format(instance.data["name"]),
instance.context.data.get("frameEndHandle"))
if "renderlayer" in instance.data.get("families"):
# Special behavior for renderlayers
cls.repair_renderlayer(instance)
return
node = instance.data["name"]
context = instance.context
frame_start_handle = int(context.data.get("frameStartHandle"))
frame_end_handle = int(context.data.get("frameEndHandle"))
handle_start = int(context.data.get("handleStart"))
handle_end = int(context.data.get("handleEnd"))
frame_start = int(context.data.get("frameStart"))
frame_end = int(context.data.get("frameEnd"))
# Start
if cmds.attributeQuery("handleStart", node=node, exists=True):
cmds.setAttr("{}.handleStart".format(node), handle_start)
cmds.setAttr("{}.frameStart".format(node), frame_start)
else:
# Include start handle in frame start if no separate handleStart
# attribute exists on the node
cmds.setAttr("{}.frameStart".format(node), frame_start_handle)
# End
if cmds.attributeQuery("handleEnd", node=node, exists=True):
cmds.setAttr("{}.handleEnd".format(node), handle_end)
cmds.setAttr("{}.frameEnd".format(node), frame_end)
else:
# Include end handle in frame end if no separate handleEnd
# attribute exists on the node
cmds.setAttr("{}.frameEnd".format(node), frame_end_handle)
@classmethod
def repair_renderlayer(cls, instance):
"""Apply frame range in render settings"""
layer = instance.data["setMembers"]
context = instance.context
start_attr = "defaultRenderGlobals.startFrame"
end_attr = "defaultRenderGlobals.endFrame"
frame_start_handle = int(context.data.get("frameStartHandle"))
frame_end_handle = int(context.data.get("frameEndHandle"))
cls._set_attr_in_layer(start_attr, layer, frame_start_handle)
cls._set_attr_in_layer(end_attr, layer, frame_end_handle)
@classmethod
def _set_attr_in_layer(cls, node_attr, layer, value):
if get_attr_in_layer(node_attr, layer=layer) == value:
# Already ok. This can happen if you have multiple renderlayers
# validated and there are no frame range overrides. The first
# layer's repair would have fixed the global value already
return
overrides = list(get_attr_overrides(node_attr, layer=layer))
if overrides:
# We set the last absolute override if it is an absolute override
# otherwise we'll add an Absolute override
last_override = overrides[-1][1]
if not isinstance(last_override, AbsOverride):
collection = last_override.parent()
node, attr = node_attr.split(".", 1)
last_override = collection.createAbsoluteOverride(node, attr)
cls.log.debug("Setting {attr} absolute override in "
"layer '{layer}': {value}".format(layer=layer,
attr=node_attr,
value=value))
cmds.setAttr(last_override.name() + ".attrValue", value)
else:
# Set the attribute directly
# (Note that this will set the global attribute)
cls.log.debug("Setting global {attr}: {value}".format(
attr=node_attr,
value=value
))
cmds.setAttr(node_attr, value)

View file

@ -6,18 +6,26 @@ from .workio import (
current_file,
work_root,
)
from .command import (
viewer_update_and_undo_stop
)
from .plugin import OpenPypeCreator
from .plugin import (
NukeCreator,
NukeWriteCreator,
NukeCreatorError,
OpenPypeCreator,
get_instance_group_node_childs,
get_colorspace_from_node
)
from .pipeline import (
install,
uninstall,
NukeHost,
ls,
list_instances,
remove_instance,
select_instance,
containerise,
parse_container,
update_container,
@ -25,13 +33,19 @@ from .pipeline import (
get_workfile_build_placeholder_plugins,
)
from .lib import (
INSTANCE_DATA_KNOB,
ROOT_DATA_KNOB,
maintained_selection,
reset_selection,
select_nodes,
get_view_process_node,
duplicate_node,
convert_knob_value_to_correct_type
convert_knob_value_to_correct_type,
get_node_data,
set_node_data,
update_node_data,
create_write_node
)
from .utils import (
colorspace_exists_on_node,
get_colorspace_list
@ -47,23 +61,38 @@ __all__ = (
"viewer_update_and_undo_stop",
"NukeCreator",
"NukeWriteCreator",
"NukeCreatorError",
"OpenPypeCreator",
"install",
"uninstall",
"NukeHost",
"get_instance_group_node_childs",
"get_colorspace_from_node",
"ls",
"list_instances",
"remove_instance",
"select_instance",
"containerise",
"parse_container",
"update_container",
"get_workfile_build_placeholder_plugins",
"INSTANCE_DATA_KNOB",
"ROOT_DATA_KNOB",
"maintained_selection",
"reset_selection",
"select_nodes",
"get_view_process_node",
"duplicate_node",
"convert_knob_value_to_correct_type",
"get_node_data",
"set_node_data",
"update_node_data",
"create_write_node",
"colorspace_exists_on_node",
"get_colorspace_list"

View file

@ -1,14 +1,15 @@
import os
from pprint import pformat
import re
import json
import six
import functools
import warnings
import platform
import tempfile
import contextlib
from collections import OrderedDict
import clique
import nuke
from qtpy import QtCore, QtWidgets
@ -64,6 +65,54 @@ EXCLUDED_KNOB_TYPE_ON_READ = (
26, # Text Knob (But for backward compatibility, still be read
# if value is not an empty string.)
)
JSON_PREFIX = "JSON:::"
ROOT_DATA_KNOB = "publish_context"
INSTANCE_DATA_KNOB = "publish_instance"
class DeprecatedWarning(DeprecationWarning):
pass
def deprecated(new_destination):
"""Mark functions as deprecated.
It will result in a warning being emitted when the function is used.
"""
func = None
if callable(new_destination):
func = new_destination
new_destination = None
def _decorator(decorated_func):
if new_destination is None:
warning_message = (
" Please check content of deprecated function to figure out"
" possible replacement."
)
else:
warning_message = " Please replace your usage with '{}'.".format(
new_destination
)
@functools.wraps(decorated_func)
def wrapper(*args, **kwargs):
warnings.simplefilter("always", DeprecatedWarning)
warnings.warn(
(
"Call to deprecated function '{}'"
"\nFunction was moved or removed.{}"
).format(decorated_func.__name__, warning_message),
category=DeprecatedWarning,
stacklevel=4
)
return decorated_func(*args, **kwargs)
return wrapper
if func is None:
return _decorator
return _decorator(func)
class Context:
@ -94,8 +143,78 @@ def get_main_window():
return Context.main_window
def set_node_data(node, knobname, data):
"""Write data to node invisible knob
Will create new in case it doesnt exists
or update the one already created.
Args:
node (nuke.Node): node object
knobname (str): knob name
data (dict): data to be stored in knob
"""
# if exists then update data
if knobname in node.knobs():
log.debug("Updating knobname `{}` on node `{}`".format(
knobname, node.name()
))
update_node_data(node, knobname, data)
return
log.debug("Creating knobname `{}` on node `{}`".format(
knobname, node.name()
))
# else create new
knob_value = JSON_PREFIX + json.dumps(data)
knob = nuke.String_Knob(knobname)
knob.setValue(knob_value)
knob.setFlag(nuke.INVISIBLE)
node.addKnob(knob)
def get_node_data(node, knobname):
"""Read data from node.
Args:
node (nuke.Node): node object
knobname (str): knob name
Returns:
dict: data stored in knob
"""
if knobname not in node.knobs():
return
rawdata = node[knobname].getValue()
if (
isinstance(rawdata, six.string_types)
and rawdata.startswith(JSON_PREFIX)
):
try:
return json.loads(rawdata[len(JSON_PREFIX):])
except json.JSONDecodeError:
return
def update_node_data(node, knobname, data):
"""Update already present data.
Args:
node (nuke.Node): node object
knobname (str): knob name
data (dict): data to update knob value
"""
knob = node[knobname]
node_data = get_node_data(node, knobname) or {}
node_data.update(data)
knob_value = JSON_PREFIX + json.dumps(node_data)
knob.setValue(knob_value)
class Knobby(object):
"""For creating knob which it's type isn't mapped in `create_knobs`
"""[DEPRICATED] For creating knob which it's type isn't
mapped in `create_knobs`
Args:
type (string): Nuke knob type name
@ -120,9 +239,15 @@ class Knobby(object):
knob.setFlag(flag)
return knob
@staticmethod
def nice_naming(key):
"""Convert camelCase name into UI Display Name"""
words = re.findall('[A-Z][^A-Z]*', key[0].upper() + key[1:])
return " ".join(words)
def create_knobs(data, tab=None):
"""Create knobs by data
"""[DEPRICATED] Create knobs by data
Depending on the type of each dict value and creates the correct Knob.
@ -216,7 +341,7 @@ def create_knobs(data, tab=None):
def imprint(node, data, tab=None):
"""Store attributes with value on node
"""[DEPRICATED] Store attributes with value on node
Parse user data into Node knobs.
Use `collections.OrderedDict` to ensure knob order.
@ -272,7 +397,7 @@ def imprint(node, data, tab=None):
def add_publish_knob(node):
"""Add Publish knob to node
"""[DEPRICATED] Add Publish knob to node
Arguments:
node (nuke.Node): nuke node to be processed
@ -290,7 +415,7 @@ def add_publish_knob(node):
def set_avalon_knob_data(node, data=None, prefix="avalon:"):
""" Sets data into nodes's avalon knob
"""[DEPRICATED] Sets data into nodes's avalon knob
Arguments:
node (nuke.Node): Nuke node to imprint with data,
@ -351,8 +476,8 @@ def set_avalon_knob_data(node, data=None, prefix="avalon:"):
return node
def get_avalon_knob_data(node, prefix="avalon:"):
""" Gets a data from nodes's avalon knob
def get_avalon_knob_data(node, prefix="avalon:", create=True):
"""[DEPRICATED] Gets a data from nodes's avalon knob
Arguments:
node (obj): Nuke node to search for data,
@ -380,8 +505,11 @@ def get_avalon_knob_data(node, prefix="avalon:"):
except NameError as e:
# if it doesn't then create it
log.debug("Creating avalon knob: `{}`".format(e))
node = set_avalon_knob_data(node)
return get_avalon_knob_data(node)
if create:
node = set_avalon_knob_data(node)
return get_avalon_knob_data(node)
else:
return {}
# get data from filtered knobs
data.update({k.replace(p, ''): node[k].value()
@ -392,7 +520,7 @@ def get_avalon_knob_data(node, prefix="avalon:"):
def fix_data_for_node_create(data):
"""Fixing data to be used for nuke knobs
"""[DEPRICATED] Fixing data to be used for nuke knobs
"""
for k, v in data.items():
if isinstance(v, six.text_type):
@ -403,7 +531,7 @@ def fix_data_for_node_create(data):
def add_write_node_legacy(name, **kwarg):
"""Adding nuke write node
"""[DEPRICATED] Adding nuke write node
Arguments:
name (str): nuke node name
kwarg (attrs): data for nuke knobs
@ -566,15 +694,16 @@ def get_nuke_imageio_settings():
Context.project_name)["nuke"]["imageio"]
# backward compatibility for project started before 3.10
# those are still having `__legacy__` knob types
# those are still having `__legacy__` knob types.
if not project_imageio["enabled"]:
return get_anatomy_settings(Context.project_name)["imageio"]["nuke"]
return get_project_settings(Context.project_name)["nuke"]["imageio"]
@deprecated("openpype.hosts.nuke.api.lib.get_nuke_imageio_settings")
def get_created_node_imageio_setting_legacy(nodeclass, creator, subset):
''' Get preset data for dataflow (fileType, compression, bitDepth)
'''[DEPRICATED] Get preset data for dataflow (fileType, compression, bitDepth)
'''
assert any([creator, nodeclass]), nuke.message(
@ -971,27 +1100,14 @@ def format_anatomy(data):
Return:
path (str)
'''
# TODO: perhaps should be nonPublic
anatomy = Anatomy()
log.debug("__ anatomy.templates: {}".format(anatomy.templates))
try:
# TODO: bck compatibility with old anatomy template
padding = int(
anatomy.templates["render"].get(
"frame_padding",
anatomy.templates["render"].get("padding")
)
padding = int(
anatomy.templates["render"].get(
"frame_padding"
)
except KeyError as e:
msg = ("`padding` key is not in `render` "
"or `frame_padding` on is not available in "
"Anatomy template. Please, add it there and restart "
"the pipeline (padding: \"4\"): `{}`").format(e)
log.error(msg)
nuke.message(msg)
)
version = data.get("version", None)
if not version:
@ -999,16 +1115,16 @@ def format_anatomy(data):
data["version"] = get_version_from_path(file)
project_name = anatomy.project_name
asset_name = data["avalon"]["asset"]
task_name = os.environ["AVALON_TASK"]
asset_name = data["asset"]
task_name = data["task"]
host_name = os.environ["AVALON_APP"]
context_data = get_template_data_with_names(
project_name, asset_name, task_name, host_name
)
data.update(context_data)
data.update({
"subset": data["avalon"]["subset"],
"family": data["avalon"]["family"],
"subset": data["subset"],
"family": data["family"],
"frame": "#" * padding,
})
return anatomy.format(data)
@ -1100,8 +1216,6 @@ def create_write_node(
data,
input=None,
prenodes=None,
review=True,
farm=True,
linked_knobs=None,
**kwargs
):
@ -1143,35 +1257,26 @@ def create_write_node(
'''
prenodes = prenodes or {}
# group node knob overrides
knob_overrides = data.pop("knobs", [])
# filtering variables
plugin_name = data["creator"]
subset = data["subset"]
# get knob settings for write node
imageio_writes = get_imageio_node_setting(
node_class=data["nodeclass"],
node_class="Write",
plugin_name=plugin_name,
subset=subset
)
for knob in imageio_writes["knobs"]:
if knob["name"] == "file_type":
representation = knob["value"]
ext = knob["value"]
try:
data.update({
"imageio_writes": imageio_writes,
"representation": representation,
})
anatomy_filled = format_anatomy(data)
except Exception as e:
msg = "problem with resolving anatomy template: {}".format(e)
log.error(msg)
nuke.message(msg)
data.update({
"imageio_writes": imageio_writes,
"ext": ext
})
anatomy_filled = format_anatomy(data)
# build file path to workfiles
fdir = str(anatomy_filled["work"]["folder"]).replace("\\", "/")
@ -1180,7 +1285,7 @@ def create_write_node(
version=data["version"],
subset=data["subset"],
frame=data["frame"],
ext=representation
ext=ext
)
# create directory
@ -1234,14 +1339,6 @@ def create_write_node(
# connect to previous node
now_node.setInput(0, prev_node)
# imprinting group node
set_avalon_knob_data(GN, data["avalon"])
add_publish_knob(GN)
add_rendering_knobs(GN, farm)
if review:
add_review_knob(GN)
# add divider
GN.addKnob(nuke.Text_Knob('', 'Rendering'))
@ -1287,11 +1384,7 @@ def create_write_node(
# adding write to read button
add_button_clear_rendered(GN, os.path.dirname(fpath))
# Deadline tab.
add_deadline_tab(GN)
# open the our Tab as default
GN[_NODE_TAB_NAME].setFlag(0)
GN.addKnob(nuke.Text_Knob('', ''))
# set tile color
tile_color = next(
@ -1303,12 +1396,10 @@ def create_write_node(
GN["tile_color"].setValue(
color_gui_to_int(tile_color))
# finally add knob overrides
set_node_knobs_from_settings(GN, knob_overrides, **kwargs)
return GN
@deprecated("openpype.hosts.nuke.api.lib.create_write_node")
def create_write_node_legacy(
name, data, input=None, prenodes=None,
review=True, linked_knobs=None, farm=True
@ -1599,6 +1690,13 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs):
if knob_name not in node.knobs():
continue
if knob_type == "expression":
knob_expression = knob["expression"]
node[knob_name].setExpression(
knob_expression
)
continue
# first deal with formatable knob settings
if knob_type == "formatable":
template = knob["template"]
@ -1607,7 +1705,6 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs):
_knob_value = template.format(
**kwargs
)
log.debug("__ knob_value0: {}".format(_knob_value))
except KeyError as msg:
log.warning("__ msg: {}".format(msg))
raise KeyError(msg)
@ -1661,6 +1758,7 @@ def color_gui_to_int(color_gui):
return int(hex_value, 16)
@deprecated
def add_rendering_knobs(node, farm=True):
''' Adds additional rendering knobs to given node
@ -1681,6 +1779,7 @@ def add_rendering_knobs(node, farm=True):
return node
@deprecated
def add_review_knob(node):
''' Adds additional review knob to given node
@ -1697,7 +1796,9 @@ def add_review_knob(node):
return node
@deprecated
def add_deadline_tab(node):
# TODO: remove this as it is only linked to legacy create
node.addKnob(nuke.Tab_Knob("Deadline"))
knob = nuke.Int_Knob("deadlinePriority", "Priority")
@ -1723,7 +1824,10 @@ def add_deadline_tab(node):
node.addKnob(knob)
@deprecated
def get_deadline_knob_names():
# TODO: remove this as it is only linked to legacy
# validate_write_deadline_tab
return [
"Deadline",
"deadlineChunkSize",
@ -2137,7 +2241,8 @@ class WorkfileSettings(object):
range = '{0}-{1}'.format(
int(data["frameStart"]),
int(data["frameEnd"]))
int(data["frameEnd"])
)
for node in nuke.allNodes(filter="Viewer"):
node['frame_range'].setValue(range)
@ -2145,12 +2250,14 @@ class WorkfileSettings(object):
node['frame_range'].setValue(range)
node['frame_range_lock'].setValue(True)
# adding handle_start/end to root avalon knob
if not set_avalon_knob_data(self._root_node, {
"handleStart": int(handle_start),
"handleEnd": int(handle_end)
}):
log.warning("Cannot set Avalon knob to Root node!")
set_node_data(
self._root_node,
INSTANCE_DATA_KNOB,
{
"handleStart": int(handle_start),
"handleEnd": int(handle_end)
}
)
def reset_resolution(self):
"""Set resolution to project resolution."""
@ -2264,29 +2371,25 @@ def get_write_node_template_attr(node):
''' Gets all defined data from presets
'''
# TODO: add identifiers to settings and rename settings key
plugin_names_mapping = {
"create_write_image": "CreateWriteImage",
"create_write_prerender": "CreateWritePrerender",
"create_write_render": "CreateWriteRender"
}
# get avalon data from node
avalon_knob_data = read_avalon_data(node)
# get template data
nuke_imageio_writes = get_imageio_node_setting(
node_class=avalon_knob_data["families"],
plugin_name=avalon_knob_data["creator"],
subset=avalon_knob_data["subset"]
node_data = get_node_data(node, INSTANCE_DATA_KNOB)
identifier = node_data["creator_identifier"]
# return template data
return get_imageio_node_setting(
node_class="Write",
plugin_name=plugin_names_mapping[identifier],
subset=node_data["subset"]
)
# collecting correct data
correct_data = OrderedDict()
# adding imageio knob presets
for k, v in nuke_imageio_writes.items():
if k in ["_id", "_previous"]:
continue
correct_data[k] = v
# fix badly encoded data
return fix_data_for_node_create(correct_data)
def get_dependent_nodes(nodes):
"""Get all dependent nodes connected to the list of nodes.
@ -2325,10 +2428,11 @@ def get_dependent_nodes(nodes):
def find_free_space_to_paste_nodes(
nodes,
group=nuke.root(),
direction="right",
offset=300):
nodes,
group=nuke.root(),
direction="right",
offset=300
):
"""
For getting coordinates in DAG (node graph) for placing new nodes
@ -2554,6 +2658,7 @@ def process_workfile_builder():
open_file(last_workfile_path)
@deprecated
def recreate_instance(origin_node, avalon_data=None):
"""Recreate input instance to different data
@ -2619,6 +2724,32 @@ def recreate_instance(origin_node, avalon_data=None):
return new_node
def add_scripts_menu():
try:
from scriptsmenu import launchfornuke
except ImportError:
log.warning(
"Skipping studio.menu install, because "
"'scriptsmenu' module seems unavailable."
)
return
# load configuration of custom menu
project_settings = get_project_settings(os.getenv("AVALON_PROJECT"))
config = project_settings["nuke"]["scriptsmenu"]["definition"]
_menu = project_settings["nuke"]["scriptsmenu"]["name"]
if not config:
log.warning("Skipping studio menu, no definition found.")
return
# run the launcher for Maya menu
studio_menu = launchfornuke.main(title=_menu.title())
# apply configuration
studio_menu.build_from_configuration(studio_menu, config)
def add_scripts_gizmo():
# load configuration of custom menu
@ -2799,48 +2930,6 @@ def dirmap_file_name_filter(file_name):
return file_name
# ------------------------------------
# This function seems to be deprecated
# ------------------------------------
def ls_img_sequence(path):
"""Listing all available coherent image sequence from path
Arguments:
path (str): A nuke's node object
Returns:
data (dict): with nuke formated path and frameranges
"""
file = os.path.basename(path)
dirpath = os.path.dirname(path)
base, ext = os.path.splitext(file)
name, padding = os.path.splitext(base)
# populate list of files
files = [
f for f in os.listdir(dirpath)
if name in f
if ext in f
]
# create collection from list of files
collections, reminder = clique.assemble(files)
if len(collections) > 0:
head = collections[0].format("{head}")
padding = collections[0].format("{padding}") % 1
padding = "#" * len(padding)
tail = collections[0].format("{tail}")
file = head + padding + tail
return {
"path": os.path.join(dirpath, file).replace("\\", "/"),
"frames": collections[0].format("[{ranges}]")
}
return False
def get_group_io_nodes(nodes):
"""Get the input and the output of a group of nodes."""
@ -2865,10 +2954,11 @@ def get_group_io_nodes(nodes):
break
if input_node is None:
raise ValueError("No Input found")
log.warning("No Input found")
if output_node is None:
raise ValueError("No Output found")
log.warning("No Output found")
return input_node, output_node

View file

@ -1,21 +1,24 @@
import nuke
import os
import importlib
from collections import OrderedDict
import nuke
import pyblish.api
import openpype
from openpype.host import (
HostBase,
IWorkfileHost,
ILoadHost,
IPublishHost
)
from openpype.settings import get_current_project_settings
from openpype.lib import register_event_callback, Logger
from openpype.pipeline import (
register_loader_plugin_path,
register_creator_plugin_path,
register_inventory_action_path,
deregister_loader_plugin_path,
deregister_creator_plugin_path,
deregister_inventory_action_path,
AVALON_CONTAINER_ID,
)
from openpype.pipeline.workfile import BuildWorkfile
@ -24,6 +27,8 @@ from openpype.tools.utils import host_tools
from .command import viewer_update_and_undo_stop
from .lib import (
Context,
ROOT_DATA_KNOB,
INSTANCE_DATA_KNOB,
get_main_window,
add_publish_knob,
WorkfileSettings,
@ -32,14 +37,29 @@ from .lib import (
check_inventory_versions,
set_avalon_knob_data,
read_avalon_data,
on_script_load,
dirmap_file_name_filter,
add_scripts_menu,
add_scripts_gizmo,
get_node_data,
set_node_data
)
from .workfile_template_builder import (
NukePlaceholderLoadPlugin,
NukePlaceholderCreatePlugin,
build_workfile_template,
update_workfile_template,
create_placeholder,
update_placeholder,
)
from .workio import (
open_file,
save_file,
file_extensions,
has_unsaved_changes,
work_root,
current_file
)
log = Logger.get_logger(__name__)
@ -58,6 +78,95 @@ if os.getenv("PYBLISH_GUI", None):
pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None))
class NukeHost(
HostBase, IWorkfileHost, ILoadHost, IPublishHost
):
name = "nuke"
def open_workfile(self, filepath):
return open_file(filepath)
def save_workfile(self, filepath=None):
return save_file(filepath)
def work_root(self, session):
return work_root(session)
def get_current_workfile(self):
return current_file()
def workfile_has_unsaved_changes(self):
return has_unsaved_changes()
def get_workfile_extensions(self):
return file_extensions()
def get_containers(self):
return ls()
def install(self):
''' Installing all requarements for Nuke host
'''
pyblish.api.register_host("nuke")
self.log.info("Registering Nuke plug-ins..")
pyblish.api.register_plugin_path(PUBLISH_PATH)
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
register_inventory_action_path(INVENTORY_PATH)
# Register Avalon event for workfiles loading.
register_event_callback("workio.open_file", check_inventory_versions)
register_event_callback("taskChanged", change_context_label)
pyblish.api.register_callback(
"instanceToggled", on_pyblish_instance_toggled)
_install_menu()
# add script menu
add_scripts_menu()
add_scripts_gizmo()
add_nuke_callbacks()
launch_workfiles_app()
def get_context_data(self):
root_node = nuke.root()
return get_node_data(root_node, ROOT_DATA_KNOB)
def update_context_data(self, data, changes):
root_node = nuke.root()
set_node_data(root_node, ROOT_DATA_KNOB, data)
def add_nuke_callbacks():
""" Adding all available nuke callbacks
"""
workfile_settings = WorkfileSettings()
# Set context settings.
nuke.addOnCreate(
workfile_settings.set_context_settings, nodeClass="Root")
nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root")
nuke.addOnCreate(process_workfile_builder, nodeClass="Root")
# fix ffmpeg settings on script
nuke.addOnScriptLoad(on_script_load)
# set checker for last versions on loaded containers
nuke.addOnScriptLoad(check_inventory_versions)
nuke.addOnScriptSave(check_inventory_versions)
# # set apply all workfile settings on script load and save
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
nuke.addFilenameFilter(dirmap_file_name_filter)
log.info("Added Nuke callbacks ...")
def reload_config():
"""Attempt to reload pipeline at run-time.
@ -83,52 +192,6 @@ def reload_config():
reload(module)
def install():
''' Installing all requarements for Nuke host
'''
pyblish.api.register_host("nuke")
log.info("Registering Nuke plug-ins..")
pyblish.api.register_plugin_path(PUBLISH_PATH)
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
register_inventory_action_path(INVENTORY_PATH)
# Register Avalon event for workfiles loading.
register_event_callback("workio.open_file", check_inventory_versions)
register_event_callback("taskChanged", change_context_label)
pyblish.api.register_callback(
"instanceToggled", on_pyblish_instance_toggled)
workfile_settings = WorkfileSettings()
# Set context settings.
nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root")
nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root")
nuke.addOnCreate(process_workfile_builder, nodeClass="Root")
_install_menu()
launch_workfiles_app()
def uninstall():
'''Uninstalling host's integration
'''
log.info("Deregistering Nuke plug-ins..")
pyblish.deregister_host("nuke")
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
deregister_loader_plugin_path(LOAD_PATH)
deregister_creator_plugin_path(CREATE_PATH)
deregister_inventory_action_path(INVENTORY_PATH)
pyblish.api.deregister_callback(
"instanceToggled", on_pyblish_instance_toggled)
reload_config()
_uninstall_menu()
def _show_workfiles():
# Make sure parent is not set
# - this makes Workfiles tool as separated window which
@ -139,7 +202,8 @@ def _show_workfiles():
def get_workfile_build_placeholder_plugins():
return [
NukePlaceholderLoadPlugin
NukePlaceholderLoadPlugin,
NukePlaceholderCreatePlugin
]
@ -165,7 +229,15 @@ def _install_menu():
menu.addSeparator()
menu.addCommand(
"Create...",
lambda: host_tools.show_creator(parent=main_window)
lambda: host_tools.show_publisher(
tab="create"
)
)
menu.addCommand(
"Publish...",
lambda: host_tools.show_publisher(
tab="publish"
)
)
menu.addCommand(
"Load...",
@ -174,14 +246,11 @@ def _install_menu():
use_context=True
)
)
menu.addCommand(
"Publish...",
lambda: host_tools.show_publish(parent=main_window)
)
menu.addCommand(
"Manage...",
lambda: host_tools.show_scene_inventory(parent=main_window)
)
menu.addSeparator()
menu.addCommand(
"Library...",
lambda: host_tools.show_library_loader(
@ -217,10 +286,6 @@ def _install_menu():
"Build Workfile from template",
lambda: build_workfile_template()
)
menu_template.addCommand(
"Update Workfile",
lambda: update_workfile_template()
)
menu_template.addSeparator()
menu_template.addCommand(
"Create Place Holder",
@ -235,7 +300,7 @@ def _install_menu():
"Experimental tools...",
lambda: host_tools.show_experimental_tools_dialog(parent=main_window)
)
menu.addSeparator()
# add reload pipeline only in debug mode
if bool(os.getenv("NUKE_DEBUG")):
menu.addSeparator()
@ -245,15 +310,6 @@ def _install_menu():
add_shortcuts_from_presets()
def _uninstall_menu():
menubar = nuke.menu("Nuke")
menu = menubar.findItem(MENU_LABEL)
for item in menu.items():
log.info("Removing menu item: {}".format(item.name()))
menu.removeItem(item.name())
def change_context_label():
menubar = nuke.menu("Nuke")
menu = menubar.findItem(MENU_LABEL)
@ -285,8 +341,8 @@ def add_shortcuts_from_presets():
if nuke_presets.get("menu"):
menu_label_mapping = {
"manage": "Manage...",
"create": "Create...",
"manage": "Manage...",
"load": "Load...",
"build_workfile": "Build Workfile",
"publish": "Publish..."
@ -304,7 +360,7 @@ def add_shortcuts_from_presets():
item_label = menu_label_mapping[command_name]
menuitem = menu.findItem(item_label)
menuitem.setShortcut(shortcut_str)
except AttributeError as e:
except (AttributeError, KeyError) as e:
log.error(e)
@ -436,11 +492,72 @@ def ls():
"""
all_nodes = nuke.allNodes(recurseGroups=False)
# TODO: add readgeo, readcamera, readimage
nodes = [n for n in all_nodes]
for n in nodes:
log.debug("name: `{}`".format(n.name()))
container = parse_container(n)
if container:
yield container
def list_instances(creator_id=None):
"""List all created instances to publish from current workfile.
For SubsetManager
Returns:
(list) of dictionaries matching instances format
"""
listed_instances = []
for node in nuke.allNodes(recurseGroups=True):
if node.Class() in ["Viewer", "Dot"]:
continue
try:
if node["disable"].value():
continue
except NameError:
# pass if disable knob doesn't exist
pass
# get data from avalon knob
instance_data = get_node_data(
node, INSTANCE_DATA_KNOB)
if not instance_data:
continue
if instance_data["id"] != "pyblish.avalon.instance":
continue
if creator_id and instance_data["creator_identifier"] != creator_id:
continue
listed_instances.append((node, instance_data))
return listed_instances
def remove_instance(instance):
"""Remove instance from current workfile metadata.
For SubsetManager
Args:
instance (dict): instance representation from subsetmanager model
"""
instance_node = instance.transient_data["node"]
instance_knob = instance_node.knobs()[INSTANCE_DATA_KNOB]
instance_node.removeKnob(instance_knob)
def select_instance(instance):
"""
Select instance in Node View
Args:
instance (dict): instance representation from subsetmanager model
"""
instance_node = instance.transient_data["node"]
instance_node["selected"].setValue(True)

View file

@ -1,27 +1,373 @@
import nuke
import re
import os
import sys
import six
import random
import string
from collections import OrderedDict
from collections import OrderedDict, defaultdict
from abc import abstractmethod
import nuke
from openpype.client import (
get_asset_by_name,
get_subsets,
)
from openpype.settings import get_current_project_settings
from openpype.lib import (
BoolDef,
EnumDef
)
from openpype.pipeline import (
LegacyCreator,
LoaderPlugin,
CreatorError,
Creator as NewCreator,
CreatedInstance,
legacy_io
)
from .lib import (
INSTANCE_DATA_KNOB,
Knobby,
check_subsetname_exists,
maintained_selection,
get_avalon_knob_data,
set_avalon_knob_data,
add_publish_knob,
get_nuke_imageio_settings,
set_node_knobs_from_settings,
set_node_data,
get_node_data,
get_view_process_node,
get_viewer_config_from_string
get_viewer_config_from_string,
deprecated
)
from .pipeline import (
list_instances,
remove_instance
)
def _collect_and_cache_nodes(creator):
key = "openpype.nuke.nodes"
if key not in creator.collection_shared_data:
instances_by_identifier = defaultdict(list)
for item in list_instances():
_, instance_data = item
identifier = instance_data["creator_identifier"]
instances_by_identifier[identifier].append(item)
creator.collection_shared_data[key] = instances_by_identifier
return creator.collection_shared_data[key]
class NukeCreatorError(CreatorError):
pass
class NukeCreator(NewCreator):
selected_nodes = []
def pass_pre_attributes_to_instance(
self,
instance_data,
pre_create_data,
keys=None
):
if not keys:
keys = pre_create_data.keys()
creator_attrs = instance_data["creator_attributes"] = {}
for pass_key in keys:
creator_attrs[pass_key] = pre_create_data[pass_key]
def add_info_knob(self, node):
if "OP_info" in node.knobs().keys():
return
# add info text
info_knob = nuke.Text_Knob("OP_info", "")
info_knob.setValue("""
<span style=\"color:#fc0303\">
<p>This node is maintained by <b>OpenPype Publisher</b>.</p>
<p>To remove it use Publisher gui.</p>
</span>
""")
node.addKnob(info_knob)
def check_existing_subset(self, subset_name, instance_data):
"""Check if existing subset name already exists."""
exists = False
for node in nuke.allNodes(recurseGroups=True):
node_data = get_node_data(node, INSTANCE_DATA_KNOB)
if subset_name in node_data.get("subset"):
exists = True
return exists
def create_instance_node(
self,
node_name,
knobs=None,
parent=None,
node_type=None
):
"""Create node representing instance.
Arguments:
node_name (str): Name of the new node.
knobs (OrderedDict): node knobs name and values
parent (str): Name of the parent node.
node_type (str, optional): Nuke node Class.
Returns:
nuke.Node: Newly created instance node.
"""
node_type = node_type or "NoOp"
node_knobs = knobs or {}
# set parent node
parent_node = nuke.root()
if parent:
parent_node = nuke.toNode(parent)
try:
with parent_node:
created_node = nuke.createNode(node_type)
created_node["name"].setValue(node_name)
self.add_info_knob(created_node)
for key, values in node_knobs.items():
if key in created_node.knobs():
created_node["key"].setValue(values)
except Exception as _err:
raise NukeCreatorError("Creating have failed: {}".format(_err))
return created_node
def set_selected_nodes(self, pre_create_data):
if pre_create_data.get("use_selection"):
self.selected_nodes = nuke.selectedNodes()
if self.selected_nodes == []:
raise NukeCreatorError("Creator error: No active selection")
else:
self.selected_nodes = []
def create(self, subset_name, instance_data, pre_create_data):
# make sure selected nodes are added
self.set_selected_nodes(pre_create_data)
# make sure subset name is unique
if self.check_existing_subset(subset_name, instance_data):
raise NukeCreatorError(
("subset {} is already published"
"definition.").format(subset_name))
try:
instance_node = self.create_instance_node(
subset_name,
node_type=instance_data.pop("node_type", None)
)
instance = CreatedInstance(
self.family,
subset_name,
instance_data,
self
)
instance.transient_data["node"] = instance_node
self._add_instance_to_context(instance)
set_node_data(
instance_node, INSTANCE_DATA_KNOB, instance.data_to_store())
return instance
except Exception as er:
six.reraise(
NukeCreatorError,
NukeCreatorError("Creator error: {}".format(er)),
sys.exc_info()[2])
def collect_instances(self):
cached_instances = _collect_and_cache_nodes(self)
for (node, data) in cached_instances[self.identifier]:
created_instance = CreatedInstance.from_existing(
data, self
)
created_instance.transient_data["node"] = node
self._add_instance_to_context(created_instance)
def update_instances(self, update_list):
for created_inst, _changes in update_list:
instance_node = created_inst.transient_data["node"]
# in case node is not existing anymore (user erased it manually)
try:
instance_node.fullName()
except ValueError:
self.remove_instances([created_inst])
continue
set_node_data(
instance_node,
INSTANCE_DATA_KNOB,
created_inst.data_to_store()
)
def remove_instances(self, instances):
for instance in instances:
remove_instance(instance)
self._remove_instance_from_context(instance)
def get_pre_create_attr_defs(self):
return [
BoolDef("use_selection", label="Use selection")
]
def get_creator_settings(self, project_settings, settings_key=None):
if not settings_key:
settings_key = self.__class__.__name__
return project_settings["nuke"]["create"][settings_key]
class NukeWriteCreator(NukeCreator):
"""Add Publishable Write node"""
identifier = "create_write"
label = "Create Write"
family = "write"
icon = "sign-out"
def integrate_links(self, node, outputs=True):
# skip if no selection
if not self.selected_node:
return
# collect dependencies
input_nodes = [self.selected_node]
dependent_nodes = self.selected_node.dependent() if outputs else []
# relinking to collected connections
for i, input in enumerate(input_nodes):
node.setInput(i, input)
# make it nicer in graph
node.autoplace()
# relink also dependent nodes
for dep_nodes in dependent_nodes:
dep_nodes.setInput(0, node)
def set_selected_nodes(self, pre_create_data):
if pre_create_data.get("use_selection"):
selected_nodes = nuke.selectedNodes()
if selected_nodes == []:
raise NukeCreatorError("Creator error: No active selection")
elif len(selected_nodes) > 1:
NukeCreatorError("Creator error: Select only one camera node")
self.selected_node = selected_nodes[0]
else:
self.selected_node = None
def get_pre_create_attr_defs(self):
attr_defs = [
BoolDef("use_selection", label="Use selection"),
self._get_render_target_enum()
]
return attr_defs
def get_instance_attr_defs(self):
attr_defs = [
self._get_render_target_enum(),
self._get_reviewable_bool()
]
return attr_defs
def _get_render_target_enum(self):
rendering_targets = {
"local": "Local machine rendering",
"frames": "Use existing frames"
}
if ("farm_rendering" in self.instance_attributes):
rendering_targets["farm"] = "Farm rendering"
return EnumDef(
"render_target",
items=rendering_targets,
label="Render target"
)
def _get_reviewable_bool(self):
return BoolDef(
"review",
default=("reviewable" in self.instance_attributes),
label="Review"
)
def create(self, subset_name, instance_data, pre_create_data):
# make sure selected nodes are added
self.set_selected_nodes(pre_create_data)
# make sure subset name is unique
if self.check_existing_subset(subset_name, instance_data):
raise NukeCreatorError(
("subset {} is already published"
"definition.").format(subset_name))
instance_node = self.create_instance_node(
subset_name,
instance_data
)
try:
instance = CreatedInstance(
self.family,
subset_name,
instance_data,
self
)
instance.transient_data["node"] = instance_node
self._add_instance_to_context(instance)
set_node_data(
instance_node, INSTANCE_DATA_KNOB, instance.data_to_store())
return instance
except Exception as er:
six.reraise(
NukeCreatorError,
NukeCreatorError("Creator error: {}".format(er)),
sys.exc_info()[2]
)
def apply_settings(
self,
project_settings,
system_settings
):
"""Method called on initialization of plugin to apply settings."""
# plugin settings
plugin_settings = self.get_creator_settings(project_settings)
# individual attributes
self.instance_attributes = plugin_settings.get(
"instance_attributes") or self.instance_attributes
self.prenodes = plugin_settings["prenodes"]
self.default_variants = plugin_settings.get(
"default_variants") or self.default_variants
self.temp_rendering_path_template = (
plugin_settings.get("temp_rendering_path_template")
or self.temp_rendering_path_template
)
class OpenPypeCreator(LegacyCreator):
@ -72,6 +418,41 @@ class OpenPypeCreator(LegacyCreator):
return instance
def get_instance_group_node_childs(instance):
"""Return list of instance group node children
Args:
instance (pyblish.Instance): pyblish instance
Returns:
list: [nuke.Node]
"""
node = instance.data["transientData"]["node"]
if node.Class() != "Group":
return
# collect child nodes
child_nodes = []
# iterate all nodes
for node in nuke.allNodes(group=node):
# add contained nodes to instance's node list
child_nodes.append(node)
return child_nodes
def get_colorspace_from_node(node):
# Add version data to instance
colorspace = node["colorspace"].value()
# remove default part of the string
if "default (" in colorspace:
colorspace = re.sub(r"default.\(|\)", "", colorspace)
return colorspace
def get_review_presets_config():
settings = get_current_project_settings()
review_profiles = (
@ -173,7 +554,6 @@ class ExporterReview(object):
def get_file_info(self):
if self.collection:
self.log.debug("Collection: `{}`".format(self.collection))
# get path
self.fname = os.path.basename(self.collection.format(
"{head}{padding}{tail}"))
@ -308,7 +688,6 @@ class ExporterReviewLut(ExporterReview):
# connect
self._temp_nodes.append(cms_node)
self.previous_node = cms_node
self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes))
if bake_viewer_process:
# Node View Process
@ -341,8 +720,6 @@ class ExporterReviewLut(ExporterReview):
# connect
gen_lut_node.setInput(0, self.previous_node)
self._temp_nodes.append(gen_lut_node)
self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes))
# ---------- end nodes creation
# Export lut file
@ -356,8 +733,6 @@ class ExporterReviewLut(ExporterReview):
# ---------- generate representation data
self.get_representation_data()
self.log.debug("Representation... `{}`".format(self.data))
# ---------- Clean up
self.clean_nodes()
@ -583,6 +958,7 @@ class ExporterReviewMov(ExporterReview):
return self.data
@deprecated("openpype.hosts.nuke.api.plugin.NukeWriteCreator")
class AbstractWriteRender(OpenPypeCreator):
"""Abstract creator to gather similar implementation for Write creators"""
name = ""
@ -609,7 +985,6 @@ class AbstractWriteRender(OpenPypeCreator):
self.data = data
self.nodes = nuke.selectedNodes()
self.log.debug("_ self.data: '{}'".format(self.data))
def process(self):
@ -734,3 +1109,149 @@ class AbstractWriteRender(OpenPypeCreator):
node (nuke.Node): group node with data as Knobs
"""
pass
def convert_to_valid_instaces():
""" Check and convert to latest publisher instances
Also save as new minor version of workfile.
"""
def family_to_identifier(family):
mapping = {
"render": "create_write_render",
"prerender": "create_write_prerender",
"still": "create_write_image",
"model": "create_model",
"camera": "create_camera",
"nukenodes": "create_backdrop",
"gizmo": "create_gizmo",
"source": "create_source"
}
return mapping[family]
from openpype.hosts.nuke.api import workio
task_name = legacy_io.Session["AVALON_TASK"]
# save into new workfile
current_file = workio.current_file()
# add file suffex if not
if "_publisherConvert" not in current_file:
new_workfile = (
current_file[:-3]
+ "_publisherConvert"
+ current_file[-3:]
)
else:
new_workfile = current_file
path = new_workfile.replace("\\", "/")
nuke.scriptSaveAs(new_workfile, overwrite=1)
nuke.Root()["name"].setValue(path)
nuke.Root()["project_directory"].setValue(os.path.dirname(path))
nuke.Root().setModified(False)
_remove_old_knobs(nuke.Root())
# loop all nodes and convert
for node in nuke.allNodes(recurseGroups=True):
transfer_data = {
"creator_attributes": {}
}
creator_attr = transfer_data["creator_attributes"]
if node.Class() in ["Viewer", "Dot"]:
continue
if get_node_data(node, INSTANCE_DATA_KNOB):
continue
# get data from avalon knob
avalon_knob_data = get_avalon_knob_data(
node, ["avalon:", "ak:"])
if not avalon_knob_data:
continue
if avalon_knob_data["id"] != "pyblish.avalon.instance":
continue
transfer_data.update({
k: v for k, v in avalon_knob_data.items()
if k not in ["families", "creator"]
})
transfer_data["task"] = task_name
family = avalon_knob_data["family"]
# establish families
families_ak = avalon_knob_data.get("families", [])
if "suspend_publish" in node.knobs():
creator_attr["suspended_publish"] = (
node["suspend_publish"].value())
# get review knob value
if "review" in node.knobs():
creator_attr["review"] = (
node["review"].value())
if "publish" in node.knobs():
transfer_data["active"] = (
node["publish"].value())
# add idetifier
transfer_data["creator_identifier"] = family_to_identifier(family)
# Add all nodes in group instances.
if node.Class() == "Group":
# only alter families for render family
if families_ak and "write" in families_ak.lower():
target = node["render"].value()
if target == "Use existing frames":
creator_attr["render_target"] = "frames"
elif target == "Local":
# Local rendering
creator_attr["render_target"] = "local"
elif target == "On farm":
# Farm rendering
creator_attr["render_target"] = "farm"
if "deadlinePriority" in node.knobs():
transfer_data["farm_priority"] = (
node["deadlinePriority"].value())
if "deadlineChunkSize" in node.knobs():
creator_attr["farm_chunk"] = (
node["deadlineChunkSize"].value())
if "deadlineConcurrentTasks" in node.knobs():
creator_attr["farm_concurency"] = (
node["deadlineConcurrentTasks"].value())
_remove_old_knobs(node)
# add new instance knob with transfer data
set_node_data(
node, INSTANCE_DATA_KNOB, transfer_data)
nuke.scriptSave()
def _remove_old_knobs(node):
remove_knobs = [
"review", "publish", "render", "suspend_publish", "warn", "divd",
"OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority",
"deadlineChunkSize", "deadlineConcurrentTasks", "Deadline"
]
print(node.name())
# remove all old knobs
for knob in node.allKnobs():
try:
if knob.name() in remove_knobs:
node.removeKnob(knob)
elif "avalon" in knob.name():
node.removeKnob(knob)
except ValueError:
pass

View file

@ -7,7 +7,9 @@ from openpype.pipeline.workfile.workfile_template_builder import (
AbstractTemplateBuilder,
PlaceholderPlugin,
LoadPlaceholderItem,
CreatePlaceholderItem,
PlaceholderLoadMixin,
PlaceholderCreateMixin
)
from openpype.tools.workfile_template_build import (
WorkfileBuildPlaceholderDialog,
@ -32,7 +34,7 @@ PLACEHOLDER_SET = "PLACEHOLDERS_SET"
class NukeTemplateBuilder(AbstractTemplateBuilder):
"""Concrete implementation of AbstractTemplateBuilder for maya"""
"""Concrete implementation of AbstractTemplateBuilder for nuke"""
def import_template(self, path):
"""Import template into current scene.
@ -40,7 +42,7 @@ class NukeTemplateBuilder(AbstractTemplateBuilder):
Args:
path (str): A path to current template (usually given by
get_template_path implementation)
get_template_preset implementation)
Returns:
bool: Wether the template was succesfully imported or not
@ -74,8 +76,7 @@ class NukePlaceholderPlugin(PlaceholderPlugin):
node_knobs = node.knobs()
if (
"builder_type" not in node_knobs
or "is_placeholder" not in node_knobs
"is_placeholder" not in node_knobs
or not node.knob("is_placeholder").value()
):
continue
@ -273,6 +274,15 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
placeholder.data["nb_children"] += 1
reset_selection()
# remove placeholders marked as delete
if (
placeholder.data.get("delete")
and not placeholder.data.get("keep_placeholder")
):
self.log.debug("Deleting node: {}".format(placeholder_node.name()))
nuke.delete(placeholder_node)
# go back to root group
nuke.root().begin()
@ -454,12 +464,12 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
)
for node in placeholder_node.dependent():
for idx in range(node.inputs()):
if node.input(idx) == placeholder_node:
if node.input(idx) == placeholder_node and output_node:
node.setInput(idx, output_node)
for node in placeholder_node.dependencies():
for idx in range(placeholder_node.inputs()):
if placeholder_node.input(idx) == node:
if placeholder_node.input(idx) == node and input_node:
input_node.setInput(0, node)
def _create_sib_copies(self, placeholder):
@ -535,6 +545,408 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin):
siblings_input.setInput(0, copy_output)
class NukePlaceholderCreatePlugin(
NukePlaceholderPlugin, PlaceholderCreateMixin
):
identifier = "nuke.create"
label = "Nuke create"
def _parse_placeholder_node_data(self, node):
placeholder_data = super(
NukePlaceholderCreatePlugin, self
)._parse_placeholder_node_data(node)
node_knobs = node.knobs()
nb_children = 0
if "nb_children" in node_knobs:
nb_children = int(node_knobs["nb_children"].getValue())
placeholder_data["nb_children"] = nb_children
siblings = []
if "siblings" in node_knobs:
siblings = node_knobs["siblings"].values()
placeholder_data["siblings"] = siblings
node_full_name = node.fullName()
placeholder_data["group_name"] = node_full_name.rpartition(".")[0]
placeholder_data["last_loaded"] = []
placeholder_data["delete"] = False
return placeholder_data
def _before_instance_create(self, placeholder):
placeholder.data["nodes_init"] = nuke.allNodes()
def collect_placeholders(self):
output = []
scene_placeholders = self._collect_scene_placeholders()
for node_name, node in scene_placeholders.items():
plugin_identifier_knob = node.knob("plugin_identifier")
if (
plugin_identifier_knob is None
or plugin_identifier_knob.getValue() != self.identifier
):
continue
placeholder_data = self._parse_placeholder_node_data(node)
output.append(
CreatePlaceholderItem(node_name, placeholder_data, self)
)
return output
def populate_placeholder(self, placeholder):
self.populate_create_placeholder(placeholder)
def repopulate_placeholder(self, placeholder):
self.populate_create_placeholder(placeholder)
def get_placeholder_options(self, options=None):
return self.get_create_plugin_options(options)
def cleanup_placeholder(self, placeholder, failed):
# deselect all selected nodes
placeholder_node = nuke.toNode(placeholder.scene_identifier)
# getting the latest nodes added
nodes_init = placeholder.data["nodes_init"]
nodes_created = list(set(nuke.allNodes()) - set(nodes_init))
self.log.debug("Created nodes: {}".format(nodes_created))
if not nodes_created:
return
placeholder.data["delete"] = True
nodes_created = self._move_to_placeholder_group(
placeholder, nodes_created
)
placeholder.data["last_created"] = nodes_created
refresh_nodes(nodes_created)
# positioning of the created nodes
min_x, min_y, _, _ = get_extreme_positions(nodes_created)
for node in nodes_created:
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
node.setXYpos(xpos, ypos)
refresh_nodes(nodes_created)
# fix the problem of z_order for backdrops
self._fix_z_order(placeholder)
self._imprint_siblings(placeholder)
if placeholder.data["nb_children"] == 0:
# save initial nodes postions and dimensions, update them
# and set inputs and outputs of created nodes
self._imprint_inits()
self._update_nodes(placeholder, nuke.allNodes(), nodes_created)
self._set_created_connections(placeholder)
elif placeholder.data["siblings"]:
# create copies of placeholder siblings for the new created nodes,
# set their inputs and outpus and update all nodes positions and
# dimensions and siblings names
siblings = get_nodes_by_names(placeholder.data["siblings"])
refresh_nodes(siblings)
copies = self._create_sib_copies(placeholder)
new_nodes = list(copies.values()) # copies nodes
self._update_nodes(new_nodes, nodes_created)
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
new_nodes_name = get_names_from_nodes(new_nodes)
imprint(placeholder_node, {"siblings": new_nodes_name})
self._set_copies_connections(placeholder, copies)
self._update_nodes(
nuke.allNodes(),
new_nodes + nodes_created,
20
)
new_siblings = get_names_from_nodes(new_nodes)
placeholder.data["siblings"] = new_siblings
else:
# if the placeholder doesn't have siblings, the created
# nodes will be placed in a free space
xpointer, ypointer = find_free_space_to_paste_nodes(
nodes_created, direction="bottom", offset=200
)
node = nuke.createNode("NoOp")
reset_selection()
nuke.delete(node)
for node in nodes_created:
xpos = (node.xpos() - min_x) + xpointer
ypos = (node.ypos() - min_y) + ypointer
node.setXYpos(xpos, ypos)
placeholder.data["nb_children"] += 1
reset_selection()
# remove placeholders marked as delete
if (
placeholder.data.get("delete")
and not placeholder.data.get("keep_placeholder")
):
self.log.debug("Deleting node: {}".format(placeholder_node.name()))
nuke.delete(placeholder_node)
# go back to root group
nuke.root().begin()
def _move_to_placeholder_group(self, placeholder, nodes_created):
"""
opening the placeholder's group and copying created nodes in it.
Returns :
nodes_created (list): the new list of pasted nodes
"""
groups_name = placeholder.data["group_name"]
reset_selection()
select_nodes(nodes_created)
if groups_name:
with node_tempfile() as filepath:
nuke.nodeCopy(filepath)
for node in nuke.selectedNodes():
nuke.delete(node)
group = nuke.toNode(groups_name)
group.begin()
nuke.nodePaste(filepath)
nodes_created = nuke.selectedNodes()
return nodes_created
def _fix_z_order(self, placeholder):
"""Fix the problem of z_order when a backdrop is create."""
nodes_created = placeholder.data["last_created"]
created_backdrops = []
bd_orders = set()
for node in nodes_created:
if isinstance(node, nuke.BackdropNode):
created_backdrops.append(node)
bd_orders.add(node.knob("z_order").getValue())
if not bd_orders:
return
sib_orders = set()
for node_name in placeholder.data["siblings"]:
node = nuke.toNode(node_name)
if isinstance(node, nuke.BackdropNode):
sib_orders.add(node.knob("z_order").getValue())
if not sib_orders:
return
min_order = min(bd_orders)
max_order = max(sib_orders)
for backdrop_node in created_backdrops:
z_order = backdrop_node.knob("z_order").getValue()
backdrop_node.knob("z_order").setValue(
z_order + max_order - min_order + 1)
def _imprint_siblings(self, placeholder):
"""
- add siblings names to placeholder attributes (nodes created with it)
- add Id to the attributes of all the other nodes
"""
created_nodes = placeholder.data["last_created"]
created_nodes_set = set(created_nodes)
for node in created_nodes:
node_knobs = node.knobs()
if (
"is_placeholder" not in node_knobs
or (
"is_placeholder" in node_knobs
and node.knob("is_placeholder").value()
)
):
siblings = list(created_nodes_set - {node})
siblings_name = get_names_from_nodes(siblings)
siblings = {"siblings": siblings_name}
imprint(node, siblings)
def _imprint_inits(self):
"""Add initial positions and dimensions to the attributes"""
for node in nuke.allNodes():
refresh_node(node)
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
node.knob("x_init").setVisible(False)
node.knob("y_init").setVisible(False)
width = node.screenWidth()
height = node.screenHeight()
if "bdwidth" in node.knobs():
imprint(node, {"w_init": width, "h_init": height})
node.knob("w_init").setVisible(False)
node.knob("h_init").setVisible(False)
refresh_node(node)
def _update_nodes(
self, placeholder, nodes, considered_nodes, offset_y=None
):
"""Adjust backdrop nodes dimensions and positions.
Considering some nodes sizes.
Args:
nodes (list): list of nodes to update
considered_nodes (list): list of nodes to consider while updating
positions and dimensions
offset (int): distance between copies
"""
placeholder_node = nuke.toNode(placeholder.scene_identifier)
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
diff_x = diff_y = 0
contained_nodes = [] # for backdrops
if offset_y is None:
width_ph = placeholder_node.screenWidth()
height_ph = placeholder_node.screenHeight()
diff_y = max_y - min_y - height_ph
diff_x = max_x - min_x - width_ph
contained_nodes = [placeholder_node]
min_x = placeholder_node.xpos()
min_y = placeholder_node.ypos()
else:
siblings = get_nodes_by_names(placeholder.data["siblings"])
minX, _, maxX, _ = get_extreme_positions(siblings)
diff_y = max_y - min_y + 20
diff_x = abs(max_x - min_x - maxX + minX)
contained_nodes = considered_nodes
if diff_y <= 0 and diff_x <= 0:
return
for node in nodes:
refresh_node(node)
if (
node == placeholder_node
or node in considered_nodes
):
continue
if (
not isinstance(node, nuke.BackdropNode)
or (
isinstance(node, nuke.BackdropNode)
and not set(contained_nodes) <= set(node.getNodes())
)
):
if offset_y is None and node.xpos() >= min_x:
node.setXpos(node.xpos() + diff_x)
if node.ypos() >= min_y:
node.setYpos(node.ypos() + diff_y)
else:
width = node.screenWidth()
height = node.screenHeight()
node.knob("bdwidth").setValue(width + diff_x)
node.knob("bdheight").setValue(height + diff_y)
refresh_node(node)
def _set_created_connections(self, placeholder):
"""
set inputs and outputs of created nodes"""
placeholder_node = nuke.toNode(placeholder.scene_identifier)
input_node, output_node = get_group_io_nodes(
placeholder.data["last_created"]
)
for node in placeholder_node.dependent():
for idx in range(node.inputs()):
if node.input(idx) == placeholder_node and output_node:
node.setInput(idx, output_node)
for node in placeholder_node.dependencies():
for idx in range(placeholder_node.inputs()):
if placeholder_node.input(idx) == node and input_node:
input_node.setInput(0, node)
def _create_sib_copies(self, placeholder):
""" creating copies of the palce_holder siblings (the ones who were
created with it) for the new nodes added
Returns :
copies (dict) : with copied nodes names and their copies
"""
copies = {}
siblings = get_nodes_by_names(placeholder.data["siblings"])
for node in siblings:
new_node = duplicate_node(node)
x_init = int(new_node.knob("x_init").getValue())
y_init = int(new_node.knob("y_init").getValue())
new_node.setXYpos(x_init, y_init)
if isinstance(new_node, nuke.BackdropNode):
w_init = new_node.knob("w_init").getValue()
h_init = new_node.knob("h_init").getValue()
new_node.knob("bdwidth").setValue(w_init)
new_node.knob("bdheight").setValue(h_init)
refresh_node(node)
if "repre_id" in node.knobs().keys():
node.removeKnob(node.knob("repre_id"))
copies[node.name()] = new_node
return copies
def _set_copies_connections(self, placeholder, copies):
"""Set inputs and outputs of the copies.
Args:
copies (dict): Copied nodes by their names.
"""
last_input, last_output = get_group_io_nodes(
placeholder.data["last_created"]
)
siblings = get_nodes_by_names(placeholder.data["siblings"])
siblings_input, siblings_output = get_group_io_nodes(siblings)
copy_input = copies[siblings_input.name()]
copy_output = copies[siblings_output.name()]
for node_init in siblings:
if node_init == siblings_output:
continue
node_copy = copies[node_init.name()]
for node in node_init.dependent():
for idx in range(node.inputs()):
if node.input(idx) != node_init:
continue
if node in siblings:
copies[node.name()].setInput(idx, node_copy)
else:
last_input.setInput(0, node_copy)
for node in node_init.dependencies():
for idx in range(node_init.inputs()):
if node_init.input(idx) != node:
continue
if node_init == siblings_input:
copy_input.setInput(idx, node)
elif node in siblings:
node_copy.setInput(idx, copies[node.name()])
else:
node_copy.setInput(idx, last_output)
siblings_input.setInput(0, copy_output)
def build_workfile_template(*args):
builder = NukeTemplateBuilder(registered_host())
builder.build_template()

View file

@ -0,0 +1,49 @@
from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin
from openpype.hosts.nuke.api.lib import (
INSTANCE_DATA_KNOB,
get_node_data,
get_avalon_knob_data
)
from openpype.hosts.nuke.api.plugin import convert_to_valid_instaces
import nuke
class LegacyConverted(SubsetConvertorPlugin):
identifier = "legacy.converter"
def find_instances(self):
legacy_found = False
# search for first available legacy item
for node in nuke.allNodes(recurseGroups=True):
if node.Class() in ["Viewer", "Dot"]:
continue
if get_node_data(node, INSTANCE_DATA_KNOB):
continue
# get data from avalon knob
avalon_knob_data = get_avalon_knob_data(
node, ["avalon:", "ak:"], create=False)
if not avalon_knob_data:
continue
if avalon_knob_data["id"] != "pyblish.avalon.instance":
continue
# catch and break
legacy_found = True
break
if legacy_found:
# if not item do not add legacy instance convertor
self.add_convertor_item("Convert legacy instances")
def convert(self):
# loop all instances and convert them
convert_to_valid_instaces()
# remove legacy item if all is fine
self.remove_convertor_item()

View file

@ -1,56 +1,57 @@
import nuke
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import (
select_nodes,
set_avalon_knob_data
from nukescripts import autoBackdrop
from openpype.hosts.nuke.api import (
NukeCreator,
NukeCreatorError,
maintained_selection,
select_nodes
)
class CreateBackdrop(plugin.OpenPypeCreator):
class CreateBackdrop(NukeCreator):
"""Add Publishable Backdrop"""
name = "nukenodes"
label = "Create Backdrop"
identifier = "create_backdrop"
label = "Nukenodes (backdrop)"
family = "nukenodes"
icon = "file-archive-o"
defaults = ["Main"]
maintain_selection = True
def __init__(self, *args, **kwargs):
super(CreateBackdrop, self).__init__(*args, **kwargs)
self.nodes = nuke.selectedNodes()
self.node_color = "0xdfea5dff"
return
# plugin attributes
node_color = "0xdfea5dff"
def process(self):
from nukescripts import autoBackdrop
nodes = list()
if (self.options or {}).get("useSelection"):
nodes = self.nodes
def create_instance_node(
self,
node_name,
knobs=None,
parent=None,
node_type=None
):
with maintained_selection():
if len(self.selected_nodes) >= 1:
select_nodes(self.selected_nodes)
if len(nodes) >= 1:
select_nodes(nodes)
bckd_node = autoBackdrop()
bckd_node["name"].setValue("{}_BDN".format(self.name))
bckd_node["tile_color"].setValue(int(self.node_color, 16))
bckd_node["note_font_size"].setValue(24)
bckd_node["label"].setValue("[{}]".format(self.name))
# add avalon knobs
instance = set_avalon_knob_data(bckd_node, self.data)
created_node = autoBackdrop()
created_node["name"].setValue(node_name)
created_node["tile_color"].setValue(int(self.node_color, 16))
created_node["note_font_size"].setValue(24)
created_node["label"].setValue("[{}]".format(node_name))
return instance
else:
msg = str("Please select nodes you "
"wish to add to a container")
self.log.error(msg)
nuke.message(msg)
return
else:
bckd_node = autoBackdrop()
bckd_node["name"].setValue("{}_BDN".format(self.name))
bckd_node["tile_color"].setValue(int(self.node_color, 16))
bckd_node["note_font_size"].setValue(24)
bckd_node["label"].setValue("[{}]".format(self.name))
# add avalon knobs
instance = set_avalon_knob_data(bckd_node, self.data)
self.add_info_knob(created_node)
return instance
return created_node
def create(self, subset_name, instance_data, pre_create_data):
if self.check_existing_subset(subset_name, instance_data):
raise NukeCreatorError(
("Subset name '{}' is already used. "
"Please specify different Variant.").format(subset_name))
instance = super(CreateBackdrop, self).create(
subset_name,
instance_data,
pre_create_data
)
return instance

View file

@ -1,55 +1,70 @@
import nuke
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import (
set_avalon_knob_data
from openpype.hosts.nuke.api import (
NukeCreator,
NukeCreatorError,
maintained_selection
)
class CreateCamera(plugin.OpenPypeCreator):
"""Add Publishable Backdrop"""
class CreateCamera(NukeCreator):
"""Add Publishable Camera"""
name = "camera"
label = "Create 3d Camera"
identifier = "create_camera"
label = "Camera (3d)"
family = "camera"
icon = "camera"
defaults = ["Main"]
def __init__(self, *args, **kwargs):
super(CreateCamera, self).__init__(*args, **kwargs)
self.nodes = nuke.selectedNodes()
self.node_color = "0xff9100ff"
return
# plugin attributes
node_color = "0xff9100ff"
def process(self):
nodes = list()
if (self.options or {}).get("useSelection"):
nodes = self.nodes
if len(nodes) >= 1:
# loop selected nodes
for n in nodes:
data = self.data.copy()
if len(nodes) > 1:
# rename subset name only if more
# then one node are selected
subset = self.family + n["name"].value().capitalize()
data["subset"] = subset
# change node color
n["tile_color"].setValue(int(self.node_color, 16))
# add avalon knobs
set_avalon_knob_data(n, data)
return True
def create_instance_node(
self,
node_name,
knobs=None,
parent=None,
node_type=None
):
with maintained_selection():
if self.selected_nodes:
node = self.selected_nodes[0]
if node.Class() != "Camera3":
raise NukeCreatorError(
"Creator error: Select only camera node type")
created_node = self.selected_nodes[0]
else:
msg = str("Please select nodes you "
"wish to add to a container")
self.log.error(msg)
nuke.message(msg)
return
created_node = nuke.createNode("Camera2")
created_node["tile_color"].setValue(
int(self.node_color, 16))
created_node["name"].setValue(node_name)
self.add_info_knob(created_node)
return created_node
def create(self, subset_name, instance_data, pre_create_data):
if self.check_existing_subset(subset_name, instance_data):
raise NukeCreatorError(
("Subset name '{}' is already used. "
"Please specify different Variant.").format(subset_name))
instance = super(CreateCamera, self).create(
subset_name,
instance_data,
pre_create_data
)
return instance
def set_selected_nodes(self, pre_create_data):
if pre_create_data.get("use_selection"):
self.selected_nodes = nuke.selectedNodes()
if self.selected_nodes == []:
raise NukeCreatorError(
"Creator error: No active selection")
elif len(self.selected_nodes) > 1:
raise NukeCreatorError(
"Creator error: Select only one camera node")
else:
# if selected is off then create one node
camera_node = nuke.createNode("Camera2")
camera_node["tile_color"].setValue(int(self.node_color, 16))
# add avalon knobs
instance = set_avalon_knob_data(camera_node, self.data)
return instance
self.selected_nodes = []

View file

@ -1,87 +1,69 @@
import nuke
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import (
maintained_selection,
select_nodes,
set_avalon_knob_data
from openpype.hosts.nuke.api import (
NukeCreator,
NukeCreatorError,
maintained_selection
)
class CreateGizmo(plugin.OpenPypeCreator):
"""Add Publishable "gizmo" group
class CreateGizmo(NukeCreator):
"""Add Publishable Group as gizmo"""
The name is symbolically gizmo as presumably
it is something familiar to nuke users as group of nodes
distributed downstream in workflow
"""
name = "gizmo"
label = "Gizmo"
identifier = "create_gizmo"
label = "Gizmo (group)"
family = "gizmo"
icon = "file-archive-o"
defaults = ["ViewerInput", "Lut", "Effect"]
default_variants = ["ViewerInput", "Lut", "Effect"]
def __init__(self, *args, **kwargs):
super(CreateGizmo, self).__init__(*args, **kwargs)
self.nodes = nuke.selectedNodes()
self.node_color = "0x7533c1ff"
return
def process(self):
if (self.options or {}).get("useSelection"):
nodes = self.nodes
self.log.info(len(nodes))
if len(nodes) == 1:
select_nodes(nodes)
node = nodes[-1]
# check if Group node
if node.Class() in "Group":
node["name"].setValue("{}_GZM".format(self.name))
node["tile_color"].setValue(int(self.node_color, 16))
return set_avalon_knob_data(node, self.data)
else:
msg = ("Please select a group node "
"you wish to publish as the gizmo")
self.log.error(msg)
nuke.message(msg)
if len(nodes) >= 2:
select_nodes(nodes)
nuke.makeGroup()
gizmo_node = nuke.selectedNode()
gizmo_node["name"].setValue("{}_GZM".format(self.name))
gizmo_node["tile_color"].setValue(int(self.node_color, 16))
# add sticky node with guide
with gizmo_node:
sticky = nuke.createNode("StickyNote")
sticky["label"].setValue(
"Add following:\n- set Input"
" nodes\n- set one Output1\n"
"- create User knobs on the group")
# add avalon knobs
return set_avalon_knob_data(gizmo_node, self.data)
# plugin attributes
node_color = "0x7533c1ff"
def create_instance_node(
self,
node_name,
knobs=None,
parent=None,
node_type=None
):
with maintained_selection():
if self.selected_nodes:
node = self.selected_nodes[0]
if node.Class() != "Group":
raise NukeCreatorError(
"Creator error: Select only 'Group' node type")
created_node = node
else:
msg = "Please select nodes you wish to add to the gizmo"
self.log.error(msg)
nuke.message(msg)
return
created_node = nuke.collapseToGroup()
created_node["tile_color"].setValue(
int(self.node_color, 16))
created_node["name"].setValue(node_name)
self.add_info_knob(created_node)
return created_node
def create(self, subset_name, instance_data, pre_create_data):
if self.check_existing_subset(subset_name, instance_data):
raise NukeCreatorError(
("Subset name '{}' is already used. "
"Please specify different Variant.").format(subset_name))
instance = super(CreateGizmo, self).create(
subset_name,
instance_data,
pre_create_data
)
return instance
def set_selected_nodes(self, pre_create_data):
if pre_create_data.get("use_selection"):
self.selected_nodes = nuke.selectedNodes()
if self.selected_nodes == []:
raise NukeCreatorError("Creator error: No active selection")
elif len(self.selected_nodes) > 1:
NukeCreatorError("Creator error: Select only one 'Group' node")
else:
with maintained_selection():
gizmo_node = nuke.createNode("Group")
gizmo_node["name"].setValue("{}_GZM".format(self.name))
gizmo_node["tile_color"].setValue(int(self.node_color, 16))
# add sticky node with guide
with gizmo_node:
sticky = nuke.createNode("StickyNote")
sticky["label"].setValue(
"Add following:\n- add Input"
" nodes\n- add one Output1\n"
"- create User knobs on the group")
# add avalon knobs
return set_avalon_knob_data(gizmo_node, self.data)
self.selected_nodes = []

View file

@ -1,87 +1,69 @@
import nuke
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import (
set_avalon_knob_data
from openpype.hosts.nuke.api import (
NukeCreator,
NukeCreatorError,
maintained_selection
)
class CreateModel(plugin.OpenPypeCreator):
"""Add Publishable Model Geometry"""
class CreateModel(NukeCreator):
"""Add Publishable Camera"""
name = "model"
label = "Create 3d Model"
identifier = "create_model"
label = "Model (3d)"
family = "model"
icon = "cube"
defaults = ["Main"]
default_variants = ["Main"]
def __init__(self, *args, **kwargs):
super(CreateModel, self).__init__(*args, **kwargs)
self.nodes = nuke.selectedNodes()
self.node_color = "0xff3200ff"
return
# plugin attributes
node_color = "0xff3200ff"
def process(self):
nodes = list()
if (self.options or {}).get("useSelection"):
nodes = self.nodes
for n in nodes:
n['selected'].setValue(0)
end_nodes = list()
# get the latest nodes in tree for selecion
for n in nodes:
x = n
end = 0
while end == 0:
try:
x = x.dependent()[0]
except:
end_node = x
end = 1
end_nodes.append(end_node)
# set end_nodes
end_nodes = list(set(end_nodes))
# check if nodes is 3d nodes
for n in end_nodes:
n['selected'].setValue(1)
sn = nuke.createNode("Scene")
if not sn.input(0):
end_nodes.remove(n)
nuke.delete(sn)
# loop over end nodes
for n in end_nodes:
n['selected'].setValue(1)
self.nodes = nuke.selectedNodes()
nodes = self.nodes
if len(nodes) >= 1:
# loop selected nodes
for n in nodes:
data = self.data.copy()
if len(nodes) > 1:
# rename subset name only if more
# then one node are selected
subset = self.family + n["name"].value().capitalize()
data["subset"] = subset
# change node color
n["tile_color"].setValue(int(self.node_color, 16))
# add avalon knobs
set_avalon_knob_data(n, data)
return True
def create_instance_node(
self,
node_name,
knobs=None,
parent=None,
node_type=None
):
with maintained_selection():
if self.selected_nodes:
node = self.selected_nodes[0]
if node.Class() != "Scene":
raise NukeCreatorError(
"Creator error: Select only 'Scene' node type")
created_node = node
else:
msg = str("Please select nodes you "
"wish to add to a container")
self.log.error(msg)
nuke.message(msg)
return
created_node = nuke.createNode("Scene")
created_node["tile_color"].setValue(
int(self.node_color, 16))
created_node["name"].setValue(node_name)
self.add_info_knob(created_node)
return created_node
def create(self, subset_name, instance_data, pre_create_data):
if self.check_existing_subset(subset_name, instance_data):
raise NukeCreatorError(
("Subset name '{}' is already used. "
"Please specify different Variant.").format(subset_name))
instance = super(CreateModel, self).create(
subset_name,
instance_data,
pre_create_data
)
return instance
def set_selected_nodes(self, pre_create_data):
if pre_create_data.get("use_selection"):
self.selected_nodes = nuke.selectedNodes()
if self.selected_nodes == []:
raise NukeCreatorError("Creator error: No active selection")
elif len(self.selected_nodes) > 1:
NukeCreatorError("Creator error: Select only one 'Scene' node")
else:
# if selected is off then create one node
model_node = nuke.createNode("WriteGeo")
model_node["tile_color"].setValue(int(self.node_color, 16))
# add avalon knobs
instance = set_avalon_knob_data(model_node, self.data)
return instance
self.selected_nodes = []

View file

@ -1,57 +0,0 @@
from collections import OrderedDict
import nuke
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import (
set_avalon_knob_data
)
class CrateRead(plugin.OpenPypeCreator):
# change this to template preset
name = "ReadCopy"
label = "Create Read Copy"
hosts = ["nuke"]
family = "source"
families = family
icon = "film"
defaults = ["Effect", "Backplate", "Fire", "Smoke"]
def __init__(self, *args, **kwargs):
super(CrateRead, self).__init__(*args, **kwargs)
self.nodes = nuke.selectedNodes()
data = OrderedDict()
data['family'] = self.family
data['families'] = self.families
for k, v in self.data.items():
if k not in data.keys():
data.update({k: v})
self.data = data
def process(self):
self.name = self.data["subset"]
nodes = self.nodes
if not nodes or len(nodes) == 0:
msg = "Please select Read node"
self.log.error(msg)
nuke.message(msg)
else:
count_reads = 0
for node in nodes:
if node.Class() != 'Read':
continue
avalon_data = self.data
avalon_data['subset'] = "{}".format(self.name)
set_avalon_knob_data(node, avalon_data)
node['tile_color'].setValue(16744935)
count_reads += 1
if count_reads < 1:
msg = "Please select Read node"
self.log.error(msg)
nuke.message(msg)
return

View file

@ -0,0 +1,91 @@
import nuke
import six
import sys
from openpype.hosts.nuke.api import (
INSTANCE_DATA_KNOB,
NukeCreator,
NukeCreatorError,
set_node_data
)
from openpype.pipeline import (
CreatedInstance
)
class CreateSource(NukeCreator):
"""Add Publishable Read with source"""
identifier = "create_source"
label = "Source (read)"
family = "source"
icon = "film"
default_variants = ["Effect", "Backplate", "Fire", "Smoke"]
# plugin attributes
node_color = "0xff9100ff"
def create_instance_node(
self,
node_name,
read_node
):
read_node["tile_color"].setValue(
int(self.node_color, 16))
read_node["name"].setValue(node_name)
self.add_info_knob(read_node)
return read_node
def create(self, subset_name, instance_data, pre_create_data):
# make sure selected nodes are added
self.set_selected_nodes(pre_create_data)
try:
for read_node in self.selected_nodes:
if read_node.Class() != 'Read':
continue
node_name = read_node.name()
_subset_name = subset_name + node_name
# make sure subset name is unique
if self.check_existing_subset(_subset_name, instance_data):
raise NukeCreatorError(
("subset {} is already published"
"definition.").format(_subset_name))
instance_node = self.create_instance_node(
_subset_name,
read_node
)
instance = CreatedInstance(
self.family,
_subset_name,
instance_data,
self
)
instance.transient_data["node"] = instance_node
self._add_instance_to_context(instance)
set_node_data(
instance_node,
INSTANCE_DATA_KNOB,
instance.data_to_store()
)
except Exception as er:
six.reraise(
NukeCreatorError,
NukeCreatorError("Creator error: {}".format(er)),
sys.exc_info()[2])
def set_selected_nodes(self, pre_create_data):
if pre_create_data.get("use_selection"):
self.selected_nodes = nuke.selectedNodes()
if self.selected_nodes == []:
raise NukeCreatorError("Creator error: No active selection")
else:
NukeCreatorError(
"Creator error: only supprted with active selection")

View file

@ -0,0 +1,176 @@
import nuke
import sys
import six
from openpype.pipeline import (
CreatedInstance
)
from openpype.lib import (
BoolDef,
NumberDef,
UISeparatorDef,
EnumDef
)
from openpype.hosts.nuke import api as napi
class CreateWriteImage(napi.NukeWriteCreator):
identifier = "create_write_image"
label = "Image (write)"
family = "image"
icon = "sign-out"
instance_attributes = [
"use_range_limit"
]
default_variants = [
"StillFrame",
"MPFrame",
"LayoutFrame"
]
temp_rendering_path_template = (
"{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}")
def get_pre_create_attr_defs(self):
attr_defs = [
BoolDef(
"use_selection",
default=True,
label="Use selection"
),
self._get_render_target_enum(),
UISeparatorDef(),
self._get_frame_source_number()
]
return attr_defs
def _get_render_target_enum(self):
rendering_targets = {
"local": "Local machine rendering",
"frames": "Use existing frames"
}
return EnumDef(
"render_target",
items=rendering_targets,
label="Render target"
)
def _get_frame_source_number(self):
return NumberDef(
"active_frame",
label="Active frame",
default=nuke.frame()
)
def get_instance_attr_defs(self):
attr_defs = [
self._get_render_target_enum(),
self._get_reviewable_bool()
]
return attr_defs
def create_instance_node(self, subset_name, instance_data):
linked_knobs_ = []
if "use_range_limit" in self.instance_attributes:
linked_knobs_ = ["channels", "___", "first", "last", "use_limit"]
# add fpath_template
write_data = {
"creator": self.__class__.__name__,
"subset": subset_name,
"fpath_template": self.temp_rendering_path_template
}
write_data.update(instance_data)
created_node = napi.create_write_node(
subset_name,
write_data,
input=self.selected_node,
prenodes=self.prenodes,
linked_knobs=linked_knobs_,
**{
"frame": nuke.frame()
}
)
self.add_info_knob(created_node)
self._add_frame_range_limit(created_node, instance_data)
self.integrate_links(created_node, outputs=True)
return created_node
def create(self, subset_name, instance_data, pre_create_data):
subset_name = subset_name.format(**pre_create_data)
# pass values from precreate to instance
self.pass_pre_attributes_to_instance(
instance_data,
pre_create_data,
[
"active_frame",
"render_target"
]
)
# make sure selected nodes are added
self.set_selected_nodes(pre_create_data)
# make sure subset name is unique
if self.check_existing_subset(subset_name, instance_data):
raise napi.NukeCreatorError(
("subset {} is already published"
"definition.").format(subset_name))
instance_node = self.create_instance_node(
subset_name,
instance_data,
)
try:
instance = CreatedInstance(
self.family,
subset_name,
instance_data,
self
)
instance.transient_data["node"] = instance_node
self._add_instance_to_context(instance)
napi.set_node_data(
instance_node,
napi.INSTANCE_DATA_KNOB,
instance.data_to_store()
)
return instance
except Exception as er:
six.reraise(
napi.NukeCreatorError,
napi.NukeCreatorError("Creator error: {}".format(er)),
sys.exc_info()[2]
)
def _add_frame_range_limit(self, write_node, instance_data):
if "use_range_limit" not in self.instance_attributes:
return
active_frame = (
instance_data["creator_attributes"].get("active_frame"))
write_node.begin()
for n in nuke.allNodes():
# get write node
if n.Class() in "Write":
w_node = n
write_node.end()
w_node["use_limit"].setValue(True)
w_node["first"].setValue(active_frame or nuke.frame())
w_node["last"].setExpression("first")
return write_node

View file

@ -1,56 +1,179 @@
import nuke
import sys
import six
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import (
create_write_node, create_write_node_legacy)
from openpype.pipeline import (
CreatedInstance
)
from openpype.lib import (
BoolDef,
NumberDef,
UISeparatorDef,
UILabelDef
)
from openpype.hosts.nuke import api as napi
class CreateWritePrerender(plugin.AbstractWriteRender):
# change this to template preset
name = "WritePrerender"
label = "Create Write Prerender"
hosts = ["nuke"]
n_class = "Write"
class CreateWritePrerender(napi.NukeWriteCreator):
identifier = "create_write_prerender"
label = "Prerender (write)"
family = "prerender"
icon = "sign-out"
# settings
fpath_template = "{work}/render/nuke/{subset}/{subset}.{frame}.{ext}"
defaults = ["Key01", "Bg01", "Fg01", "Branch01", "Part01"]
reviewable = False
use_range_limit = True
instance_attributes = [
"use_range_limit"
]
default_variants = [
"Key01",
"Bg01",
"Fg01",
"Branch01",
"Part01"
]
temp_rendering_path_template = (
"{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}")
def __init__(self, *args, **kwargs):
super(CreateWritePrerender, self).__init__(*args, **kwargs)
def get_pre_create_attr_defs(self):
attr_defs = [
BoolDef(
"use_selection",
default=True,
label="Use selection"
),
self._get_render_target_enum()
]
return attr_defs
def get_instance_attr_defs(self):
attr_defs = [
self._get_render_target_enum(),
self._get_reviewable_bool()
]
if "farm_rendering" in self.instance_attributes:
attr_defs.extend([
UISeparatorDef(),
UILabelDef("Farm rendering attributes"),
BoolDef("suspended_publish", label="Suspended publishing"),
NumberDef(
"farm_priority",
label="Priority",
minimum=1,
maximum=99,
default=50
),
NumberDef(
"farm_chunk",
label="Chunk size",
minimum=1,
maximum=99,
default=10
),
NumberDef(
"farm_concurency",
label="Concurent tasks",
minimum=1,
maximum=10,
default=1
)
])
return attr_defs
def create_instance_node(self, subset_name, instance_data):
linked_knobs_ = []
if "use_range_limit" in self.instance_attributes:
linked_knobs_ = ["channels", "___", "first", "last", "use_limit"]
def _create_write_node(self, selected_node, inputs, outputs, write_data):
# add fpath_template
write_data["fpath_template"] = self.fpath_template
write_data["use_range_limit"] = self.use_range_limit
write_data["frame_range"] = (
nuke.root()["first_frame"].value(),
nuke.root()["last_frame"].value()
write_data = {
"creator": self.__class__.__name__,
"subset": subset_name,
"fpath_template": self.temp_rendering_path_template
}
write_data.update(instance_data)
# get width and height
if self.selected_node:
width, height = (
self.selected_node.width(), self.selected_node.height())
else:
actual_format = nuke.root().knob('format').value()
width, height = (actual_format.width(), actual_format.height())
created_node = napi.create_write_node(
subset_name,
write_data,
input=self.selected_node,
prenodes=self.prenodes,
linked_knobs=linked_knobs_,
**{
"width": width,
"height": height
}
)
self.add_info_knob(created_node)
self._add_frame_range_limit(created_node)
self.integrate_links(created_node, outputs=True)
return created_node
def create(self, subset_name, instance_data, pre_create_data):
# pass values from precreate to instance
self.pass_pre_attributes_to_instance(
instance_data,
pre_create_data,
[
"render_target"
]
)
if not self.is_legacy():
return create_write_node(
self.data["subset"],
write_data,
input=selected_node,
review=self.reviewable,
linked_knobs=["channels", "___", "first", "last", "use_limit"]
)
else:
return create_write_node_legacy(
self.data["subset"],
write_data,
input=selected_node,
review=self.reviewable,
linked_knobs=["channels", "___", "first", "last", "use_limit"]
# make sure selected nodes are added
self.set_selected_nodes(pre_create_data)
# make sure subset name is unique
if self.check_existing_subset(subset_name, instance_data):
raise napi.NukeCreatorError(
("subset {} is already published"
"definition.").format(subset_name))
instance_node = self.create_instance_node(
subset_name,
instance_data
)
try:
instance = CreatedInstance(
self.family,
subset_name,
instance_data,
self
)
def _modify_write_node(self, write_node):
# open group node
instance.transient_data["node"] = instance_node
self._add_instance_to_context(instance)
napi.set_node_data(
instance_node,
napi.INSTANCE_DATA_KNOB,
instance.data_to_store()
)
return instance
except Exception as er:
six.reraise(
napi.NukeCreatorError,
napi.NukeCreatorError("Creator error: {}".format(er)),
sys.exc_info()[2]
)
def _add_frame_range_limit(self, write_node):
if "use_range_limit" not in self.instance_attributes:
return
write_node.begin()
for n in nuke.allNodes():
# get write node
@ -58,9 +181,8 @@ class CreateWritePrerender(plugin.AbstractWriteRender):
w_node = n
write_node.end()
if self.use_range_limit:
w_node["use_limit"].setValue(True)
w_node["first"].setValue(nuke.root()["first_frame"].value())
w_node["last"].setValue(nuke.root()["last_frame"].value())
w_node["use_limit"].setValue(True)
w_node["first"].setValue(nuke.root()["first_frame"].value())
w_node["last"].setValue(nuke.root()["last_frame"].value())
return write_node

View file

@ -1,86 +1,160 @@
import nuke
import sys
import six
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import (
create_write_node, create_write_node_legacy)
from openpype.pipeline import (
CreatedInstance
)
from openpype.lib import (
BoolDef,
NumberDef,
UISeparatorDef,
UILabelDef
)
from openpype.hosts.nuke import api as napi
class CreateWriteRender(plugin.AbstractWriteRender):
# change this to template preset
name = "WriteRender"
label = "Create Write Render"
hosts = ["nuke"]
n_class = "Write"
class CreateWriteRender(napi.NukeWriteCreator):
identifier = "create_write_render"
label = "Render (write)"
family = "render"
icon = "sign-out"
# settings
fpath_template = "{work}/render/nuke/{subset}/{subset}.{frame}.{ext}"
defaults = ["Main", "Mask"]
prenodes = {
"Reformat01": {
"nodeclass": "Reformat",
"dependent": None,
"knobs": [
{
"type": "text",
"name": "resize",
"value": "none"
},
{
"type": "bool",
"name": "black_outside",
"value": True
}
]
}
}
instance_attributes = [
"reviewable"
]
default_variants = [
"Main",
"Mask"
]
temp_rendering_path_template = (
"{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}")
def __init__(self, *args, **kwargs):
super(CreateWriteRender, self).__init__(*args, **kwargs)
def get_pre_create_attr_defs(self):
attr_defs = [
BoolDef(
"use_selection",
default=True,
label="Use selection"
),
self._get_render_target_enum()
]
return attr_defs
def _create_write_node(self, selected_node, inputs, outputs, write_data):
def get_instance_attr_defs(self):
attr_defs = [
self._get_render_target_enum(),
self._get_reviewable_bool()
]
if "farm_rendering" in self.instance_attributes:
attr_defs.extend([
UISeparatorDef(),
UILabelDef("Farm rendering attributes"),
BoolDef("suspended_publish", label="Suspended publishing"),
NumberDef(
"farm_priority",
label="Priority",
minimum=1,
maximum=99,
default=50
),
NumberDef(
"farm_chunk",
label="Chunk size",
minimum=1,
maximum=99,
default=10
),
NumberDef(
"farm_concurency",
label="Concurent tasks",
minimum=1,
maximum=10,
default=1
)
])
return attr_defs
def create_instance_node(self, subset_name, instance_data):
# add fpath_template
write_data["fpath_template"] = self.fpath_template
write_data = {
"creator": self.__class__.__name__,
"subset": subset_name,
"fpath_template": self.temp_rendering_path_template
}
write_data.update(instance_data)
# add reformat node to cut off all outside of format bounding box
# get width and height
try:
width, height = (selected_node.width(), selected_node.height())
except AttributeError:
if self.selected_node:
width, height = (
self.selected_node.width(), self.selected_node.height())
else:
actual_format = nuke.root().knob('format').value()
width, height = (actual_format.width(), actual_format.height())
if not self.is_legacy():
return create_write_node(
self.data["subset"],
write_data,
input=selected_node,
prenodes=self.prenodes,
**{
"width": width,
"height": height
}
)
else:
_prenodes = [
{
"name": "Reformat01",
"class": "Reformat",
"knobs": [
("resize", 0),
("black_outside", 1),
],
"dependent": None
}
created_node = napi.create_write_node(
subset_name,
write_data,
input=self.selected_node,
prenodes=self.prenodes,
**{
"width": width,
"height": height
}
)
self.add_info_knob(created_node)
self.integrate_links(created_node, outputs=False)
return created_node
def create(self, subset_name, instance_data, pre_create_data):
# pass values from precreate to instance
self.pass_pre_attributes_to_instance(
instance_data,
pre_create_data,
[
"render_target"
]
)
# make sure selected nodes are added
self.set_selected_nodes(pre_create_data)
return create_write_node_legacy(
self.data["subset"],
write_data,
input=selected_node,
prenodes=_prenodes
# make sure subset name is unique
if self.check_existing_subset(subset_name, instance_data):
raise napi.NukeCreatorError(
("subset {} is already published"
"definition.").format(subset_name))
instance_node = self.create_instance_node(
subset_name,
instance_data
)
try:
instance = CreatedInstance(
self.family,
subset_name,
instance_data,
self
)
def _modify_write_node(self, write_node):
return write_node
instance.transient_data["node"] = instance_node
self._add_instance_to_context(instance)
napi.set_node_data(
instance_node,
napi.INSTANCE_DATA_KNOB,
instance.data_to_store()
)
return instance
except Exception as er:
six.reraise(
napi.NukeCreatorError,
napi.NukeCreatorError("Creator error: {}".format(er)),
sys.exc_info()[2]
)

View file

@ -1,105 +0,0 @@
import nuke
from openpype.hosts.nuke.api import plugin
from openpype.hosts.nuke.api.lib import (
create_write_node,
create_write_node_legacy,
get_created_node_imageio_setting_legacy
)
# HACK: just to disable still image on projects which
# are not having anatomy imageio preset for CreateWriteStill
# TODO: remove this code as soon as it will be obsolete
imageio_writes = get_created_node_imageio_setting_legacy(
"Write",
"CreateWriteStill",
"stillMain"
)
print(imageio_writes["knobs"])
class CreateWriteStill(plugin.AbstractWriteRender):
# change this to template preset
name = "WriteStillFrame"
label = "Create Write Still Image"
hosts = ["nuke"]
n_class = "Write"
family = "still"
icon = "image"
# settings
fpath_template = "{work}/render/nuke/{subset}/{subset}.{ext}"
defaults = [
"ImageFrame",
"MPFrame",
"LayoutFrame"
]
prenodes = {
"FrameHold01": {
"nodeclass": "FrameHold",
"dependent": None,
"knobs": [
{
"type": "formatable",
"name": "first_frame",
"template": "{frame}",
"to_type": "number"
}
]
}
}
def __init__(self, *args, **kwargs):
super(CreateWriteStill, self).__init__(*args, **kwargs)
def _create_write_node(self, selected_node, inputs, outputs, write_data):
# add fpath_template
write_data["fpath_template"] = self.fpath_template
if not self.is_legacy():
return create_write_node(
self.name,
write_data,
input=selected_node,
review=False,
prenodes=self.prenodes,
farm=False,
linked_knobs=["channels", "___", "first", "last", "use_limit"],
**{
"frame": nuke.frame()
}
)
else:
_prenodes = [
{
"name": "FrameHold01",
"class": "FrameHold",
"knobs": [
("first_frame", nuke.frame())
],
"dependent": None
}
]
return create_write_node_legacy(
self.name,
write_data,
input=selected_node,
review=False,
prenodes=_prenodes,
farm=False,
linked_knobs=["channels", "___", "first", "last", "use_limit"]
)
def _modify_write_node(self, write_node):
write_node.begin()
for n in nuke.allNodes():
# get write node
if n.Class() in "Write":
w_node = n
write_node.end()
w_node["use_limit"].setValue(True)
w_node["first"].setValue(nuke.frame())
w_node["last"].setValue(nuke.frame())
return write_node

View file

@ -0,0 +1,69 @@
import openpype.hosts.nuke.api as api
from openpype.client import get_asset_by_name
from openpype.pipeline import (
AutoCreator,
CreatedInstance,
legacy_io,
)
from openpype.hosts.nuke.api import (
INSTANCE_DATA_KNOB,
set_node_data
)
import nuke
class WorkfileCreator(AutoCreator):
identifier = "workfile"
family = "workfile"
default_variant = "Main"
def get_instance_attr_defs(self):
return []
def collect_instances(self):
root_node = nuke.root()
instance_data = api.get_node_data(
root_node, api.INSTANCE_DATA_KNOB
)
project_name = legacy_io.Session["AVALON_PROJECT"]
asset_name = legacy_io.Session["AVALON_ASSET"]
task_name = legacy_io.Session["AVALON_TASK"]
host_name = legacy_io.Session["AVALON_APP"]
asset_doc = get_asset_by_name(project_name, asset_name)
subset_name = self.get_subset_name(
self.default_variant, task_name, asset_doc,
project_name, host_name
)
instance_data.update({
"asset": asset_name,
"task": task_name,
"variant": self.default_variant
})
instance_data.update(self.get_dynamic_data(
self.default_variant, task_name, asset_doc,
project_name, host_name, instance_data
))
instance = CreatedInstance(
self.family, subset_name, instance_data, self
)
instance.transient_data["node"] = root_node
self._add_instance_to_context(instance)
def update_instances(self, update_list):
for created_inst, _changes in update_list:
instance_node = created_inst.transient_data["node"]
set_node_data(
instance_node,
INSTANCE_DATA_KNOB,
created_inst.data_to_store()
)
def create(self, options=None):
# no need to create if it is created
# in `collect_instances`
pass

View file

@ -28,7 +28,7 @@ class LoadBackdropNodes(load.LoaderPlugin):
representations = ["nk"]
families = ["workfile", "nukenodes"]
label = "Iport Nuke Nodes"
label = "Import Nuke Nodes"
order = 0
icon = "eye"
color = "white"

View file

@ -1,9 +1,9 @@
from pprint import pformat
import pyblish.api
from openpype.hosts.nuke.api import lib as pnlib
import nuke
@pyblish.api.log
class CollectBackdrops(pyblish.api.InstancePlugin):
"""Collect Backdrop node instance and its content
"""
@ -14,8 +14,9 @@ class CollectBackdrops(pyblish.api.InstancePlugin):
families = ["nukenodes"]
def process(self, instance):
self.log.debug(pformat(instance.data))
bckn = instance[0]
bckn = instance.data["transientData"]["node"]
# define size of the backdrop
left = bckn.xpos()
@ -23,6 +24,7 @@ class CollectBackdrops(pyblish.api.InstancePlugin):
right = left + bckn['bdwidth'].value()
bottom = top + bckn['bdheight'].value()
instance.data["transientData"]["childNodes"] = []
# iterate all nodes
for node in nuke.allNodes():
@ -37,17 +39,17 @@ class CollectBackdrops(pyblish.api.InstancePlugin):
and (node.ypos() + node.screenHeight() < bottom):
# add contained nodes to instance's node list
instance.append(node)
instance.data["transientData"]["childNodes"].append(node)
# get all connections from outside of backdrop
nodes = instance[1:]
nodes = instance.data["transientData"]["childNodes"]
connections_in, connections_out = pnlib.get_dependent_nodes(nodes)
instance.data["nodeConnectionsIn"] = connections_in
instance.data["nodeConnectionsOut"] = connections_out
instance.data["transientData"]["nodeConnectionsIn"] = connections_in
instance.data["transientData"]["nodeConnectionsOut"] = connections_out
# make label nicer
instance.data["label"] = "{0} ({1} nodes)".format(
bckn.name(), len(instance) - 1)
bckn.name(), len(instance.data["transientData"]["childNodes"]))
instance.data["families"].append(instance.data["family"])
@ -83,5 +85,4 @@ class CollectBackdrops(pyblish.api.InstancePlugin):
"frameStart": first_frame,
"frameEnd": last_frame
})
self.log.info("Backdrop content collected: `{}`".format(instance[:]))
self.log.info("Backdrop instance collected: `{}`".format(instance))

View file

@ -0,0 +1,69 @@
import os
import nuke
import pyblish.api
import openpype.api as api
import openpype.hosts.nuke.api as napi
from openpype.pipeline import KnownPublishError
class CollectContextData(pyblish.api.ContextPlugin):
"""Collect current context publish."""
order = pyblish.api.CollectorOrder - 0.499
label = "Collect context data"
hosts = ['nuke']
def process(self, context): # sourcery skip: avoid-builtin-shadow
root_node = nuke.root()
current_file = os.path.normpath(root_node.name())
if current_file.lower() == "root":
raise KnownPublishError(
"Workfile is not correct file name. \n"
"Use workfile tool to manage the name correctly."
)
# Get frame range
first_frame = int(root_node["first_frame"].getValue())
last_frame = int(root_node["last_frame"].getValue())
# get instance data from root
root_instance_context = napi.get_node_data(
root_node, napi.INSTANCE_DATA_KNOB
)
handle_start = root_instance_context["handleStart"]
handle_end = root_instance_context["handleEnd"]
# Get format
format = root_node['format'].value()
resolution_width = format.width()
resolution_height = format.height()
pixel_aspect = format.pixelAspect()
script_data = {
"frameStart": first_frame + handle_start,
"frameEnd": last_frame - handle_end,
"resolutionWidth": resolution_width,
"resolutionHeight": resolution_height,
"pixelAspect": pixel_aspect,
# backward compatibility handles
"handles": handle_start,
"handleStart": handle_start,
"handleEnd": handle_end,
"step": 1,
"fps": root_node['fps'].value(),
"currentFile": current_file,
"version": int(api.get_version_from_path(current_file)),
"host": pyblish.api.current_host(),
"hostVersion": nuke.NUKE_VERSION_STRING
}
context.data["scriptData"] = script_data
context.data.update(script_data)
self.log.info('Context from Nuke script collected')

View file

@ -2,25 +2,23 @@ import pyblish.api
import nuke
@pyblish.api.log
class CollectGizmo(pyblish.api.InstancePlugin):
"""Collect Gizmo (group) node instance and its content
"""
order = pyblish.api.CollectorOrder + 0.22
label = "Collect Gizmo (Group)"
label = "Collect Gizmo (group)"
hosts = ["nuke"]
families = ["gizmo"]
def process(self, instance):
grpn = instance[0]
gizmo_node = instance.data["transientData"]["node"]
# add family to familiess
instance.data["families"].insert(0, instance.data["family"])
# make label nicer
instance.data["label"] = "{0} ({1} nodes)".format(
grpn.name(), len(instance) - 1)
instance.data["label"] = gizmo_node.name()
# Get frame range
handle_start = instance.context.data["handleStart"]
@ -46,5 +44,4 @@ class CollectGizmo(pyblish.api.InstancePlugin):
"frameStart": first_frame,
"frameEnd": last_frame
})
self.log.info("Gizmo content collected: `{}`".format(instance[:]))
self.log.info("Gizmo instance collected: `{}`".format(instance))

View file

@ -0,0 +1,44 @@
import nuke
import pyblish.api
class CollectInstanceData(pyblish.api.InstancePlugin):
"""Collect all nodes with Avalon knob."""
order = pyblish.api.CollectorOrder - 0.49
label = "Collect Instance Data"
hosts = ["nuke", "nukeassist"]
# presets
sync_workfile_version_on_families = []
def process(self, instance):
family = instance.data["family"]
# Get format
root = nuke.root()
format_ = root['format'].value()
resolution_width = format_.width()
resolution_height = format_.height()
pixel_aspect = format_.pixelAspect()
# sync workfile version
if family in self.sync_workfile_version_on_families:
self.log.debug(
"Syncing version with workfile for '{}'".format(
family
)
)
# get version to instance for integration
instance.data['version'] = instance.context.data['version']
instance.data.update({
"step": 1,
"fps": root['fps'].value(),
"resolutionWidth": resolution_width,
"resolutionHeight": resolution_height,
"pixelAspect": pixel_aspect
})
self.log.debug("Collected instance: {}".format(
instance.data))

View file

@ -2,7 +2,6 @@ import pyblish.api
import nuke
@pyblish.api.log
class CollectModel(pyblish.api.InstancePlugin):
"""Collect Model node instance and its content
"""
@ -14,12 +13,12 @@ class CollectModel(pyblish.api.InstancePlugin):
def process(self, instance):
grpn = instance[0]
geo_node = instance.data["transientData"]["node"]
# add family to familiess
instance.data["families"].insert(0, instance.data["family"])
# make label nicer
instance.data["label"] = grpn.name()
instance.data["label"] = geo_node.name()
# Get frame range
handle_start = instance.context.data["handleStart"]
@ -45,5 +44,4 @@ class CollectModel(pyblish.api.InstancePlugin):
"frameStart": first_frame,
"frameEnd": last_frame
})
self.log.info("Model content collected: `{}`".format(instance[:]))
self.log.info("Model instance collected: `{}`".format(instance))

View file

@ -2,12 +2,10 @@ import os
import re
import nuke
import pyblish.api
from openpype.client import get_asset_by_name
from openpype.pipeline import legacy_io
@pyblish.api.log
class CollectNukeReads(pyblish.api.InstancePlugin):
"""Collect all read nodes."""
@ -17,6 +15,8 @@ class CollectNukeReads(pyblish.api.InstancePlugin):
families = ["source"]
def process(self, instance):
node = instance.data["transientData"]["node"]
project_name = legacy_io.active_project()
asset_name = legacy_io.Session["AVALON_ASSET"]
asset_doc = get_asset_by_name(project_name, asset_name)
@ -25,7 +25,6 @@ class CollectNukeReads(pyblish.api.InstancePlugin):
self.log.debug("checking instance: {}".format(instance))
node = instance[0]
if node.Class() != "Read":
return
@ -99,10 +98,7 @@ class CollectNukeReads(pyblish.api.InstancePlugin):
}
instance.data["representations"].append(representation)
transfer = False
if "publish" in node.knobs():
transfer = node["publish"]
transfer = node["publish"] if "publish" in node.knobs() else False
instance.data['transfer'] = transfer
# Add version data to instance

View file

@ -8,10 +8,10 @@ class CollectSlate(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder + 0.09
label = "Collect Slate Node"
hosts = ["nuke"]
families = ["render", "render.local", "render.farm"]
families = ["render"]
def process(self, instance):
node = instance[0]
node = instance.data["transientData"]["node"]
slate = next((n for n in nuke.allNodes()
if "slate" in n.name().lower()
@ -35,7 +35,6 @@ class CollectSlate(pyblish.api.InstancePlugin):
instance.data["slateNode"] = slate_node
instance.data["slate"] = True
instance.data["families"].append("slate")
instance.data["versionData"]["families"].append("slate")
self.log.info(
"Slate node is in node graph: `{}`".format(slate.name()))
self.log.debug(

View file

@ -0,0 +1,40 @@
import os
import nuke
import pyblish.api
class CollectWorkfile(pyblish.api.InstancePlugin):
"""Collect current script for publish."""
order = pyblish.api.CollectorOrder
label = "Collect Workfile"
hosts = ['nuke']
families = ["workfile"]
def process(self, instance): # sourcery skip: avoid-builtin-shadow
script_data = instance.context.data["scriptData"]
current_file = os.path.normpath(nuke.root().name())
# creating instances per write node
staging_dir = os.path.dirname(current_file)
base_name = os.path.basename(current_file)
# creating representation
representation = {
'name': 'nk',
'ext': 'nk',
'files': base_name,
"stagingDir": staging_dir,
}
# creating instance data
instance.data.update({
"name": base_name,
"representations": [representation]
})
# adding basic script data
instance.data.update(script_data)
self.log.info("Collect script version")

View file

@ -0,0 +1,186 @@
import os
from pprint import pformat
import nuke
import pyblish.api
from openpype.hosts.nuke import api as napi
class CollectNukeWrites(pyblish.api.InstancePlugin):
"""Collect all write nodes."""
order = pyblish.api.CollectorOrder - 0.48
label = "Collect Writes"
hosts = ["nuke", "nukeassist"]
families = ["render", "prerender", "image"]
def process(self, instance):
self.log.debug(pformat(instance.data))
creator_attributes = instance.data["creator_attributes"]
instance.data.update(creator_attributes)
group_node = instance.data["transientData"]["node"]
render_target = instance.data["render_target"]
family = instance.data["family"]
families = instance.data["families"]
# add targeted family to families
instance.data["families"].append(
"{}.{}".format(family, render_target)
)
if instance.data.get("review"):
instance.data["families"].append("review")
child_nodes = napi.get_instance_group_node_childs(instance)
instance.data["transientData"]["childNodes"] = child_nodes
write_node = None
for x in child_nodes:
if x.Class() == "Write":
write_node = x
if write_node is None:
self.log.warning(
"Created node '{}' is missing write node!".format(
group_node.name()
)
)
return
instance.data["writeNode"] = write_node
self.log.debug("checking instance: {}".format(instance))
# Determine defined file type
ext = write_node["file_type"].value()
# Get frame range
handle_start = instance.context.data["handleStart"]
handle_end = instance.context.data["handleEnd"]
first_frame = int(nuke.root()["first_frame"].getValue())
last_frame = int(nuke.root()["last_frame"].getValue())
frame_length = int(last_frame - first_frame + 1)
if write_node["use_limit"].getValue():
first_frame = int(write_node["first"].getValue())
last_frame = int(write_node["last"].getValue())
write_file_path = nuke.filename(write_node)
output_dir = os.path.dirname(write_file_path)
self.log.debug('output dir: {}'.format(output_dir))
if render_target == "frames":
representation = {
'name': ext,
'ext': ext,
"stagingDir": output_dir,
"tags": []
}
# get file path knob
node_file_knob = write_node["file"]
# list file paths based on input frames
expected_paths = list(sorted({
node_file_knob.evaluate(frame)
for frame in range(first_frame, last_frame + 1)
}))
# convert only to base names
expected_filenames = [
os.path.basename(filepath)
for filepath in expected_paths
]
# make sure files are existing at folder
collected_frames = [
filename
for filename in os.listdir(output_dir)
if filename in expected_filenames
]
if collected_frames:
collected_frames_len = len(collected_frames)
frame_start_str = "%0{}d".format(
len(str(last_frame))) % first_frame
representation['frameStart'] = frame_start_str
# in case slate is expected and not yet rendered
self.log.debug("_ frame_length: {}".format(frame_length))
self.log.debug("_ collected_frames_len: {}".format(
collected_frames_len))
# this will only run if slate frame is not already
# rendered from previews publishes
if (
"slate" in families
and frame_length == collected_frames_len
and family == "render"
):
frame_slate_str = (
"{{:0{}d}}".format(len(str(last_frame)))
).format(first_frame - 1)
slate_frame = collected_frames[0].replace(
frame_start_str, frame_slate_str)
collected_frames.insert(0, slate_frame)
if collected_frames_len == 1:
representation['files'] = collected_frames.pop()
else:
representation['files'] = collected_frames
instance.data["representations"].append(representation)
self.log.info("Publishing rendered frames ...")
elif render_target == "farm":
farm_priority = creator_attributes.get("farm_priority")
farm_chunk = creator_attributes.get("farm_chunk")
farm_concurency = creator_attributes.get("farm_concurency")
instance.data.update({
"deadlineChunkSize": farm_chunk or 1,
"deadlinePriority": farm_priority or 50,
"deadlineConcurrentTasks": farm_concurency or 0
})
# Farm rendering
instance.data["transfer"] = False
instance.data["farm"] = True
self.log.info("Farm rendering ON ...")
# get colorspace and add to version data
colorspace = napi.get_colorspace_from_node(write_node)
version_data = {
"colorspace": colorspace
}
instance.data.update({
"versionData": version_data,
"path": write_file_path,
"outputDir": output_dir,
"ext": ext,
"colorspace": colorspace
})
if family == "render":
instance.data.update({
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStart": first_frame + handle_start,
"frameEnd": last_frame - handle_end,
"frameStartHandle": first_frame,
"frameEndHandle": last_frame,
})
else:
instance.data.update({
"handleStart": 0,
"handleEnd": 0,
"frameStart": first_frame,
"frameEnd": last_frame,
"frameStartHandle": first_frame,
"frameEndHandle": last_frame,
})
# make sure rendered sequence on farm will
# be used for exctract review
if not instance.data["review"]:
instance.data["useSequenceForReview"] = False
self.log.debug("instance.data: {}".format(pformat(instance.data)))

View file

@ -26,8 +26,14 @@ class ExtractBackdropNode(publish.Extractor):
families = ["nukenodes"]
def process(self, instance):
tmp_nodes = list()
nodes = instance[1:]
tmp_nodes = []
child_nodes = instance.data["transientData"]["childNodes"]
# all connections outside of backdrop
connections_in = instance.data["transientData"]["nodeConnectionsIn"]
connections_out = instance.data["transientData"]["nodeConnectionsOut"]
self.log.debug("_ connections_in: `{}`".format(connections_in))
self.log.debug("_ connections_out: `{}`".format(connections_out))
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = "{0}.nk".format(instance.name)
@ -35,20 +41,14 @@ class ExtractBackdropNode(publish.Extractor):
# maintain selection
with maintained_selection():
# all connections outside of backdrop
connections_in = instance.data["nodeConnectionsIn"]
connections_out = instance.data["nodeConnectionsOut"]
self.log.debug("_ connections_in: `{}`".format(connections_in))
self.log.debug("_ connections_out: `{}`".format(connections_out))
# create input nodes and name them as passing node (*_INP)
# create input child_nodes and name them as passing node (*_INP)
for n, inputs in connections_in.items():
for i, input in inputs:
inpn = nuke.createNode("Input")
inpn["name"].setValue("{}_{}_INP".format(n.name(), i))
n.setInput(i, inpn)
inpn.setXYpos(input.xpos(), input.ypos())
nodes.append(inpn)
child_nodes.append(inpn)
tmp_nodes.append(inpn)
reset_selection()
@ -63,13 +63,13 @@ class ExtractBackdropNode(publish.Extractor):
if d.name() in n.name()), 0), opn)
opn.setInput(0, n)
opn.autoplace()
nodes.append(opn)
child_nodes.append(opn)
tmp_nodes.append(opn)
reset_selection()
# select nodes to copy
# select child_nodes to copy
reset_selection()
select_nodes(nodes)
select_nodes(child_nodes)
# create tmp nk file
# save file to the path
nuke.nodeCopy(path)
@ -104,6 +104,3 @@ class ExtractBackdropNode(publish.Extractor):
self.log.info("Extracted instance '{}' to: {}".format(
instance.name, path))
self.log.info("Data {}".format(
instance.data))

View file

@ -28,6 +28,7 @@ class ExtractCamera(publish.Extractor):
]
def process(self, instance):
camera_node = instance.data["transientData"]["node"]
handle_start = instance.context.data["handleStart"]
handle_end = instance.context.data["handleEnd"]
first_frame = int(nuke.root()["first_frame"].getValue())
@ -38,7 +39,7 @@ class ExtractCamera(publish.Extractor):
self.log.info("instance.data: `{}`".format(
pformat(instance.data)))
rm_nodes = list()
rm_nodes = []
self.log.info("Crating additional nodes")
subset = instance.data["subset"]
staging_dir = self.staging_dir(instance)
@ -58,7 +59,7 @@ class ExtractCamera(publish.Extractor):
with maintained_selection():
# bake camera with axeses onto word coordinate XYZ
rm_n = bakeCameraWithAxeses(
nuke.toNode(instance.data["name"]), output_range)
camera_node, output_range)
rm_nodes.append(rm_n)
# create scene node

View file

@ -19,13 +19,14 @@ class ExtractGizmo(publish.Extractor):
"""
order = pyblish.api.ExtractorOrder
label = "Extract Gizmo (Group)"
label = "Extract Gizmo (group)"
hosts = ["nuke"]
families = ["gizmo"]
def process(self, instance):
tmp_nodes = list()
orig_grpn = instance[0]
tmp_nodes = []
orig_grpn = instance.data["transientData"]["node"]
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = "{0}.nk".format(instance.name)
@ -54,15 +55,6 @@ class ExtractGizmo(publish.Extractor):
# convert gizmos to groups
pnutils.bake_gizmos_recursively(copy_grpn)
# remove avalonknobs
knobs = copy_grpn.knobs()
avalon_knobs = [k for k in knobs.keys()
for ak in ["avalon:", "ak:"]
if ak in k]
avalon_knobs.append("publish")
for ak in avalon_knobs:
copy_grpn.removeKnob(knobs[ak])
# add to temporary nodes
tmp_nodes.append(copy_grpn)

View file

@ -36,8 +36,9 @@ class ExtractModel(publish.Extractor):
self.log.info("instance.data: `{}`".format(
pformat(instance.data)))
rm_nodes = list()
model_node = instance[0]
rm_nodes = []
model_node = instance.data["transientData"]["node"]
self.log.info("Crating additional nodes")
subset = instance.data["subset"]
staging_dir = self.staging_dir(instance)

View file

@ -16,13 +16,17 @@ class CreateOutputNode(pyblish.api.ContextPlugin):
def process(self, context):
# capture selection state
with maintained_selection():
active_node = [node for inst in context
for node in inst
if "ak:family" in node.knobs()]
active_node = [
inst.data.get("transientData", {}).get("node")
for inst in context
if inst.data.get("transientData", {}).get("node")
if inst.data.get(
"transientData", {}).get("node").Class() != "Root"
]
if active_node:
self.log.info(active_node)
active_node = active_node[0]
active_node = active_node.pop()
self.log.info(active_node)
active_node['selected'].setValue(True)

View file

@ -23,9 +23,13 @@ class NukeRenderLocal(publish.Extractor):
def process(self, instance):
families = instance.data["families"]
child_nodes = (
instance.data.get("transientData", {}).get("childNodes")
or instance
)
node = None
for x in instance:
for x in child_nodes:
if x.Class() == "Write":
node = x

View file

@ -4,11 +4,7 @@ import nuke
import pyblish.api
from openpype.pipeline import publish
from openpype.hosts.nuke.api import (
maintained_selection,
get_view_process_node
)
from openpype.hosts.nuke import api as napi
if sys.version_info[0] >= 3:
unicode = str
@ -38,7 +34,7 @@ class ExtractThumbnail(publish.Extractor):
if "render.farm" in instance.data["families"]:
return
with maintained_selection():
with napi.maintained_selection():
self.log.debug("instance: {}".format(instance))
self.log.debug("instance.data[families]: {}".format(
instance.data["families"]))
@ -69,7 +65,7 @@ class ExtractThumbnail(publish.Extractor):
bake_viewer_input_process_node = kwargs[
"bake_viewer_input_process"]
node = instance[0] # group node
node = instance.data["transientData"]["node"] # group node
self.log.info("Creating staging dir...")
if "representations" not in instance.data:
@ -144,7 +140,7 @@ class ExtractThumbnail(publish.Extractor):
if bake_viewer_process:
if bake_viewer_input_process_node:
# get input process and connect it to baking
ipn = get_view_process_node()
ipn = napi.get_view_process_node()
if ipn is not None:
ipn.setInput(0, previous_node)
previous_node = ipn

View file

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<root>
<error id="main">
<title>Shot/Asset mame</title>
<title>Shot/Asset name</title>
<description>
## Invalid Shot/Asset name in subset

View file

@ -3,16 +3,30 @@
<error id="main">
<title>Knobs values</title>
<description>
## Invalid node's knobs values
## Invalid node's knobs values
Following write node knobs needs to be repaired:
Following write node knobs needs to be repaired:
{xml_msg}
{xml_msg}
### How to repair?
### How to repair?
1. Use Repair button.
2. Hit Reload button on the publisher.
1. Use Repair button.
2. Hit Reload button on the publisher.
</description>
</error>
<error id="legacy">
<title>Legacy knob types</title>
<description>
## Knobs are in obsolete configuration
Settings needs to be fixed.
### How to repair?
Contact your supervisor or fix it in project settings at
'project_settings/nuke/imageio/nodes/requiredNodes' at knobs.
Each '__legacy__' type has to be defined accordingly to its type.
</description>
</error>
</root>

View file

@ -1,158 +0,0 @@
import nuke
import pyblish.api
from openpype.hosts.nuke.api.lib import (
add_publish_knob,
get_avalon_knob_data
)
@pyblish.api.log
class PreCollectNukeInstances(pyblish.api.ContextPlugin):
"""Collect all nodes with Avalon knob."""
order = pyblish.api.CollectorOrder - 0.49
label = "Pre-collect Instances"
hosts = ["nuke", "nukeassist"]
# presets
sync_workfile_version_on_families = []
def process(self, context):
instances = []
root = nuke.root()
self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes()))
for node in nuke.allNodes():
if node.Class() in ["Viewer", "Dot"]:
continue
try:
if node["disable"].value():
continue
except Exception as E:
self.log.warning(E)
# get data from avalon knob
avalon_knob_data = get_avalon_knob_data(
node, ["avalon:", "ak:"])
self.log.debug("avalon_knob_data: {}".format(avalon_knob_data))
if not avalon_knob_data:
continue
if avalon_knob_data["id"] != "pyblish.avalon.instance":
continue
# establish families
family = avalon_knob_data["family"]
families_ak = avalon_knob_data.get("families", [])
families = []
# except disabled nodes but exclude backdrops in test
if ("nukenodes" not in family) and (node["disable"].value()):
continue
subset = avalon_knob_data.get(
"subset", None) or node["name"].value()
# Create instance
instance = context.create_instance(subset)
instance.append(node)
suspend_publish = False
if "suspend_publish" in node.knobs():
suspend_publish = node["suspend_publish"].value()
instance.data["suspend_publish"] = suspend_publish
# get review knob value
review = False
if "review" in node.knobs():
review = node["review"].value()
if review:
families.append("review")
# Add all nodes in group instances.
if node.Class() == "Group":
# only alter families for render family
if families_ak and "write" in families_ak.lower():
target = node["render"].value()
if target == "Use existing frames":
# Local rendering
self.log.info("flagged for no render")
families.append(families_ak.lower())
elif target == "Local":
# Local rendering
self.log.info("flagged for local render")
families.append("{}.local".format(family))
family = families_ak.lower()
elif target == "On farm":
# Farm rendering
self.log.info("flagged for farm render")
instance.data["transfer"] = False
instance.data["farm"] = True
families.append("{}.farm".format(family))
family = families_ak.lower()
node.begin()
for i in nuke.allNodes():
instance.append(i)
node.end()
if not families and families_ak and family not in [
"render", "prerender"]:
families.append(families_ak.lower())
self.log.debug("__ family: `{}`".format(family))
self.log.debug("__ families: `{}`".format(families))
# Get format
format_ = root['format'].value()
resolution_width = format_.width()
resolution_height = format_.height()
pixel_aspect = format_.pixelAspect()
# get publish knob value
if "publish" not in node.knobs():
add_publish_knob(node)
# sync workfile version
_families_test = [family] + families
self.log.debug("__ _families_test: `{}`".format(_families_test))
for family_test in _families_test:
if family_test in self.sync_workfile_version_on_families:
self.log.debug(
"Syncing version with workfile for '{}'".format(
family_test
)
)
# get version to instance for integration
instance.data['version'] = instance.context.data['version']
instance.data.update({
"subset": subset,
"asset": avalon_knob_data["asset"],
"label": node.name(),
"name": node.name(),
"subset": subset,
"family": family,
"families": families,
"avalonKnob": avalon_knob_data,
"step": 1,
"publish": node.knob('publish').value(),
"fps": nuke.root()['fps'].value(),
"resolutionWidth": resolution_width,
"resolutionHeight": resolution_height,
"pixelAspect": pixel_aspect,
"review": review,
"representations": []
})
self.log.info("collected instance: {}".format(instance.data))
instances.append(instance)
self.log.debug("context: {}".format(context))

View file

@ -1,107 +0,0 @@
import os
import nuke
import pyblish.api
from openpype.lib import get_version_from_path
from openpype.hosts.nuke.api.lib import (
add_publish_knob,
get_avalon_knob_data
)
from openpype.pipeline import KnownPublishError
class CollectWorkfile(pyblish.api.ContextPlugin):
"""Collect current script for publish."""
order = pyblish.api.CollectorOrder - 0.50
label = "Pre-collect Workfile"
hosts = ['nuke']
def process(self, context): # sourcery skip: avoid-builtin-shadow
root = nuke.root()
current_file = os.path.normpath(nuke.root().name())
if current_file.lower() == "root":
raise KnownPublishError(
"Workfile is not correct file name. \n"
"Use workfile tool to manage the name correctly."
)
knob_data = get_avalon_knob_data(root)
add_publish_knob(root)
family = "workfile"
task = os.getenv("AVALON_TASK", None)
# creating instances per write node
staging_dir = os.path.dirname(current_file)
base_name = os.path.basename(current_file)
subset = family + task.capitalize()
# Get frame range
first_frame = int(root["first_frame"].getValue())
last_frame = int(root["last_frame"].getValue())
handle_start = int(knob_data.get("handleStart", 0))
handle_end = int(knob_data.get("handleEnd", 0))
# Get format
format = root['format'].value()
resolution_width = format.width()
resolution_height = format.height()
pixel_aspect = format.pixelAspect()
# Create instance
instance = context.create_instance(subset)
instance.add(root)
script_data = {
"asset": os.getenv("AVALON_ASSET", None),
"frameStart": first_frame + handle_start,
"frameEnd": last_frame - handle_end,
"resolutionWidth": resolution_width,
"resolutionHeight": resolution_height,
"pixelAspect": pixel_aspect,
# backward compatibility
"handles": handle_start,
"handleStart": handle_start,
"handleEnd": handle_end,
"step": 1,
"fps": root['fps'].value(),
"currentFile": current_file,
"version": int(get_version_from_path(current_file)),
"host": pyblish.api.current_host(),
"hostVersion": nuke.NUKE_VERSION_STRING
}
context.data.update(script_data)
# creating representation
representation = {
'name': 'nk',
'ext': 'nk',
'files': base_name,
"stagingDir": staging_dir,
}
# creating instance data
instance.data.update({
"subset": subset,
"label": base_name,
"name": base_name,
"publish": root.knob('publish').value(),
"family": family,
"families": [family],
"representations": [representation]
})
# adding basic script data
instance.data.update(script_data)
self.log.info('Publishing script version')

View file

@ -1,207 +0,0 @@
import os
import re
from pprint import pformat
import nuke
import pyblish.api
from openpype.client import (
get_last_version_by_subset_name,
get_representations,
)
from openpype.pipeline import (
legacy_io,
get_representation_path,
)
@pyblish.api.log
class CollectNukeWrites(pyblish.api.InstancePlugin):
"""Collect all write nodes."""
order = pyblish.api.CollectorOrder - 0.48
label = "Pre-collect Writes"
hosts = ["nuke", "nukeassist"]
families = ["write"]
def process(self, instance):
_families_test = [instance.data["family"]] + instance.data["families"]
self.log.debug("_families_test: {}".format(_families_test))
node = None
for x in instance:
if x.Class() == "Write":
node = x
if node is None:
return
instance.data["writeNode"] = node
self.log.debug("checking instance: {}".format(instance))
# Determine defined file type
ext = node["file_type"].value()
# Determine output type
output_type = "img"
if ext == "mov":
output_type = "mov"
# Get frame range
handle_start = instance.context.data["handleStart"]
handle_end = instance.context.data["handleEnd"]
first_frame = int(nuke.root()["first_frame"].getValue())
last_frame = int(nuke.root()["last_frame"].getValue())
frame_length = int(last_frame - first_frame + 1)
if node["use_limit"].getValue():
first_frame = int(node["first"].getValue())
last_frame = int(node["last"].getValue())
# Prepare expected output paths by evaluating each frame of write node
# - paths are first collected to set to avoid duplicated paths, then
# sorted and converted to list
node_file = node["file"]
expected_paths = list(sorted({
node_file.evaluate(frame)
for frame in range(first_frame, last_frame + 1)
}))
expected_filenames = [
os.path.basename(filepath)
for filepath in expected_paths
]
path = nuke.filename(node)
output_dir = os.path.dirname(path)
self.log.debug('output dir: {}'.format(output_dir))
# create label
name = node.name()
# Include start and end render frame in label
label = "{0} ({1}-{2})".format(
name,
int(first_frame),
int(last_frame)
)
if [fm for fm in _families_test
if fm in ["render", "prerender", "still"]]:
if "representations" not in instance.data:
instance.data["representations"] = list()
representation = {
'name': ext,
'ext': ext,
"stagingDir": output_dir,
"tags": list()
}
try:
collected_frames = [
filename
for filename in os.listdir(output_dir)
if filename in expected_filenames
]
if collected_frames:
collected_frames_len = len(collected_frames)
frame_start_str = "%0{}d".format(
len(str(last_frame))) % first_frame
representation['frameStart'] = frame_start_str
# in case slate is expected and not yet rendered
self.log.debug("_ frame_length: {}".format(frame_length))
self.log.debug(
"_ collected_frames_len: {}".format(
collected_frames_len))
# this will only run if slate frame is not already
# rendered from previews publishes
if "slate" in _families_test \
and (frame_length == collected_frames_len) \
and ("prerender" not in _families_test):
frame_slate_str = "%0{}d".format(
len(str(last_frame))) % (first_frame - 1)
slate_frame = collected_frames[0].replace(
frame_start_str, frame_slate_str)
collected_frames.insert(0, slate_frame)
if collected_frames_len == 1:
representation['files'] = collected_frames.pop()
if "still" in _families_test:
instance.data['family'] = 'image'
instance.data["families"].remove('still')
else:
representation['files'] = collected_frames
instance.data["representations"].append(representation)
except Exception:
instance.data["representations"].append(representation)
self.log.debug("couldn't collect frames: {}".format(label))
# Add version data to instance
colorspace = node["colorspace"].value()
# remove default part of the string
if "default (" in colorspace:
colorspace = re.sub(r"default.\(|\)", "", colorspace)
self.log.debug("colorspace: `{}`".format(colorspace))
version_data = {
"families": [
_f.replace(".local", "").replace(".farm", "")
for _f in _families_test if "write" != _f
],
"colorspace": colorspace
}
group_node = [x for x in instance if x.Class() == "Group"][0]
dl_chunk_size = 1
if "deadlineChunkSize" in group_node.knobs():
dl_chunk_size = group_node["deadlineChunkSize"].value()
dl_priority = 50
if "deadlinePriority" in group_node.knobs():
dl_priority = group_node["deadlinePriority"].value()
dl_concurrent_tasks = 0
if "deadlineConcurrentTasks" in group_node.knobs():
dl_concurrent_tasks = group_node["deadlineConcurrentTasks"].value()
instance.data.update({
"versionData": version_data,
"path": path,
"outputDir": output_dir,
"ext": ext,
"label": label,
"outputType": output_type,
"colorspace": colorspace,
"deadlineChunkSize": dl_chunk_size,
"deadlinePriority": dl_priority,
"deadlineConcurrentTasks": dl_concurrent_tasks
})
if self.is_prerender(_families_test):
instance.data.update({
"handleStart": 0,
"handleEnd": 0,
"frameStart": first_frame,
"frameEnd": last_frame,
"frameStartHandle": first_frame,
"frameEndHandle": last_frame,
})
else:
instance.data.update({
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStart": first_frame + handle_start,
"frameEnd": last_frame - handle_end,
"frameStartHandle": first_frame,
"frameEndHandle": last_frame,
})
# make sure rendered sequence on farm will
# be used for exctract review
if not instance.data["review"]:
instance.data["useSequenceForReview"] = False
self.log.debug("instance.data: {}".format(pformat(instance.data)))
def is_prerender(self, families):
return next((f for f in families if "prerender" in f), None)

View file

@ -2,11 +2,10 @@
"""Validate if instance asset is the same as context asset."""
from __future__ import absolute_import
import nuke
import pyblish.api
import openpype.hosts.nuke.api.lib as nlib
import openpype.hosts.nuke.api as nuke_api
from openpype.pipeline.publish import (
ValidateContentsOrder,
PublishXmlValidationError,
@ -51,9 +50,10 @@ class SelectInvalidInstances(pyblish.api.Action):
self.deselect()
def select(self, instances):
nlib.select_nodes(
[nuke.toNode(str(x)) for x in instances]
)
for inst in instances:
if inst.data.get("transientData", {}).get("node"):
select_node = inst.data["transientData"]["node"]
select_node["selected"].setValue(True)
def deselect(self):
nlib.reset_selection()
@ -82,13 +82,14 @@ class RepairSelectInvalidInstances(pyblish.api.Action):
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
self.log.debug(instances)
context_asset = context.data["assetEntity"]["name"]
for instance in instances:
origin_node = instance[0]
nuke_api.lib.recreate_instance(
origin_node, avalon_data={"asset": context_asset}
)
node = instance.data["transientData"]["node"]
node_data = nlib.get_node_data(node, nlib.INSTANCE_DATA_KNOB)
node_data["asset"] = context_asset
nlib.set_node_data(node, nlib.INSTANCE_DATA_KNOB, node_data)
class ValidateCorrectAssetName(pyblish.api.InstancePlugin):
@ -112,6 +113,7 @@ class ValidateCorrectAssetName(pyblish.api.InstancePlugin):
def process(self, instance):
asset = instance.data.get("asset")
context_asset = instance.context.data["assetEntity"]["name"]
node = instance.data["transientData"]["node"]
msg = (
"Instance `{}` has wrong shot/asset name:\n"
@ -123,7 +125,7 @@ class ValidateCorrectAssetName(pyblish.api.InstancePlugin):
if asset != context_asset:
raise PublishXmlValidationError(
self, msg, formatting_data={
"node_name": instance[0]["name"].value(),
"node_name": node.name(),
"wrong_name": asset,
"correct_name": context_asset
}

View file

@ -1,6 +1,6 @@
import nuke
import pyblish
from openpype.hosts.nuke.api.lib import maintained_selection
from openpype.hosts.nuke import api as napi
from openpype.pipeline import PublishXmlValidationError
@ -25,14 +25,14 @@ class SelectCenterInNodeGraph(pyblish.api.Action):
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
all_xC = list()
all_yC = list()
all_xC = []
all_yC = []
# maintain selection
with maintained_selection():
with napi.maintained_selection():
# collect all failed nodes xpos and ypos
for instance in instances:
bdn = instance[0]
bdn = instance.data["transientData"]["node"]
xC = bdn.xpos() + bdn.screenWidth() / 2
yC = bdn.ypos() + bdn.screenHeight() / 2
@ -46,7 +46,6 @@ class SelectCenterInNodeGraph(pyblish.api.Action):
nuke.zoom(2, [min(all_xC), min(all_yC)])
@pyblish.api.log
class ValidateBackdrop(pyblish.api.InstancePlugin):
""" Validate amount of nodes on backdrop node in case user
forgoten to add nodes above the publishing backdrop node.
@ -60,7 +59,8 @@ class ValidateBackdrop(pyblish.api.InstancePlugin):
actions = [SelectCenterInNodeGraph]
def process(self, instance):
connections_out = instance.data["nodeConnectionsOut"]
child_nodes = instance.data["transientData"]["childNodes"]
connections_out = instance.data["transientData"]["nodeConnectionsOut"]
msg_multiple_outputs = (
"Only one outcoming connection from "
@ -78,10 +78,10 @@ class ValidateBackdrop(pyblish.api.InstancePlugin):
self.log.debug(
"Amount of nodes on instance: {}".format(
len(instance))
len(child_nodes))
)
if len(instance) == 1:
if child_nodes == []:
raise PublishXmlValidationError(
self,
msg_no_nodes,

View file

@ -1,6 +1,6 @@
import pyblish
from openpype.pipeline import PublishXmlValidationError
from openpype.hosts.nuke.api import maintained_selection
from openpype.hosts.nuke import api as napi
import nuke
@ -26,45 +26,44 @@ class OpenFailedGroupNode(pyblish.api.Action):
instances = pyblish.api.instances_by_plugin(failed, plugin)
# maintain selection
with maintained_selection():
with napi.maintained_selection():
# collect all failed nodes xpos and ypos
for instance in instances:
grpn = instance[0]
grpn = instance.data["transientData"]["node"]
nuke.showDag(grpn)
@pyblish.api.log
class ValidateGizmo(pyblish.api.InstancePlugin):
"""Validate amount of output nodes in gizmo (group) node"""
order = pyblish.api.ValidatorOrder
optional = True
families = ["gizmo"]
label = "Validate Gizmo (Group)"
label = "Validate Gizmo (group)"
hosts = ["nuke"]
actions = [OpenFailedGroupNode]
def process(self, instance):
grpn = instance[0]
grpn = instance.data["transientData"]["node"]
with grpn:
connections_out = nuke.allNodes('Output')
msg_multiple_outputs = (
"Only one outcoming connection from "
"\"{}\" is allowed").format(instance.data["name"])
if len(connections_out) > 1:
msg_multiple_outputs = (
"Only one outcoming connection from "
"\"{}\" is allowed").format(instance.data["name"])
raise PublishXmlValidationError(
self, msg_multiple_outputs, "multiple_outputs",
{"node_name": grpn["name"].value()}
)
connections_in = nuke.allNodes('Input')
msg_missing_inputs = (
"At least one Input node has to be inside Group: "
"\"{}\"").format(instance.data["name"])
if len(connections_in) == 0:
msg_missing_inputs = (
"At least one Input node has to be inside Group: "
"\"{}\"").format(instance.data["name"])
raise PublishXmlValidationError(
self, msg_missing_inputs, "no_inputs",
{"node_name": grpn["name"].value()}

View file

@ -61,17 +61,11 @@ class ValidateKnobs(pyblish.api.ContextPlugin):
invalid_knobs = []
for instance in context:
# Filter publisable instances.
if not instance.data["publish"]:
continue
# Filter families.
families = [instance.data["family"]]
families += instance.data.get("families", [])
if not families:
continue
# Get all knobs to validate.
knobs = {}
for family in families:

View file

@ -1,12 +1,19 @@
import pyblish.api
from openpype.hosts.nuke.api import maintained_selection
from openpype.pipeline import PublishXmlValidationError
from openpype.hosts.nuke import api as napi
from openpype.pipeline.publish import RepairAction
from openpype.pipeline import (
PublishXmlValidationError,
OptionalPyblishPluginMixin
)
import nuke
class ValidateOutputResolution(pyblish.api.InstancePlugin):
class ValidateOutputResolution(
OptionalPyblishPluginMixin,
pyblish.api.InstancePlugin
):
"""Validates Output Resolution.
It is making sure the resolution of write's input is the same as
@ -15,7 +22,7 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin):
order = pyblish.api.ValidatorOrder
optional = True
families = ["render", "render.local", "render.farm"]
families = ["render"]
label = "Write Resolution"
hosts = ["nuke"]
actions = [RepairAction]
@ -24,14 +31,22 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin):
resolution_msg = "Reformat is set to wrong format"
def process(self, instance):
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:
raise PublishXmlValidationError(self, invalid)
@classmethod
def get_reformat(cls, instance):
child_nodes = (
instance.data.get("transientData", {}).get("childNodes")
or instance
)
reformat = None
for inode in instance:
for inode in child_nodes:
if inode.Class() != "Reformat":
continue
reformat = inode
@ -64,21 +79,26 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin):
@classmethod
def repair(cls, instance):
child_nodes = (
instance.data.get("transientData", {}).get("childNodes")
or instance
)
invalid = cls.get_invalid(instance)
grp_node = instance[0]
grp_node = instance.data["transientData"]["node"]
if cls.missing_msg == invalid:
# make sure we are inside of the group node
with grp_node:
# find input node and select it
_input = None
for inode in instance:
for inode in child_nodes:
if inode.Class() != "Input":
continue
_input = inode
# add reformat node under it
with maintained_selection():
with napi.maintained_selection():
_input['selected'].setValue(True)
_rfn = nuke.createNode("Reformat", "name Reformat01")
_rfn["resize"].setValue(0)

View file

@ -17,7 +17,6 @@ class FixProxyMode(pyblish.api.Action):
rootNode["proxy"].setValue(False)
@pyblish.api.log
class ValidateProxyMode(pyblish.api.ContextPlugin):
"""Validate active proxy mode"""

View file

@ -1,87 +0,0 @@
import os
import nuke
import toml
import pyblish.api
from bson.objectid import ObjectId
from openpype.pipeline import (
discover_loader_plugins,
load_container,
)
class RepairReadLegacyAction(pyblish.api.Action):
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
# Get the errored instances
failed = []
for result in context.data["results"]:
if (result["error"] is not None and result["instance"] is not None
and result["instance"] not in failed):
failed.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
for instance in instances:
data = toml.loads(instance[0]["avalon"].value())
data["name"] = instance[0].name()
data["xpos"] = instance[0].xpos()
data["ypos"] = instance[0].ypos()
data["extension"] = os.path.splitext(
instance[0]["file"].value()
)[1][1:]
data["connections"] = []
for d in instance[0].dependent():
for i in range(d.inputs()):
if d.input(i) == instance[0]:
data["connections"].append([i, d])
nuke.delete(instance[0])
loader_name = "LoadSequence"
if data["extension"] == "mov":
loader_name = "LoadMov"
loader_plugin = None
for Loader in discover_loader_plugins():
if Loader.__name__ != loader_name:
continue
loader_plugin = Loader
load_container(
Loader=loader_plugin,
representation=ObjectId(data["representation"])
)
node = nuke.toNode(data["name"])
for connection in data["connections"]:
connection[1].setInput(connection[0], node)
node.setXYpos(data["xpos"], data["ypos"])
class ValidateReadLegacy(pyblish.api.InstancePlugin):
"""Validate legacy read instance[0]s."""
order = pyblish.api.ValidatorOrder
optional = True
families = ["read.legacy"]
label = "Read Legacy"
hosts = ["nuke"]
actions = [RepairReadLegacyAction]
def process(self, instance):
msg = "Clean up legacy read node \"{}\"".format(instance)
assert False, msg

View file

@ -4,7 +4,6 @@ import clique
from openpype.pipeline import PublishXmlValidationError
@pyblish.api.log
class RepairActionBase(pyblish.api.Action):
on = "failed"
icon = "wrench"
@ -23,6 +22,7 @@ class RepairActionBase(pyblish.api.Action):
def repair_knob(self, instances, state):
for instance in instances:
node = instance.data["transientData"]["node"]
files_remove = [os.path.join(instance.data["outputDir"], f)
for r in instance.data.get("representations", [])
for f in r.get("files", [])
@ -31,7 +31,7 @@ class RepairActionBase(pyblish.api.Action):
for f in files_remove:
os.remove(f)
self.log.debug("removing file: {}".format(f))
instance[0]["render"].setValue(state)
node["render"].setValue(state)
self.log.info("Rendering toggled to `{}`".format(state))
@ -62,9 +62,10 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
actions = [RepairCollectionActionToLocal, RepairCollectionActionToFarm]
def process(self, instance):
node = instance.data["transientData"]["node"]
f_data = {
"node_name": instance[0]["name"].value()
"node_name": node.name()
}
for repre in instance.data["representations"]:

View file

@ -1,17 +1,19 @@
from pprint import pformat
from copy import deepcopy
import pyblish.api
from openpype.pipeline import PublishXmlValidationError
from openpype.pipeline import (
PublishXmlValidationError,
OptionalPyblishPluginMixin
)
from openpype.pipeline.publish import RepairAction
from openpype.hosts.nuke.api.lib import (
get_avalon_knob_data,
WorkfileSettings
)
import nuke
@pyblish.api.log
class ValidateScriptAttributes(pyblish.api.InstancePlugin):
class ValidateScriptAttributes(
OptionalPyblishPluginMixin,
pyblish.api.InstancePlugin
):
""" Validates file output. """
order = pyblish.api.ValidatorOrder + 0.1
@ -22,14 +24,12 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin):
actions = [RepairAction]
def process(self, instance):
root = nuke.root()
knob_data = get_avalon_knob_data(root)
if not self.is_active(instance.data):
return
script_data = deepcopy(instance.context.data["scriptData"])
asset = instance.data["assetEntity"]
# get asset data frame values
frame_start = asset["data"]["frameStart"]
frame_end = asset["data"]["frameEnd"]
handle_start = asset["data"]["handleStart"]
handle_end = asset["data"]["handleEnd"]
# These attributes will be checked
attributes = [
@ -48,37 +48,11 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin):
for attr in attributes
if attr in asset["data"]
}
# fix float to max 4 digints (only for evaluating)
fps_data = float("{0:.4f}".format(
asset_attributes["fps"]))
# fix frame values to include handles
asset_attributes.update({
"frameStart": frame_start - handle_start,
"frameEnd": frame_end + handle_end,
"fps": fps_data
})
self.log.debug(pformat(
asset_attributes
))
# Get format
_format = root["format"].value()
# Get values from nukescript
script_attributes = {
"handleStart": int(knob_data["handleStart"]),
"handleEnd": int(knob_data["handleEnd"]),
"fps": float("{0:.4f}".format(root['fps'].value())),
"frameStart": int(root["first_frame"].getValue()),
"frameEnd": int(root["last_frame"].getValue()),
"resolutionWidth": _format.width(),
"resolutionHeight": _format.height(),
"pixelAspect": _format.pixelAspect()
}
self.log.debug(pformat(
script_attributes
))
asset_attributes["fps"] = float("{0:.4f}".format(
asset_attributes["fps"]))
script_data["fps"] = float("{0:.4f}".format(
script_data["fps"]))
# Compare asset's values Nukescript X Database
not_matching = []
@ -87,14 +61,14 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin):
"Asset vs Script attribute \"{}\": {}, {}".format(
attr,
asset_attributes[attr],
script_attributes[attr]
script_data[attr]
)
)
if asset_attributes[attr] != script_attributes[attr]:
if asset_attributes[attr] != script_data[attr]:
not_matching.append({
"name": attr,
"expected": asset_attributes[attr],
"actual": script_attributes[attr]
"actual": script_data[attr]
})
# Raise error if not matching

View file

@ -1,53 +0,0 @@
import pyblish.api
import openpype.hosts.nuke.lib
class RepairNukeWriteDeadlineTab(pyblish.api.Action):
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
# Get the errored instances
failed = []
for result in context.data["results"]:
if (result["error"] is not None and result["instance"] is not None
and result["instance"] not in failed):
failed.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
for instance in instances:
group_node = [x for x in instance if x.Class() == "Group"][0]
# Remove existing knobs.
knob_names = openpype.hosts.nuke.lib.get_deadline_knob_names()
for name, knob in group_node.knobs().items():
if name in knob_names:
group_node.removeKnob(knob)
openpype.hosts.nuke.lib.add_deadline_tab(group_node)
class ValidateNukeWriteDeadlineTab(pyblish.api.InstancePlugin):
"""Ensure Deadline tab is present and current."""
order = pyblish.api.ValidatorOrder
label = "Deadline Tab"
hosts = ["nuke"]
optional = True
families = ["render"]
actions = [RepairNukeWriteDeadlineTab]
def process(self, instance):
group_node = [x for x in instance if x.Class() == "Group"][0]
knob_names = openpype.hosts.nuke.lib.get_deadline_knob_names()
missing_knobs = []
for name in knob_names:
if name not in group_node.knobs().keys():
missing_knobs.append(name)
assert not missing_knobs, "Missing knobs: {}".format(missing_knobs)

View file

@ -1,108 +0,0 @@
import toml
import nuke
import pyblish.api
from openpype.pipeline import discover_creator_plugins
from openpype.pipeline.publish import RepairAction
from openpype.hosts.nuke.api.lib import get_avalon_knob_data
class ValidateWriteLegacy(pyblish.api.InstancePlugin):
"""Validate legacy write nodes."""
order = pyblish.api.ValidatorOrder
optional = True
families = ["write"]
label = "Validate Write Legacy"
hosts = ["nuke"]
actions = [RepairAction]
def process(self, instance):
node = instance[0]
msg = "Clean up legacy write node \"{}\"".format(instance)
if node.Class() not in ["Group", "Write"]:
return
# test avalon knobs
family_knobs = ["ak:family", "avalon:family"]
family_test = [k for k in node.knobs().keys() if k in family_knobs]
self.log.debug("_ family_test: {}".format(family_test))
# test if render in family test knob
# and only one item should be available
assert len(family_test) == 1, msg + " > More avalon attributes"
assert "render" in node[family_test[0]].value() \
or "still" in node[family_test[0]].value(), msg + \
" > Not correct family"
# test if `file` knob in node, this way old
# non-group-node write could be detected
assert "file" not in node.knobs(), msg + \
" > file knob should not be present"
# check if write node is having old render targeting
assert "render_farm" not in node.knobs(), msg + \
" > old way of setting render target"
@classmethod
def repair(cls, instance):
node = instance[0]
if "Write" in node.Class():
data = toml.loads(node["avalon"].value())
else:
data = get_avalon_knob_data(node)
# collect reusable data
data["XYpos"] = (node.xpos(), node.ypos())
data["input"] = node.input(0)
data["publish"] = node["publish"].value()
data["render"] = node["render"].value()
data["render_farm"] = node["render_farm"].value()
data["review"] = node["review"].value()
data["use_limit"] = node["use_limit"].value()
data["first"] = node["first"].value()
data["last"] = node["last"].value()
family = data["family"]
cls.log.debug("_ orig node family: {}".format(family))
# define what family of write node should be recreated
if family == "render":
Create_name = "CreateWriteRender"
elif family == "prerender":
Create_name = "CreateWritePrerender"
elif family == "still":
Create_name = "CreateWriteStill"
# get appropriate plugin class
creator_plugin = None
for Creator in discover_creator_plugins():
if Creator.__name__ != Create_name:
continue
creator_plugin = Creator
# delete the legaci write node
nuke.delete(node)
# create write node with creator
new_node_name = data["subset"]
creator_plugin(new_node_name, data["asset"]).process()
node = nuke.toNode(new_node_name)
node.setXYpos(*data["XYpos"])
node.setInput(0, data["input"])
node["publish"].setValue(data["publish"])
node["review"].setValue(data["review"])
node["use_limit"].setValue(data["use_limit"])
node["first"].setValue(data["first"])
node["last"].setValue(data["last"])
# recreate render targets
if data["render"]:
node["render"].setValue("Local")
if data["render_farm"]:
node["render"].setValue("On farm")

View file

@ -5,10 +5,13 @@ from openpype.hosts.nuke.api.lib import (
set_node_knobs_from_settings,
color_gui_to_int
)
from openpype.pipeline import PublishXmlValidationError
from openpype.pipeline.publish import (
PublishXmlValidationError,
OptionalPyblishPluginMixin,
)
@pyblish.api.log
class RepairNukeWriteNodeAction(pyblish.api.Action):
label = "Repair"
on = "failed"
@ -18,10 +21,15 @@ class RepairNukeWriteNodeAction(pyblish.api.Action):
instances = get_errored_instances_from_context(context)
for instance in instances:
write_group_node = instance[0]
child_nodes = (
instance.data.get("transientData", {}).get("childNodes")
or instance
)
write_group_node = instance.data["transientData"]["node"]
# get write node from inside of group
write_node = None
for x in instance:
for x in child_nodes:
if x.Class() == "Write":
write_node = x
@ -32,7 +40,10 @@ class RepairNukeWriteNodeAction(pyblish.api.Action):
self.log.info("Node attributes were fixed")
class ValidateNukeWriteNode(pyblish.api.InstancePlugin):
class ValidateNukeWriteNode(
OptionalPyblishPluginMixin,
pyblish.api.InstancePlugin
):
""" Validate Write node's knobs.
Compare knobs on write node inside the render group
@ -42,16 +53,24 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin):
order = pyblish.api.ValidatorOrder
optional = True
families = ["render"]
label = "Write Node"
label = "Validate write node"
actions = [RepairNukeWriteNodeAction]
hosts = ["nuke"]
def process(self, instance):
write_group_node = instance[0]
if not self.is_active(instance.data):
return
child_nodes = (
instance.data.get("transientData", {}).get("childNodes")
or instance
)
write_group_node = instance.data["transientData"]["node"]
# get write node from inside of group
write_node = None
for x in instance:
for x in child_nodes:
if x.Class() == "Write":
write_node = x
@ -60,17 +79,31 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin):
correct_data = get_write_node_template_attr(write_group_node)
if correct_data:
check_knobs = correct_data["knobs"]
else:
return
check = []
self.log.debug("__ write_node: {}".format(
write_node
))
self.log.debug("__ correct_data: {}".format(
correct_data
))
for knob_data in correct_data["knobs"]:
knob_type = knob_data["type"]
self.log.debug("__ knob_type: {}".format(
knob_type
))
if (
knob_type == "__legacy__"
):
raise PublishXmlValidationError(
self, (
"Please update data in settings 'project_settings"
"/nuke/imageio/nodes/requiredNodes'"
),
key="legacy"
)
for knob_data in check_knobs:
key = knob_data["name"]
value = knob_data["value"]
node_value = write_node[key].value()

View file

@ -1,64 +1,5 @@
import nuke
import os
from openpype.lib import Logger
from openpype.pipeline import install_host
from openpype.hosts.nuke import api
from openpype.hosts.nuke.api.lib import (
on_script_load,
check_inventory_versions,
WorkfileSettings,
dirmap_file_name_filter,
add_scripts_gizmo
)
from openpype.settings import get_project_settings
from openpype.hosts.nuke.api import NukeHost
log = Logger.get_logger(__name__)
install_host(api)
# fix ffmpeg settings on script
nuke.addOnScriptLoad(on_script_load)
# set checker for last versions on loaded containers
nuke.addOnScriptLoad(check_inventory_versions)
nuke.addOnScriptSave(check_inventory_versions)
# # set apply all workfile settings on script load and save
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
nuke.addFilenameFilter(dirmap_file_name_filter)
log.info('Automatic syncing of write file knob to script version')
def add_scripts_menu():
try:
from scriptsmenu import launchfornuke
except ImportError:
log.warning(
"Skipping studio.menu install, because "
"'scriptsmenu' module seems unavailable."
)
return
# load configuration of custom menu
project_settings = get_project_settings(os.getenv("AVALON_PROJECT"))
config = project_settings["nuke"]["scriptsmenu"]["definition"]
_menu = project_settings["nuke"]["scriptsmenu"]["name"]
if not config:
log.warning("Skipping studio menu, no definition found.")
return
# run the launcher for Maya menu
studio_menu = launchfornuke.main(title=_menu.title())
# apply configuration
studio_menu.build_from_configuration(studio_menu, config)
add_scripts_menu()
add_scripts_gizmo()
host = NukeHost()
install_host(host)

View file

@ -0,0 +1,2 @@
[/Script/OpenPype.OpenPypeSettings]
FolderColor=(R=91,G=197,B=220,A=255)

View file

@ -42,6 +42,7 @@ public class OpenPype : ModuleRules
"Engine",
"Slate",
"SlateCore",
"AssetTools"
// ... add private dependencies that you statically link with here ...
}
);

View file

@ -1,6 +1,11 @@
#include "OpenPype.h"
#include "ISettingsContainer.h"
#include "ISettingsModule.h"
#include "ISettingsSection.h"
#include "LevelEditor.h"
#include "OpenPypePythonBridge.h"
#include "OpenPypeSettings.h"
#include "OpenPypeStyle.h"
@ -11,7 +16,6 @@ static const FName OpenPypeTabName("OpenPype");
// This function is triggered when the plugin is staring up
void FOpenPypeModule::StartupModule()
{
FOpenPypeStyle::Initialize();
FOpenPypeStyle::SetIcon("Logo", "openpype40");
@ -37,6 +41,7 @@ void FOpenPypeModule::StartupModule()
LevelEditorModule.GetMenuExtensibilityManager()->AddExtender(MenuExtender);
LevelEditorModule.GetToolBarExtensibilityManager()->AddExtender(ToolbarExtender);
RegisterSettings();
}
void FOpenPypeModule::ShutdownModule()
@ -64,7 +69,6 @@ void FOpenPypeModule::AddMenuEntry(FMenuBuilder& MenuBuilder)
FSlateIcon(FOpenPypeStyle::GetStyleSetName(), "OpenPype.Logo"),
FUIAction(FExecuteAction::CreateRaw(this, &FOpenPypeModule::MenuDialog))
);
}
MenuBuilder.EndSection();
}
@ -89,13 +93,58 @@ void FOpenPypeModule::AddToobarEntry(FToolBarBuilder& ToolbarBuilder)
ToolbarBuilder.EndSection();
}
void FOpenPypeModule::RegisterSettings()
{
ISettingsModule& SettingsModule = FModuleManager::LoadModuleChecked<ISettingsModule>("Settings");
void FOpenPypeModule::MenuPopup() {
// Create the new category
// TODO: After the movement of the plugin from the game to editor, it might be necessary to move this!
ISettingsContainerPtr SettingsContainer = SettingsModule.GetContainer("Project");
UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
// Register the settings
ISettingsSectionPtr SettingsSection = SettingsModule.RegisterSettings("Project", "OpenPype", "General",
LOCTEXT("RuntimeGeneralSettingsName",
"General"),
LOCTEXT("RuntimeGeneralSettingsDescription",
"Base configuration for Open Pype Module"),
Settings
);
// Register the save handler to your settings, you might want to use it to
// validate those or just act to settings changes.
if (SettingsSection.IsValid())
{
SettingsSection->OnModified().BindRaw(this, &FOpenPypeModule::HandleSettingsSaved);
}
}
bool FOpenPypeModule::HandleSettingsSaved()
{
UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
bool ResaveSettings = false;
// You can put any validation code in here and resave the settings in case an invalid
// value has been entered
if (ResaveSettings)
{
Settings->SaveConfig();
}
return true;
}
void FOpenPypeModule::MenuPopup()
{
UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get();
bridge->RunInPython_Popup();
}
void FOpenPypeModule::MenuDialog() {
void FOpenPypeModule::MenuDialog()
{
UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get();
bridge->RunInPython_Dialog();
}

View file

@ -1,4 +1,6 @@
#include "OpenPypeLib.h"
#include "AssetViewUtils.h"
#include "Misc/Paths.h"
#include "Misc/ConfigCacheIni.h"
#include "UObject/UnrealType.h"
@ -10,21 +12,23 @@
* @warning This color will appear only after Editor restart. Is there a better way?
*/
void UOpenPypeLib::CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd)
bool UOpenPypeLib::SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor, const bool& bForceAdd)
{
auto SaveColorInternal = [](FString InPath, FLinearColor InFolderColor)
if (AssetViewUtils::DoesFolderExist(FolderPath))
{
// Saves the color of the folder to the config
if (FPaths::FileExists(GEditorPerProjectIni))
{
GConfig->SetString(TEXT("PathColor"), *InPath, *InFolderColor.ToString(), GEditorPerProjectIni);
}
const TSharedPtr<FLinearColor> LinearColor = MakeShared<FLinearColor>(FolderColor);
};
SaveColorInternal(FolderPath, FolderColor);
AssetViewUtils::SaveColor(FolderPath, LinearColor, true);
UE_LOG(LogAssetData, Display, TEXT("A color {%s} has been set to folder \"%s\""), *LinearColor->ToString(),
*FolderPath)
return true;
}
UE_LOG(LogAssetData, Display, TEXT("Setting a color {%s} to folder \"%s\" has failed! Directory doesn't exist!"),
*FolderColor.ToString(), *FolderPath)
return false;
}
/**
* Returns all poperties on given object
* @param cls - class

View file

@ -3,6 +3,8 @@
#include "OpenPypePublishInstance.h"
#include "AssetRegistryModule.h"
#include "NotificationManager.h"
#include "OpenPypeLib.h"
#include "OpenPypeSettings.h"
#include "SNotificationList.h"
//Moves all the invalid pointers to the end to prepare them for the shrinking
@ -36,6 +38,11 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj
AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated);
AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved);
AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated);
#ifdef WITH_EDITOR
ColorOpenPypeDirs();
#endif
}
void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData)
@ -58,7 +65,7 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData)
if (AssetDataInternal.Emplace(Asset).IsValidId())
{
UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"),
*this->GetName(), *Asset->GetName());
*this->GetName(), *Asset->GetName());
}
}
}
@ -96,6 +103,48 @@ bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const
#ifdef WITH_EDITOR
void UOpenPypePublishInstance::ColorOpenPypeDirs()
{
FString PathName = this->GetPathName();
//Check whether the path contains the defined OpenPype folder
if (!PathName.Contains(TEXT("OpenPype"))) return;
//Get the base path for open pype
FString PathLeft, PathRight;
PathName.Split(FString("OpenPype"), &PathLeft, &PathRight);
if (PathLeft.IsEmpty() || PathRight.IsEmpty())
{
UE_LOG(LogAssetData, Error, TEXT("Failed to retrieve the base OpenPype directory!"))
return;
}
PathName.RemoveFromEnd(PathRight, ESearchCase::CaseSensitive);
//Get the current settings
const UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
//Color the base folder
UOpenPypeLib::SetFolderColor(PathName, Settings->GetFolderFColor(), false);
//Get Sub paths, iterate through them and color them according to the folder color in UOpenPypeSettings
const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>(
"AssetRegistry");
TArray<FString> PathList;
AssetRegistryModule.Get().GetSubPaths(PathName, PathList, true);
if (PathList.Num() > 0)
{
for (const FString& Path : PathList)
{
UOpenPypeLib::SetFolderColor(Path, Settings->GetFolderFColor(), false);
}
}
}
void UOpenPypePublishInstance::SendNotification(const FString& Text) const
{
FNotificationInfo Info{FText::FromString(Text)};

View file

@ -0,0 +1,21 @@
// Fill out your copyright notice in the Description page of Project Settings.
#include "OpenPypeSettings.h"
#include "IPluginManager.h"
#include "UObjectGlobals.h"
/**
* Mainly is used for initializing default values if the DefaultOpenPypeSettings.ini file does not exist in the saved config
*/
UOpenPypeSettings::UOpenPypeSettings(const FObjectInitializer& ObjectInitializer)
{
const FString ConfigFilePath = OPENPYPE_SETTINGS_FILEPATH;
// This has to be probably in the future set using the UE Reflection system
FColor Color;
GConfig->GetColor(TEXT("/Script/OpenPype.OpenPypeSettings"), TEXT("FolderColor"), Color, ConfigFilePath);
FolderColor = Color;
}

View file

@ -12,10 +12,11 @@ public:
virtual void ShutdownModule() override;
private:
void RegisterSettings();
bool HandleSettingsSaved();
void AddMenuEntry(FMenuBuilder& MenuBuilder);
void AddToobarEntry(FToolBarBuilder& ToolbarBuilder);
void MenuPopup();
void MenuDialog();
};

View file

@ -5,14 +5,14 @@
UCLASS(Blueprintable)
class OPENPYPE_API UOpenPypeLib : public UObject
class OPENPYPE_API UOpenPypeLib : public UBlueprintFunctionLibrary
{
GENERATED_BODY()
public:
UFUNCTION(BlueprintCallable, Category = Python)
static void CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd);
static bool SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor,const bool& bForceAdd);
UFUNCTION(BlueprintCallable, Category = Python)
static TArray<FString> GetAllProperties(UClass* cls);

View file

@ -10,8 +10,6 @@ class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset
GENERATED_UCLASS_BODY()
public:
/**
/**
* Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is
* placed in)
@ -58,7 +56,9 @@ public:
UFUNCTION(BlueprintCallable, BlueprintPure)
TSet<UObject*> GetAllAssets() const
{
const TSet<TSoftObjectPtr<UObject>>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal;
const TSet<TSoftObjectPtr<UObject>>& IteratedSet = bAddExternalAssets
? AssetDataInternal.Union(AssetDataExternal)
: AssetDataInternal;
//Create a new TSet only with raw pointers.
TSet<UObject*> ResultSet;
@ -69,9 +69,7 @@ public:
return ResultSet;
}
private:
UPROPERTY(VisibleAnywhere, Category="Assets")
TSet<TSoftObjectPtr<UObject>> AssetDataInternal;
@ -94,10 +92,10 @@ private:
#ifdef WITH_EDITOR
void ColorOpenPypeDirs();
void SendNotification(const FString& Text) const;
virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override;
#endif
};

View file

@ -0,0 +1,32 @@
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "Object.h"
#include "OpenPypeSettings.generated.h"
#define OPENPYPE_SETTINGS_FILEPATH IPluginManager::Get().FindPlugin("OpenPype")->GetBaseDir() / TEXT("Config") / TEXT("DefaultOpenPypeSettings.ini")
UCLASS(Config=OpenPypeSettings, DefaultConfig)
class OPENPYPE_API UOpenPypeSettings : public UObject
{
GENERATED_UCLASS_BODY()
UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings)
FColor GetFolderFColor() const
{
return FolderColor;
}
UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings)
FLinearColor GetFolderFLinearColor() const
{
return FLinearColor(FolderColor);
}
protected:
UPROPERTY(config, EditAnywhere, Category = Folders)
FColor FolderColor = FColor(25,45,223);
};

View file

@ -0,0 +1,2 @@
[/Script/OpenPype.OpenPypeSettings]
FolderColor=(R=91,G=197,B=220,A=255)

View file

@ -48,6 +48,7 @@ public class OpenPype : ModuleRules
"Engine",
"Slate",
"SlateCore",
"AssetTools"
// ... add private dependencies that you statically link with here ...
}
);

View file

@ -1,8 +1,12 @@
#include "OpenPype.h"
#include "ISettingsContainer.h"
#include "ISettingsModule.h"
#include "ISettingsSection.h"
#include "OpenPypeStyle.h"
#include "OpenPypeCommands.h"
#include "OpenPypePythonBridge.h"
#include "LevelEditor.h"
#include "OpenPypeSettings.h"
#include "Misc/MessageDialog.h"
#include "ToolMenus.h"
@ -29,7 +33,10 @@ void FOpenPypeModule::StartupModule()
FExecuteAction::CreateRaw(this, &FOpenPypeModule::MenuDialog),
FCanExecuteAction());
UToolMenus::RegisterStartupCallback(FSimpleMulticastDelegate::FDelegate::CreateRaw(this, &FOpenPypeModule::RegisterMenus));
UToolMenus::RegisterStartupCallback(
FSimpleMulticastDelegate::FDelegate::CreateRaw(this, &FOpenPypeModule::RegisterMenus));
RegisterSettings();
}
void FOpenPypeModule::ShutdownModule()
@ -43,6 +50,50 @@ void FOpenPypeModule::ShutdownModule()
FOpenPypeCommands::Unregister();
}
void FOpenPypeModule::RegisterSettings()
{
ISettingsModule& SettingsModule = FModuleManager::LoadModuleChecked<ISettingsModule>("Settings");
// Create the new category
// TODO: After the movement of the plugin from the game to editor, it might be necessary to move this!
ISettingsContainerPtr SettingsContainer = SettingsModule.GetContainer("Project");
UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
// Register the settings
ISettingsSectionPtr SettingsSection = SettingsModule.RegisterSettings("Project", "OpenPype", "General",
LOCTEXT("RuntimeGeneralSettingsName",
"General"),
LOCTEXT("RuntimeGeneralSettingsDescription",
"Base configuration for Open Pype Module"),
Settings
);
// Register the save handler to your settings, you might want to use it to
// validate those or just act to settings changes.
if (SettingsSection.IsValid())
{
SettingsSection->OnModified().BindRaw(this, &FOpenPypeModule::HandleSettingsSaved);
}
}
bool FOpenPypeModule::HandleSettingsSaved()
{
UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
bool ResaveSettings = false;
// You can put any validation code in here and resave the settings in case an invalid
// value has been entered
if (ResaveSettings)
{
Settings->SaveConfig();
}
return true;
}
void FOpenPypeModule::RegisterMenus()
{
// Owner will be used for cleanup in call to UToolMenus::UnregisterOwner
@ -64,7 +115,8 @@ void FOpenPypeModule::RegisterMenus()
{
FToolMenuSection& Section = ToolbarMenu->FindOrAddSection("PluginTools");
{
FToolMenuEntry& Entry = Section.AddEntry(FToolMenuEntry::InitToolBarButton(FOpenPypeCommands::Get().OpenPypeTools));
FToolMenuEntry& Entry = Section.AddEntry(
FToolMenuEntry::InitToolBarButton(FOpenPypeCommands::Get().OpenPypeTools));
Entry.SetCommandList(PluginCommands);
}
}
@ -72,12 +124,14 @@ void FOpenPypeModule::RegisterMenus()
}
void FOpenPypeModule::MenuPopup() {
void FOpenPypeModule::MenuPopup()
{
UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get();
bridge->RunInPython_Popup();
}
void FOpenPypeModule::MenuDialog() {
void FOpenPypeModule::MenuDialog()
{
UOpenPypePythonBridge* bridge = UOpenPypePythonBridge::Get();
bridge->RunInPython_Dialog();
}

View file

@ -1,4 +1,6 @@
#include "OpenPypeLib.h"
#include "AssetViewUtils.h"
#include "Misc/Paths.h"
#include "Misc/ConfigCacheIni.h"
#include "UObject/UnrealType.h"
@ -10,21 +12,23 @@
* @warning This color will appear only after Editor restart. Is there a better way?
*/
void UOpenPypeLib::CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd)
bool UOpenPypeLib::SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor, const bool& bForceAdd)
{
auto SaveColorInternal = [](FString InPath, FLinearColor InFolderColor)
if (AssetViewUtils::DoesFolderExist(FolderPath))
{
// Saves the color of the folder to the config
if (FPaths::FileExists(GEditorPerProjectIni))
{
GConfig->SetString(TEXT("PathColor"), *InPath, *InFolderColor.ToString(), GEditorPerProjectIni);
}
const TSharedPtr<FLinearColor> LinearColor = MakeShared<FLinearColor>(FolderColor);
};
SaveColorInternal(FolderPath, FolderColor);
AssetViewUtils::SaveColor(FolderPath, LinearColor, true);
UE_LOG(LogAssetData, Display, TEXT("A color {%s} has been set to folder \"%s\""), *LinearColor->ToString(),
*FolderPath)
return true;
}
UE_LOG(LogAssetData, Display, TEXT("Setting a color {%s} to folder \"%s\" has failed! Directory doesn't exist!"),
*FolderColor.ToString(), *FolderPath)
return false;
}
/**
* Returns all poperties on given object
* @param cls - class

View file

@ -4,8 +4,11 @@
#include "AssetRegistry/AssetRegistryModule.h"
#include "AssetToolsModule.h"
#include "Framework/Notifications/NotificationManager.h"
#include "OpenPypeLib.h"
#include "OpenPypeSettings.h"
#include "Widgets/Notifications/SNotificationList.h"
//Moves all the invalid pointers to the end to prepare them for the shrinking
#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \
VAR.Shrink();
@ -16,8 +19,11 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj
const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<
FAssetRegistryModule>("AssetRegistry");
const FPropertyEditorModule& PropertyEditorModule = FModuleManager::LoadModuleChecked<FPropertyEditorModule>(
"PropertyEditor");
FString Left, Right;
GetPathName().Split(GetName(), &Left, &Right);
GetPathName().Split("/" + GetName(), &Left, &Right);
FARFilter Filter;
Filter.PackagePaths.Emplace(FName(Left));
@ -35,14 +41,16 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj
AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved);
AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated);
#ifdef WITH_EDITOR
ColorOpenPypeDirs();
#endif
}
void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData)
{
TArray<FString> split;
const TObjectPtr<UObject> Asset = InAssetData.GetAsset();
UObject* Asset = InAssetData.GetAsset();
if (!IsValid(Asset))
{
@ -58,7 +66,7 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData)
if (AssetDataInternal.Emplace(Asset).IsValidId())
{
UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"),
*this->GetName(), *Asset->GetName());
*this->GetName(), *Asset->GetName());
}
}
}
@ -86,7 +94,7 @@ void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData)
REMOVE_INVALID_ENTRIES(AssetDataExternal);
}
bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr<UObject>& InAsset) const
bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const
{
FString ThisLeft, ThisRight;
this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight);
@ -96,6 +104,48 @@ bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr<UObject>& InAsset
#ifdef WITH_EDITOR
void UOpenPypePublishInstance::ColorOpenPypeDirs()
{
FString PathName = this->GetPathName();
//Check whether the path contains the defined OpenPype folder
if (!PathName.Contains(TEXT("OpenPype"))) return;
//Get the base path for open pype
FString PathLeft, PathRight;
PathName.Split(FString("OpenPype"), &PathLeft, &PathRight);
if (PathLeft.IsEmpty() || PathRight.IsEmpty())
{
UE_LOG(LogAssetData, Error, TEXT("Failed to retrieve the base OpenPype directory!"))
return;
}
PathName.RemoveFromEnd(PathRight, ESearchCase::CaseSensitive);
//Get the current settings
const UOpenPypeSettings* Settings = GetMutableDefault<UOpenPypeSettings>();
//Color the base folder
UOpenPypeLib::SetFolderColor(PathName, Settings->GetFolderFColor(), false);
//Get Sub paths, iterate through them and color them according to the folder color in UOpenPypeSettings
const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>(
"AssetRegistry");
TArray<FString> PathList;
AssetRegistryModule.Get().GetSubPaths(PathName, PathList, true);
if (PathList.Num() > 0)
{
for (const FString& Path : PathList)
{
UOpenPypeLib::SetFolderColor(Path, Settings->GetFolderFColor(), false);
}
}
}
void UOpenPypePublishInstance::SendNotification(const FString& Text) const
{
FNotificationInfo Info{FText::FromString(Text)};
@ -125,16 +175,15 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro
PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED(
UOpenPypePublishInstance, AssetDataExternal))
{
// Check for duplicated assets
for (const auto& Asset : AssetDataInternal)
{
if (AssetDataExternal.Contains(Asset))
{
AssetDataExternal.Remove(Asset);
return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!");
return SendNotification(
"You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!");
}
}
// Check if no UOpenPypePublishInstance type assets are included

View file

@ -0,0 +1,21 @@
// Fill out your copyright notice in the Description page of Project Settings.
#include "OpenPypeSettings.h"
#include "Interfaces/IPluginManager.h"
#include "UObject/UObjectGlobals.h"
/**
* Mainly is used for initializing default values if the DefaultOpenPypeSettings.ini file does not exist in the saved config
*/
UOpenPypeSettings::UOpenPypeSettings(const FObjectInitializer& ObjectInitializer)
{
const FString ConfigFilePath = OPENPYPE_SETTINGS_FILEPATH;
// This has to be probably in the future set using the UE Reflection system
FColor Color;
GConfig->GetColor(TEXT("/Script/OpenPype.OpenPypeSettings"), TEXT("FolderColor"), Color, ConfigFilePath);
FolderColor = Color;
}

View file

@ -14,6 +14,8 @@ public:
private:
void RegisterMenus();
void RegisterSettings();
bool HandleSettingsSaved();
void MenuPopup();
void MenuDialog();

View file

@ -5,14 +5,14 @@
UCLASS(Blueprintable)
class OPENPYPE_API UOpenPypeLib : public UObject
class OPENPYPE_API UOpenPypeLib : public UBlueprintFunctionLibrary
{
GENERATED_BODY()
public:
UFUNCTION(BlueprintCallable, Category = Python)
static void CSetFolderColor(FString FolderPath, FLinearColor FolderColor, bool bForceAdd);
static bool SetFolderColor(const FString& FolderPath, const FLinearColor& FolderColor,const bool& bForceAdd);
UFUNCTION(BlueprintCallable, Category = Python)
static TArray<FString> GetAllProperties(UClass* cls);

View file

@ -8,7 +8,9 @@ UCLASS(Blueprintable)
class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset
{
GENERATED_UCLASS_BODY()
public:
/**
/**
* Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is
* placed in)
@ -55,7 +57,9 @@ public:
UFUNCTION(BlueprintCallable, BlueprintPure)
TSet<UObject*> GetAllAssets() const
{
const TSet<TSoftObjectPtr<UObject>>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal;
const TSet<TSoftObjectPtr<UObject>>& IteratedSet = bAddExternalAssets
? AssetDataInternal.Union(AssetDataExternal)
: AssetDataInternal;
//Create a new TSet only with raw pointers.
TSet<UObject*> ResultSet;
@ -71,24 +75,26 @@ private:
TSet<TSoftObjectPtr<UObject>> AssetDataInternal;
/**
* This property allows the instance to include other assets from any other directory than what it's currently
* monitoring.
* @attention assets have to be added manually! They are not automatically registered or added!
* This property allows exposing the array to include other assets from any other directory than what it's currently
* monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added!
*/
UPROPERTY(EditAnywhere, Category="Assets")
UPROPERTY(EditAnywhere, Category = "Assets")
bool bAddExternalAssets = false;
UPROPERTY(EditAnywhere, Category="Assets", meta=(EditCondition="bAddExternalAssets"))
UPROPERTY(EditAnywhere, meta=(EditCondition="bAddExternalAssets"), Category="Assets")
TSet<TSoftObjectPtr<UObject>> AssetDataExternal;
void OnAssetCreated(const FAssetData& InAssetData);
void OnAssetRemoved(const FAssetData& InAssetData);
void OnAssetUpdated(const FAssetData& InAssetData);
bool IsUnderSameDir(const TObjectPtr<UObject>& InAsset) const;
bool IsUnderSameDir(const UObject* InAsset) const;
#ifdef WITH_EDITOR
void ColorOpenPypeDirs();
void SendNotification(const FString& Text) const;
virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override;

View file

@ -0,0 +1,32 @@
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "UObject/Object.h"
#include "OpenPypeSettings.generated.h"
#define OPENPYPE_SETTINGS_FILEPATH IPluginManager::Get().FindPlugin("OpenPype")->GetBaseDir() / TEXT("Config") / TEXT("DefaultOpenPypeSettings.ini")
UCLASS(Config=OpenPypeSettings, DefaultConfig)
class OPENPYPE_API UOpenPypeSettings : public UObject
{
GENERATED_UCLASS_BODY()
UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings)
FColor GetFolderFColor() const
{
return FolderColor;
}
UFUNCTION(BlueprintCallable, BlueprintPure, Category = Settings)
FLinearColor GetFolderFLinearColor() const
{
return FLinearColor(FolderColor);
}
protected:
UPROPERTY(config, EditAnywhere, Category = Folders)
FColor FolderColor = FColor(25,45,223);
};

Some files were not shown because too many files have changed in this diff Show more