mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merged in feature/PYPE-589-publishing-gizmos-and-importing (pull request #383)
[DRAFT] - Publish and Load workflow for Gizmo family Approved-by: Milan Kolar <milan@orbi.tools>
This commit is contained in:
commit
a884b18536
17 changed files with 974 additions and 30 deletions
|
|
@ -1,16 +1,50 @@
|
|||
from avalon.nuke.pipeline import Creator
|
||||
|
||||
from avalon.nuke import lib as anlib
|
||||
import nuke
|
||||
|
||||
class CreateBackdrop(Creator):
|
||||
"""Add Publishable Backdrop"""
|
||||
|
||||
name = "backdrop"
|
||||
label = "Backdrop"
|
||||
family = "group"
|
||||
icon = "cube"
|
||||
name = "nukenodes"
|
||||
label = "Create Backdrop"
|
||||
family = "nukenodes"
|
||||
icon = "file-archive-o"
|
||||
defaults = ["Main"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateBackdrop, self).__init__(*args, **kwargs)
|
||||
|
||||
self.nodes = nuke.selectedNodes()
|
||||
self.node_color = "0xdfea5dff"
|
||||
return
|
||||
|
||||
def process(self):
|
||||
from nukescripts import autoBackdrop
|
||||
nodes = list()
|
||||
if (self.options or {}).get("useSelection"):
|
||||
nodes = self.nodes
|
||||
|
||||
if len(nodes) >= 1:
|
||||
anlib.select_nodes(nodes)
|
||||
bckd_node = autoBackdrop()
|
||||
bckd_node["name"].setValue("{}_BDN".format(self.name))
|
||||
bckd_node["tile_color"].setValue(int(self.node_color, 16))
|
||||
bckd_node["note_font_size"].setValue(24)
|
||||
bckd_node["label"].setValue("[{}]".format(self.name))
|
||||
# add avalon knobs
|
||||
instance = anlib.imprint(bckd_node, self.data)
|
||||
|
||||
return instance
|
||||
else:
|
||||
nuke.message("Please select nodes you "
|
||||
"wish to add to a container")
|
||||
return
|
||||
else:
|
||||
bckd_node = autoBackdrop()
|
||||
bckd_node["name"].setValue("{}_BDN".format(self.name))
|
||||
bckd_node["tile_color"].setValue(int(self.node_color, 16))
|
||||
bckd_node["note_font_size"].setValue(24)
|
||||
bckd_node["label"].setValue("[{}]".format(self.name))
|
||||
# add avalon knobs
|
||||
instance = anlib.imprint(bckd_node, self.data)
|
||||
|
||||
return instance
|
||||
|
|
|
|||
79
pype/plugins/nuke/create/create_gizmo.py
Normal file
79
pype/plugins/nuke/create/create_gizmo.py
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
from avalon.nuke.pipeline import Creator
|
||||
from avalon.nuke import lib as anlib
|
||||
import nuke
|
||||
import nukescripts
|
||||
|
||||
class CreateGizmo(Creator):
|
||||
"""Add Publishable "gizmo" group
|
||||
|
||||
The name is symbolically gizmo as presumably
|
||||
it is something familiar to nuke users as group of nodes
|
||||
distributed downstream in workflow
|
||||
"""
|
||||
|
||||
name = "gizmo"
|
||||
label = "Gizmo"
|
||||
family = "gizmo"
|
||||
icon = "file-archive-o"
|
||||
defaults = ["ViewerInput", "Lut", "Effect"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateGizmo, self).__init__(*args, **kwargs)
|
||||
self.nodes = nuke.selectedNodes()
|
||||
self.node_color = "0x7533c1ff"
|
||||
return
|
||||
|
||||
def process(self):
|
||||
if (self.options or {}).get("useSelection"):
|
||||
nodes = self.nodes
|
||||
self.log.info(len(nodes))
|
||||
if len(nodes) == 1:
|
||||
anlib.select_nodes(nodes)
|
||||
node = nodes[-1]
|
||||
# check if Group node
|
||||
if node.Class() in "Group":
|
||||
node["name"].setValue("{}_GZM".format(self.name))
|
||||
node["tile_color"].setValue(int(self.node_color, 16))
|
||||
return anlib.imprint(node, self.data)
|
||||
else:
|
||||
nuke.message("Please select a group node "
|
||||
"you wish to publish as the gizmo")
|
||||
|
||||
if len(nodes) >= 2:
|
||||
anlib.select_nodes(nodes)
|
||||
nuke.makeGroup()
|
||||
gizmo_node = nuke.selectedNode()
|
||||
gizmo_node["name"].setValue("{}_GZM".format(self.name))
|
||||
gizmo_node["tile_color"].setValue(int(self.node_color, 16))
|
||||
|
||||
# add sticky node wit guide
|
||||
with gizmo_node:
|
||||
sticky = nuke.createNode("StickyNote")
|
||||
sticky["label"].setValue(
|
||||
"Add following:\n- set Input"
|
||||
" nodes\n- set one Output1\n"
|
||||
"- create User knobs on the group")
|
||||
|
||||
# add avalon knobs
|
||||
return anlib.imprint(gizmo_node, self.data)
|
||||
|
||||
else:
|
||||
nuke.message("Please select nodes you "
|
||||
"wish to add to the gizmo")
|
||||
return
|
||||
else:
|
||||
with anlib.maintained_selection():
|
||||
gizmo_node = nuke.createNode("Group")
|
||||
gizmo_node["name"].setValue("{}_GZM".format(self.name))
|
||||
gizmo_node["tile_color"].setValue(int(self.node_color, 16))
|
||||
|
||||
# add sticky node wit guide
|
||||
with gizmo_node:
|
||||
sticky = nuke.createNode("StickyNote")
|
||||
sticky["label"].setValue(
|
||||
"Add following:\n- add Input"
|
||||
" nodes\n- add one Output1\n"
|
||||
"- create User knobs on the group")
|
||||
|
||||
# add avalon knobs
|
||||
return anlib.imprint(gizmo_node, self.data)
|
||||
|
|
@ -24,8 +24,6 @@ class CreateWriteRender(plugin.PypeCreator):
|
|||
def __init__(self, *args, **kwargs):
|
||||
super(CreateWriteRender, self).__init__(*args, **kwargs)
|
||||
|
||||
self.name = self.data["subset"]
|
||||
|
||||
data = OrderedDict()
|
||||
|
||||
data["family"] = self.family
|
||||
|
|
|
|||
239
pype/plugins/nuke/load/load_gizmo_ip.py
Normal file
239
pype/plugins/nuke/load/load_gizmo_ip.py
Normal file
|
|
@ -0,0 +1,239 @@
|
|||
from avalon import api, style, io
|
||||
import nuke
|
||||
from pype.nuke import lib as pnlib
|
||||
from avalon.nuke import lib as anlib
|
||||
from avalon.nuke import containerise, update_container
|
||||
|
||||
|
||||
class LoadGizmoInputProcess(api.Loader):
|
||||
"""Loading colorspace soft effect exported from nukestudio"""
|
||||
|
||||
representations = ["gizmo"]
|
||||
families = ["gizmo"]
|
||||
|
||||
label = "Load Gizmo - Input Process"
|
||||
order = 0
|
||||
icon = "eye"
|
||||
color = style.colors.alert
|
||||
node_color = "0x7533c1ff"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Loading function to get Gizmo as Input Process on viewer
|
||||
|
||||
Arguments:
|
||||
context (dict): context of version
|
||||
name (str): name of the version
|
||||
namespace (str): asset name
|
||||
data (dict): compulsory attribute > not used
|
||||
|
||||
Returns:
|
||||
nuke node: containerised nuke node object
|
||||
"""
|
||||
|
||||
# get main variables
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
vname = version.get("name", None)
|
||||
first = version_data.get("frameStart", None)
|
||||
last = version_data.get("frameEnd", None)
|
||||
namespace = namespace or context['asset']['name']
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
# prepare data for imprinting
|
||||
# add additional metadata from the version to imprint to Avalon knob
|
||||
add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd",
|
||||
"source", "author", "fps"]
|
||||
|
||||
data_imprint = {"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": vname,
|
||||
"colorspaceInput": colorspace,
|
||||
"objectName": object_name}
|
||||
|
||||
for k in add_keys:
|
||||
data_imprint.update({k: version_data[k]})
|
||||
|
||||
# getting file path
|
||||
file = self.fname.replace("\\", "/")
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
with anlib.maintained_selection():
|
||||
# add group from nk
|
||||
nuke.nodePaste(file)
|
||||
|
||||
GN = nuke.selectedNode()
|
||||
|
||||
GN["name"].setValue(object_name)
|
||||
|
||||
# try to place it under Viewer1
|
||||
if not self.connect_active_viewer(GN):
|
||||
nuke.delete(GN)
|
||||
return
|
||||
|
||||
return containerise(
|
||||
node=GN,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
|
||||
# get main variables
|
||||
# Get version from io
|
||||
version = io.find_one({
|
||||
"type": "version",
|
||||
"_id": representation["parent"]
|
||||
})
|
||||
# get corresponding node
|
||||
GN = nuke.toNode(container['objectName'])
|
||||
|
||||
file = api.get_representation_path(representation).replace("\\", "/")
|
||||
context = representation["context"]
|
||||
name = container['name']
|
||||
version_data = version.get("data", {})
|
||||
vname = version.get("name", None)
|
||||
first = version_data.get("frameStart", None)
|
||||
last = version_data.get("frameEnd", None)
|
||||
namespace = container['namespace']
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd",
|
||||
"source", "author", "fps"]
|
||||
|
||||
data_imprint = {"representation": str(representation["_id"]),
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": vname,
|
||||
"colorspaceInput": colorspace,
|
||||
"objectName": object_name}
|
||||
|
||||
for k in add_keys:
|
||||
data_imprint.update({k: version_data[k]})
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
with anlib.maintained_selection():
|
||||
xpos = GN.xpos()
|
||||
ypos = GN.ypos()
|
||||
avalon_data = anlib.get_avalon_knob_data(GN)
|
||||
nuke.delete(GN)
|
||||
# add group from nk
|
||||
nuke.nodePaste(file)
|
||||
|
||||
GN = nuke.selectedNode()
|
||||
anlib.set_avalon_knob_data(GN, avalon_data)
|
||||
GN.setXYpos(xpos, ypos)
|
||||
GN["name"].setValue(object_name)
|
||||
|
||||
# get all versions in list
|
||||
versions = io.find({
|
||||
"type": "version",
|
||||
"parent": version["parent"]
|
||||
}).distinct('name')
|
||||
|
||||
max_version = max(versions)
|
||||
|
||||
# change color of node
|
||||
if version.get("name") not in [max_version]:
|
||||
GN["tile_color"].setValue(int("0xd88467ff", 16))
|
||||
else:
|
||||
GN["tile_color"].setValue(int(self.node_color, 16))
|
||||
|
||||
self.log.info("udated to version: {}".format(version.get("name")))
|
||||
|
||||
return update_container(GN, data_imprint)
|
||||
|
||||
def connect_active_viewer(self, group_node):
|
||||
"""
|
||||
Finds Active viewer and
|
||||
place the node under it, also adds
|
||||
name of group into Input Process of the viewer
|
||||
|
||||
Arguments:
|
||||
group_node (nuke node): nuke group node object
|
||||
|
||||
"""
|
||||
group_node_name = group_node["name"].value()
|
||||
|
||||
viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()]
|
||||
if len(viewer) > 0:
|
||||
viewer = viewer[0]
|
||||
else:
|
||||
self.log.error("Please create Viewer node before you "
|
||||
"run this action again")
|
||||
return None
|
||||
|
||||
# get coordinates of Viewer1
|
||||
xpos = viewer["xpos"].value()
|
||||
ypos = viewer["ypos"].value()
|
||||
|
||||
ypos += 150
|
||||
|
||||
viewer["ypos"].setValue(ypos)
|
||||
|
||||
# set coordinates to group node
|
||||
group_node["xpos"].setValue(xpos)
|
||||
group_node["ypos"].setValue(ypos + 50)
|
||||
|
||||
# add group node name to Viewer Input Process
|
||||
viewer["input_process_node"].setValue(group_node_name)
|
||||
|
||||
# put backdrop under
|
||||
pnlib.create_backdrop(label="Input Process", layer=2,
|
||||
nodes=[viewer, group_node], color="0x7c7faaff")
|
||||
|
||||
return True
|
||||
|
||||
def get_item(self, data, trackIndex, subTrackIndex):
|
||||
return {key: val for key, val in data.items()
|
||||
if subTrackIndex == val["subTrackIndex"]
|
||||
if trackIndex == val["trackIndex"]}
|
||||
|
||||
def byteify(self, input):
|
||||
"""
|
||||
Converts unicode strings to strings
|
||||
It goes trought all dictionary
|
||||
|
||||
Arguments:
|
||||
input (dict/str): input
|
||||
|
||||
Returns:
|
||||
dict: with fixed values and keys
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.iteritems()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, unicode):
|
||||
return input.encode('utf-8')
|
||||
else:
|
||||
return input
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
from avalon.nuke import viewer_update_and_undo_stop
|
||||
node = nuke.toNode(container['objectName'])
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
|
|
@ -7,7 +7,7 @@ class LinkAsGroup(api.Loader):
|
|||
"""Copy the published file to be pasted at the desired location"""
|
||||
|
||||
representations = ["nk"]
|
||||
families = ["workfile"]
|
||||
families = ["workfile", "nukenodes"]
|
||||
|
||||
label = "Load Precomp"
|
||||
order = 0
|
||||
|
|
@ -63,8 +63,6 @@ class LinkAsGroup(api.Loader):
|
|||
colorspace = context["version"]["data"].get("colorspace", None)
|
||||
self.log.info("colorspace: {}\n".format(colorspace))
|
||||
|
||||
# ['version', 'file', 'reading', 'output', 'useOutput']
|
||||
|
||||
P["name"].setValue("{}_{}".format(name, namespace))
|
||||
P["useOutput"].setValue(True)
|
||||
|
||||
|
|
@ -74,14 +72,15 @@ class LinkAsGroup(api.Loader):
|
|||
if n.Class() == "Group"
|
||||
if get_avalon_knob_data(n)]
|
||||
|
||||
# create panel for selecting output
|
||||
panel_choices = " ".join(writes)
|
||||
panel_label = "Select write node for output"
|
||||
p = nuke.Panel("Select Write Node")
|
||||
p.addEnumerationPulldown(
|
||||
panel_label, panel_choices)
|
||||
p.show()
|
||||
P["output"].setValue(p.value(panel_label))
|
||||
if writes:
|
||||
# create panel for selecting output
|
||||
panel_choices = " ".join(writes)
|
||||
panel_label = "Select write node for output"
|
||||
p = nuke.Panel("Select Write Node")
|
||||
p.addEnumerationPulldown(
|
||||
panel_label, panel_choices)
|
||||
p.show()
|
||||
P["output"].setValue(p.value(panel_label))
|
||||
|
||||
P["tile_color"].setValue(0xff0ff0ff)
|
||||
|
||||
|
|
|
|||
83
pype/plugins/nuke/publish/collect_backdrop.py
Normal file
83
pype/plugins/nuke/publish/collect_backdrop.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
import pyblish.api
|
||||
import pype.api as pype
|
||||
from pype.nuke import lib as pnlib
|
||||
import nuke
|
||||
|
||||
@pyblish.api.log
|
||||
class CollectBackdrops(pyblish.api.InstancePlugin):
|
||||
"""Collect Backdrop node instance and its content
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.22
|
||||
label = "Collect Backdrop"
|
||||
hosts = ["nuke"]
|
||||
families = ["nukenodes"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
bckn = instance[0]
|
||||
|
||||
# define size of the backdrop
|
||||
left = bckn.xpos()
|
||||
top = bckn.ypos()
|
||||
right = left + bckn['bdwidth'].value()
|
||||
bottom = top + bckn['bdheight'].value()
|
||||
|
||||
# iterate all nodes
|
||||
for node in nuke.allNodes():
|
||||
|
||||
# exclude viewer
|
||||
if node.Class() == "Viewer":
|
||||
continue
|
||||
|
||||
# find all related nodes
|
||||
if (node.xpos() > left) \
|
||||
and (node.xpos() + node.screenWidth() < right) \
|
||||
and (node.ypos() > top) \
|
||||
and (node.ypos() + node.screenHeight() < bottom):
|
||||
|
||||
# add contained nodes to instance's node list
|
||||
instance.append(node)
|
||||
|
||||
# get all connections from outside of backdrop
|
||||
nodes = instance[1:]
|
||||
connections_in, connections_out = pnlib.get_dependent_nodes(nodes)
|
||||
instance.data["connections_in"] = connections_in
|
||||
instance.data["connections_out"] = connections_out
|
||||
|
||||
# make label nicer
|
||||
instance.data["label"] = "{0} ({1} nodes)".format(
|
||||
bckn.name(), len(instance)-1)
|
||||
|
||||
instance.data["families"].append(instance.data["family"])
|
||||
|
||||
# Get frame range
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
|
||||
# get version
|
||||
version = pype.get_version_from_path(nuke.root().name())
|
||||
instance.data['version'] = version
|
||||
|
||||
# Add version data to instance
|
||||
version_data = {
|
||||
"handles": handle_start,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"version": int(version),
|
||||
"families": [instance.data["family"]] + instance.data["families"],
|
||||
"subset": instance.data["subset"],
|
||||
"fps": instance.context.data["fps"]
|
||||
}
|
||||
|
||||
instance.data.update({
|
||||
"versionData": version_data,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame
|
||||
})
|
||||
self.log.info("Backdrop content collected: `{}`".format(instance[:]))
|
||||
self.log.info("Backdrop instance collected: `{}`".format(instance))
|
||||
56
pype/plugins/nuke/publish/collect_gizmo.py
Normal file
56
pype/plugins/nuke/publish/collect_gizmo.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
import pyblish.api
|
||||
import pype.api as pype
|
||||
import nuke
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class CollectGizmo(pyblish.api.InstancePlugin):
|
||||
"""Collect Gizmo (group) node instance and its content
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.22
|
||||
label = "Collect Gizmo (Group)"
|
||||
hosts = ["nuke"]
|
||||
families = ["gizmo"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
grpn = instance[0]
|
||||
|
||||
# add family to familiess
|
||||
instance.data["families"].insert(0, instance.data["family"])
|
||||
# make label nicer
|
||||
instance.data["label"] = "{0} ({1} nodes)".format(
|
||||
grpn.name(), len(instance) - 1)
|
||||
|
||||
# Get frame range
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
|
||||
# get version
|
||||
version = pype.get_version_from_path(nuke.root().name())
|
||||
instance.data['version'] = version
|
||||
|
||||
# Add version data to instance
|
||||
version_data = {
|
||||
"handles": handle_start,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"colorspace": nuke.root().knob('workingSpaceLUT').value(),
|
||||
"version": int(version),
|
||||
"families": [instance.data["family"]] + instance.data["families"],
|
||||
"subset": instance.data["subset"],
|
||||
"fps": instance.context.data["fps"]
|
||||
}
|
||||
|
||||
instance.data.update({
|
||||
"versionData": version_data,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame
|
||||
})
|
||||
self.log.info("Gizmo content collected: `{}`".format(instance[:]))
|
||||
self.log.info("Gizmo instance collected: `{}`".format(instance))
|
||||
|
|
@ -21,7 +21,6 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
self.log.debug("asset_data: {}".format(asset_data["data"]))
|
||||
instances = []
|
||||
# creating instances per write node
|
||||
|
||||
self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes()))
|
||||
for node in nuke.allNodes():
|
||||
|
|
@ -45,6 +44,14 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
if avalon_knob_data["id"] != "pyblish.avalon.instance":
|
||||
continue
|
||||
|
||||
# establish families
|
||||
family = avalon_knob_data["family"]
|
||||
families = list()
|
||||
|
||||
# except disabled nodes but exclude backdrops in test
|
||||
if ("nukenodes" not in family) and (node["disable"].value()):
|
||||
continue
|
||||
|
||||
subset = avalon_knob_data.get(
|
||||
"subset", None) or node["name"].value()
|
||||
|
||||
|
|
@ -54,6 +61,23 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
# Add all nodes in group instances.
|
||||
if node.Class() == "Group":
|
||||
# only alter families for render family
|
||||
if ("render" in family):
|
||||
# check if node is not disabled
|
||||
families.append(avalon_knob_data["families"])
|
||||
if node["render"].value():
|
||||
self.log.info("flagged for render")
|
||||
add_family = "render.local"
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
self.log.info("adding render farm family")
|
||||
add_family = "render.farm"
|
||||
instance.data["transfer"] = False
|
||||
families.append(add_family)
|
||||
else:
|
||||
# add family into families
|
||||
families.insert(0, family)
|
||||
|
||||
node.begin()
|
||||
for i in nuke.allNodes():
|
||||
instance.append(i)
|
||||
|
|
@ -61,7 +85,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
family = avalon_knob_data["family"]
|
||||
families = [avalon_knob_data["families"]]
|
||||
|
||||
|
||||
if node.Class() not in "Read":
|
||||
if node["render"].value():
|
||||
self.log.info("flagged for render")
|
||||
|
|
|
|||
|
|
@ -24,7 +24,8 @@ class CollectWriteLegacy(pyblish.api.InstancePlugin):
|
|||
self.log.info("render")
|
||||
return
|
||||
|
||||
instance.data.update(
|
||||
{"family": "write.legacy",
|
||||
"families": []}
|
||||
)
|
||||
if "render" in node.knobs():
|
||||
instance.data.update(
|
||||
{"family": "write.legacy",
|
||||
"families": []}
|
||||
)
|
||||
|
|
|
|||
103
pype/plugins/nuke/publish/extract_backdrop.py
Normal file
103
pype/plugins/nuke/publish/extract_backdrop.py
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
import pyblish.api
|
||||
from avalon.nuke import lib as anlib
|
||||
from pype.nuke import lib as pnlib
|
||||
import nuke
|
||||
import os
|
||||
import pype
|
||||
reload(pnlib)
|
||||
|
||||
class ExtractBackdropNode(pype.api.Extractor):
|
||||
"""Extracting content of backdrop nodes
|
||||
|
||||
Will create nuke script only with containing nodes.
|
||||
Also it will solve Input and Output nodes.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Backdrop"
|
||||
hosts = ["nuke"]
|
||||
families = ["nukenodes"]
|
||||
|
||||
def process(self, instance):
|
||||
tmp_nodes = list()
|
||||
nodes = instance[1:]
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{0}.nk".format(instance.name)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
# maintain selection
|
||||
with anlib.maintained_selection():
|
||||
# all connections outside of backdrop
|
||||
connections_in = instance.data["connections_in"]
|
||||
connections_out = instance.data["connections_out"]
|
||||
self.log.debug("_ connections_in: `{}`".format(connections_in))
|
||||
self.log.debug("_ connections_out: `{}`".format(connections_out))
|
||||
|
||||
# create input nodes and name them as passing node (*_INP)
|
||||
for n, inputs in connections_in.items():
|
||||
for i, input in inputs:
|
||||
inpn = nuke.createNode("Input")
|
||||
inpn["name"].setValue("{}_{}_INP".format(n.name(), i))
|
||||
n.setInput(i, inpn)
|
||||
inpn.setXYpos(input.xpos(), input.ypos())
|
||||
nodes.append(inpn)
|
||||
tmp_nodes.append(inpn)
|
||||
|
||||
anlib.reset_selection()
|
||||
|
||||
# connect output node
|
||||
for n, output in connections_out.items():
|
||||
opn = nuke.createNode("Output")
|
||||
self.log.info(n.name())
|
||||
self.log.info(output.name())
|
||||
output.setInput(
|
||||
next((i for i, d in enumerate(output.dependencies())
|
||||
if d.name() in n.name()), 0), opn)
|
||||
opn.setInput(0, n)
|
||||
opn.autoplace()
|
||||
nodes.append(opn)
|
||||
tmp_nodes.append(opn)
|
||||
anlib.reset_selection()
|
||||
|
||||
# select nodes to copy
|
||||
anlib.reset_selection()
|
||||
anlib.select_nodes(nodes)
|
||||
# create tmp nk file
|
||||
# save file to the path
|
||||
nuke.nodeCopy(path)
|
||||
|
||||
# Clean up
|
||||
for tn in tmp_nodes:
|
||||
nuke.delete(tn)
|
||||
|
||||
# restore original connections
|
||||
# reconnect input node
|
||||
for n, inputs in connections_in.items():
|
||||
for i, input in inputs:
|
||||
n.setInput(i, input)
|
||||
|
||||
# reconnect output node
|
||||
for n, output in connections_out.items():
|
||||
output.setInput(
|
||||
next((i for i, d in enumerate(output.dependencies())
|
||||
if d.name() in n.name()), 0), n)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
# create representation
|
||||
representation = {
|
||||
'name': 'nk',
|
||||
'ext': 'nk',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '{}' to: {}".format(
|
||||
instance.name, path))
|
||||
|
||||
self.log.info("Data {}".format(
|
||||
instance.data))
|
||||
95
pype/plugins/nuke/publish/extract_gizmo.py
Normal file
95
pype/plugins/nuke/publish/extract_gizmo.py
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
import pyblish.api
|
||||
from avalon.nuke import lib as anlib
|
||||
from pype.nuke import lib as pnlib
|
||||
from pype.nuke import utils as pnutils
|
||||
import nuke
|
||||
import os
|
||||
import pype
|
||||
|
||||
|
||||
class ExtractGizmo(pype.api.Extractor):
|
||||
"""Extracting Gizmo (Group) node
|
||||
|
||||
Will create nuke script only with the Gizmo node.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Gizmo (Group)"
|
||||
hosts = ["nuke"]
|
||||
families = ["gizmo"]
|
||||
|
||||
def process(self, instance):
|
||||
tmp_nodes = list()
|
||||
orig_grpn = instance[0]
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{0}.nk".format(instance.name)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
# maintain selection
|
||||
with anlib.maintained_selection():
|
||||
orig_grpn_name = orig_grpn.name()
|
||||
tmp_grpn_name = orig_grpn_name + "_tmp"
|
||||
# select original group node
|
||||
anlib.select_nodes([orig_grpn])
|
||||
|
||||
# copy to clipboard
|
||||
nuke.nodeCopy("%clipboard%")
|
||||
|
||||
# reset selection to none
|
||||
anlib.reset_selection()
|
||||
|
||||
# paste clipboard
|
||||
nuke.nodePaste("%clipboard%")
|
||||
|
||||
# assign pasted node
|
||||
copy_grpn = nuke.selectedNode()
|
||||
copy_grpn.setXYpos((orig_grpn.xpos() + 120), orig_grpn.ypos())
|
||||
|
||||
# convert gizmos to groups
|
||||
pnutils.bake_gizmos_recursively(copy_grpn)
|
||||
|
||||
# remove avalonknobs
|
||||
knobs = copy_grpn.knobs()
|
||||
avalon_knobs = [k for k in knobs.keys()
|
||||
for ak in ["avalon:", "ak:"]
|
||||
if ak in k]
|
||||
avalon_knobs.append("publish")
|
||||
for ak in avalon_knobs:
|
||||
copy_grpn.removeKnob(knobs[ak])
|
||||
|
||||
# add to temporary nodes
|
||||
tmp_nodes.append(copy_grpn)
|
||||
|
||||
# swap names
|
||||
orig_grpn.setName(tmp_grpn_name)
|
||||
copy_grpn.setName(orig_grpn_name)
|
||||
|
||||
# create tmp nk file
|
||||
# save file to the path
|
||||
nuke.nodeCopy(path)
|
||||
|
||||
# Clean up
|
||||
for tn in tmp_nodes:
|
||||
nuke.delete(tn)
|
||||
|
||||
# rename back to original
|
||||
orig_grpn.setName(orig_grpn_name)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
# create representation
|
||||
representation = {
|
||||
'name': 'gizmo',
|
||||
'ext': 'nk',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '{}' to: {}".format(
|
||||
instance.name, path))
|
||||
|
||||
self.log.info("Data {}".format(
|
||||
instance.data))
|
||||
69
pype/plugins/nuke/publish/validate_backdrop.py
Normal file
69
pype/plugins/nuke/publish/validate_backdrop.py
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
import pyblish
|
||||
from avalon.nuke import lib as anlib
|
||||
import nuke
|
||||
|
||||
|
||||
class SelectCenterInNodeGraph(pyblish.api.Action):
|
||||
"""
|
||||
Centering failed instance node in node grap
|
||||
"""
|
||||
|
||||
label = "Center node in node graph"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
all_xC = list()
|
||||
all_yC = list()
|
||||
|
||||
# maintain selection
|
||||
with anlib.maintained_selection():
|
||||
# collect all failed nodes xpos and ypos
|
||||
for instance in instances:
|
||||
bdn = instance[0]
|
||||
xC = bdn.xpos() + bdn.screenWidth()/2
|
||||
yC = bdn.ypos() + bdn.screenHeight()/2
|
||||
|
||||
all_xC.append(xC)
|
||||
all_yC.append(yC)
|
||||
|
||||
self.log.info("all_xC: `{}`".format(all_xC))
|
||||
self.log.info("all_yC: `{}`".format(all_yC))
|
||||
|
||||
# zoom to nodes in node graph
|
||||
nuke.zoom(2, [min(all_xC), min(all_yC)])
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class ValidateBackdrop(pyblish.api.InstancePlugin):
|
||||
"""Validate amount of nodes on backdrop node in case user
|
||||
forgoten to add nodes above the publishing backdrop node"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
optional = True
|
||||
families = ["nukenodes"]
|
||||
label = "Validate Backdrop"
|
||||
hosts = ["nuke"]
|
||||
actions = [SelectCenterInNodeGraph]
|
||||
|
||||
def process(self, instance):
|
||||
connections_out = instance.data["connections_out"]
|
||||
|
||||
msg_multiple_outputs = "Only one outcoming connection from \"{}\" is allowed".format(
|
||||
instance.data["name"])
|
||||
assert len(connections_out.keys()) <= 1, msg_multiple_outputs
|
||||
|
||||
msg_no_content = "No content on backdrop node: \"{}\"".format(
|
||||
instance.data["name"])
|
||||
assert len(instance) > 1, msg_no_content
|
||||
58
pype/plugins/nuke/publish/validate_gizmo.py
Normal file
58
pype/plugins/nuke/publish/validate_gizmo.py
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
import pyblish
|
||||
from avalon.nuke import lib as anlib
|
||||
import nuke
|
||||
|
||||
|
||||
class OpenFailedGroupNode(pyblish.api.Action):
|
||||
"""
|
||||
Centering failed instance node in node grap
|
||||
"""
|
||||
|
||||
label = "Open Gizmo in Node Graph"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
# maintain selection
|
||||
with anlib.maintained_selection():
|
||||
# collect all failed nodes xpos and ypos
|
||||
for instance in instances:
|
||||
grpn = instance[0]
|
||||
nuke.showDag(grpn)
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class ValidateGizmo(pyblish.api.InstancePlugin):
|
||||
"""Validate amount of output nodes in gizmo (group) node"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
optional = True
|
||||
families = ["gizmo"]
|
||||
label = "Validate Gizmo (Group)"
|
||||
hosts = ["nuke"]
|
||||
actions = [OpenFailedGroupNode]
|
||||
|
||||
def process(self, instance):
|
||||
grpn = instance[0]
|
||||
|
||||
with grpn:
|
||||
connections_out = nuke.allNodes('Output')
|
||||
msg_multiple_outputs = "Only one outcoming connection from "
|
||||
"\"{}\" is allowed".format(instance.data["name"])
|
||||
assert len(connections_out) <= 1, msg_multiple_outputs
|
||||
|
||||
connections_in = nuke.allNodes('Input')
|
||||
msg_missing_inputs = "At least one Input node has to be used in: "
|
||||
"\"{}\"".format(instance.data["name"])
|
||||
assert len(connections_in) >= 1, msg_missing_inputs
|
||||
Loading…
Add table
Add a link
Reference in a new issue