mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 21:32:15 +01:00
Create/Validate Deadline tab with ChunkSize.
This commit is contained in:
parent
ae3496ceea
commit
62bee8eafa
4 changed files with 61 additions and 4 deletions
|
|
@ -378,6 +378,14 @@ def add_rendering_knobs(node):
|
|||
return node
|
||||
|
||||
|
||||
def add_deadline_tab(node):
|
||||
node.addKnob(nuke.Tab_Knob("Deadline"))
|
||||
|
||||
knob = nuke.Int_Knob("deadlineChunkSize", "Chunk Size")
|
||||
knob.setValue(1)
|
||||
node.addKnob(knob)
|
||||
|
||||
|
||||
def set_viewers_colorspace(viewer):
|
||||
''' Adds correct colorspace to viewer
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from collections import OrderedDict
|
||||
import avalon.api
|
||||
import avalon.nuke
|
||||
from pype.nuke.lib import create_write_node
|
||||
from pype.nuke.lib import create_write_node, add_deadline_tab
|
||||
from pype import api as pype
|
||||
from pypeapp import config
|
||||
|
||||
|
|
@ -51,7 +51,7 @@ class CreateWriteRender(avalon.nuke.Creator):
|
|||
node = 'write'
|
||||
|
||||
instance = nuke.toNode(self.data["subset"])
|
||||
|
||||
node = None
|
||||
if not instance:
|
||||
write_data = {
|
||||
"class": node,
|
||||
|
|
@ -69,9 +69,10 @@ class CreateWriteRender(avalon.nuke.Creator):
|
|||
write_data.update({
|
||||
"fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}"})
|
||||
|
||||
create_write_node(self.data["subset"], write_data)
|
||||
node = create_write_node(self.data["subset"], write_data)
|
||||
|
||||
return
|
||||
# Deadline tab.
|
||||
add_deadline_tab(node)
|
||||
|
||||
|
||||
class CreateWritePrerender(avalon.nuke.Creator):
|
||||
|
|
|
|||
|
|
@ -101,6 +101,11 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
"fps": instance.context.data["fps"]
|
||||
}
|
||||
|
||||
group_node = [x for x in instance if x.Class() == "Group"][0]
|
||||
deadlineChunkSize = 1
|
||||
if "deadlineChunkSize" in group_node.knobs():
|
||||
deadlineChunkSize = group_node["deadlineChunkSize"].value()
|
||||
|
||||
instance.data.update({
|
||||
"versionData": version_data,
|
||||
"path": path,
|
||||
|
|
@ -112,6 +117,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
"frameEnd": last_frame,
|
||||
"outputType": output_type,
|
||||
"colorspace": node["colorspace"].value(),
|
||||
"deadlineChunkSize": deadlineChunkSize
|
||||
})
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
|
|
|
|||
42
pype/plugins/nuke/publish/validate_write_deadline_tab.py
Normal file
42
pype/plugins/nuke/publish/validate_write_deadline_tab.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import pyblish.api
|
||||
import pype.nuke.lib
|
||||
|
||||
|
||||
class RepairNukeWriteDeadlineTab(pyblish.api.Action):
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
for instance in instances:
|
||||
group_node = [x for x in instance if x.Class() == "Group"][0]
|
||||
pype.nuke.lib.add_deadline_tab(group_node)
|
||||
|
||||
|
||||
class ValidateNukeWriteDeadlineTab(pyblish.api.InstancePlugin):
|
||||
"""Ensure Deadline tab is present and current."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Deadline Tab"
|
||||
hosts = ["nuke"]
|
||||
optional = True
|
||||
families = ["write"]
|
||||
actions = [RepairNukeWriteDeadlineTab]
|
||||
|
||||
def process(self, instance):
|
||||
group_node = [x for x in instance if x.Class() == "Group"][0]
|
||||
|
||||
msg = "Deadline tab missing on \"{}\"".format(group_node.name())
|
||||
assert "Deadline" in group_node.knobs(), msg
|
||||
Loading…
Add table
Add a link
Reference in a new issue