Merged in hotfix/nuke_publish_create_load (pull request #253)

Hotfix/nuke publish create load

Approved-by: Milan Kolar <milan@orbi.tools>
This commit is contained in:
Jakub Jezek 2019-08-04 20:34:26 +00:00 committed by Milan Kolar
commit d1eae52a3e
19 changed files with 162 additions and 142 deletions

View file

@ -197,7 +197,7 @@ def version_up(filepath):
dirname = os.path.dirname(filepath)
basename, ext = os.path.splitext(os.path.basename(filepath))
regex = "[._]v\d+"
regex = r"[._]v\d+"
matches = re.findall(regex, str(basename), re.IGNORECASE)
if not matches:
log.info("Creating version...")
@ -205,7 +205,7 @@ def version_up(filepath):
new_basename = "{}{}".format(basename, new_label)
else:
label = matches[-1]
version = re.search("\d+", label).group()
version = re.search(r"\d+", label).group()
padding = len(version)
new_version = int(version) + 1

View file

@ -321,16 +321,7 @@ def create_write_node(name, data, prenodes=None):
lnk.makeLink(write_node.name(), "Render")
lnk.setName("Render")
GN.addKnob(lnk)
# linking knobs to group property panel
linking_knobs = ["first", "last", "use_limit"]
for k in linking_knobs:
lnk = nuke.Link_Knob(k)
lnk.makeLink(write_node.name(), k)
lnk.setName(k.replace('_', ' ').capitalize())
lnk.clearFlag(nuke.STARTLINE)
GN.addKnob(lnk)
return GN

View file

@ -410,6 +410,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
Args:
instance: the instance to integrate
"""
transfers = instance.data.get("transfers", list())
for src, dest in transfers:
if os.path.normpath(src) != os.path.normpath(dest):
self.copy_file(src, dest)
transfers = instance.data.get("transfers", list())
for src, dest in transfers:

View file

@ -125,6 +125,24 @@ class CreateWritePrerender(avalon.nuke.Creator):
write_data.update({
"fpath_template": "{work}/prerenders/{subset}/{subset}.{frame}.{ext}"})
create_write_node(self.data["subset"], write_data)
# get group node
group_node = create_write_node(self.data["subset"], write_data)
# open group node
group_node.begin()
for n in nuke.allNodes():
# get write node
if n.Class() in "Write":
write_node = n
group_node.end()
# linking knobs to group property panel
linking_knobs = ["first", "last", "use_limit"]
for k in linking_knobs:
lnk = nuke.Link_Knob(k)
lnk.makeLink(write_node.name(), k)
lnk.setName(k.replace('_', ' ').capitalize())
lnk.clearFlag(nuke.STARTLINE)
group_node.addKnob(lnk)
return

View file

@ -76,7 +76,7 @@ class LoadMov(api.Loader):
"""Load mov file into Nuke"""
families = ["write", "source", "plate", "render", "review"]
representations = ["mov", "preview", "review", "mp4"]
representations = ["wipmov", "h264", "mov", "preview", "review", "mp4"]
label = "Load mov"
order = -10

View file

@ -1,22 +1,18 @@
from avalon import api, style, io
from pype.nuke.lib import get_avalon_knob_data
from avalon.nuke import get_avalon_knob_data
import nuke
import os
from pype.api import Logger
log = Logger().get_logger(__name__, "nuke")
class LinkAsGroup(api.Loader):
"""Copy the published file to be pasted at the desired location"""
representations = ["nk"]
families = ["*"]
families = ["workfile"]
label = "Load Precomp"
order = 10
order = 0
icon = "file"
color = style.colors.dark
color = style.colors.alert
def load(self, context, name, namespace, data):
@ -41,12 +37,12 @@ class LinkAsGroup(api.Loader):
self.log.info("versionData: {}\n".format(context["version"]["data"]))
# add additional metadata from the version to imprint to Avalon knob
add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", "source", "author", "fps"]
add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd",
"source", "author", "fps"]
data_imprint = {
"start_frame": first,
"startingFrame": first,
"frameStart": first,
"frameEnd": last,
"version": vname
@ -67,7 +63,6 @@ class LinkAsGroup(api.Loader):
colorspace = context["version"]["data"].get("colorspace", None)
self.log.info("colorspace: {}\n".format(colorspace))
# ['version', 'file', 'reading', 'output', 'useOutput']
P["name"].setValue("{}_{}".format(name, namespace))
@ -76,7 +71,7 @@ class LinkAsGroup(api.Loader):
with P:
# iterate trough all nodes in group node and find pype writes
writes = [n.name() for n in nuke.allNodes()
if n.Class() == "Write"
if n.Class() == "Group"
if get_avalon_knob_data(n)]
# create panel for selecting output
@ -84,7 +79,7 @@ class LinkAsGroup(api.Loader):
panel_label = "Select write node for output"
p = nuke.Panel("Select Write Node")
p.addEnumerationPulldown(
panel_label, panel_choices)
panel_label, panel_choices)
p.show()
P["output"].setValue(p.value(panel_label))
@ -116,7 +111,7 @@ class LinkAsGroup(api.Loader):
node = nuke.toNode(container['objectName'])
root = api.get_representation_path(representation).replace("\\","/")
root = api.get_representation_path(representation).replace("\\", "/")
# Get start frame from version data
version = io.find_one({
@ -159,8 +154,7 @@ class LinkAsGroup(api.Loader):
else:
node["tile_color"].setValue(int("0xff0ff0ff", 16))
log.info("udated to version: {}".format(version.get("name")))
self.log.info("udated to version: {}".format(version.get("name")))
def remove(self, container):
from avalon.nuke import viewer_update_and_undo_stop

View file

@ -92,6 +92,8 @@ class LoadSequence(api.Loader):
version = context['version']
version_data = version.get("data", {})
log.info("version_data: {}\n".format(version_data))
first = version_data.get("frameStart", None)
last = version_data.get("frameEnd", None)
handles = version_data.get("handles", 0)
@ -103,9 +105,9 @@ class LoadSequence(api.Loader):
handle_start = handles
handle_end = handles
# create handles offset
first -= handle_start
last += handle_end
# # create handles offset
# first -= handle_start
# last += handle_end
# Fallback to asset name when namespace is None
if namespace is None:

View file

@ -12,3 +12,4 @@ class CollectActiveViewer(pyblish.api.ContextPlugin):
def process(self, context):
context.data["ViewerProcess"] = nuke.ViewerProcess.node()
context.data["ActiveViewer"] = nuke.activeViewer()

View file

@ -19,3 +19,5 @@ class CollectAssetInfo(pyblish.api.ContextPlugin):
self.log.info("asset_data: {}".format(asset_data))
context.data['handles'] = int(asset_data["data"].get("handles", 0))
context.data["handleStart"] = int(asset_data["data"].get("handleStart", 0))
context.data["handleEnd"] = int(asset_data["data"].get("handleEnd", 0))

View file

@ -3,7 +3,7 @@ import os
import nuke
import pyblish.api
from avalon import io, api
from avalon.nuke.lib import get_avalon_knob_data
from avalon.nuke import get_avalon_knob_data
@pyblish.api.log
@ -18,23 +18,26 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
asset_data = io.find_one({"type": "asset",
"name": api.Session["AVALON_ASSET"]})
# add handles into context
context.data['handles'] = context.data['handles']
self.log.debug("asset_data: {}".format(asset_data["data"]))
instances = []
# creating instances per write node
for node in nuke.allNodes():
self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes()))
for node in nuke.allNodes():
try:
if node["disable"].value():
continue
except Exception:
except Exception as E:
self.log.warning(E)
continue
# get data from avalon knob
self.log.debug("node[name]: {}".format(node['name'].value()))
avalon_knob_data = get_avalon_knob_data(node)
self.log.debug("avalon_knob_data: {}".format(avalon_knob_data))
if not avalon_knob_data:
continue

View file

@ -14,17 +14,24 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
families = ["render", "render.local", "render.farm"]
def process(self, instance):
# if not instance.data["publish"]:
# continue
node = instance[0]
group = instance[0]
if node.Class() != "Write":
if group.Class() != "Group":
return
self.log.debug("checking instance: {}".format(instance))
group.begin()
for n in nuke.allNodes():
if n.Class() != "Write":
continue
node = n
group.end()
# Determine defined file type
ext = node["file_type"].value()
@ -34,7 +41,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
output_type = "mov"
# Get frame range
handles = instance.context.data.get('handles', 0)
handles = instance.context.data['handles']
handle_start = instance.context.data["handleStart"]
handle_end = instance.context.data["handleEnd"]
first_frame = int(nuke.root()["first_frame"].getValue())
last_frame = int(nuke.root()["last_frame"].getValue())
@ -85,7 +94,22 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
if 'render.local' in instance.data['families']:
instance.data['families'].append('ftrack')
# Add version data to instance
version_data = {
"handles": handle_start,
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStart": first_frame,
"frameEnd": last_frame,
"version": int(version),
"colorspace": node["colorspace"].value(),
"families": [instance.data["family"]],
"subset": instance.data["subset"],
"fps": instance.context.data["fps"]
}
instance.data.update({
"versionData": version_data,
"path": path,
"outputDir": output_dir,
"ext": ext,
@ -96,5 +120,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"outputType": output_type,
"colorspace": node["colorspace"].value(),
})
instance.insert(1, instance[0])
instance[0] = node
self.log.debug("instance.data: {}".format(instance.data))

View file

@ -0,0 +1,42 @@
import nuke
import pyblish.api
from avalon.nuke import maintained_selection
class CreateOutputNode(pyblish.api.ContextPlugin):
"""Adding output node for each ouput write node
So when latly user will want to Load .nk as LifeGroup or Precomp
Nuke will not complain about missing Output node
"""
label = 'Output Node Create'
order = pyblish.api.ExtractorOrder + 0.4
families = ["workfile"]
hosts = ['nuke']
def process(self, context):
# capture selection state
with maintained_selection():
# deselect all allNodes
self.log.info(context.data["ActiveViewer"])
active_viewer = context.data["ActiveViewer"]
active_input = active_viewer.activeInput()
active_node = active_viewer.node()
last_viewer_node = active_node.input(active_input)
name = last_viewer_node.name()
self.log.info("Node name: {}".format(name))
# select only instance render node
last_viewer_node['selected'].setValue(True)
output_node = nuke.createNode("Output")
# deselect all and select the original selection
output_node['selected'].setValue(False)
# save script
nuke.scriptSave()
# add node to instance node list
context.data["outputNode"] = output_node

View file

@ -1,91 +0,0 @@
import os
import json
import datetime
import time
import clique
from pprint import pformat
import pyblish.api
class ExtractJSON(pyblish.api.ContextPlugin):
""" Extract all instances to a serialized json file. """
order = pyblish.api.IntegratorOrder + 1
label = "Extract to JSON"
families = ["write"]
def process(self, context):
workspace = os.path.join(
os.path.dirname(context.data["currentFile"]), "workspace",
"instances")
if not os.path.exists(workspace):
os.makedirs(workspace)
context_data = context.data.copy()
unwrapped_instance = []
for i in context_data["instances"]:
unwrapped_instance.append(i.data)
context_data["instances"] = unwrapped_instance
timestamp = datetime.datetime.fromtimestamp(
time.time()).strftime("%Y%m%d-%H%M%S")
filename = timestamp + "_instances.json"
with open(os.path.join(workspace, filename), "w") as outfile:
outfile.write(pformat(context_data, depth=20))
def serialize(self, data):
"""
Convert all nested content to serialized objects
Args:
data (dict): nested data
Returns:
dict: nested data
"""
def encoding_obj(value):
try:
value = str(value).replace("\\", "/")
# value = getattr(value, '__dict__', str(value))
except Exception:
pass
return value
for key, value in dict(data).items():
if key in ["records", "instances", "results"]:
# escape all record objects
data[key] = None
continue
if hasattr(value, '__module__'):
# only deals with module objects
if "plugins" in value.__module__:
# only dealing with plugin objects
data[key] = str(value.__module__)
else:
if ".lib." in value.__module__:
# will allow only anatomy dict
data[key] = self.serialize(value)
else:
data[key] = None
continue
continue
if isinstance(value, dict):
# loops if dictionary
data[key] = self.serialize(value)
if isinstance(value, (list or tuple)):
# loops if list or tuple
for i, item in enumerate(value):
value[i] = self.serialize(item)
data[key] = value
data[key] = encoding_obj(value)
return data

View file

@ -0,0 +1,22 @@
import nuke
import pyblish.api
class RemoveOutputNode(pyblish.api.ContextPlugin):
"""Removing output node for each ouput write node
"""
label = 'Output Node Remove'
order = pyblish.api.IntegratorOrder
families = ["workfile"]
hosts = ['nuke']
def process(self, context):
try:
output_node = context.data["outputNode"]
name = output_node["name"].value()
self.log.info("Removing output node: '{}'".format(name))
nuke.delete(output_node)
except Exception:
return

View file

@ -29,7 +29,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
# root = nuke.root()
# node_subset_name = instance.data.get("name", None)
node = instance[0]
node = instance[1]
DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
"http://localhost:8082")

View file

@ -11,16 +11,16 @@ class RepairCollectionAction(pyblish.api.Action):
icon = "wrench"
def process(self, context, plugin):
self.log.info(context[0])
self.log.info(context[0][1])
files_remove = [os.path.join(context[0].data["outputDir"], f)
for r in context[0].data.get("representations", [])
for f in r.get("files", [])
]
self.log.info(files_remove)
self.log.info("Files to be removed: {}".format(files_remove))
for f in files_remove:
os.remove(f)
self.log.debug("removing file: {}".format(f))
context[0][0]["render"].setValue(True)
context[0][1]["render"].setValue(True)
self.log.info("Rendering toggled ON")

View file

@ -14,7 +14,7 @@ class RepairNukeWriteNodeAction(pyblish.api.Action):
instances = pype.utils.filter_instances(context, plugin)
for instance in instances:
node = instance[0]
node = instance[1]
correct_data = nukelib.get_write_node_template_attr(node)
for k, v in correct_data.items():
node[k].setValue(v)
@ -33,7 +33,7 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin):
def process(self, instance):
node = instance[0]
node = instance[1]
correct_data = nukelib.get_write_node_template_attr(node)
check = []

View file

@ -258,6 +258,8 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
"handleEnd": handle_end,
"frameStart": instance.data["frameStart"],
"frameEnd": instance.data["frameEnd"],
"clipIn": instance.data["clipIn"],
"clipOut": instance.data["clipOut"],
'fps': instance.context.data["fps"]
}

View file

@ -18,7 +18,10 @@ class CollectClipTagFrameStart(api.InstancePlugin):
t_family = t_metadata.get("tag.family", "")
# gets only task family tags and collect labels
if "startingFrame" in t_family:
if "frameStart" in t_family:
t_number = t_metadata.get("tag.number", "")
start_frame = int(t_number)
instance.data["startingFrame"] = start_frame
self.log.info("Start frame on `{0}` set to `{1}`".format(
instance, start_frame
))