mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'feature/PYPE-468-nukestudio-retiming-tag-for-pub' into feature/PYPE-468-476-MERGE
This commit is contained in:
commit
d1ef27f0b7
8 changed files with 342 additions and 9 deletions
|
|
@ -195,6 +195,13 @@ def script_name():
|
|||
'''
|
||||
return nuke.root().knob('name').value()
|
||||
|
||||
def add_button_write_to_read(node):
|
||||
name = "createReadNode"
|
||||
label = "Create Read"
|
||||
value = "import write_to_read;write_to_read.write_to_read(nuke.thisNode())"
|
||||
k = nuke.PyScript_Knob(name, label, value)
|
||||
k.setFlag(0x1000)
|
||||
node.addKnob(k)
|
||||
|
||||
def create_write_node(name, data, prenodes=None):
|
||||
''' Creating write node which is group node
|
||||
|
|
@ -324,11 +331,15 @@ def create_write_node(name, data, prenodes=None):
|
|||
# imprinting group node
|
||||
GN = avalon.nuke.imprint(GN, data["avalon"])
|
||||
|
||||
|
||||
divider = nuke.Text_Knob('')
|
||||
GN.addKnob(divider)
|
||||
|
||||
add_rendering_knobs(GN)
|
||||
|
||||
# adding write to read button
|
||||
add_button_write_to_read(GN)
|
||||
|
||||
divider = nuke.Text_Knob('')
|
||||
GN.addKnob(divider)
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ def subset_to_families(subset, family, families):
|
|||
new_subset = families + subset_sufx
|
||||
return "{}.{}".format(family, new_subset)
|
||||
|
||||
|
||||
class CreateWriteRender(avalon.nuke.Creator):
|
||||
# change this to template preset
|
||||
preset = "render"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import nuke
|
||||
from avalon import api, io
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -19,5 +18,6 @@ class CollectAssetInfo(pyblish.api.ContextPlugin):
|
|||
self.log.info("asset_data: {}".format(asset_data))
|
||||
|
||||
context.data['handles'] = int(asset_data["data"].get("handles", 0))
|
||||
context.data["handleStart"] = int(asset_data["data"].get("handleStart", 0))
|
||||
context.data["handleStart"] = int(asset_data["data"].get(
|
||||
"handleStart", 0))
|
||||
context.data["handleEnd"] = int(asset_data["data"].get("handleEnd", 0))
|
||||
|
|
|
|||
121
pype/plugins/nukestudio/publish/collect_calculate_retime.py
Normal file
121
pype/plugins/nukestudio/publish/collect_calculate_retime.py
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
from pyblish import api
|
||||
import hiero
|
||||
import math
|
||||
|
||||
|
||||
class CollectCalculateRetime(api.InstancePlugin):
|
||||
"""Calculate Retiming of selected track items."""
|
||||
|
||||
order = api.CollectorOrder + 0.02
|
||||
label = "Collect Calculate Retiming"
|
||||
hosts = ["nukestudio"]
|
||||
families = ['retime']
|
||||
|
||||
def process(self, instance):
|
||||
margin_in = instance.data["retimeMarginIn"]
|
||||
margin_out = instance.data["retimeMarginOut"]
|
||||
self.log.debug("margin_in: '{0}', margin_out: '{1}'".format(margin_in, margin_out))
|
||||
|
||||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
|
||||
track_item = instance.data["item"]
|
||||
|
||||
# define basic clip frame range variables
|
||||
timeline_in = int(track_item.timelineIn())
|
||||
timeline_out = int(track_item.timelineOut())
|
||||
source_in = int(track_item.sourceIn())
|
||||
source_out = int(track_item.sourceOut())
|
||||
speed = track_item.playbackSpeed()
|
||||
self.log.debug("_BEFORE: \n timeline_in: `{0}`,\n timeline_out: `{1}`,\
|
||||
\n source_in: `{2}`,\n source_out: `{3}`,\n speed: `{4}`,\n handle_start: `{5}`,\n handle_end: `{6}`".format(
|
||||
timeline_in,
|
||||
timeline_out,
|
||||
source_in,
|
||||
source_out,
|
||||
speed,
|
||||
handle_start,
|
||||
handle_end
|
||||
))
|
||||
|
||||
# loop withing subtrack items
|
||||
source_in_change = 0
|
||||
source_out_change = 0
|
||||
for s_track_item in track_item.linkedItems():
|
||||
if isinstance(s_track_item, hiero.core.EffectTrackItem) \
|
||||
and "TimeWarp" in s_track_item.node().Class():
|
||||
|
||||
# adding timewarp attribute to instance
|
||||
if not instance.data.get("timeWarpNodes", None):
|
||||
instance.data["timeWarpNodes"] = list()
|
||||
|
||||
# ignore item if not enabled
|
||||
if s_track_item.isEnabled():
|
||||
node = s_track_item.node()
|
||||
name = node["name"].value()
|
||||
look_up = node["lookup"].value()
|
||||
animated = node["lookup"].isAnimated()
|
||||
if animated:
|
||||
look_up = [((node["lookup"].getValueAt(i)) - i)
|
||||
for i in range((timeline_in - handle_start), (timeline_out + handle_end) + 1)
|
||||
]
|
||||
# calculate differnce
|
||||
diff_in = (node["lookup"].getValueAt(
|
||||
timeline_in)) - timeline_in
|
||||
diff_out = (node["lookup"].getValueAt(
|
||||
timeline_out)) - timeline_out
|
||||
|
||||
# calculate source
|
||||
source_in_change += diff_in
|
||||
source_out_change += diff_out
|
||||
|
||||
# calculate speed
|
||||
speed_in = (node["lookup"].getValueAt(timeline_in) / (
|
||||
float(timeline_in) * .01)) * .01
|
||||
speed_out = (node["lookup"].getValueAt(timeline_out) / (
|
||||
float(timeline_out) * .01)) * .01
|
||||
|
||||
# calculate handles
|
||||
handle_start = int(
|
||||
math.ceil(
|
||||
(handle_start * speed_in * 1000) / 1000.0)
|
||||
)
|
||||
|
||||
handle_end = int(
|
||||
math.ceil(
|
||||
(handle_end * speed_out * 1000) / 1000.0)
|
||||
)
|
||||
self.log.debug(
|
||||
("diff_in, diff_out", diff_in, diff_out))
|
||||
self.log.debug(
|
||||
("source_in_change, source_out_change", source_in_change, source_out_change))
|
||||
|
||||
instance.data["timeWarpNodes"].append({"Class": "TimeWarp",
|
||||
"name": name,
|
||||
"lookup": look_up})
|
||||
|
||||
self.log.debug((source_in_change, source_out_change))
|
||||
# recalculate handles by the speed
|
||||
handle_start *= speed
|
||||
handle_end *= speed
|
||||
self.log.debug("speed: handle_start: '{0}', handle_end: '{1}'".format(handle_start, handle_end))
|
||||
|
||||
source_in += int(source_in_change)
|
||||
source_out += int(source_out_change * speed)
|
||||
handle_start += (margin_in)
|
||||
handle_end += (margin_out)
|
||||
self.log.debug("margin: handle_start: '{0}', handle_end: '{1}'".format(handle_start, handle_end))
|
||||
|
||||
# add all data to Instance
|
||||
instance.data["sourceIn"] = source_in
|
||||
instance.data["sourceOut"] = source_out
|
||||
instance.data["sourceInH"] = int(source_in - math.ceil(
|
||||
(handle_start * 1000) / 1000.0))
|
||||
instance.data["sourceOutH"] = int(source_out + math.ceil(
|
||||
(handle_end * 1000) / 1000.0))
|
||||
instance.data["speed"] = speed
|
||||
|
||||
self.log.debug("timeWarpNodes: {}".format(instance.data["timeWarpNodes"]))
|
||||
self.log.debug("sourceIn: {}".format(instance.data["sourceIn"]))
|
||||
self.log.debug("sourceOut: {}".format(instance.data["sourceOut"]))
|
||||
self.log.debug("speed: {}".format(instance.data["speed"]))
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectClipFrameRanges(pyblish.api.InstancePlugin):
|
||||
"""Collect all frame range data: source(In,Out), timeline(In,Out), edit_(in, out), f(start, end)"""
|
||||
|
||||
|
|
@ -15,8 +16,10 @@ class CollectClipFrameRanges(pyblish.api.InstancePlugin):
|
|||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
|
||||
source_in_h = instance.data["sourceIn"] - handle_start
|
||||
source_out_h = instance.data["sourceOut"] + handle_end
|
||||
source_in_h = instance.data("sourceInH",
|
||||
instance.data("sourceIn") - handle_start)
|
||||
source_out_h = instance.data("sourceOutH",
|
||||
instance.data("sourceOut") + handle_end)
|
||||
|
||||
timeline_in = instance.data["clipIn"]
|
||||
timeline_out = instance.data["clipOut"]
|
||||
|
|
|
|||
|
|
@ -137,7 +137,6 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
"subset": name,
|
||||
"fps": instance.context.data["fps"]
|
||||
})
|
||||
instance.data["versionData"] = version_data
|
||||
|
||||
try:
|
||||
basename, ext = os.path.splitext(source_file)
|
||||
|
|
@ -156,9 +155,11 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
start_frame = source_first_frame + instance.data["sourceInH"]
|
||||
duration = instance.data["sourceOutH"] - instance.data["sourceInH"]
|
||||
end_frame = start_frame + duration
|
||||
self.log.debug("start_frame: `{}`".format(start_frame))
|
||||
self.log.debug("end_frame: `{}`".format(end_frame))
|
||||
files = [file % i for i in range(start_frame, (end_frame + 1), 1)]
|
||||
except Exception as e:
|
||||
self.log.debug("Exception in file: {}".format(e))
|
||||
self.log.warning("Exception in file: {}".format(e))
|
||||
head, ext = os.path.splitext(source_file)
|
||||
ext = ext[1:]
|
||||
files = source_file
|
||||
|
|
@ -207,16 +208,41 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
thumb_representation)
|
||||
|
||||
# adding representation for plates
|
||||
frame_start = instance.data["frameStart"] - \
|
||||
instance.data["handleStart"]
|
||||
frame_end = instance.data["frameEnd"] + instance.data["handleEnd"]
|
||||
|
||||
# exception for retimes
|
||||
if instance.data.get("retime"):
|
||||
source_in_h = instance.data["sourceInH"]
|
||||
source_in = instance.data["sourceIn"]
|
||||
source_handle_start = source_in_h - source_in
|
||||
frame_start = instance.data["frameStart"] + source_handle_start
|
||||
duration = instance.data["sourceOutH"] - instance.data["sourceInH"]
|
||||
frame_end = frame_start + duration
|
||||
|
||||
plates_representation = {
|
||||
'files': files,
|
||||
'stagingDir': staging_dir,
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
"frameStart": instance.data["frameStart"] - instance.data["handleStart"],
|
||||
"frameEnd": instance.data["frameEnd"] + instance.data["handleEnd"],
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
}
|
||||
instance.data["representations"].append(plates_representation)
|
||||
|
||||
# deal with retimed clip
|
||||
if instance.data.get("retime"):
|
||||
version_data.update({
|
||||
"retime": True,
|
||||
"speed": instance.data.get("speed", 1),
|
||||
"timewarps": instance.data.get("timeWarpNodes", []),
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
})
|
||||
|
||||
instance.data["versionData"] = version_data
|
||||
|
||||
# testing families
|
||||
family = instance.data["family"]
|
||||
families = instance.data["families"]
|
||||
|
|
|
|||
32
pype/plugins/nukestudio/publish/collect_tag_retime.py
Normal file
32
pype/plugins/nukestudio/publish/collect_tag_retime.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
from pyblish import api
|
||||
|
||||
|
||||
class CollectTagRetime(api.InstancePlugin):
|
||||
"""Collect Retiming from Tags of selected track items."""
|
||||
|
||||
order = api.CollectorOrder + 0.014
|
||||
label = "Collect Retiming Tag"
|
||||
hosts = ["nukestudio"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
# gets tags
|
||||
tags = instance.data["tags"]
|
||||
|
||||
for t in tags:
|
||||
t_metadata = dict(t["metadata"])
|
||||
t_family = t_metadata.get("tag.family", "")
|
||||
|
||||
# gets only task family tags and collect labels
|
||||
if "retiming" in t_family:
|
||||
margin_in = t_metadata.get("tag.marginIn", "")
|
||||
margin_out = t_metadata.get("tag.marginOut", "")
|
||||
|
||||
instance.data["retimeMarginIn"] = int(margin_in)
|
||||
instance.data["retimeMarginOut"] = int(margin_out)
|
||||
instance.data["retime"] = True
|
||||
|
||||
self.log.info("retimeMarginIn: `{}`".format(margin_in))
|
||||
self.log.info("retimeMarginOut: `{}`".format(margin_out))
|
||||
|
||||
instance.data["families"] += ["retime"]
|
||||
141
setup/nuke/nuke_path/write_to_read.py
Normal file
141
setup/nuke/nuke_path/write_to_read.py
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
import re
|
||||
import os
|
||||
import glob
|
||||
import nuke
|
||||
from pype import api as pype
|
||||
log = pype.Logger().get_logger(__name__, "nuke")
|
||||
|
||||
SINGLE_FILE_FORMATS = ['avi', 'mp4', 'mxf', 'mov', 'mpg', 'mpeg', 'wmv', 'm4v',
|
||||
'm2v']
|
||||
|
||||
|
||||
def evaluate_filepath_new(k_value, k_eval, project_dir, first_frame):
|
||||
# get combined relative path
|
||||
combined_relative_path = None
|
||||
if k_eval is not None and project_dir is not None:
|
||||
combined_relative_path = os.path.abspath(
|
||||
os.path.join(project_dir, k_eval))
|
||||
combined_relative_path = combined_relative_path.replace('\\', '/')
|
||||
filetype = combined_relative_path.split('.')[-1]
|
||||
frame_number = re.findall(r'\d+', combined_relative_path)[-1]
|
||||
basename = combined_relative_path[: combined_relative_path.rfind(
|
||||
frame_number)]
|
||||
filepath_glob = basename + '*' + filetype
|
||||
glob_search_results = glob.glob(filepath_glob)
|
||||
if len(glob_search_results) <= 0:
|
||||
combined_relative_path = None
|
||||
|
||||
try:
|
||||
k_value = k_value % first_frame
|
||||
if os.path.exists(k_value):
|
||||
filepath = k_value
|
||||
elif os.path.exists(k_eval):
|
||||
filepath = k_eval
|
||||
elif not isinstance(project_dir, type(None)) and \
|
||||
not isinstance(combined_relative_path, type(None)):
|
||||
filepath = combined_relative_path
|
||||
|
||||
filepath = os.path.abspath(filepath)
|
||||
except Exception as E:
|
||||
log.error("Cannot create Read node. Perhaps it needs to be rendered first :) Error: `{}`".format(E))
|
||||
return
|
||||
|
||||
filepath = filepath.replace('\\', '/')
|
||||
current_frame = re.findall(r'\d+', filepath)[-1]
|
||||
padding = len(current_frame)
|
||||
basename = filepath[: filepath.rfind(current_frame)]
|
||||
filetype = filepath.split('.')[-1]
|
||||
|
||||
# sequence or not?
|
||||
if filetype in SINGLE_FILE_FORMATS:
|
||||
pass
|
||||
else:
|
||||
# Image sequence needs hashes
|
||||
filepath = basename + '#' * padding + '.' + filetype
|
||||
|
||||
# relative path? make it relative again
|
||||
if not isinstance(project_dir, type(None)):
|
||||
filepath = filepath.replace(project_dir, '.')
|
||||
|
||||
# get first and last frame from disk
|
||||
frames = []
|
||||
firstframe = 0
|
||||
lastframe = 0
|
||||
filepath_glob = basename + '*' + filetype
|
||||
glob_search_results = glob.glob(filepath_glob)
|
||||
for f in glob_search_results:
|
||||
frame = re.findall(r'\d+', f)[-1]
|
||||
frames.append(frame)
|
||||
frames = sorted(frames)
|
||||
firstframe = frames[0]
|
||||
lastframe = frames[len(frames) - 1]
|
||||
if lastframe < 0:
|
||||
lastframe = firstframe
|
||||
|
||||
return filepath, firstframe, lastframe
|
||||
|
||||
|
||||
def create_read_node(ndata, comp_start):
|
||||
read = nuke.createNode('Read', 'file ' + ndata['filepath'])
|
||||
read.knob('colorspace').setValue(int(ndata['colorspace']))
|
||||
read.knob('raw').setValue(ndata['rawdata'])
|
||||
read.knob('first').setValue(int(ndata['firstframe']))
|
||||
read.knob('last').setValue(int(ndata['lastframe']))
|
||||
read.knob('origfirst').setValue(int(ndata['firstframe']))
|
||||
read.knob('origlast').setValue(int(ndata['lastframe']))
|
||||
if comp_start == int(ndata['firstframe']):
|
||||
read.knob('frame_mode').setValue("1")
|
||||
read.knob('frame').setValue(str(comp_start))
|
||||
else:
|
||||
read.knob('frame_mode').setValue("0")
|
||||
read.knob('xpos').setValue(ndata['new_xpos'])
|
||||
read.knob('ypos').setValue(ndata['new_ypos'])
|
||||
nuke.inputs(read, 0)
|
||||
return
|
||||
|
||||
|
||||
def write_to_read(gn):
|
||||
comp_start = nuke.Root().knob('first_frame').value()
|
||||
comp_end = nuke.Root().knob('last_frame').value()
|
||||
project_dir = nuke.Root().knob('project_directory').getValue()
|
||||
if not os.path.exists(project_dir):
|
||||
project_dir = nuke.Root().knob('project_directory').evaluate()
|
||||
|
||||
group_read_nodes = []
|
||||
|
||||
with gn:
|
||||
height = gn.screenHeight() # get group height and position
|
||||
new_xpos = int(gn.knob('xpos').value())
|
||||
new_ypos = int(gn.knob('ypos').value()) + height + 20
|
||||
group_writes = [n for n in nuke.allNodes() if n.Class() == "Write"]
|
||||
print("__ group_writes: {}".format(group_writes))
|
||||
if group_writes != []:
|
||||
# there can be only 1 write node, taking first
|
||||
n = group_writes[0]
|
||||
|
||||
if n.knob('file') is not None:
|
||||
myfiletranslated, firstFrame, lastFrame = evaluate_filepath_new(
|
||||
n.knob('file').getValue(),
|
||||
n.knob('file').evaluate(),
|
||||
project_dir,
|
||||
comp_start
|
||||
)
|
||||
# get node data
|
||||
ndata = {
|
||||
'filepath': myfiletranslated,
|
||||
'firstframe': firstFrame,
|
||||
'lastframe': lastFrame,
|
||||
'new_xpos': new_xpos,
|
||||
'new_ypos': new_ypos,
|
||||
'colorspace': n.knob('colorspace').getValue(),
|
||||
'rawdata': n.knob('raw').value(),
|
||||
'write_frame_mode': str(n.knob('frame_mode').value()),
|
||||
'write_frame': n.knob('frame').value()
|
||||
}
|
||||
group_read_nodes.append(ndata)
|
||||
|
||||
|
||||
# create reads in one go
|
||||
for oneread in group_read_nodes:
|
||||
# create read node
|
||||
create_read_node(oneread, comp_start)
|
||||
Loading…
Add table
Add a link
Reference in a new issue