Merge branch 'release/2.7.0' into hotfix/master_integrate_loops

This commit is contained in:
Jakub Trllo 2020-03-26 19:03:42 +01:00 committed by GitHub
commit 2e42820a98
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
21 changed files with 279 additions and 223 deletions

View file

@ -215,14 +215,14 @@ def script_name():
def add_button_write_to_read(node):
name = "createReadNode"
label = "Create Read"
label = "[ Create Read ]"
value = "import write_to_read;write_to_read.write_to_read(nuke.thisNode())"
k = nuke.PyScript_Knob(name, label, value)
k.setFlag(0x1000)
node.addKnob(k)
def create_write_node(name, data, input=None, prenodes=None):
def create_write_node(name, data, input=None, prenodes=None, review=True):
''' Creating write node which is group node
Arguments:
@ -231,6 +231,7 @@ def create_write_node(name, data, input=None, prenodes=None):
input (node): selected node to connect to
prenodes (list, optional): list of lists, definitions for nodes
to be created before write
review (bool): adding review knob
Example:
prenodes = [(
@ -389,15 +390,8 @@ def create_write_node(name, data, input=None, prenodes=None):
add_rendering_knobs(GN)
# adding write to read button
add_button_write_to_read(GN)
divider = nuke.Text_Knob('')
GN.addKnob(divider)
# set tile color
tile_color = _data.get("tile_color", "0xff0000ff")
GN["tile_color"].setValue(tile_color)
if review:
add_review_knob(GN)
# add render button
lnk = nuke.Link_Knob("Render")
@ -405,9 +399,20 @@ def create_write_node(name, data, input=None, prenodes=None):
lnk.setName("Render")
GN.addKnob(lnk)
divider = nuke.Text_Knob('')
GN.addKnob(divider)
# adding write to read button
add_button_write_to_read(GN)
# Deadline tab.
add_deadline_tab(GN)
# set tile color
tile_color = _data.get("tile_color", "0xff0000ff")
GN["tile_color"].setValue(tile_color)
return GN
@ -429,6 +434,17 @@ def add_rendering_knobs(node):
knob = nuke.Boolean_Knob("render_farm", "Render on Farm")
knob.setValue(False)
node.addKnob(knob)
return node
def add_review_knob(node):
''' Adds additional review knob to given node
Arguments:
node (obj): nuke node object to be fixed
Return:
node (obj): with added knob
'''
if "review" not in node.knobs():
knob = nuke.Boolean_Knob("review", "Review")
knob.setValue(True)
@ -1135,7 +1151,7 @@ class BuildWorkfile(WorkfileSettings):
regex_filter=None,
version=None,
representations=["exr", "dpx", "lutJson", "mov",
"preview", "png"]):
"preview", "png", "jpeg", "jpg"]):
"""
A short description.

View file

@ -22,6 +22,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
'setdress': 'setdress',
'pointcache': 'cache',
'render': 'render',
'render2d': 'render',
'nukescript': 'comp',
'write': 'render',
'review': 'mov',

View file

@ -11,7 +11,9 @@ class ExtractReviewSlate(pype.api.Extractor):
label = "Review with Slate frame"
order = pyblish.api.ExtractorOrder + 0.031
families = ["slate"]
families = ["slate", "review"]
match = pyblish.api.Subset
hosts = ["nuke", "maya", "shell"]
optional = True
@ -34,7 +36,8 @@ class ExtractReviewSlate(pype.api.Extractor):
fps = inst_data.get("fps")
# defining image ratios
resolution_ratio = (float(resolution_width) * pixel_aspect) / resolution_height
resolution_ratio = ((float(resolution_width) * pixel_aspect) /
resolution_height)
delivery_ratio = float(to_width) / float(to_height)
self.log.debug("__ resolution_ratio: `{}`".format(resolution_ratio))
self.log.debug("__ delivery_ratio: `{}`".format(delivery_ratio))
@ -89,7 +92,7 @@ class ExtractReviewSlate(pype.api.Extractor):
input_args.extend([
"-r {}".format(fps),
"-t 0.04"]
)
)
# output args
codec_args = repre["_profile"].get('codec', [])
@ -111,7 +114,7 @@ class ExtractReviewSlate(pype.api.Extractor):
self.log.debug("lower then delivery")
width_scale = int(to_width * scale_factor)
width_half_pad = int((
to_width - width_scale)/2)
to_width - width_scale) / 2)
height_scale = to_height
height_half_pad = 0
else:
@ -124,7 +127,7 @@ class ExtractReviewSlate(pype.api.Extractor):
height_scale = int(
resolution_height * scale_factor)
height_half_pad = int(
(to_height - height_scale)/2)
(to_height - height_scale) / 2)
self.log.debug(
"__ width_scale: `{}`".format(width_scale))
@ -135,8 +138,10 @@ class ExtractReviewSlate(pype.api.Extractor):
self.log.debug(
"__ height_half_pad: `{}`".format(height_half_pad))
scaling_arg = "scale={0}x{1}:flags=lanczos,pad={2}:{3}:{4}:{5}:black,setsar=1".format(
width_scale, height_scale, to_width, to_height, width_half_pad, height_half_pad
scaling_arg = ("scale={0}x{1}:flags=lanczos,"
"pad={2}:{3}:{4}:{5}:black,setsar=1").format(
width_scale, height_scale, to_width, to_height,
width_half_pad, height_half_pad
)
vf_back = self.add_video_filter_args(

View file

@ -64,6 +64,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"scene",
"vrayproxy",
"render",
"prerender",
"imagesequence",
"review",
"rendersetup",

View file

@ -141,7 +141,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
hosts = ["fusion", "maya", "nuke"]
families = ["render.farm", "renderlayer", "imagesequence"]
families = ["render.farm", "prerener", "renderlayer", "imagesequence"]
aov_filter = {"maya": ["beauty"]}
@ -168,8 +168,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
instance_transfer = {
"slate": ["slateFrame"],
"review": ["lutPath"],
"render.farm": ["bakeScriptPath", "bakeRenderPath",
"bakeWriteNodeName", "version"]
"render2d": ["bakeScriptPath", "bakeRenderPath",
"bakeWriteNodeName", "version"]
}
# list of family names to transfer to new family if present
@ -586,11 +586,23 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"resolutionHeight": data.get("resolutionHeight", 1080),
}
if "prerender" in instance.data["families"]:
instance_skeleton_data.update({
"family": "prerender",
"families": []})
# transfer specific families from original instance to new render
for item in self.families_transfer:
if item in instance.data.get("families", []):
instance_skeleton_data["families"] += [item]
if "render.farm" in instance.data["families"]:
instance_skeleton_data.update({
"family": "render2d",
"families": ["render"] + [f for f in instance.data["families"]
if "render.farm" not in f]
})
# transfer specific properties from original instance based on
# mapping dictionary `instance_transfer`
for key, values in self.instance_transfer.items():

View file

@ -13,13 +13,17 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
"""Validates the global render settings
* File Name Prefix must start with: `maya/<Scene>`
all other token are customizable but sane values are:
all other token are customizable but sane values for Arnold are:
`maya/<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>`
<Camera> token is supported also, usefull for multiple renderable
<Camera> token is supported also, useful for multiple renderable
cameras per render layer.
For Redshift omit <RenderPass> token. Redshift will append it
automatically if AOVs are enabled and if you user Multipart EXR
it doesn't make much sense.
* Frame Padding must be:
* default: 4
@ -127,8 +131,13 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
# no vray checks implemented yet
pass
elif renderer == "redshift":
# no redshift check implemented yet
pass
if re.search(cls.R_AOV_TOKEN, prefix):
invalid = True
cls.log.error("Do not use AOV token [ {} ] - "
"Redshift automatically append AOV name and "
"it doesn't make much sense with "
"Multipart EXR".format(prefix))
elif renderer == "renderman":
file_prefix = cmds.getAttr("rmanGlobals.imageFileFormat")
dir_prefix = cmds.getAttr("rmanGlobals.imageOutputDir")
@ -143,8 +152,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
dir_prefix))
else:
multichannel = cmds.getAttr("defaultArnoldDriver.mergeAOVs")
if multichannel:
multipart = cmds.getAttr("defaultArnoldDriver.mergeAOVs")
if multipart:
if re.search(cls.R_AOV_TOKEN, prefix):
invalid = True
cls.log.error("Wrong image prefix [ {} ] - "

View file

@ -1,103 +1,10 @@
from collections import OrderedDict
from pype.nuke import plugin
from pype.nuke import (
plugin,
lib as pnlib)
import nuke
class CreateWriteRender(plugin.PypeCreator):
# change this to template preset
name = "WriteRender"
label = "Create Write Render"
hosts = ["nuke"]
n_class = "write"
family = "render"
icon = "sign-out"
defaults = ["Main", "Mask"]
def __init__(self, *args, **kwargs):
super(CreateWriteRender, self).__init__(*args, **kwargs)
data = OrderedDict()
data["family"] = self.family
data["families"] = self.n_class
for k, v in self.data.items():
if k not in data.keys():
data.update({k: v})
self.data = data
self.nodes = nuke.selectedNodes()
self.log.debug("_ self.data: '{}'".format(self.data))
def process(self):
from pype.nuke import lib as pnlib
inputs = []
outputs = []
instance = nuke.toNode(self.data["subset"])
selected_node = None
# use selection
if (self.options or {}).get("useSelection"):
nodes = self.nodes
if not (len(nodes) < 2):
msg = ("Select only one node. The node you want to connect to, "
"or tick off `Use selection`")
log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
outputs = selected_node.dependent()
if instance:
if (instance.name() in selected_node.name()):
selected_node = instance.dependencies()[0]
# if node already exist
if instance:
# collect input / outputs
inputs = instance.dependencies()
outputs = instance.dependent()
selected_node = inputs[0]
# remove old one
nuke.delete(instance)
# recreate new
write_data = {
"class": self.n_class,
"families": [self.family],
"avalon": self.data
}
if self.presets.get('fpath_template'):
self.log.info("Adding template path from preset")
write_data.update(
{"fpath_template": self.presets["fpath_template"]}
)
else:
self.log.info("Adding template path from plugin")
write_data.update({
"fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}"})
write_node = pnlib.create_write_node(
self.data["subset"],
write_data,
input=selected_node)
# relinking to collected connections
for i, input in enumerate(inputs):
write_node.setInput(i, input)
write_node.autoplace()
for output in outputs:
output.setInput(0, write_node)
return write_node
class CreateWritePrerender(plugin.PypeCreator):
# change this to template preset
name = "WritePrerender"
@ -125,8 +32,6 @@ class CreateWritePrerender(plugin.PypeCreator):
self.log.debug("_ self.data: '{}'".format(self.data))
def process(self):
from pype.nuke import lib as pnlib
inputs = []
outputs = []
instance = nuke.toNode(self.data["subset"])
@ -137,8 +42,9 @@ class CreateWritePrerender(plugin.PypeCreator):
nodes = self.nodes
if not (len(nodes) < 2):
msg = ("Select only one node. The node you want to connect to, "
"or tick off `Use selection`")
msg = ("Select only one node. The node "
"you want to connect to, "
"or tick off `Use selection`")
self.log.error(msg)
nuke.message(msg)
@ -174,13 +80,15 @@ class CreateWritePrerender(plugin.PypeCreator):
else:
self.log.info("Adding template path from plugin")
write_data.update({
"fpath_template": "{work}/prerenders/nuke/{subset}/{subset}.{frame}.{ext}"})
"fpath_template": ("{work}/prerenders/nuke/{subset}"
"/{subset}.{frame}.{ext}")})
write_node = pnlib.create_write_node(
self.data["subset"],
write_data,
input=selected_node,
prenodes=[])
prenodes=[],
review=False)
# relinking to collected connections
for i, input in enumerate(inputs):

View file

@ -0,0 +1,101 @@
from collections import OrderedDict
from pype.nuke import (
plugin,
lib as pnlib)
import nuke
class CreateWriteRender(plugin.PypeCreator):
# change this to template preset
name = "WriteRender"
label = "Create Write Render"
hosts = ["nuke"]
n_class = "write"
family = "render"
icon = "sign-out"
defaults = ["Main", "Mask"]
def __init__(self, *args, **kwargs):
super(CreateWriteRender, self).__init__(*args, **kwargs)
data = OrderedDict()
data["family"] = self.family
data["families"] = self.n_class
for k, v in self.data.items():
if k not in data.keys():
data.update({k: v})
self.data = data
self.nodes = nuke.selectedNodes()
self.log.debug("_ self.data: '{}'".format(self.data))
def process(self):
inputs = []
outputs = []
instance = nuke.toNode(self.data["subset"])
selected_node = None
# use selection
if (self.options or {}).get("useSelection"):
nodes = self.nodes
if not (len(nodes) < 2):
msg = ("Select only one node. "
"The node you want to connect to, "
"or tick off `Use selection`")
self.log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
outputs = selected_node.dependent()
if instance:
if (instance.name() in selected_node.name()):
selected_node = instance.dependencies()[0]
# if node already exist
if instance:
# collect input / outputs
inputs = instance.dependencies()
outputs = instance.dependent()
selected_node = inputs[0]
# remove old one
nuke.delete(instance)
# recreate new
write_data = {
"class": self.n_class,
"families": [self.family],
"avalon": self.data
}
if self.presets.get('fpath_template'):
self.log.info("Adding template path from preset")
write_data.update(
{"fpath_template": self.presets["fpath_template"]}
)
else:
self.log.info("Adding template path from plugin")
write_data.update({
"fpath_template": ("{work}/renders/nuke/{subset}"
"/{subset}.{frame}.{ext}")})
write_node = pnlib.create_write_node(
self.data["subset"],
write_data,
input=selected_node)
# relinking to collected connections
for i, input in enumerate(inputs):
write_node.setInput(i, input)
write_node.autoplace()
for output in outputs:
output.setInput(0, write_node)
return write_node

View file

@ -92,6 +92,7 @@ class LoadMov(api.Loader):
"source",
"plate",
"render",
"prerender",
"review"] + presets["families"]
representations = [

View file

@ -70,7 +70,7 @@ def loader_shift(node, frame, relative=True):
class LoadSequence(api.Loader):
"""Load image sequence into Nuke"""
families = ["render2d", "source", "plate", "render"]
families = ["render2d", "source", "plate", "render", "prerender"]
representations = ["exr", "dpx", "jpg", "jpeg", "png"]
label = "Load sequence"
@ -87,7 +87,7 @@ class LoadSequence(api.Loader):
version = context['version']
version_data = version.get("data", {})
repr_id = context["representation"]["_id"]
self.log.info("version_data: {}\n".format(version_data))
self.log.debug(
"Representation id `{}` ".format(repr_id))

View file

@ -52,6 +52,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
# establish families
family = avalon_knob_data["family"]
families_ak = avalon_knob_data.get("families")
families = list()
# except disabled nodes but exclude backdrops in test
@ -68,16 +69,14 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
# Add all nodes in group instances.
if node.Class() == "Group":
# only alter families for render family
if ("render" in family):
# check if node is not disabled
families.append(avalon_knob_data["families"])
if "write" in families_ak:
if node["render"].value():
self.log.info("flagged for render")
add_family = "render.local"
add_family = "{}.local".format(family)
# dealing with local/farm rendering
if node["render_farm"].value():
self.log.info("adding render farm family")
add_family = "render.farm"
add_family = "{}.farm".format(family)
instance.data["transfer"] = False
families.append(add_family)
else:
@ -89,9 +88,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
instance.append(i)
node.end()
family = avalon_knob_data["family"]
families = list()
families_ak = avalon_knob_data.get("families")
self.log.debug("__ families: `{}`".format(families))
if families_ak:
families.append(families_ak)
@ -104,22 +101,6 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
resolution_height = format.height()
pixel_aspect = format.pixelAspect()
if node.Class() not in "Read":
if "render" not in node.knobs().keys():
pass
elif node["render"].value():
self.log.info("flagged for render")
add_family = "render.local"
# dealing with local/farm rendering
if node["render_farm"].value():
self.log.info("adding render farm family")
add_family = "render.farm"
instance.data["transfer"] = False
families.append(add_family)
else:
# add family into families
families.insert(0, family)
instance.data.update({
"subset": subset,
"asset": os.environ["AVALON_ASSET"],

View file

@ -8,7 +8,7 @@ class CollectSlate(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder + 0.09
label = "Collect Slate Node"
hosts = ["nuke"]
families = ["write"]
families = ["render", "render.local", "render.farm"]
def process(self, instance):
node = instance[0]

View file

@ -1,7 +1,6 @@
import os
import nuke
import pyblish.api
import pype.api as pype
@pyblish.api.log
@ -13,9 +12,11 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
hosts = ["nuke", "nukeassist"]
families = ["write"]
# preset attributes
sync_workfile_version = True
def process(self, instance):
# adding 2d focused rendering
instance.data["families"].append("render2d")
families = instance.data["families"]
node = None
for x in instance:
@ -53,10 +54,13 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
output_dir = os.path.dirname(path)
self.log.debug('output dir: {}'.format(output_dir))
# get version to instance for integration
instance.data['version'] = instance.context.data["version"]
if not next((f for f in families
if "prerender" in f),
None) and self.sync_workfile_version:
# get version to instance for integration
instance.data['version'] = instance.context.data["version"]
self.log.debug('Write Version: %s' % instance.data('version'))
self.log.debug('Write Version: %s' % instance.data('version'))
# create label
name = node.name()
@ -67,7 +71,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
int(last_frame)
)
if 'render' in instance.data['families']:
if [fm for fm in families
if fm in ["render", "prerender"]]:
if "representations" not in instance.data:
instance.data["representations"] = list()
@ -95,7 +100,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
# this will only run if slate frame is not already
# rendered from previews publishes
if "slate" in instance.data["families"] \
and (frame_length == collected_frames_len):
and (frame_length == collected_frames_len) \
and ("prerender" not in instance.data["families"]):
frame_slate_str = "%0{}d".format(
len(str(last_frame))) % (first_frame - 1)
slate_frame = collected_frames[0].replace(
@ -124,6 +130,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
deadlinePriority = group_node["deadlinePriority"].value()
families = [f for f in instance.data["families"] if "write" not in f]
instance.data.update({
"versionData": version_data,
"path": path,
@ -144,4 +151,12 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"deadlinePriority": deadlinePriority
})
if "prerender" in families:
instance.data.update({
"family": "prerender",
"families": []
})
self.log.debug("families: {}".format(families))
self.log.debug("instance.data: {}".format(instance.data))

View file

@ -17,9 +17,11 @@ class NukeRenderLocal(pype.api.Extractor):
order = pyblish.api.ExtractorOrder
label = "Render Local"
hosts = ["nuke"]
families = ["render.local"]
families = ["render.local", "prerender.local"]
def process(self, instance):
families = instance.data["families"]
node = None
for x in instance:
if x.Class() == "Write":
@ -30,7 +32,7 @@ class NukeRenderLocal(pype.api.Extractor):
first_frame = instance.data.get("frameStartHandle", None)
# exception for slate workflow
if "slate" in instance.data["families"]:
if "slate" in families:
first_frame -= 1
last_frame = instance.data.get("frameEndHandle", None)
@ -53,7 +55,7 @@ class NukeRenderLocal(pype.api.Extractor):
)
# exception for slate workflow
if "slate" in instance.data["families"]:
if "slate" in families:
first_frame += 1
path = node['file'].value()
@ -79,8 +81,16 @@ class NukeRenderLocal(pype.api.Extractor):
out_dir
))
instance.data['family'] = 'render'
instance.data['families'].append('render')
# redefinition of families
if "render.local" in families:
instance.data['family'] = 'render2d'
families.remove('render.local')
families.insert(0, "render")
elif "prerender.local" in families:
instance.data['family'] = 'prerender'
families.remove('prerender.local')
families.insert(0, "prerender")
instance.data["families"] = families
collections, remainder = clique.assemble(collected_frames)
self.log.info('collections: {}'.format(str(collections)))

View file

@ -3,7 +3,7 @@ import pyblish.api
from avalon.nuke import lib as anlib
from pype.nuke import lib as pnlib
import pype
reload(pnlib)
class ExtractReviewDataMov(pype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts
@ -15,7 +15,7 @@ class ExtractReviewDataMov(pype.api.Extractor):
order = pyblish.api.ExtractorOrder + 0.01
label = "Extract Review Data Mov"
families = ["review", "render", "render.local"]
families = ["review"]
hosts = ["nuke"]
# presets

View file

@ -9,6 +9,7 @@ class IncrementScriptVersion(pyblish.api.ContextPlugin):
order = pyblish.api.IntegratorOrder + 0.9
label = "Increment Script Version"
optional = True
families = ["workfile", "render", "render.local", "render.farm"]
hosts = ['nuke']
def process(self, context):
@ -16,19 +17,7 @@ class IncrementScriptVersion(pyblish.api.ContextPlugin):
assert all(result["success"] for result in context.data["results"]), (
"Publishing not succesfull so version is not increased.")
instances = context[:]
prerender_check = list()
families_check = list()
for instance in instances:
if ("prerender" in str(instance)) and instance.data.get("families", None):
prerender_check.append(instance)
if instance.data.get("families", None):
families_check.append(True)
if len(prerender_check) != len(families_check):
from pype.lib import version_up
path = context.data["currentFile"]
nuke.scriptSaveAs(version_up(path))
self.log.info('Incrementing script version')
from pype.lib import version_up
path = context.data["currentFile"]
nuke.scriptSaveAs(version_up(path))
self.log.info('Incrementing script version')

View file

@ -19,7 +19,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
label = "Submit to Deadline"
order = pyblish.api.IntegratorOrder + 0.1
hosts = ["nuke", "nukestudio"]
families = ["render.farm"]
families = ["render.farm", "prerender.farm"]
optional = True
deadline_priority = 50
@ -28,6 +28,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
deadline_chunk_size = 1
def process(self, instance):
families = instance.data["families"]
node = instance[0]
context = instance.context
@ -82,6 +83,15 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
instance.data["deadlineSubmissionJob"] = resp.json()
instance.data["publishJobState"] = "Suspended"
# redefinition of families
if "render.farm" in families:
instance.data['family'] = 'write'
families.insert(0, "render2d")
elif "prerender.farm" in families:
instance.data['family'] = 'write'
families.insert(0, "prerender")
instance.data["families"] = families
def payload_submit(self,
instance,
script_path,

View file

@ -28,7 +28,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
""" Validates file output. """
order = pyblish.api.ValidatorOrder + 0.1
families = ["render"]
families = ["render", "prerender"]
label = "Validate rendered frame"
hosts = ["nuke", "nukestudio"]

View file

@ -74,17 +74,14 @@ class ValidateScript(pyblish.api.InstancePlugin):
if "handleEnd" in asset_attributes:
handle_end = asset_attributes["handleEnd"]
# Set frame range with handles
# asset_attributes["frameStart"] -= handle_start
# asset_attributes["frameEnd"] += handle_end
if len(str(asset_attributes["fps"])) > 4:
asset_attributes["fps"] = float("{0:.8f}".format(asset_attributes["fps"]))
asset_attributes["fps"] = float("{0:.4f}".format(
asset_attributes["fps"]))
# Get values from nukescript
script_attributes = {
"handleStart": ctx_data["handleStart"],
"handleEnd": ctx_data["handleEnd"],
"fps": ctx_data["fps"],
"fps": float("{0:.4f}".format(ctx_data["fps"])),
"frameStart": ctx_data["frameStart"],
"frameEnd": ctx_data["frameEnd"],
"resolutionWidth": ctx_data["resolutionWidth"],

View file

@ -47,6 +47,16 @@ class CollectClips(api.ContextPlugin):
track = item.parent()
source = item.source().mediaSource()
source_path = source.firstpath()
file_head = source.filenameHead()
file_info = next((f for f in source.fileinfos()), None)
source_first_frame = file_info.startFrame()
is_sequence = False
if not source.singleFile():
self.log.info("Single file")
is_sequence = True
source_path = file_info.filename()
effects = [f for f in item.linkedItems()
if f.isEnabled()
if isinstance(f, hiero.core.EffectTrackItem)]
@ -78,12 +88,6 @@ class CollectClips(api.ContextPlugin):
)
)
try:
head, padding, ext = os.path.basename(source_path).split(".")
source_first_frame = int(padding)
except Exception:
source_first_frame = 0
data.update({
"name": "{0}_{1}".format(track.name(), item.name()),
"item": item,
@ -91,6 +95,8 @@ class CollectClips(api.ContextPlugin):
"timecodeStart": str(source.timecodeStart()),
"timelineTimecodeStart": str(sequence.timecodeStart()),
"sourcePath": source_path,
"sourceFileHead": file_head,
"isSequence": is_sequence,
"track": track.name(),
"trackIndex": track_index,
"sourceFirst": source_first_frame,
@ -101,8 +107,9 @@ class CollectClips(api.ContextPlugin):
int(item.sourceIn())) + 1,
"clipIn": int(item.timelineIn()),
"clipOut": int(item.timelineOut()),
"clipDuration": (int(item.timelineOut()) -
int(item.timelineIn())) + 1,
"clipDuration": (
int(item.timelineOut()) - int(
item.timelineIn())) + 1,
"asset": asset,
"family": "clip",
"families": [],

View file

@ -147,22 +147,15 @@ class CollectPlatesData(api.InstancePlugin):
"version": version
})
source_first_frame = instance.data.get("sourceFirst")
source_file_head = instance.data.get("sourceFileHead")
try:
basename, ext = os.path.splitext(source_file)
head, padding = os.path.splitext(basename)
ext = ext[1:]
padding = padding[1:]
self.log.debug("_ padding: `{}`".format(padding))
# head, padding, ext = source_file.split('.')
source_first_frame = int(padding)
padding = len(padding)
file = "{head}.%0{padding}d.{ext}".format(
head=head,
padding=padding,
ext=ext
)
if instance.data.get("isSequence", False):
self.log.info("Is sequence of files")
file = os.path.basename(source_file)
ext = os.path.splitext(file)[-1][1:]
self.log.debug("source_file_head: `{}`".format(source_file_head))
head = source_file_head[:-1]
start_frame = int(source_first_frame + instance.data["sourceInH"])
duration = int(
instance.data["sourceOutH"] - instance.data["sourceInH"])
@ -170,10 +163,10 @@ class CollectPlatesData(api.InstancePlugin):
self.log.debug("start_frame: `{}`".format(start_frame))
self.log.debug("end_frame: `{}`".format(end_frame))
files = [file % i for i in range(start_frame, (end_frame + 1), 1)]
except Exception as e:
self.log.warning("Exception in file: {}".format(e))
head, ext = os.path.splitext(source_file)
ext = ext[1:]
else:
self.log.info("Is single file")
ext = os.path.splitext(source_file)[-1][1:]
head = source_file_head
files = source_file
start_frame = instance.data["sourceInH"]
end_frame = instance.data["sourceOutH"]