Merge pull request #916 from ynput/bugfix/#895_fix_extract_otio_review

Fix extract_otio_review extraction logic for corner-case media
This commit is contained in:
Jakub Trllo 2024-10-02 11:30:19 +02:00 committed by GitHub
commit 1072fc963f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 2280 additions and 106 deletions

View file

@ -88,7 +88,7 @@ def trim_media_range(media_range, source_range):
"""
rw_media_start = _ot.RationalTime(
media_range.start_time.value + source_range.start_time.value,
source_range.start_time.value,
media_range.start_time.rate
)
rw_media_duration = _ot.RationalTime(
@ -173,6 +173,80 @@ def _sequence_resize(source, length):
yield (1 - ratio) * source[int(low)] + ratio * source[int(high)]
def is_clip_from_media_sequence(otio_clip):
"""
Args:
otio_clip (otio.schema.Clip): The OTIO clip to check.
Returns:
bool. Is the provided clip from an input media sequence ?
"""
media_ref = otio_clip.media_reference
metadata = media_ref.metadata
# OpenTimelineIO 0.13 and newer
is_input_sequence = (
hasattr(otio.schema, "ImageSequenceReference") and
isinstance(media_ref, otio.schema.ImageSequenceReference)
)
# OpenTimelineIO 0.12 and older
is_input_sequence_legacy = bool(metadata.get("padding"))
return is_input_sequence or is_input_sequence_legacy
def remap_range_on_file_sequence(otio_clip, in_out_range):
"""
Args:
otio_clip (otio.schema.Clip): The OTIO clip to check.
in_out_range (tuple[float, float]): The in-out range to remap.
Returns:
tuple(int, int): The remapped range as discrete frame number.
Raises:
ValueError. When the otio_clip or provided range is invalid.
"""
if not is_clip_from_media_sequence(otio_clip):
raise ValueError(f"Cannot map on non-file sequence clip {otio_clip}.")
try:
media_in_trimmed, media_out_trimmed = in_out_range
except ValueError as error:
raise ValueError("Invalid in_out_range provided.") from error
media_ref = otio_clip.media_reference
available_range = otio_clip.available_range()
source_range = otio_clip.source_range
available_range_rate = available_range.start_time.rate
media_in = available_range.start_time.value
# Temporary.
# Some AYON custom OTIO exporter were implemented with relative
# source range for image sequence. Following code maintain
# backward-compatibility by adjusting media_in
# while we are updating those.
if (
is_clip_from_media_sequence(otio_clip)
and otio_clip.available_range().start_time.to_frames() == media_ref.start_frame
and source_range.start_time.to_frames() < media_ref.start_frame
):
media_in = 0
frame_in = otio.opentime.RationalTime.from_frames(
media_in_trimmed - media_in + media_ref.start_frame,
rate=available_range_rate,
).to_frames()
frame_out = otio.opentime.RationalTime.from_frames(
media_out_trimmed - media_in + media_ref.start_frame,
rate=available_range_rate,
).to_frames()
return frame_in, frame_out
def get_media_range_with_retimes(otio_clip, handle_start, handle_end):
source_range = otio_clip.source_range
available_range = otio_clip.available_range()
@ -182,10 +256,7 @@ def get_media_range_with_retimes(otio_clip, handle_start, handle_end):
# mediaIn/mediaOut have to correspond
# to frame numbers from source sequence.
media_ref = otio_clip.media_reference
is_input_sequence = (
hasattr(otio.schema, "ImageSequenceReference")
and isinstance(media_ref, otio.schema.ImageSequenceReference)
)
is_input_sequence = is_clip_from_media_sequence(otio_clip)
# Temporary.
# Some AYON custom OTIO exporter were implemented with relative
@ -296,17 +367,15 @@ def get_media_range_with_retimes(otio_clip, handle_start, handle_end):
media_in = available_range.start_time.value
media_out = available_range.end_time_inclusive().value
# If media source is an image sequence, returned
# mediaIn/mediaOut have to correspond
# to frame numbers from source sequence.
if is_input_sequence:
# preserve discrete frame numbers
media_in_trimmed = otio.opentime.RationalTime.from_frames(
media_in_trimmed - media_in + media_ref.start_frame,
rate=available_range_rate,
).to_frames()
media_out_trimmed = otio.opentime.RationalTime.from_frames(
media_out_trimmed - media_in + media_ref.start_frame,
rate=available_range_rate,
).to_frames()
media_in_trimmed, media_out_trimmed = remap_range_on_file_sequence(
otio_clip,
(media_in_trimmed, media_out_trimmed)
)
media_in = media_ref.start_frame
media_out = media_in + available_range.duration.to_frames() - 1

View file

@ -58,7 +58,9 @@ class ExtractOTIOReview(publish.Extractor):
import opentimelineio as otio
from ayon_core.pipeline.editorial import (
otio_range_to_frame_range,
make_sequence_collection
make_sequence_collection,
remap_range_on_file_sequence,
is_clip_from_media_sequence
)
# TODO refactore from using instance variable
@ -108,39 +110,61 @@ class ExtractOTIOReview(publish.Extractor):
# get from media reference metadata source
# TODO 'openpype' prefix should be removed (added 24/09/03)
# NOTE it looks like it is set only in hiero integration
for key in {"ayon.source.width", "openpype.source.width"}:
value = media_metadata.get(key)
if value is not None:
width = int(value)
break
for key in {"ayon.source.height", "openpype.source.height"}:
value = media_metadata.get(key)
if value is not None:
height = int(value)
break
# compare and reset
if width != self.to_width:
self.to_width = width
if height != self.to_height:
self.to_height = height
res_data = {"width": self.to_width, "height": self.to_height}
for key in res_data:
for meta_prefix in ("ayon.source.", "openpype.source."):
meta_key = f"{meta_prefix}.{key}"
value = media_metadata.get(meta_key)
if value is not None:
res_data[key] = value
break
self.to_width, self.to_height = res_data["width"], res_data["height"]
self.log.debug("> self.to_width x self.to_height: {} x {}".format(
self.to_width, self.to_height
))
# get frame range values
# Clip: compute process range from available media range.
src_range = r_otio_cl.source_range
start = src_range.start_time.value
duration = src_range.duration.value
available_range = None
self.actual_fps = src_range.duration.rate
# add available range only if not gap
if isinstance(r_otio_cl, otio.schema.Clip):
available_range = r_otio_cl.available_range()
processing_range = None
self.actual_fps = available_range.duration.rate
start = src_range.start_time.rescaled_to(self.actual_fps)
duration = src_range.duration.rescaled_to(self.actual_fps)
# Temporary.
# Some AYON custom OTIO exporter were implemented with relative
# source range for image sequence. Following code maintain
# backward-compatibility by adjusting available range
# while we are updating those.
media_ref = r_otio_cl.media_reference
if (
is_clip_from_media_sequence(r_otio_cl)
and available_range.start_time.to_frames() == media_ref.start_frame
and src_range.start_time.to_frames() < media_ref.start_frame
):
available_range = otio.opentime.TimeRange(
otio.opentime.RationalTime(0, rate=self.actual_fps),
available_range.duration,
)
# Gap: no media, generate range based on source range
else:
available_range = processing_range = None
self.actual_fps = src_range.duration.rate
start = src_range.start_time
duration = src_range.duration
# Create handle offsets.
handle_start = otio.opentime.RationalTime(
handle_start,
rate=self.actual_fps,
)
handle_end = otio.opentime.RationalTime(
handle_end,
rate=self.actual_fps,
)
# reframing handles conditions
if (len(otio_review_clips) > 1) and (index == 0):
@ -157,35 +181,33 @@ class ExtractOTIOReview(publish.Extractor):
duration += (handle_start + handle_end)
if available_range:
available_range = self._trim_available_range(
available_range, start, duration, self.actual_fps)
processing_range = self._trim_available_range(
available_range, start, duration)
# process all track items of the track
if isinstance(r_otio_cl, otio.schema.Clip):
# process Clip
media_ref = r_otio_cl.media_reference
metadata = media_ref.metadata
is_sequence = None
# check in two way if it is sequence
if hasattr(otio.schema, "ImageSequenceReference"):
# for OpenTimelineIO 0.13 and newer
if isinstance(media_ref,
otio.schema.ImageSequenceReference):
is_sequence = True
else:
# for OpenTimelineIO 0.12 and older
if metadata.get("padding"):
is_sequence = True
is_sequence = is_clip_from_media_sequence(r_otio_cl)
# File sequence way
if is_sequence:
# file sequence way
# Remap processing range to input file sequence.
processing_range_as_frames = (
processing_range.start_time.to_frames(),
processing_range.end_time_inclusive().to_frames()
)
first, last = remap_range_on_file_sequence(
r_otio_cl,
processing_range_as_frames,
)
input_fps = processing_range.start_time.rate
if hasattr(media_ref, "target_url_base"):
dirname = media_ref.target_url_base
head = media_ref.name_prefix
tail = media_ref.name_suffix
first, last = otio_range_to_frame_range(
available_range)
collection = clique.Collection(
head=head,
tail=tail,
@ -195,7 +217,7 @@ class ExtractOTIOReview(publish.Extractor):
[i for i in range(first, (last + 1))])
# render segment
self._render_seqment(
sequence=[dirname, collection])
sequence=[dirname, collection, input_fps])
# generate used frames
self._generate_used_frames(
len(collection.indexes))
@ -204,24 +226,38 @@ class ExtractOTIOReview(publish.Extractor):
# `ImageSequenceReference`
path = media_ref.target_url
collection_data = make_sequence_collection(
path, available_range, metadata)
path, processing_range, metadata)
dir_path, collection = collection_data
# render segment
self._render_seqment(
sequence=[dir_path, collection])
sequence=[dir_path, collection, input_fps])
# generate used frames
self._generate_used_frames(
len(collection.indexes))
# Single video way.
# Extraction via FFmpeg.
else:
# single video file way
path = media_ref.target_url
# Set extract range from 0 (FFmpeg ignores embedded timecode).
extract_range = otio.opentime.TimeRange(
otio.opentime.RationalTime(
(
processing_range.start_time.value
- available_range.start_time.value
),
rate=available_range.start_time.rate,
),
duration=processing_range.duration,
)
# render video file to sequence
self._render_seqment(
video=[path, available_range])
video=[path, extract_range])
# generate used frames
self._generate_used_frames(
available_range.duration.value)
processing_range.duration.value)
# QUESTION: what if nested track composition is in place?
else:
# at last process a Gap
@ -276,7 +312,7 @@ class ExtractOTIOReview(publish.Extractor):
})
return representation_data
def _trim_available_range(self, avl_range, start, duration, fps):
def _trim_available_range(self, avl_range, start, duration):
"""
Trim available media range to source range.
@ -285,57 +321,60 @@ class ExtractOTIOReview(publish.Extractor):
Args:
avl_range (otio.time.TimeRange): media available time range
start (int): start frame
duration (int): duration frames
fps (float): frame rate
start (otio.time.RationalTime): start
duration (otio.time.RationalTime): duration
Returns:
otio.time.TimeRange: trimmed available range
"""
# Not all hosts can import these modules.
import opentimelineio as otio
from ayon_core.pipeline.editorial import (
trim_media_range,
range_from_frames
)
avl_start = int(avl_range.start_time.value)
src_start = int(avl_start + start)
avl_durtation = int(avl_range.duration.value)
avl_start = avl_range.start_time
avl_duration = avl_range.duration
self.need_offset = bool(avl_start != 0 and src_start != 0)
# if media start is les then clip requires
if src_start < avl_start:
# calculate gap
gap_duration = avl_start - src_start
# create gap data to disk
self._render_seqment(gap=gap_duration)
# generate used frames
self._generate_used_frames(gap_duration)
# fix start and end to correct values
start = 0
# An additional gap is required before the available
# range to conform source start point and head handles.
if start < avl_start:
gap_duration = avl_start - start
start = avl_start
duration -= gap_duration
# if media duration is shorter then clip requirement
if duration > avl_durtation:
# calculate gap
gap_start = int(src_start + avl_durtation)
gap_end = int(src_start + duration)
gap_duration = gap_end - gap_start
# create gap data to disk
self._render_seqment(gap=gap_duration.round().to_frames())
# generate used frames
self._generate_used_frames(gap_duration.round().to_frames())
# An additional gap is required after the available
# range to conform to source end point + tail handles
# (media duration is shorter then clip requirement).
end_point = start + duration
avl_end_point = avl_range.end_time_exclusive()
if end_point > avl_end_point:
gap_duration = end_point - avl_end_point
duration -= gap_duration
# create gap data to disk
self._render_seqment(gap=gap_duration, end_offset=avl_durtation)
self._render_seqment(
gap=gap_duration.round().to_frames(),
end_offset=duration.to_frames()
)
# generate used frames
self._generate_used_frames(gap_duration, end_offset=avl_durtation)
# fix duration lenght
duration = avl_durtation
self._generate_used_frames(
gap_duration.round().to_frames(),
end_offset=duration.to_frames()
)
# return correct trimmed range
return trim_media_range(
avl_range, range_from_frames(start, duration, fps)
avl_range,
otio.opentime.TimeRange(
start,
duration
)
)
def _render_seqment(self, sequence=None,
@ -347,7 +386,7 @@ class ExtractOTIOReview(publish.Extractor):
to defined image sequence format.
Args:
sequence (list): input dir path string, collection object in list
sequence (list): input dir path string, collection object, fps in list
video (list)[optional]: video_path string, otio_range in list
gap (int)[optional]: gap duration
end_offset (int)[optional]: offset gap frame start in frames
@ -369,7 +408,7 @@ class ExtractOTIOReview(publish.Extractor):
input_extension = None
if sequence:
input_dir, collection = sequence
input_dir, collection, sequence_fps = sequence
in_frame_start = min(collection.indexes)
# converting image sequence to image sequence
@ -377,9 +416,22 @@ class ExtractOTIOReview(publish.Extractor):
input_path = os.path.join(input_dir, input_file)
input_extension = os.path.splitext(input_path)[-1]
# form command for rendering gap files
# form command for rendering sequence files
# (need to explicit set the input frame range
# if case input sequence has framerate metadata
# to preserve frame range and avoid silent dropped
# frames caused by input mismatch with FFmpeg default
# rate 25.0 fps) more info refer to FFmpeg image2 demuxer
#
# Implicit
# [Input 100 frames (24fps from metadata)] -> [Demuxer video 25fps] -> [Output 98 frames, dropped 2]
#
# Explicit with "-framerate"
# [Input 100 frames (24fps from metadata)] -> [Demuxer video 24fps] -> [Output 100 frames]
command.extend([
"-start_number", str(in_frame_start),
"-framerate", str(sequence_fps),
"-i", input_path
])
@ -454,16 +506,11 @@ class ExtractOTIOReview(publish.Extractor):
padding = "{{:0{}d}}".format(self.padding)
# create frame offset
offset = 0
if self.need_offset:
offset = 1
if end_offset:
new_frames = list()
start_frame = self.used_frames[-1]
for index in range((end_offset + offset),
(int(end_offset + duration) + offset)):
for index in range(end_offset,
(int(end_offset + duration))):
seq_number = padding.format(start_frame + index)
self.log.debug(
"index: `{}` | seq_number: `{}`".format(index, seq_number))

View file

@ -0,0 +1,363 @@
{
"OTIO_SCHEMA": "Clip.2",
"metadata": {
"Resolve_OTIO": {}
},
"name": "output.[1000-1100].exr",
"source_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 101.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 87399.0
}
},
"effects": [
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Transform",
"Enabled": true,
"Name": "Transform",
"Parameters": [],
"Type": 2
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Cropping",
"Enabled": true,
"Name": "Cropping",
"Parameters": [],
"Type": 3
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Dynamic Zoom",
"Enabled": false,
"Name": "Dynamic Zoom",
"Parameters": [
{
"Default Parameter Value": [
0.0,
0.0
],
"Key Frames": {
"0": {
"Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
},
"1000": {
"Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
}
},
"Parameter ID": "dynamicZoomCenter",
"Parameter Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
},
{
"Default Parameter Value": 1.0,
"Key Frames": {
"0": {
"Value": 0.8,
"Variant Type": "Double"
},
"1000": {
"Value": 1.0,
"Variant Type": "Double"
}
},
"Parameter ID": "dynamicZoomScale",
"Parameter Value": 1.0,
"Variant Type": "Double",
"maxValue": 100.0,
"minValue": 0.01
}
],
"Type": 59
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Composite",
"Enabled": true,
"Name": "Composite",
"Parameters": [],
"Type": 1
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Lens Correction",
"Enabled": true,
"Name": "Lens Correction",
"Parameters": [],
"Type": 43
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Retime and Scaling",
"Enabled": true,
"Name": "Retime and Scaling",
"Parameters": [],
"Type": 22
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Video Faders",
"Enabled": true,
"Name": "Video Faders",
"Parameters": [],
"Type": 36
}
},
"name": "",
"effect_name": "Resolve Effect"
}
],
"markers": [
{
"OTIO_SCHEMA": "Marker.2",
"metadata": {
"Resolve_OTIO": {
"Keywords": [],
"Note": "{\"resolve_sub_products\": {\"io.ayon.creators.resolve.shot\": {\"id\": \"pyblish.avalon.instance\", \"productType\": \"shot\", \"productName\": \"shotMain\", \"active\": true, \"creator_identifier\": \"io.ayon.creators.resolve.shot\", \"variant\": \"Main\", \"folderPath\": \"/shots/seq_img_tc_handles_out/sh010\", \"task\": \"Generic\", \"clip_variant\": \"<track_name>\", \"clip_index\": \"adca7e5b-b53c-48ab-8469-abe4db3c276a\", \"clip_source_resolution\": {\"width\": \"956\", \"height\": \"720\", \"pixelAspect\": 1.0}, \"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_img_tc_handles_out\", \"track\": \"{_track_}\", \"shot\": \"sh###\", \"hierarchy\": \"shots/seq_img_tc_handles_out\", \"sourceResolution\": true, \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"parents\": [{\"folder_type\": \"folder\", \"entity_name\": \"shots\"}, {\"folder_type\": \"sequence\", \"entity_name\": \"seq_img_tc_handles_out\"}], \"hierarchyData\": {\"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_img_tc_handles_out\", \"track\": \"Video1\", \"shot\": \"sh010\"}, \"heroTrack\": true, \"uuid\": \"fca94ed7-1e74-4ddc-8d56-05696e8c472a\", \"reviewTrack\": \"Video1\", \"label\": \"/shots/seq_img_tc_handles_out/sh010 shot\", \"has_promised_context\": true, \"newHierarchyIntegration\": true, \"newAssetPublishing\": true, \"instance_id\": \"6c2baba3-183c-41f0-b9a9-596d315fd162\", \"creator_attributes\": {\"folderPath\": \"/shots/seq_img_tc_handles_out/sh010\", \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"frameStart\": 1001, \"frameEnd\": 1102, \"clipIn\": 86524, \"clipOut\": 86625, \"clipDuration\": 101, \"sourceIn\": 0, \"sourceOut\": 101, \"fps\": \"from_selection\"}, \"publish_attributes\": {\"CollectSlackFamilies\": {\"additional_message\": \"\"}}}, \"io.ayon.creators.resolve.plate\": {\"id\": \"pyblish.avalon.instance\", \"productType\": \"plate\", \"productName\": \"plateVideo1\", \"active\": true, \"creator_identifier\": \"io.ayon.creators.resolve.plate\", \"variant\": \"Video1\", \"folderPath\": \"/shots/seq_img_tc_handles_out/sh010\", \"task\": \"Generic\", \"clip_variant\": \"<track_name>\", \"clip_index\": \"adca7e5b-b53c-48ab-8469-abe4db3c276a\", \"clip_source_resolution\": {\"width\": \"956\", \"height\": \"720\", \"pixelAspect\": 1.0}, \"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_img_tc_handles_out\", \"track\": \"{_track_}\", \"shot\": \"sh###\", \"hierarchy\": \"shots/seq_img_tc_handles_out\", \"sourceResolution\": true, \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"parents\": [{\"folder_type\": \"folder\", \"entity_name\": \"shots\"}, {\"folder_type\": \"sequence\", \"entity_name\": \"seq_img_tc_handles_out\"}], \"hierarchyData\": {\"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_img_tc_handles_out\", \"track\": \"Video1\", \"shot\": \"sh010\"}, \"heroTrack\": true, \"uuid\": \"fca94ed7-1e74-4ddc-8d56-05696e8c472a\", \"reviewTrack\": \"Video1\", \"parent_instance_id\": \"6c2baba3-183c-41f0-b9a9-596d315fd162\", \"label\": \"/shots/seq_img_tc_handles_out/sh010 plate\", \"has_promised_context\": true, \"newHierarchyIntegration\": true, \"newAssetPublishing\": true, \"instance_id\": \"8b1f1e6f-699a-4481-b9be-92d819bc4096\", \"creator_attributes\": {\"parentInstance\": \"/shots/seq_img_tc_handles_out/sh010 shot\", \"vSyncOn\": true, \"vSyncTrack\": \"Video1\"}, \"publish_attributes\": {\"CollectSlackFamilies\": {\"additional_message\": \"\"}}}}, \"clip_index\": \"adca7e5b-b53c-48ab-8469-abe4db3c276a\", \"publish\": true}"
},
"clip_index": "adca7e5b-b53c-48ab-8469-abe4db3c276a",
"publish": true,
"resolve_sub_products": {
"io.ayon.creators.resolve.plate": {
"active": true,
"clip_index": "adca7e5b-b53c-48ab-8469-abe4db3c276a",
"clip_source_resolution": {
"height": "720",
"pixelAspect": 1.0,
"width": "956"
},
"clip_variant": "<track_name>",
"creator_attributes": {
"parentInstance": "/shots/seq_img_tc_handles_out/sh010 shot",
"vSyncOn": true,
"vSyncTrack": "Video1"
},
"creator_identifier": "io.ayon.creators.resolve.plate",
"episode": "ep01",
"folder": "shots",
"folderPath": "/shots/seq_img_tc_handles_out/sh010",
"handleEnd": 10,
"handleStart": 10,
"has_promised_context": true,
"heroTrack": true,
"hierarchy": "shots/seq_img_tc_handles_out",
"hierarchyData": {
"episode": "ep01",
"folder": "shots",
"sequence": "seq_img_tc_handles_out",
"shot": "sh010",
"track": "Video1"
},
"id": "pyblish.avalon.instance",
"instance_id": "8b1f1e6f-699a-4481-b9be-92d819bc4096",
"label": "/shots/seq_img_tc_handles_out/sh010 plate",
"newAssetPublishing": true,
"newHierarchyIntegration": true,
"parent_instance_id": "6c2baba3-183c-41f0-b9a9-596d315fd162",
"parents": [
{
"entity_name": "shots",
"folder_type": "folder"
},
{
"entity_name": "seq_img_tc_handles_out",
"folder_type": "sequence"
}
],
"productName": "plateVideo1",
"productType": "plate",
"publish_attributes": {
"CollectSlackFamilies": {
"additional_message": ""
}
},
"reviewTrack": "Video1",
"sequence": "seq_img_tc_handles_out",
"shot": "sh###",
"sourceResolution": true,
"task": "Generic",
"track": "{_track_}",
"uuid": "fca94ed7-1e74-4ddc-8d56-05696e8c472a",
"variant": "Video1",
"workfileFrameStart": 1001
},
"io.ayon.creators.resolve.shot": {
"active": true,
"clip_index": "adca7e5b-b53c-48ab-8469-abe4db3c276a",
"clip_source_resolution": {
"height": "720",
"pixelAspect": 1.0,
"width": "956"
},
"clip_variant": "<track_name>",
"creator_attributes": {
"clipDuration": 101,
"clipIn": 86524,
"clipOut": 86625,
"folderPath": "/shots/seq_img_tc_handles_out/sh010",
"fps": "from_selection",
"frameEnd": 1102,
"frameStart": 1001,
"handleEnd": 10,
"handleStart": 10,
"sourceIn": 0,
"sourceOut": 101,
"workfileFrameStart": 1001
},
"creator_identifier": "io.ayon.creators.resolve.shot",
"episode": "ep01",
"folder": "shots",
"folderPath": "/shots/seq_img_tc_handles_out/sh010",
"handleEnd": 10,
"handleStart": 10,
"has_promised_context": true,
"heroTrack": true,
"hierarchy": "shots/seq_img_tc_handles_out",
"hierarchyData": {
"episode": "ep01",
"folder": "shots",
"sequence": "seq_img_tc_handles_out",
"shot": "sh010",
"track": "Video1"
},
"id": "pyblish.avalon.instance",
"instance_id": "6c2baba3-183c-41f0-b9a9-596d315fd162",
"label": "/shots/seq_img_tc_handles_out/sh010 shot",
"newAssetPublishing": true,
"newHierarchyIntegration": true,
"parents": [
{
"entity_name": "shots",
"folder_type": "folder"
},
{
"entity_name": "seq_img_tc_handles_out",
"folder_type": "sequence"
}
],
"productName": "shotMain",
"productType": "shot",
"publish_attributes": {
"CollectSlackFamilies": {
"additional_message": ""
}
},
"reviewTrack": "Video1",
"sequence": "seq_img_tc_handles_out",
"shot": "sh###",
"sourceResolution": true,
"task": "Generic",
"track": "{_track_}",
"uuid": "fca94ed7-1e74-4ddc-8d56-05696e8c472a",
"variant": "Main",
"workfileFrameStart": 1001
}
}
},
"name": "AyonData",
"color": "GREEN",
"marked_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 1.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 87449.0
}
},
"comment": ""
}
],
"enabled": true,
"media_references": {
"DEFAULT_MEDIA": {
"OTIO_SCHEMA": "ImageSequenceReference.1",
"metadata": {},
"name": "output.[1000-1100].exr",
"available_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 101.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 87399.0
}
},
"available_image_bounds": null,
"target_url_base": "C:\\exr_embedded_tc",
"name_prefix": "output.",
"name_suffix": ".exr",
"start_frame": 1000,
"frame_step": 1,
"rate": 24.0,
"frame_zero_padding": 4,
"missing_frame_policy": "error"
}
},
"active_media_reference_key": "DEFAULT_MEDIA"
}

View file

@ -0,0 +1,363 @@
{
"OTIO_SCHEMA": "Clip.2",
"metadata": {
"Resolve_OTIO": {}
},
"name": "output.[1000-1100].tif",
"source_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 91.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 5.0
}
},
"effects": [
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Transform",
"Enabled": true,
"Name": "Transform",
"Parameters": [],
"Type": 2
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Cropping",
"Enabled": true,
"Name": "Cropping",
"Parameters": [],
"Type": 3
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Dynamic Zoom",
"Enabled": false,
"Name": "Dynamic Zoom",
"Parameters": [
{
"Default Parameter Value": [
0.0,
0.0
],
"Key Frames": {
"-5": {
"Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
},
"955": {
"Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
}
},
"Parameter ID": "dynamicZoomCenter",
"Parameter Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
},
{
"Default Parameter Value": 1.0,
"Key Frames": {
"-5": {
"Value": 0.8,
"Variant Type": "Double"
},
"955": {
"Value": 1.0,
"Variant Type": "Double"
}
},
"Parameter ID": "dynamicZoomScale",
"Parameter Value": 1.0,
"Variant Type": "Double",
"maxValue": 100.0,
"minValue": 0.01
}
],
"Type": 59
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Composite",
"Enabled": true,
"Name": "Composite",
"Parameters": [],
"Type": 1
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Lens Correction",
"Enabled": true,
"Name": "Lens Correction",
"Parameters": [],
"Type": 43
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Retime and Scaling",
"Enabled": true,
"Name": "Retime and Scaling",
"Parameters": [],
"Type": 22
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Video Faders",
"Enabled": true,
"Name": "Video Faders",
"Parameters": [],
"Type": 36
}
},
"name": "",
"effect_name": "Resolve Effect"
}
],
"markers": [
{
"OTIO_SCHEMA": "Marker.2",
"metadata": {
"Resolve_OTIO": {
"Keywords": [],
"Note": "{\"resolve_sub_products\": {\"io.ayon.creators.resolve.shot\": {\"id\": \"pyblish.avalon.instance\", \"productType\": \"shot\", \"productName\": \"shotMain\", \"active\": true, \"creator_identifier\": \"io.ayon.creators.resolve.shot\", \"variant\": \"Main\", \"folderPath\": \"/shots/seq_img_notc_blackhandles/sh010\", \"task\": \"Generic\", \"clip_variant\": \"<track_name>\", \"clip_index\": \"a82520bd-f231-4a23-9cb7-8823141232db\", \"clip_source_resolution\": {\"width\": \"1920\", \"height\": \"1080\", \"pixelAspect\": 1.0}, \"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_img_notc_blackhandles\", \"track\": \"{_track_}\", \"shot\": \"sh###\", \"hierarchy\": \"shots/seq_img_notc_blackhandles\", \"sourceResolution\": false, \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"parents\": [{\"folder_type\": \"folder\", \"entity_name\": \"shots\"}, {\"folder_type\": \"sequence\", \"entity_name\": \"seq_img_notc_blackhandles\"}], \"hierarchyData\": {\"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_img_notc_blackhandles\", \"track\": \"Video1\", \"shot\": \"sh010\"}, \"heroTrack\": true, \"uuid\": \"5d6be326-f1d0-4416-b6aa-780d05a8dd6d\", \"reviewTrack\": \"Video1\", \"label\": \"/shots/seq_img_notc_blackhandles/sh010 shot\", \"has_promised_context\": true, \"newHierarchyIntegration\": true, \"newAssetPublishing\": true, \"instance_id\": \"e196263f-c584-40b4-bc27-018051a3bc92\", \"creator_attributes\": {\"folderPath\": \"/shots/seq_img_notc_blackhandles/sh010\", \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"frameStart\": 1001, \"frameEnd\": 1092, \"clipIn\": 86511, \"clipOut\": 86602, \"clipDuration\": 91, \"sourceIn\": 5, \"sourceOut\": 96, \"fps\": \"from_selection\"}, \"publish_attributes\": {\"CollectSlackFamilies\": {\"additional_message\": \"\"}}}, \"io.ayon.creators.resolve.plate\": {\"id\": \"pyblish.avalon.instance\", \"productType\": \"plate\", \"productName\": \"plateVideo1\", \"active\": true, \"creator_identifier\": \"io.ayon.creators.resolve.plate\", \"variant\": \"Video1\", \"folderPath\": \"/shots/seq_img_notc_blackhandles/sh010\", \"task\": \"Generic\", \"clip_variant\": \"<track_name>\", \"clip_index\": \"a82520bd-f231-4a23-9cb7-8823141232db\", \"clip_source_resolution\": {\"width\": \"1920\", \"height\": \"1080\", \"pixelAspect\": 1.0}, \"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_img_notc_blackhandles\", \"track\": \"{_track_}\", \"shot\": \"sh###\", \"hierarchy\": \"shots/seq_img_notc_blackhandles\", \"sourceResolution\": false, \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"parents\": [{\"folder_type\": \"folder\", \"entity_name\": \"shots\"}, {\"folder_type\": \"sequence\", \"entity_name\": \"seq_img_notc_blackhandles\"}], \"hierarchyData\": {\"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_img_notc_blackhandles\", \"track\": \"Video1\", \"shot\": \"sh010\"}, \"heroTrack\": true, \"uuid\": \"5d6be326-f1d0-4416-b6aa-780d05a8dd6d\", \"reviewTrack\": \"Video1\", \"parent_instance_id\": \"e196263f-c584-40b4-bc27-018051a3bc92\", \"label\": \"/shots/seq_img_notc_blackhandles/sh010 plate\", \"has_promised_context\": true, \"newHierarchyIntegration\": true, \"newAssetPublishing\": true, \"instance_id\": \"ced7e9b8-721a-4377-a827-15fbf7f2831a\", \"creator_attributes\": {\"parentInstance\": \"/shots/seq_img_notc_blackhandles/sh010 shot\", \"vSyncOn\": true, \"vSyncTrack\": \"Video1\"}, \"publish_attributes\": {\"CollectSlackFamilies\": {\"additional_message\": \"\"}}}}, \"clip_index\": \"a82520bd-f231-4a23-9cb7-8823141232db\", \"publish\": true}"
},
"clip_index": "a82520bd-f231-4a23-9cb7-8823141232db",
"publish": true,
"resolve_sub_products": {
"io.ayon.creators.resolve.plate": {
"active": true,
"clip_index": "a82520bd-f231-4a23-9cb7-8823141232db",
"clip_source_resolution": {
"height": "1080",
"pixelAspect": 1.0,
"width": "1920"
},
"clip_variant": "<track_name>",
"creator_attributes": {
"parentInstance": "/shots/seq_img_notc_blackhandles/sh010 shot",
"vSyncOn": true,
"vSyncTrack": "Video1"
},
"creator_identifier": "io.ayon.creators.resolve.plate",
"episode": "ep01",
"folder": "shots",
"folderPath": "/shots/seq_img_notc_blackhandles/sh010",
"handleEnd": 10,
"handleStart": 10,
"has_promised_context": true,
"heroTrack": true,
"hierarchy": "shots/seq_img_notc_blackhandles",
"hierarchyData": {
"episode": "ep01",
"folder": "shots",
"sequence": "seq_img_notc_blackhandles",
"shot": "sh010",
"track": "Video1"
},
"id": "pyblish.avalon.instance",
"instance_id": "ced7e9b8-721a-4377-a827-15fbf7f2831a",
"label": "/shots/seq_img_notc_blackhandles/sh010 plate",
"newAssetPublishing": true,
"newHierarchyIntegration": true,
"parent_instance_id": "e196263f-c584-40b4-bc27-018051a3bc92",
"parents": [
{
"entity_name": "shots",
"folder_type": "folder"
},
{
"entity_name": "seq_img_notc_blackhandles",
"folder_type": "sequence"
}
],
"productName": "plateVideo1",
"productType": "plate",
"publish_attributes": {
"CollectSlackFamilies": {
"additional_message": ""
}
},
"reviewTrack": "Video1",
"sequence": "seq_img_notc_blackhandles",
"shot": "sh###",
"sourceResolution": false,
"task": "Generic",
"track": "{_track_}",
"uuid": "5d6be326-f1d0-4416-b6aa-780d05a8dd6d",
"variant": "Video1",
"workfileFrameStart": 1001
},
"io.ayon.creators.resolve.shot": {
"active": true,
"clip_index": "a82520bd-f231-4a23-9cb7-8823141232db",
"clip_source_resolution": {
"height": "1080",
"pixelAspect": 1.0,
"width": "1920"
},
"clip_variant": "<track_name>",
"creator_attributes": {
"clipDuration": 91,
"clipIn": 86511,
"clipOut": 86602,
"folderPath": "/shots/seq_img_notc_blackhandles/sh010",
"fps": "from_selection",
"frameEnd": 1092,
"frameStart": 1001,
"handleEnd": 10,
"handleStart": 10,
"sourceIn": 5,
"sourceOut": 96,
"workfileFrameStart": 1001
},
"creator_identifier": "io.ayon.creators.resolve.shot",
"episode": "ep01",
"folder": "shots",
"folderPath": "/shots/seq_img_notc_blackhandles/sh010",
"handleEnd": 10,
"handleStart": 10,
"has_promised_context": true,
"heroTrack": true,
"hierarchy": "shots/seq_img_notc_blackhandles",
"hierarchyData": {
"episode": "ep01",
"folder": "shots",
"sequence": "seq_img_notc_blackhandles",
"shot": "sh010",
"track": "Video1"
},
"id": "pyblish.avalon.instance",
"instance_id": "e196263f-c584-40b4-bc27-018051a3bc92",
"label": "/shots/seq_img_notc_blackhandles/sh010 shot",
"newAssetPublishing": true,
"newHierarchyIntegration": true,
"parents": [
{
"entity_name": "shots",
"folder_type": "folder"
},
{
"entity_name": "seq_img_notc_blackhandles",
"folder_type": "sequence"
}
],
"productName": "shotMain",
"productType": "shot",
"publish_attributes": {
"CollectSlackFamilies": {
"additional_message": ""
}
},
"reviewTrack": "Video1",
"sequence": "seq_img_notc_blackhandles",
"shot": "sh###",
"sourceResolution": false,
"task": "Generic",
"track": "{_track_}",
"uuid": "5d6be326-f1d0-4416-b6aa-780d05a8dd6d",
"variant": "Main",
"workfileFrameStart": 1001
}
}
},
"name": "AyonData",
"color": "GREEN",
"marked_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 1.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 50.0
}
},
"comment": ""
}
],
"enabled": true,
"media_references": {
"DEFAULT_MEDIA": {
"OTIO_SCHEMA": "ImageSequenceReference.1",
"metadata": {},
"name": "output.[1000-1100].tif",
"available_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 25.0,
"value": 101.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 25.0,
"value": 0.0
}
},
"available_image_bounds": null,
"target_url_base": "C:\\tif_seq",
"name_prefix": "output.",
"name_suffix": ".tif",
"start_frame": 1000,
"frame_step": 1,
"rate": 25.0,
"frame_zero_padding": 4,
"missing_frame_policy": "error"
}
},
"active_media_reference_key": "DEFAULT_MEDIA"
}

View file

@ -0,0 +1,356 @@
{
"OTIO_SCHEMA": "Clip.2",
"metadata": {
"Resolve_OTIO": {}
},
"name": "qt_embedded_tc.mov",
"source_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 68.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 86414.0
}
},
"effects": [
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Transform",
"Enabled": true,
"Name": "Transform",
"Parameters": [],
"Type": 2
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Cropping",
"Enabled": true,
"Name": "Cropping",
"Parameters": [],
"Type": 3
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Dynamic Zoom",
"Enabled": false,
"Name": "Dynamic Zoom",
"Parameters": [
{
"Default Parameter Value": [
0.0,
0.0
],
"Key Frames": {
"-14": {
"Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
},
"986": {
"Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
}
},
"Parameter ID": "dynamicZoomCenter",
"Parameter Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
},
{
"Default Parameter Value": 1.0,
"Key Frames": {
"-14": {
"Value": 0.8,
"Variant Type": "Double"
},
"986": {
"Value": 1.0,
"Variant Type": "Double"
}
},
"Parameter ID": "dynamicZoomScale",
"Parameter Value": 1.0,
"Variant Type": "Double",
"maxValue": 100.0,
"minValue": 0.01
}
],
"Type": 59
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Composite",
"Enabled": true,
"Name": "Composite",
"Parameters": [],
"Type": 1
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Lens Correction",
"Enabled": true,
"Name": "Lens Correction",
"Parameters": [],
"Type": 43
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Retime and Scaling",
"Enabled": true,
"Name": "Retime and Scaling",
"Parameters": [],
"Type": 22
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Video Faders",
"Enabled": true,
"Name": "Video Faders",
"Parameters": [],
"Type": 36
}
},
"name": "",
"effect_name": "Resolve Effect"
}
],
"markers": [
{
"OTIO_SCHEMA": "Marker.2",
"metadata": {
"Resolve_OTIO": {
"Keywords": [],
"Note": "{\"resolve_sub_products\": {\"io.ayon.creators.resolve.shot\": {\"id\": \"pyblish.avalon.instance\", \"productType\": \"shot\", \"productName\": \"shotMain\", \"active\": true, \"creator_identifier\": \"io.ayon.creators.resolve.shot\", \"variant\": \"Main\", \"folderPath\": \"/shots/seq_qt_tc/sh010\", \"task\": \"Generic\", \"clip_variant\": \"<track_name>\", \"clip_index\": \"12cce00c-eadf-4abd-ac80-0816a24506ab\", \"clip_source_resolution\": {\"width\": \"956\", \"height\": \"720\", \"pixelAspect\": 1.0}, \"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_qt_tc\", \"track\": \"{_track_}\", \"shot\": \"sh###\", \"hierarchy\": \"shots/seq_qt_tc\", \"sourceResolution\": false, \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"parents\": [{\"folder_type\": \"folder\", \"entity_name\": \"shots\"}, {\"folder_type\": \"sequence\", \"entity_name\": \"seq_qt_tc\"}], \"hierarchyData\": {\"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_qt_tc\", \"track\": \"Video1\", \"shot\": \"sh010\"}, \"heroTrack\": true, \"uuid\": \"5dc397e0-1142-4a35-969d-d4c35c512f0f\", \"reviewTrack\": \"Video1\", \"label\": \"/shots/seq_qt_tc/sh010 shot\", \"has_promised_context\": true, \"newHierarchyIntegration\": true, \"newAssetPublishing\": true, \"instance_id\": \"6f4bbf76-6638-4645-9059-0f516c0c12c2\", \"creator_attributes\": {\"folderPath\": \"/shots/seq_qt_tc/sh010\", \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"frameStart\": 1001, \"frameEnd\": 1069, \"clipIn\": 86516, \"clipOut\": 86584, \"clipDuration\": 68, \"sourceIn\": 14, \"sourceOut\": 82, \"fps\": \"from_selection\"}, \"publish_attributes\": {\"CollectSlackFamilies\": {\"additional_message\": \"\"}}}, \"io.ayon.creators.resolve.plate\": {\"id\": \"pyblish.avalon.instance\", \"productType\": \"plate\", \"productName\": \"plateVideo1\", \"active\": true, \"creator_identifier\": \"io.ayon.creators.resolve.plate\", \"variant\": \"Video1\", \"folderPath\": \"/shots/seq_qt_tc/sh010\", \"task\": \"Generic\", \"clip_variant\": \"<track_name>\", \"clip_index\": \"12cce00c-eadf-4abd-ac80-0816a24506ab\", \"clip_source_resolution\": {\"width\": \"956\", \"height\": \"720\", \"pixelAspect\": 1.0}, \"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_qt_tc\", \"track\": \"{_track_}\", \"shot\": \"sh###\", \"hierarchy\": \"shots/seq_qt_tc\", \"sourceResolution\": false, \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"parents\": [{\"folder_type\": \"folder\", \"entity_name\": \"shots\"}, {\"folder_type\": \"sequence\", \"entity_name\": \"seq_qt_tc\"}], \"hierarchyData\": {\"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_qt_tc\", \"track\": \"Video1\", \"shot\": \"sh010\"}, \"heroTrack\": true, \"uuid\": \"5dc397e0-1142-4a35-969d-d4c35c512f0f\", \"reviewTrack\": \"Video1\", \"parent_instance_id\": \"6f4bbf76-6638-4645-9059-0f516c0c12c2\", \"label\": \"/shots/seq_qt_tc/sh010 plate\", \"has_promised_context\": true, \"newHierarchyIntegration\": true, \"newAssetPublishing\": true, \"instance_id\": \"1d11a6b5-cc2b-49d8-8bcb-35187c785b22\", \"creator_attributes\": {\"parentInstance\": \"/shots/seq_qt_tc/sh010 shot\", \"vSyncOn\": true, \"vSyncTrack\": \"Video1\"}, \"publish_attributes\": {\"CollectSlackFamilies\": {\"additional_message\": \"\"}}}}, \"clip_index\": \"12cce00c-eadf-4abd-ac80-0816a24506ab\", \"publish\": true}"
},
"clip_index": "12cce00c-eadf-4abd-ac80-0816a24506ab",
"publish": true,
"resolve_sub_products": {
"io.ayon.creators.resolve.plate": {
"active": true,
"clip_index": "12cce00c-eadf-4abd-ac80-0816a24506ab",
"clip_source_resolution": {
"height": "720",
"pixelAspect": 1.0,
"width": "956"
},
"clip_variant": "<track_name>",
"creator_attributes": {
"parentInstance": "/shots/seq_qt_tc/sh010 shot",
"vSyncOn": true,
"vSyncTrack": "Video1"
},
"creator_identifier": "io.ayon.creators.resolve.plate",
"episode": "ep01",
"folder": "shots",
"folderPath": "/shots/seq_qt_tc/sh010",
"handleEnd": 10,
"handleStart": 10,
"has_promised_context": true,
"heroTrack": true,
"hierarchy": "shots/seq_qt_tc",
"hierarchyData": {
"episode": "ep01",
"folder": "shots",
"sequence": "seq_qt_tc",
"shot": "sh010",
"track": "Video1"
},
"id": "pyblish.avalon.instance",
"instance_id": "1d11a6b5-cc2b-49d8-8bcb-35187c785b22",
"label": "/shots/seq_qt_tc/sh010 plate",
"newAssetPublishing": true,
"newHierarchyIntegration": true,
"parent_instance_id": "6f4bbf76-6638-4645-9059-0f516c0c12c2",
"parents": [
{
"entity_name": "shots",
"folder_type": "folder"
},
{
"entity_name": "seq_qt_tc",
"folder_type": "sequence"
}
],
"productName": "plateVideo1",
"productType": "plate",
"publish_attributes": {
"CollectSlackFamilies": {
"additional_message": ""
}
},
"reviewTrack": "Video1",
"sequence": "seq_qt_tc",
"shot": "sh###",
"sourceResolution": false,
"task": "Generic",
"track": "{_track_}",
"uuid": "5dc397e0-1142-4a35-969d-d4c35c512f0f",
"variant": "Video1",
"workfileFrameStart": 1001
},
"io.ayon.creators.resolve.shot": {
"active": true,
"clip_index": "12cce00c-eadf-4abd-ac80-0816a24506ab",
"clip_source_resolution": {
"height": "720",
"pixelAspect": 1.0,
"width": "956"
},
"clip_variant": "<track_name>",
"creator_attributes": {
"clipDuration": 68,
"clipIn": 86516,
"clipOut": 86584,
"folderPath": "/shots/seq_qt_tc/sh010",
"fps": "from_selection",
"frameEnd": 1069,
"frameStart": 1001,
"handleEnd": 10,
"handleStart": 10,
"sourceIn": 14,
"sourceOut": 82,
"workfileFrameStart": 1001
},
"creator_identifier": "io.ayon.creators.resolve.shot",
"episode": "ep01",
"folder": "shots",
"folderPath": "/shots/seq_qt_tc/sh010",
"handleEnd": 10,
"handleStart": 10,
"has_promised_context": true,
"heroTrack": true,
"hierarchy": "shots/seq_qt_tc",
"hierarchyData": {
"episode": "ep01",
"folder": "shots",
"sequence": "seq_qt_tc",
"shot": "sh010",
"track": "Video1"
},
"id": "pyblish.avalon.instance",
"instance_id": "6f4bbf76-6638-4645-9059-0f516c0c12c2",
"label": "/shots/seq_qt_tc/sh010 shot",
"newAssetPublishing": true,
"newHierarchyIntegration": true,
"parents": [
{
"entity_name": "shots",
"folder_type": "folder"
},
{
"entity_name": "seq_qt_tc",
"folder_type": "sequence"
}
],
"productName": "shotMain",
"productType": "shot",
"publish_attributes": {
"CollectSlackFamilies": {
"additional_message": ""
}
},
"reviewTrack": "Video1",
"sequence": "seq_qt_tc",
"shot": "sh###",
"sourceResolution": false,
"task": "Generic",
"track": "{_track_}",
"uuid": "5dc397e0-1142-4a35-969d-d4c35c512f0f",
"variant": "Main",
"workfileFrameStart": 1001
}
}
},
"name": "AyonData",
"color": "GREEN",
"marked_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 1.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 86448.0
}
},
"comment": ""
}
],
"enabled": true,
"media_references": {
"DEFAULT_MEDIA": {
"OTIO_SCHEMA": "ExternalReference.1",
"metadata": {},
"name": "qt_embedded_tc.mov",
"available_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 100.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 86400.0
}
},
"available_image_bounds": null,
"target_url": "C:\\data\\qt_embedded_tc.mov"
}
},
"active_media_reference_key": "DEFAULT_MEDIA"
}

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,356 @@
{
"OTIO_SCHEMA": "Clip.2",
"metadata": {
"Resolve_OTIO": {}
},
"name": "3 jours dans les coulisses du ZEvent 2024.mp4",
"source_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 25.0,
"value": 50.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 25.0,
"value": 0.0
}
},
"effects": [
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Transform",
"Enabled": true,
"Name": "Transform",
"Parameters": [],
"Type": 2
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Cropping",
"Enabled": true,
"Name": "Cropping",
"Parameters": [],
"Type": 3
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Dynamic Zoom",
"Enabled": false,
"Name": "Dynamic Zoom",
"Parameters": [
{
"Default Parameter Value": [
0.0,
0.0
],
"Key Frames": {
"0": {
"Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
},
"1000": {
"Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
}
},
"Parameter ID": "dynamicZoomCenter",
"Parameter Value": [
0.0,
0.0
],
"Variant Type": "POINTF"
},
{
"Default Parameter Value": 1.0,
"Key Frames": {
"0": {
"Value": 0.8,
"Variant Type": "Double"
},
"1000": {
"Value": 1.0,
"Variant Type": "Double"
}
},
"Parameter ID": "dynamicZoomScale",
"Parameter Value": 1.0,
"Variant Type": "Double",
"maxValue": 100.0,
"minValue": 0.01
}
],
"Type": 59
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Composite",
"Enabled": true,
"Name": "Composite",
"Parameters": [],
"Type": 1
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Lens Correction",
"Enabled": true,
"Name": "Lens Correction",
"Parameters": [],
"Type": 43
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Retime and Scaling",
"Enabled": true,
"Name": "Retime and Scaling",
"Parameters": [],
"Type": 22
}
},
"name": "",
"effect_name": "Resolve Effect"
},
{
"OTIO_SCHEMA": "Effect.1",
"metadata": {
"Resolve_OTIO": {
"Effect Name": "Video Faders",
"Enabled": true,
"Name": "Video Faders",
"Parameters": [],
"Type": 36
}
},
"name": "",
"effect_name": "Resolve Effect"
}
],
"markers": [
{
"OTIO_SCHEMA": "Marker.2",
"metadata": {
"Resolve_OTIO": {
"Keywords": [],
"Note": "{\"resolve_sub_products\": {\"io.ayon.creators.resolve.shot\": {\"id\": \"pyblish.avalon.instance\", \"productType\": \"shot\", \"productName\": \"shotMain\", \"active\": true, \"creator_identifier\": \"io.ayon.creators.resolve.shot\", \"variant\": \"Main\", \"folderPath\": \"/shots/seq_qt_no_tc/sh010\", \"task\": \"Generic\", \"clip_variant\": \"<track_name>\", \"clip_index\": \"c3d9fb4f-afdf-49e3-9733-bf80e40e0de3\", \"clip_source_resolution\": {\"width\": \"640\", \"height\": \"360\", \"pixelAspect\": 1.0}, \"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_qt_no_tc\", \"track\": \"{_track_}\", \"shot\": \"sh###\", \"hierarchy\": \"shots/seq_qt_no_tc\", \"sourceResolution\": true, \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"parents\": [{\"folder_type\": \"folder\", \"entity_name\": \"shots\"}, {\"folder_type\": \"sequence\", \"entity_name\": \"seq_qt_no_tc\"}], \"hierarchyData\": {\"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_qt_no_tc\", \"track\": \"Video1\", \"shot\": \"sh010\"}, \"heroTrack\": true, \"uuid\": \"5ab44838-a173-422a-8750-d5265e5a4ab5\", \"reviewTrack\": \"Video1\", \"label\": \"/shots/seq_qt_no_tc/sh010 shot\", \"has_promised_context\": true, \"newHierarchyIntegration\": true, \"newAssetPublishing\": true, \"instance_id\": \"ba8e76cd-7319-449d-93b5-93fd65cf3e83\", \"creator_attributes\": {\"folderPath\": \"/shots/seq_qt_no_tc/sh010\", \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"frameStart\": 1001, \"frameEnd\": 1051, \"clipIn\": 86477, \"clipOut\": 86527, \"clipDuration\": 50, \"sourceIn\": 0, \"sourceOut\": 50, \"fps\": \"from_selection\"}, \"publish_attributes\": {\"CollectSlackFamilies\": {\"additional_message\": \"\"}}}, \"io.ayon.creators.resolve.plate\": {\"id\": \"pyblish.avalon.instance\", \"productType\": \"plate\", \"productName\": \"plateVideo1\", \"active\": true, \"creator_identifier\": \"io.ayon.creators.resolve.plate\", \"variant\": \"Video1\", \"folderPath\": \"/shots/seq_qt_no_tc/sh010\", \"task\": \"Generic\", \"clip_variant\": \"<track_name>\", \"clip_index\": \"c3d9fb4f-afdf-49e3-9733-bf80e40e0de3\", \"clip_source_resolution\": {\"width\": \"640\", \"height\": \"360\", \"pixelAspect\": 1.0}, \"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_qt_no_tc\", \"track\": \"{_track_}\", \"shot\": \"sh###\", \"hierarchy\": \"shots/seq_qt_no_tc\", \"sourceResolution\": true, \"workfileFrameStart\": 1001, \"handleStart\": 10, \"handleEnd\": 10, \"parents\": [{\"folder_type\": \"folder\", \"entity_name\": \"shots\"}, {\"folder_type\": \"sequence\", \"entity_name\": \"seq_qt_no_tc\"}], \"hierarchyData\": {\"folder\": \"shots\", \"episode\": \"ep01\", \"sequence\": \"seq_qt_no_tc\", \"track\": \"Video1\", \"shot\": \"sh010\"}, \"heroTrack\": true, \"uuid\": \"5ab44838-a173-422a-8750-d5265e5a4ab5\", \"reviewTrack\": \"Video1\", \"parent_instance_id\": \"ba8e76cd-7319-449d-93b5-93fd65cf3e83\", \"label\": \"/shots/seq_qt_no_tc/sh010 plate\", \"has_promised_context\": true, \"newHierarchyIntegration\": true, \"newAssetPublishing\": true, \"instance_id\": \"4a1cd220-c638-4e77-855c-cebd43b5dbc3\", \"creator_attributes\": {\"parentInstance\": \"/shots/seq_qt_no_tc/sh010 shot\", \"vSyncOn\": true, \"vSyncTrack\": \"Video1\"}, \"publish_attributes\": {\"CollectSlackFamilies\": {\"additional_message\": \"\"}}}}, \"clip_index\": \"c3d9fb4f-afdf-49e3-9733-bf80e40e0de3\", \"publish\": true}"
},
"clip_index": "c3d9fb4f-afdf-49e3-9733-bf80e40e0de3",
"publish": true,
"resolve_sub_products": {
"io.ayon.creators.resolve.plate": {
"active": true,
"clip_index": "c3d9fb4f-afdf-49e3-9733-bf80e40e0de3",
"clip_source_resolution": {
"height": "360",
"pixelAspect": 1.0,
"width": "640"
},
"clip_variant": "<track_name>",
"creator_attributes": {
"parentInstance": "/shots/seq_qt_no_tc/sh010 shot",
"vSyncOn": true,
"vSyncTrack": "Video1"
},
"creator_identifier": "io.ayon.creators.resolve.plate",
"episode": "ep01",
"folder": "shots",
"folderPath": "/shots/seq_qt_no_tc/sh010",
"handleEnd": 10,
"handleStart": 10,
"has_promised_context": true,
"heroTrack": true,
"hierarchy": "shots/seq_qt_no_tc",
"hierarchyData": {
"episode": "ep01",
"folder": "shots",
"sequence": "seq_qt_no_tc",
"shot": "sh010",
"track": "Video1"
},
"id": "pyblish.avalon.instance",
"instance_id": "4a1cd220-c638-4e77-855c-cebd43b5dbc3",
"label": "/shots/seq_qt_no_tc/sh010 plate",
"newAssetPublishing": true,
"newHierarchyIntegration": true,
"parent_instance_id": "ba8e76cd-7319-449d-93b5-93fd65cf3e83",
"parents": [
{
"entity_name": "shots",
"folder_type": "folder"
},
{
"entity_name": "seq_qt_no_tc",
"folder_type": "sequence"
}
],
"productName": "plateVideo1",
"productType": "plate",
"publish_attributes": {
"CollectSlackFamilies": {
"additional_message": ""
}
},
"reviewTrack": "Video1",
"sequence": "seq_qt_no_tc",
"shot": "sh###",
"sourceResolution": true,
"task": "Generic",
"track": "{_track_}",
"uuid": "5ab44838-a173-422a-8750-d5265e5a4ab5",
"variant": "Video1",
"workfileFrameStart": 1001
},
"io.ayon.creators.resolve.shot": {
"active": true,
"clip_index": "c3d9fb4f-afdf-49e3-9733-bf80e40e0de3",
"clip_source_resolution": {
"height": "360",
"pixelAspect": 1.0,
"width": "640"
},
"clip_variant": "<track_name>",
"creator_attributes": {
"clipDuration": 50,
"clipIn": 86477,
"clipOut": 86527,
"folderPath": "/shots/seq_qt_no_tc/sh010",
"fps": "from_selection",
"frameEnd": 1051,
"frameStart": 1001,
"handleEnd": 10,
"handleStart": 10,
"sourceIn": 0,
"sourceOut": 50,
"workfileFrameStart": 1001
},
"creator_identifier": "io.ayon.creators.resolve.shot",
"episode": "ep01",
"folder": "shots",
"folderPath": "/shots/seq_qt_no_tc/sh010",
"handleEnd": 10,
"handleStart": 10,
"has_promised_context": true,
"heroTrack": true,
"hierarchy": "shots/seq_qt_no_tc",
"hierarchyData": {
"episode": "ep01",
"folder": "shots",
"sequence": "seq_qt_no_tc",
"shot": "sh010",
"track": "Video1"
},
"id": "pyblish.avalon.instance",
"instance_id": "ba8e76cd-7319-449d-93b5-93fd65cf3e83",
"label": "/shots/seq_qt_no_tc/sh010 shot",
"newAssetPublishing": true,
"newHierarchyIntegration": true,
"parents": [
{
"entity_name": "shots",
"folder_type": "folder"
},
{
"entity_name": "seq_qt_no_tc",
"folder_type": "sequence"
}
],
"productName": "shotMain",
"productType": "shot",
"publish_attributes": {
"CollectSlackFamilies": {
"additional_message": ""
}
},
"reviewTrack": "Video1",
"sequence": "seq_qt_no_tc",
"shot": "sh###",
"sourceResolution": true,
"task": "Generic",
"track": "{_track_}",
"uuid": "5ab44838-a173-422a-8750-d5265e5a4ab5",
"variant": "Main",
"workfileFrameStart": 1001
}
}
},
"name": "AyonData",
"color": "GREEN",
"marked_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 1.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 24.0,
"value": 25.0
}
},
"comment": ""
}
],
"enabled": true,
"media_references": {
"DEFAULT_MEDIA": {
"OTIO_SCHEMA": "ExternalReference.1",
"metadata": {},
"name": "3 jours dans les coulisses du ZEvent 2024.mp4",
"available_range": {
"OTIO_SCHEMA": "TimeRange.1",
"duration": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 25.0,
"value": 30822.0
},
"start_time": {
"OTIO_SCHEMA": "RationalTime.1",
"rate": 25.0,
"value": 0.0
}
},
"available_image_bounds": null,
"target_url": "C:\\data\\movie.mp4"
}
},
"active_media_reference_key": "DEFAULT_MEDIA"
}

View file

@ -0,0 +1,203 @@
import mock
import os
import pytest
from typing import NamedTuple
import opentimelineio as otio
import ayon_core.lib
from ayon_core.plugins.publish import extract_otio_review
_RESOURCE_DIR = os.path.join(
os.path.dirname(__file__),
"resources"
)
class MockInstance():
""" Mock pyblish instance for testing purpose.
"""
def __init__(self, data: dict):
self.data = data
self.context = self
class CaptureFFmpegCalls():
""" Mock calls made to ffmpeg subprocess.
"""
def __init__(self):
self.calls = []
def append_call(self, *args, **kwargs):
ffmpeg_args_list, = args
self.calls.append(" ".join(ffmpeg_args_list))
return True
def get_fmpeg_executable(self, _):
return ["/path/to/ffmpeg"]
def run_process(file_name: str):
"""
"""
# Get OTIO review data from serialized file_name
file_path = os.path.join(_RESOURCE_DIR, file_name)
clip = otio.schema.Clip.from_json_file(file_path)
# Prepare dummy instance and capture call object
capture_call = CaptureFFmpegCalls()
processor = extract_otio_review.ExtractOTIOReview()
instance = MockInstance({
"otioReviewClips": [clip],
"handleStart": 10,
"handleEnd": 10,
"workfileFrameStart": 1001,
"folderPath": "/dummy/path",
"anatomy": NamedTuple("Anatomy", [('project_name', "test_project")])
})
# Mock calls to extern and run plugins.
with mock.patch.object(
extract_otio_review,
"get_ffmpeg_tool_args",
side_effect=capture_call.get_fmpeg_executable,
):
with mock.patch.object(
extract_otio_review,
"run_subprocess",
side_effect=capture_call.append_call,
):
with mock.patch.object(
processor,
"_get_folder_name_based_prefix",
return_value="C:/result/output."
):
processor.process(instance)
# return all calls made to ffmpeg subprocess
return capture_call.calls
def test_image_sequence_with_embedded_tc_and_handles_out_of_range():
"""
Img sequence clip (embedded timecode 1h/24fps)
available_files = 1000-1100
available_range = 87399-87500 24fps
source_range = 87399-87500 24fps
"""
calls = run_process("img_seq_embedded_tc_review.json")
expected = [
# 10 head black handles generated from gap (991-1000)
"/path/to/ffmpeg -t 0.4166666666666667 -r 24.0 -f lavfi -i "
"color=c=black:s=1280x720 -tune stillimage -start_number 991 "
"C:/result/output.%03d.jpg",
# 10 tail black handles generated from gap (1102-1111)
"/path/to/ffmpeg -t 0.4166666666666667 -r 24.0 -f lavfi -i "
"color=c=black:s=1280x720 -tune stillimage -start_number 1102 "
"C:/result/output.%03d.jpg",
# Report from source exr (1001-1101) with enforce framerate
"/path/to/ffmpeg -start_number 1000 -framerate 24.0 -i "
"C:\\exr_embedded_tc\\output.%04d.exr -start_number 1001 "
"C:/result/output.%03d.jpg"
]
assert calls == expected
def test_image_sequence_and_handles_out_of_range():
"""
Img sequence clip (no timecode)
available_files = 1000-1100
available_range = 0-101 25fps
source_range = 5-91 24fps
"""
calls = run_process("img_seq_review.json")
expected = [
# 5 head black frames generated from gap (991-995)
"/path/to/ffmpeg -t 0.2 -r 25.0 -f lavfi -i color=c=black:s=1280x720 -tune "
"stillimage -start_number 991 C:/result/output.%03d.jpg",
# 9 tail back frames generated from gap (1097-1105)
"/path/to/ffmpeg -t 0.36 -r 25.0 -f lavfi -i color=c=black:s=1280x720 -tune "
"stillimage -start_number 1097 C:/result/output.%03d.jpg",
# Report from source tiff (996-1096)
# 996-1000 = additional 5 head frames
# 1001-1095 = source range conformed to 25fps
# 1096-1096 = additional 1 tail frames
"/path/to/ffmpeg -start_number 1000 -framerate 25.0 -i "
"C:\\tif_seq\\output.%04d.tif -start_number 996 C:/result/output.%03d.jpg"
]
assert calls == expected
def test_movie_with_embedded_tc_no_gap_handles():
"""
Qt movie clip (embedded timecode 1h/24fps)
available_range = 86400-86500 24fps
source_range = 86414-86482 24fps
"""
calls = run_process("qt_embedded_tc_review.json")
expected = [
# Handles are all included in media available range.
# Extract source range from Qt
# - first_frame = 14 src - 10 (head tail) = frame 4 = 0.1666s
# - duration = 68fr (source) + 20fr (handles) = 88frames = 3.666s
"/path/to/ffmpeg -ss 0.16666666666666666 -t 3.6666666666666665 "
"-i C:\\data\\qt_embedded_tc.mov -start_number 991 "
"C:/result/output.%03d.jpg"
]
assert calls == expected
def test_short_movie_head_gap_handles():
"""
Qt movie clip.
available_range = 0-30822 25fps
source_range = 0-50 24fps
"""
calls = run_process("qt_review.json")
expected = [
# 10 head black frames generated from gap (991-1000)
"/path/to/ffmpeg -t 0.4 -r 25.0 -f lavfi -i color=c=black:s=1280x720 -tune "
"stillimage -start_number 991 C:/result/output.%03d.jpg",
# source range + 10 tail frames
# duration = 50fr (source) + 10fr (tail handle) = 60 fr = 2.4s
"/path/to/ffmpeg -ss 0.0 -t 2.4 -i C:\\data\\movie.mp4 -start_number 1001 "
"C:/result/output.%03d.jpg"
]
assert calls == expected
def test_short_movie_tail_gap_handles():
"""
Qt movie clip.
available_range = 0-101 24fps
source_range = 35-101 24fps
"""
calls = run_process("qt_handle_tail_review.json")
expected = [
# 10 tail black frames generated from gap (1067-1076)
"/path/to/ffmpeg -t 0.4166666666666667 -r 24.0 -f lavfi -i "
"color=c=black:s=1280x720 -tune stillimage -start_number 1067 "
"C:/result/output.%03d.jpg",
# 10 head frames + source range
# duration = 10fr (head handle) + 66fr (source) = 76fr = 3.16s
"/path/to/ffmpeg -ss 1.0416666666666667 -t 3.1666666666666665 -i "
"C:\\data\\qt_no_tc_24fps.mov -start_number 991 C:/result/output.%03d.jpg"
]
assert calls == expected