mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge pull request #2209 from pypeclub/feature/PYPE-1897_publishing-tvp-with-studio-rendering
TVPaint: Workers rendering
This commit is contained in:
commit
afbbc5f20e
28 changed files with 3751 additions and 456 deletions
682
openpype/hosts/tvpaint/lib.py
Normal file
682
openpype/hosts/tvpaint/lib.py
Normal file
|
|
@ -0,0 +1,682 @@
|
|||
import os
|
||||
import shutil
|
||||
import collections
|
||||
from PIL import Image, ImageDraw
|
||||
|
||||
|
||||
def backwards_id_conversion(data_by_layer_id):
|
||||
"""Convert layer ids to strings from integers."""
|
||||
for key in tuple(data_by_layer_id.keys()):
|
||||
if not isinstance(key, str):
|
||||
data_by_layer_id[str(key)] = data_by_layer_id.pop(key)
|
||||
|
||||
|
||||
def get_frame_filename_template(frame_end, filename_prefix=None, ext=None):
|
||||
"""Get file template with frame key for rendered files.
|
||||
|
||||
This is simple template contains `{frame}{ext}` for sequential outputs
|
||||
and `single_file{ext}` for single file output. Output is rendered to
|
||||
temporary folder so filename should not matter as integrator change
|
||||
them.
|
||||
"""
|
||||
frame_padding = 4
|
||||
frame_end_str_len = len(str(frame_end))
|
||||
if frame_end_str_len > frame_padding:
|
||||
frame_padding = frame_end_str_len
|
||||
|
||||
ext = ext or ".png"
|
||||
filename_prefix = filename_prefix or ""
|
||||
|
||||
return "{}{{frame:0>{}}}{}".format(filename_prefix, frame_padding, ext)
|
||||
|
||||
|
||||
def get_layer_pos_filename_template(range_end, filename_prefix=None, ext=None):
|
||||
filename_prefix = filename_prefix or ""
|
||||
new_filename_prefix = filename_prefix + "pos_{pos}."
|
||||
return get_frame_filename_template(range_end, new_filename_prefix, ext)
|
||||
|
||||
|
||||
def _calculate_pre_behavior_copy(
|
||||
range_start, exposure_frames, pre_beh,
|
||||
layer_frame_start, layer_frame_end,
|
||||
output_idx_by_frame_idx
|
||||
):
|
||||
"""Calculate frames before first exposure frame based on pre behavior.
|
||||
|
||||
Function may skip whole processing if first exposure frame is before
|
||||
layer's first frame. In that case pre behavior does not make sense.
|
||||
|
||||
Args:
|
||||
range_start(int): First frame of range which should be rendered.
|
||||
exposure_frames(list): List of all exposure frames on layer.
|
||||
pre_beh(str): Pre behavior of layer (enum of 4 strings).
|
||||
layer_frame_start(int): First frame of layer.
|
||||
layer_frame_end(int): Last frame of layer.
|
||||
output_idx_by_frame_idx(dict): References to already prepared frames
|
||||
and where result will be stored.
|
||||
"""
|
||||
# Check if last layer frame is after range end
|
||||
if layer_frame_start < range_start:
|
||||
return
|
||||
|
||||
first_exposure_frame = min(exposure_frames)
|
||||
# Skip if last exposure frame is after range end
|
||||
if first_exposure_frame < range_start:
|
||||
return
|
||||
|
||||
# Calculate frame count of layer
|
||||
frame_count = layer_frame_end - layer_frame_start + 1
|
||||
|
||||
if pre_beh == "none":
|
||||
# Just fill all frames from last exposure frame to range end with None
|
||||
for frame_idx in range(range_start, layer_frame_start):
|
||||
output_idx_by_frame_idx[frame_idx] = None
|
||||
|
||||
elif pre_beh == "hold":
|
||||
# Keep first frame for whole time
|
||||
for frame_idx in range(range_start, layer_frame_start):
|
||||
output_idx_by_frame_idx[frame_idx] = first_exposure_frame
|
||||
|
||||
elif pre_beh in ("loop", "repeat"):
|
||||
# Loop backwards from last frame of layer
|
||||
for frame_idx in reversed(range(range_start, layer_frame_start)):
|
||||
eq_frame_idx_offset = (
|
||||
(layer_frame_end - frame_idx) % frame_count
|
||||
)
|
||||
eq_frame_idx = layer_frame_end - eq_frame_idx_offset
|
||||
output_idx_by_frame_idx[frame_idx] = eq_frame_idx
|
||||
|
||||
elif pre_beh == "pingpong":
|
||||
half_seq_len = frame_count - 1
|
||||
seq_len = half_seq_len * 2
|
||||
for frame_idx in reversed(range(range_start, layer_frame_start)):
|
||||
eq_frame_idx_offset = (layer_frame_start - frame_idx) % seq_len
|
||||
if eq_frame_idx_offset > half_seq_len:
|
||||
eq_frame_idx_offset = (seq_len - eq_frame_idx_offset)
|
||||
eq_frame_idx = layer_frame_start + eq_frame_idx_offset
|
||||
output_idx_by_frame_idx[frame_idx] = eq_frame_idx
|
||||
|
||||
|
||||
def _calculate_post_behavior_copy(
|
||||
range_end, exposure_frames, post_beh,
|
||||
layer_frame_start, layer_frame_end,
|
||||
output_idx_by_frame_idx
|
||||
):
|
||||
"""Calculate frames after last frame of layer based on post behavior.
|
||||
|
||||
Function may skip whole processing if last layer frame is after range_end.
|
||||
In that case post behavior does not make sense.
|
||||
|
||||
Args:
|
||||
range_end(int): Last frame of range which should be rendered.
|
||||
exposure_frames(list): List of all exposure frames on layer.
|
||||
post_beh(str): Post behavior of layer (enum of 4 strings).
|
||||
layer_frame_start(int): First frame of layer.
|
||||
layer_frame_end(int): Last frame of layer.
|
||||
output_idx_by_frame_idx(dict): References to already prepared frames
|
||||
and where result will be stored.
|
||||
"""
|
||||
# Check if last layer frame is after range end
|
||||
if layer_frame_end >= range_end:
|
||||
return
|
||||
|
||||
last_exposure_frame = max(exposure_frames)
|
||||
# Skip if last exposure frame is after range end
|
||||
# - this is probably irrelevant with layer frame end check?
|
||||
if last_exposure_frame >= range_end:
|
||||
return
|
||||
|
||||
# Calculate frame count of layer
|
||||
frame_count = layer_frame_end - layer_frame_start + 1
|
||||
|
||||
if post_beh == "none":
|
||||
# Just fill all frames from last exposure frame to range end with None
|
||||
for frame_idx in range(layer_frame_end + 1, range_end + 1):
|
||||
output_idx_by_frame_idx[frame_idx] = None
|
||||
|
||||
elif post_beh == "hold":
|
||||
# Keep last exposure frame to the end
|
||||
for frame_idx in range(layer_frame_end + 1, range_end + 1):
|
||||
output_idx_by_frame_idx[frame_idx] = last_exposure_frame
|
||||
|
||||
elif post_beh in ("loop", "repeat"):
|
||||
# Loop backwards from last frame of layer
|
||||
for frame_idx in range(layer_frame_end + 1, range_end + 1):
|
||||
eq_frame_idx = frame_idx % frame_count
|
||||
output_idx_by_frame_idx[frame_idx] = eq_frame_idx
|
||||
|
||||
elif post_beh == "pingpong":
|
||||
half_seq_len = frame_count - 1
|
||||
seq_len = half_seq_len * 2
|
||||
for frame_idx in range(layer_frame_end + 1, range_end + 1):
|
||||
eq_frame_idx_offset = (frame_idx - layer_frame_end) % seq_len
|
||||
if eq_frame_idx_offset > half_seq_len:
|
||||
eq_frame_idx_offset = seq_len - eq_frame_idx_offset
|
||||
eq_frame_idx = layer_frame_end - eq_frame_idx_offset
|
||||
output_idx_by_frame_idx[frame_idx] = eq_frame_idx
|
||||
|
||||
|
||||
def _calculate_in_range_frames(
|
||||
range_start, range_end,
|
||||
exposure_frames, layer_frame_end,
|
||||
output_idx_by_frame_idx
|
||||
):
|
||||
"""Calculate frame references in defined range.
|
||||
|
||||
Function may skip whole processing if last layer frame is after range_end.
|
||||
In that case post behavior does not make sense.
|
||||
|
||||
Args:
|
||||
range_start(int): First frame of range which should be rendered.
|
||||
range_end(int): Last frame of range which should be rendered.
|
||||
exposure_frames(list): List of all exposure frames on layer.
|
||||
layer_frame_end(int): Last frame of layer.
|
||||
output_idx_by_frame_idx(dict): References to already prepared frames
|
||||
and where result will be stored.
|
||||
"""
|
||||
# Calculate in range frames
|
||||
in_range_frames = []
|
||||
for frame_idx in exposure_frames:
|
||||
if range_start <= frame_idx <= range_end:
|
||||
output_idx_by_frame_idx[frame_idx] = frame_idx
|
||||
in_range_frames.append(frame_idx)
|
||||
|
||||
if in_range_frames:
|
||||
first_in_range_frame = min(in_range_frames)
|
||||
# Calculate frames from first exposure frames to range end or last
|
||||
# frame of layer (post behavior should be calculated since that time)
|
||||
previous_exposure = first_in_range_frame
|
||||
for frame_idx in range(first_in_range_frame, range_end + 1):
|
||||
if frame_idx > layer_frame_end:
|
||||
break
|
||||
|
||||
if frame_idx in exposure_frames:
|
||||
previous_exposure = frame_idx
|
||||
else:
|
||||
output_idx_by_frame_idx[frame_idx] = previous_exposure
|
||||
|
||||
# There can be frames before first exposure frame in range
|
||||
# First check if we don't alreade have first range frame filled
|
||||
if range_start in output_idx_by_frame_idx:
|
||||
return
|
||||
|
||||
first_exposure_frame = max(exposure_frames)
|
||||
last_exposure_frame = max(exposure_frames)
|
||||
# Check if is first exposure frame smaller than defined range
|
||||
# if not then skip
|
||||
if first_exposure_frame >= range_start:
|
||||
return
|
||||
|
||||
# Check is if last exposure frame is also before range start
|
||||
# in that case we can't use fill frames before out range
|
||||
if last_exposure_frame < range_start:
|
||||
return
|
||||
|
||||
closest_exposure_frame = first_exposure_frame
|
||||
for frame_idx in exposure_frames:
|
||||
if frame_idx >= range_start:
|
||||
break
|
||||
if frame_idx > closest_exposure_frame:
|
||||
closest_exposure_frame = frame_idx
|
||||
|
||||
output_idx_by_frame_idx[closest_exposure_frame] = closest_exposure_frame
|
||||
for frame_idx in range(range_start, range_end + 1):
|
||||
if frame_idx in output_idx_by_frame_idx:
|
||||
break
|
||||
output_idx_by_frame_idx[frame_idx] = closest_exposure_frame
|
||||
|
||||
|
||||
def _cleanup_frame_references(output_idx_by_frame_idx):
|
||||
"""Cleanup frame references to frame reference.
|
||||
|
||||
Cleanup not direct references to rendered frame.
|
||||
```
|
||||
// Example input
|
||||
{
|
||||
1: 1,
|
||||
2: 1,
|
||||
3: 2
|
||||
}
|
||||
// Result
|
||||
{
|
||||
1: 1,
|
||||
2: 1,
|
||||
3: 1 // Changed reference to final rendered frame
|
||||
}
|
||||
```
|
||||
Result is dictionary where keys leads to frame that should be rendered.
|
||||
"""
|
||||
for frame_idx in tuple(output_idx_by_frame_idx.keys()):
|
||||
reference_idx = output_idx_by_frame_idx[frame_idx]
|
||||
# Skip transparent frames
|
||||
if reference_idx is None or reference_idx == frame_idx:
|
||||
continue
|
||||
|
||||
real_reference_idx = reference_idx
|
||||
_tmp_reference_idx = reference_idx
|
||||
while True:
|
||||
_temp = output_idx_by_frame_idx[_tmp_reference_idx]
|
||||
if _temp == _tmp_reference_idx:
|
||||
real_reference_idx = _tmp_reference_idx
|
||||
break
|
||||
_tmp_reference_idx = _temp
|
||||
|
||||
if real_reference_idx != reference_idx:
|
||||
output_idx_by_frame_idx[frame_idx] = real_reference_idx
|
||||
|
||||
|
||||
def _cleanup_out_range_frames(output_idx_by_frame_idx, range_start, range_end):
|
||||
"""Cleanup frame references to frames out of passed range.
|
||||
|
||||
First available frame in range is used
|
||||
```
|
||||
// Example input. Range 2-3
|
||||
{
|
||||
1: 1,
|
||||
2: 1,
|
||||
3: 1
|
||||
}
|
||||
// Result
|
||||
{
|
||||
2: 2, // Redirect to self as is first that refence out range
|
||||
3: 2 // Redirect to first redirected frame
|
||||
}
|
||||
```
|
||||
Result is dictionary where keys leads to frame that should be rendered.
|
||||
"""
|
||||
in_range_frames_by_out_frames = collections.defaultdict(set)
|
||||
out_range_frames = set()
|
||||
for frame_idx in tuple(output_idx_by_frame_idx.keys()):
|
||||
# Skip frames that are already out of range
|
||||
if frame_idx < range_start or frame_idx > range_end:
|
||||
out_range_frames.add(frame_idx)
|
||||
continue
|
||||
|
||||
reference_idx = output_idx_by_frame_idx[frame_idx]
|
||||
# Skip transparent frames
|
||||
if reference_idx is None:
|
||||
continue
|
||||
|
||||
# Skip references in range
|
||||
if reference_idx < range_start or reference_idx > range_end:
|
||||
in_range_frames_by_out_frames[reference_idx].add(frame_idx)
|
||||
|
||||
for reference_idx in tuple(in_range_frames_by_out_frames.keys()):
|
||||
frame_indexes = in_range_frames_by_out_frames.pop(reference_idx)
|
||||
new_reference = None
|
||||
for frame_idx in frame_indexes:
|
||||
if new_reference is None:
|
||||
new_reference = frame_idx
|
||||
output_idx_by_frame_idx[frame_idx] = new_reference
|
||||
|
||||
# Finally remove out of range frames
|
||||
for frame_idx in out_range_frames:
|
||||
output_idx_by_frame_idx.pop(frame_idx)
|
||||
|
||||
|
||||
def calculate_layer_frame_references(
|
||||
range_start, range_end,
|
||||
layer_frame_start,
|
||||
layer_frame_end,
|
||||
exposure_frames,
|
||||
pre_beh, post_beh
|
||||
):
|
||||
"""Calculate frame references for one layer based on it's data.
|
||||
|
||||
Output is dictionary where key is frame index referencing to rendered frame
|
||||
index. If frame index should be rendered then is referencing to self.
|
||||
|
||||
```
|
||||
// Example output
|
||||
{
|
||||
1: 1, // Reference to self - will be rendered
|
||||
2: 1, // Reference to frame 1 - will be copied
|
||||
3: 1, // Reference to frame 1 - will be copied
|
||||
4: 4, // Reference to self - will be rendered
|
||||
...
|
||||
20: 4 // Reference to frame 4 - will be copied
|
||||
21: None // Has reference to None - transparent image
|
||||
}
|
||||
```
|
||||
|
||||
Args:
|
||||
range_start(int): First frame of range which should be rendered.
|
||||
range_end(int): Last frame of range which should be rendered.
|
||||
layer_frame_start(int)L First frame of layer.
|
||||
layer_frame_end(int): Last frame of layer.
|
||||
exposure_frames(list): List of all exposure frames on layer.
|
||||
pre_beh(str): Pre behavior of layer (enum of 4 strings).
|
||||
post_beh(str): Post behavior of layer (enum of 4 strings).
|
||||
"""
|
||||
# Output variable
|
||||
output_idx_by_frame_idx = {}
|
||||
# Skip if layer does not have any exposure frames
|
||||
if not exposure_frames:
|
||||
return output_idx_by_frame_idx
|
||||
|
||||
# First calculate in range frames
|
||||
_calculate_in_range_frames(
|
||||
range_start, range_end,
|
||||
exposure_frames, layer_frame_end,
|
||||
output_idx_by_frame_idx
|
||||
)
|
||||
# Calculate frames by pre behavior of layer
|
||||
_calculate_pre_behavior_copy(
|
||||
range_start, exposure_frames, pre_beh,
|
||||
layer_frame_start, layer_frame_end,
|
||||
output_idx_by_frame_idx
|
||||
)
|
||||
# Calculate frames by post behavior of layer
|
||||
_calculate_post_behavior_copy(
|
||||
range_end, exposure_frames, post_beh,
|
||||
layer_frame_start, layer_frame_end,
|
||||
output_idx_by_frame_idx
|
||||
)
|
||||
# Cleanup of referenced frames
|
||||
_cleanup_frame_references(output_idx_by_frame_idx)
|
||||
|
||||
# Remove frames out of range
|
||||
_cleanup_out_range_frames(output_idx_by_frame_idx, range_start, range_end)
|
||||
|
||||
return output_idx_by_frame_idx
|
||||
|
||||
|
||||
def calculate_layers_extraction_data(
|
||||
layers_data,
|
||||
exposure_frames_by_layer_id,
|
||||
behavior_by_layer_id,
|
||||
range_start,
|
||||
range_end,
|
||||
skip_not_visible=True,
|
||||
filename_prefix=None,
|
||||
ext=None
|
||||
):
|
||||
"""Calculate extraction data for passed layers data.
|
||||
|
||||
```
|
||||
{
|
||||
<layer_id>: {
|
||||
"frame_references": {...},
|
||||
"filenames_by_frame_index": {...}
|
||||
},
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
Frame references contains frame index reference to rendered frame index.
|
||||
|
||||
Filename by frame index represents filename under which should be frame
|
||||
stored. Directory is not handled here because each usage may need different
|
||||
approach.
|
||||
|
||||
Args:
|
||||
layers_data(list): Layers data loaded from TVPaint.
|
||||
exposure_frames_by_layer_id(dict): Exposure frames of layers stored by
|
||||
layer id.
|
||||
behavior_by_layer_id(dict): Pre and Post behavior of layers stored by
|
||||
layer id.
|
||||
range_start(int): First frame of rendered range.
|
||||
range_end(int): Last frame of rendered range.
|
||||
skip_not_visible(bool): Skip calculations for hidden layers (Skipped
|
||||
by default).
|
||||
filename_prefix(str): Prefix before filename.
|
||||
ext(str): Extension which filenames will have ('.png' is default).
|
||||
|
||||
Returns:
|
||||
dict: Prepared data for rendering by layer position.
|
||||
"""
|
||||
# Make sure layer ids are strings
|
||||
# backwards compatibility when layer ids were integers
|
||||
backwards_id_conversion(exposure_frames_by_layer_id)
|
||||
backwards_id_conversion(behavior_by_layer_id)
|
||||
|
||||
layer_template = get_layer_pos_filename_template(
|
||||
range_end, filename_prefix, ext
|
||||
)
|
||||
output = {}
|
||||
for layer_data in layers_data:
|
||||
if skip_not_visible and not layer_data["visible"]:
|
||||
continue
|
||||
|
||||
orig_layer_id = layer_data["layer_id"]
|
||||
layer_id = str(orig_layer_id)
|
||||
|
||||
# Skip if does not have any exposure frames (empty layer)
|
||||
exposure_frames = exposure_frames_by_layer_id[layer_id]
|
||||
if not exposure_frames:
|
||||
continue
|
||||
|
||||
layer_position = layer_data["position"]
|
||||
layer_frame_start = layer_data["frame_start"]
|
||||
layer_frame_end = layer_data["frame_end"]
|
||||
|
||||
layer_behavior = behavior_by_layer_id[layer_id]
|
||||
|
||||
pre_behavior = layer_behavior["pre"]
|
||||
post_behavior = layer_behavior["post"]
|
||||
|
||||
frame_references = calculate_layer_frame_references(
|
||||
range_start, range_end,
|
||||
layer_frame_start,
|
||||
layer_frame_end,
|
||||
exposure_frames,
|
||||
pre_behavior, post_behavior
|
||||
)
|
||||
# All values in 'frame_references' reference to a frame that must be
|
||||
# rendered out
|
||||
frames_to_render = set(frame_references.values())
|
||||
# Remove 'None' reference (transparent image)
|
||||
if None in frames_to_render:
|
||||
frames_to_render.remove(None)
|
||||
|
||||
# Skip layer if has nothing to render
|
||||
if not frames_to_render:
|
||||
continue
|
||||
|
||||
# All filenames that should be as output (not final output)
|
||||
filename_frames = (
|
||||
set(range(range_start, range_end + 1))
|
||||
| frames_to_render
|
||||
)
|
||||
filenames_by_frame_index = {}
|
||||
for frame_idx in filename_frames:
|
||||
filenames_by_frame_index[frame_idx] = layer_template.format(
|
||||
pos=layer_position,
|
||||
frame=frame_idx
|
||||
)
|
||||
|
||||
# Store objects under the layer id
|
||||
output[orig_layer_id] = {
|
||||
"frame_references": frame_references,
|
||||
"filenames_by_frame_index": filenames_by_frame_index
|
||||
}
|
||||
return output
|
||||
|
||||
|
||||
def create_transparent_image_from_source(src_filepath, dst_filepath):
|
||||
"""Create transparent image of same type and size as source image."""
|
||||
img_obj = Image.open(src_filepath)
|
||||
painter = ImageDraw.Draw(img_obj)
|
||||
painter.rectangle((0, 0, *img_obj.size), fill=(0, 0, 0, 0))
|
||||
img_obj.save(dst_filepath)
|
||||
|
||||
|
||||
def fill_reference_frames(frame_references, filepaths_by_frame):
|
||||
# Store path to first transparent image if there is any
|
||||
for frame_idx, ref_idx in frame_references.items():
|
||||
# Frame referencing to self should be rendered and used as source
|
||||
# and reference indexes with None can't be filled
|
||||
if ref_idx is None or frame_idx == ref_idx:
|
||||
continue
|
||||
|
||||
# Get destination filepath
|
||||
src_filepath = filepaths_by_frame[ref_idx]
|
||||
dst_filepath = filepaths_by_frame[frame_idx]
|
||||
|
||||
if hasattr(os, "link"):
|
||||
os.link(src_filepath, dst_filepath)
|
||||
else:
|
||||
shutil.copy(src_filepath, dst_filepath)
|
||||
|
||||
|
||||
def copy_render_file(src_path, dst_path):
|
||||
"""Create copy file of an image."""
|
||||
if hasattr(os, "link"):
|
||||
os.link(src_path, dst_path)
|
||||
else:
|
||||
shutil.copy(src_path, dst_path)
|
||||
|
||||
|
||||
def cleanup_rendered_layers(filepaths_by_layer_id):
|
||||
"""Delete all files for each individual layer files after compositing."""
|
||||
# Collect all filepaths from data
|
||||
all_filepaths = []
|
||||
for filepaths_by_frame in filepaths_by_layer_id.values():
|
||||
all_filepaths.extend(filepaths_by_frame.values())
|
||||
|
||||
# Loop over loop
|
||||
for filepath in set(all_filepaths):
|
||||
if filepath is not None and os.path.exists(filepath):
|
||||
os.remove(filepath)
|
||||
|
||||
|
||||
def composite_rendered_layers(
|
||||
layers_data, filepaths_by_layer_id,
|
||||
range_start, range_end,
|
||||
dst_filepaths_by_frame, cleanup=True
|
||||
):
|
||||
"""Composite multiple rendered layers by their position.
|
||||
|
||||
Result is single frame sequence with transparency matching content
|
||||
created in TVPaint. Missing source filepaths are replaced with transparent
|
||||
images but at least one image must be rendered and exist.
|
||||
|
||||
Function can be used even if single layer was created to fill transparent
|
||||
filepaths.
|
||||
|
||||
Args:
|
||||
layers_data(list): Layers data loaded from TVPaint.
|
||||
filepaths_by_layer_id(dict): Rendered filepaths stored by frame index
|
||||
per layer id. Used as source for compositing.
|
||||
range_start(int): First frame of rendered range.
|
||||
range_end(int): Last frame of rendered range.
|
||||
dst_filepaths_by_frame(dict): Output filepaths by frame where final
|
||||
image after compositing will be stored. Path must not clash with
|
||||
source filepaths.
|
||||
cleanup(bool): Remove all source filepaths when done with compositing.
|
||||
"""
|
||||
# Prepare layers by their position
|
||||
# - position tells in which order will compositing happen
|
||||
layer_ids_by_position = {}
|
||||
for layer in layers_data:
|
||||
layer_position = layer["position"]
|
||||
layer_ids_by_position[layer_position] = layer["layer_id"]
|
||||
|
||||
# Sort layer positions
|
||||
sorted_positions = tuple(sorted(layer_ids_by_position.keys()))
|
||||
# Prepare variable where filepaths without any rendered content
|
||||
# - transparent will be created
|
||||
transparent_filepaths = set()
|
||||
# Store first final filepath
|
||||
first_dst_filepath = None
|
||||
for frame_idx in range(range_start, range_end + 1):
|
||||
dst_filepath = dst_filepaths_by_frame[frame_idx]
|
||||
src_filepaths = []
|
||||
for layer_position in sorted_positions:
|
||||
layer_id = layer_ids_by_position[layer_position]
|
||||
filepaths_by_frame = filepaths_by_layer_id[layer_id]
|
||||
src_filepath = filepaths_by_frame.get(frame_idx)
|
||||
if src_filepath is not None:
|
||||
src_filepaths.append(src_filepath)
|
||||
|
||||
if not src_filepaths:
|
||||
transparent_filepaths.add(dst_filepath)
|
||||
continue
|
||||
|
||||
# Store first destionation filepath to be used for transparent images
|
||||
if first_dst_filepath is None:
|
||||
first_dst_filepath = dst_filepath
|
||||
|
||||
if len(src_filepaths) == 1:
|
||||
src_filepath = src_filepaths[0]
|
||||
if cleanup:
|
||||
os.rename(src_filepath, dst_filepath)
|
||||
else:
|
||||
copy_render_file(src_filepath, dst_filepath)
|
||||
|
||||
else:
|
||||
composite_images(src_filepaths, dst_filepath)
|
||||
|
||||
# Store first transparent filepath to be able copy it
|
||||
transparent_filepath = None
|
||||
for dst_filepath in transparent_filepaths:
|
||||
if transparent_filepath is None:
|
||||
create_transparent_image_from_source(
|
||||
first_dst_filepath, dst_filepath
|
||||
)
|
||||
transparent_filepath = dst_filepath
|
||||
else:
|
||||
copy_render_file(transparent_filepath, dst_filepath)
|
||||
|
||||
# Remove all files that were used as source for compositing
|
||||
if cleanup:
|
||||
cleanup_rendered_layers(filepaths_by_layer_id)
|
||||
|
||||
|
||||
def composite_images(input_image_paths, output_filepath):
|
||||
"""Composite images in order from passed list.
|
||||
|
||||
Raises:
|
||||
ValueError: When entered list is empty.
|
||||
"""
|
||||
if not input_image_paths:
|
||||
raise ValueError("Nothing to composite.")
|
||||
|
||||
img_obj = None
|
||||
for image_filepath in input_image_paths:
|
||||
_img_obj = Image.open(image_filepath)
|
||||
if img_obj is None:
|
||||
img_obj = _img_obj
|
||||
else:
|
||||
img_obj.alpha_composite(_img_obj)
|
||||
img_obj.save(output_filepath)
|
||||
|
||||
|
||||
def rename_filepaths_by_frame_start(
|
||||
filepaths_by_frame, range_start, range_end, new_frame_start
|
||||
):
|
||||
"""Change frames in filenames of finished images to new frame start."""
|
||||
# Skip if source first frame is same as destination first frame
|
||||
if range_start == new_frame_start:
|
||||
return
|
||||
|
||||
# Calculate frame end
|
||||
new_frame_end = range_end + (new_frame_start - range_start)
|
||||
# Create filename template
|
||||
filename_template = get_frame_filename_template(
|
||||
max(range_end, new_frame_end)
|
||||
)
|
||||
|
||||
# Use differnet ranges based on Mark In and output Frame Start values
|
||||
# - this is to make sure that filename renaming won't affect files that
|
||||
# are not renamed yet
|
||||
if range_start < new_frame_start:
|
||||
source_range = range(range_end, range_start - 1, -1)
|
||||
output_range = range(new_frame_end, new_frame_start - 1, -1)
|
||||
else:
|
||||
# This is less possible situation as frame start will be in most
|
||||
# cases higher than Mark In.
|
||||
source_range = range(range_start, range_end + 1)
|
||||
output_range = range(new_frame_start, new_frame_end + 1)
|
||||
|
||||
new_dst_filepaths = {}
|
||||
for src_frame, dst_frame in zip(source_range, output_range):
|
||||
src_filepath = filepaths_by_frame[src_frame]
|
||||
src_dirpath = os.path.dirname(src_filepath)
|
||||
dst_filename = filename_template.format(frame=dst_frame)
|
||||
dst_filepath = os.path.join(src_dirpath, dst_filename)
|
||||
|
||||
os.rename(src_filepath, dst_filepath)
|
||||
|
||||
new_dst_filepaths[dst_frame] = dst_filepath
|
||||
return new_dst_filepaths
|
||||
|
|
@ -1,12 +1,18 @@
|
|||
import os
|
||||
import shutil
|
||||
import copy
|
||||
import tempfile
|
||||
|
||||
import pyblish.api
|
||||
from avalon.tvpaint import lib
|
||||
from openpype.hosts.tvpaint.api.lib import composite_images
|
||||
from PIL import Image, ImageDraw
|
||||
from openpype.hosts.tvpaint.lib import (
|
||||
calculate_layers_extraction_data,
|
||||
get_frame_filename_template,
|
||||
fill_reference_frames,
|
||||
composite_rendered_layers,
|
||||
rename_filepaths_by_frame_start
|
||||
)
|
||||
from PIL import Image
|
||||
|
||||
|
||||
class ExtractSequence(pyblish.api.Extractor):
|
||||
|
|
@ -111,14 +117,6 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
filename_template = self._get_filename_template(
|
||||
# Use the biggest number
|
||||
max(mark_out, frame_end)
|
||||
)
|
||||
ext = os.path.splitext(filename_template)[1].replace(".", "")
|
||||
|
||||
self.log.debug("Using file template \"{}\"".format(filename_template))
|
||||
|
||||
# Save to staging dir
|
||||
output_dir = instance.data.get("stagingDir")
|
||||
if not output_dir:
|
||||
|
|
@ -133,30 +131,30 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
)
|
||||
|
||||
if instance.data["family"] == "review":
|
||||
output_filenames, thumbnail_fullpath = self.render_review(
|
||||
filename_template, output_dir, mark_in, mark_out,
|
||||
scene_bg_color
|
||||
result = self.render_review(
|
||||
output_dir, mark_in, mark_out, scene_bg_color
|
||||
)
|
||||
else:
|
||||
# Render output
|
||||
output_filenames, thumbnail_fullpath = self.render(
|
||||
filename_template, output_dir,
|
||||
mark_in, mark_out,
|
||||
filtered_layers
|
||||
result = self.render(
|
||||
output_dir, mark_in, mark_out, filtered_layers
|
||||
)
|
||||
|
||||
output_filepaths_by_frame_idx, thumbnail_fullpath = result
|
||||
|
||||
# Change scene frame Start back to previous value
|
||||
lib.execute_george("tv_startframe {}".format(scene_start_frame))
|
||||
|
||||
# Sequence of one frame
|
||||
if not output_filenames:
|
||||
if not output_filepaths_by_frame_idx:
|
||||
self.log.warning("Extractor did not create any output.")
|
||||
return
|
||||
|
||||
repre_files = self._rename_output_files(
|
||||
filename_template, output_dir,
|
||||
mark_in, mark_out,
|
||||
output_frame_start, output_frame_end
|
||||
output_filepaths_by_frame_idx,
|
||||
mark_in,
|
||||
mark_out,
|
||||
output_frame_start
|
||||
)
|
||||
|
||||
# Fill tags and new families
|
||||
|
|
@ -169,9 +167,11 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
if single_file:
|
||||
repre_files = repre_files[0]
|
||||
|
||||
# Extension is harcoded
|
||||
# - changing extension would require change code
|
||||
new_repre = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"name": "png",
|
||||
"ext": "png",
|
||||
"files": repre_files,
|
||||
"stagingDir": output_dir,
|
||||
"tags": tags
|
||||
|
|
@ -206,69 +206,28 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(thumbnail_repre)
|
||||
|
||||
def _get_filename_template(self, frame_end):
|
||||
"""Get filetemplate for rendered files.
|
||||
|
||||
This is simple template contains `{frame}{ext}` for sequential outputs
|
||||
and `single_file{ext}` for single file output. Output is rendered to
|
||||
temporary folder so filename should not matter as integrator change
|
||||
them.
|
||||
"""
|
||||
frame_padding = 4
|
||||
frame_end_str_len = len(str(frame_end))
|
||||
if frame_end_str_len > frame_padding:
|
||||
frame_padding = frame_end_str_len
|
||||
|
||||
return "{{frame:0>{}}}".format(frame_padding) + ".png"
|
||||
|
||||
def _rename_output_files(
|
||||
self, filename_template, output_dir,
|
||||
mark_in, mark_out, output_frame_start, output_frame_end
|
||||
self, filepaths_by_frame, mark_in, mark_out, output_frame_start
|
||||
):
|
||||
# Use differnet ranges based on Mark In and output Frame Start values
|
||||
# - this is to make sure that filename renaming won't affect files that
|
||||
# are not renamed yet
|
||||
mark_start_is_less = bool(mark_in < output_frame_start)
|
||||
if mark_start_is_less:
|
||||
marks_range = range(mark_out, mark_in - 1, -1)
|
||||
frames_range = range(output_frame_end, output_frame_start - 1, -1)
|
||||
else:
|
||||
# This is less possible situation as frame start will be in most
|
||||
# cases higher than Mark In.
|
||||
marks_range = range(mark_in, mark_out + 1)
|
||||
frames_range = range(output_frame_start, output_frame_end + 1)
|
||||
new_filepaths_by_frame = rename_filepaths_by_frame_start(
|
||||
filepaths_by_frame, mark_in, mark_out, output_frame_start
|
||||
)
|
||||
|
||||
repre_filepaths = []
|
||||
for mark, frame in zip(marks_range, frames_range):
|
||||
new_filename = filename_template.format(frame=frame)
|
||||
new_filepath = os.path.join(output_dir, new_filename)
|
||||
repre_filenames = []
|
||||
for filepath in new_filepaths_by_frame.values():
|
||||
repre_filenames.append(os.path.basename(filepath))
|
||||
|
||||
repre_filepaths.append(new_filepath)
|
||||
if mark_in < output_frame_start:
|
||||
repre_filenames = list(reversed(repre_filenames))
|
||||
|
||||
if mark != frame:
|
||||
old_filename = filename_template.format(frame=mark)
|
||||
old_filepath = os.path.join(output_dir, old_filename)
|
||||
os.rename(old_filepath, new_filepath)
|
||||
|
||||
# Reverse repre files order if output
|
||||
if mark_start_is_less:
|
||||
repre_filepaths = list(reversed(repre_filepaths))
|
||||
|
||||
return [
|
||||
os.path.basename(path)
|
||||
for path in repre_filepaths
|
||||
]
|
||||
return repre_filenames
|
||||
|
||||
def render_review(
|
||||
self, filename_template, output_dir, mark_in, mark_out, scene_bg_color
|
||||
self, output_dir, mark_in, mark_out, scene_bg_color
|
||||
):
|
||||
""" Export images from TVPaint using `tv_savesequence` command.
|
||||
|
||||
Args:
|
||||
filename_template (str): Filename template of an output. Template
|
||||
should already contain extension. Template may contain only
|
||||
keyword argument `{frame}` or index argument (for same value).
|
||||
Extension in template must match `save_mode`.
|
||||
output_dir (str): Directory where files will be stored.
|
||||
mark_in (int): Starting frame index from which export will begin.
|
||||
mark_out (int): On which frame index export will end.
|
||||
|
|
@ -279,6 +238,8 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
tuple: With 2 items first is list of filenames second is path to
|
||||
thumbnail.
|
||||
"""
|
||||
filename_template = get_frame_filename_template(mark_out)
|
||||
|
||||
self.log.debug("Preparing data for rendering.")
|
||||
first_frame_filepath = os.path.join(
|
||||
output_dir,
|
||||
|
|
@ -313,12 +274,13 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
lib.execute_george_through_file("\n".join(george_script_lines))
|
||||
|
||||
first_frame_filepath = None
|
||||
output_filenames = []
|
||||
for frame in range(mark_in, mark_out + 1):
|
||||
filename = filename_template.format(frame=frame)
|
||||
output_filenames.append(filename)
|
||||
|
||||
output_filepaths_by_frame_idx = {}
|
||||
for frame_idx in range(mark_in, mark_out + 1):
|
||||
filename = filename_template.format(frame=frame_idx)
|
||||
filepath = os.path.join(output_dir, filename)
|
||||
|
||||
output_filepaths_by_frame_idx[frame_idx] = filepath
|
||||
|
||||
if not os.path.exists(filepath):
|
||||
raise AssertionError(
|
||||
"Output was not rendered. File was not found {}".format(
|
||||
|
|
@ -337,16 +299,12 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
source_img = source_img.convert("RGB")
|
||||
source_img.save(thumbnail_filepath)
|
||||
|
||||
return output_filenames, thumbnail_filepath
|
||||
return output_filepaths_by_frame_idx, thumbnail_filepath
|
||||
|
||||
def render(self, filename_template, output_dir, mark_in, mark_out, layers):
|
||||
def render(self, output_dir, mark_in, mark_out, layers):
|
||||
""" Export images from TVPaint.
|
||||
|
||||
Args:
|
||||
filename_template (str): Filename template of an output. Template
|
||||
should already contain extension. Template may contain only
|
||||
keyword argument `{frame}` or index argument (for same value).
|
||||
Extension in template must match `save_mode`.
|
||||
output_dir (str): Directory where files will be stored.
|
||||
mark_in (int): Starting frame index from which export will begin.
|
||||
mark_out (int): On which frame index export will end.
|
||||
|
|
@ -360,12 +318,15 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
|
||||
# Map layers by position
|
||||
layers_by_position = {}
|
||||
layers_by_id = {}
|
||||
layer_ids = []
|
||||
for layer in layers:
|
||||
layer_id = layer["layer_id"]
|
||||
position = layer["position"]
|
||||
layers_by_position[position] = layer
|
||||
layers_by_id[layer_id] = layer
|
||||
|
||||
layer_ids.append(layer["layer_id"])
|
||||
layer_ids.append(layer_id)
|
||||
|
||||
# Sort layer positions in reverse order
|
||||
sorted_positions = list(reversed(sorted(layers_by_position.keys())))
|
||||
|
|
@ -374,59 +335,45 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
|
||||
self.log.debug("Collecting pre/post behavior of individual layers.")
|
||||
behavior_by_layer_id = lib.get_layers_pre_post_behavior(layer_ids)
|
||||
|
||||
tmp_filename_template = "pos_{pos}." + filename_template
|
||||
|
||||
files_by_position = {}
|
||||
for position in sorted_positions:
|
||||
layer = layers_by_position[position]
|
||||
behavior = behavior_by_layer_id[layer["layer_id"]]
|
||||
|
||||
files_by_frames = self._render_layer(
|
||||
layer,
|
||||
tmp_filename_template,
|
||||
output_dir,
|
||||
behavior,
|
||||
mark_in,
|
||||
mark_out
|
||||
)
|
||||
if files_by_frames:
|
||||
files_by_position[position] = files_by_frames
|
||||
else:
|
||||
self.log.warning((
|
||||
"Skipped layer \"{}\". Probably out of Mark In/Out range."
|
||||
).format(layer["name"]))
|
||||
|
||||
if not files_by_position:
|
||||
layer_names = set(layer["name"] for layer in layers)
|
||||
joined_names = ", ".join(
|
||||
["\"{}\"".format(name) for name in layer_names]
|
||||
)
|
||||
self.log.warning(
|
||||
"Layers {} do not have content in range {} - {}".format(
|
||||
joined_names, mark_in, mark_out
|
||||
)
|
||||
)
|
||||
return [], None
|
||||
|
||||
output_filepaths = self._composite_files(
|
||||
files_by_position,
|
||||
mark_in,
|
||||
mark_out,
|
||||
filename_template,
|
||||
output_dir
|
||||
exposure_frames_by_layer_id = lib.get_layers_exposure_frames(
|
||||
layer_ids, layers
|
||||
)
|
||||
self._cleanup_tmp_files(files_by_position)
|
||||
|
||||
output_filenames = [
|
||||
os.path.basename(filepath)
|
||||
for filepath in output_filepaths
|
||||
]
|
||||
extraction_data_by_layer_id = calculate_layers_extraction_data(
|
||||
layers,
|
||||
exposure_frames_by_layer_id,
|
||||
behavior_by_layer_id,
|
||||
mark_in,
|
||||
mark_out
|
||||
)
|
||||
# Render layers
|
||||
filepaths_by_layer_id = {}
|
||||
for layer_id, render_data in extraction_data_by_layer_id.items():
|
||||
layer = layers_by_id[layer_id]
|
||||
filepaths_by_layer_id[layer_id] = self._render_layer(
|
||||
render_data, layer, output_dir
|
||||
)
|
||||
|
||||
# Prepare final filepaths where compositing should store result
|
||||
output_filepaths_by_frame = {}
|
||||
thumbnail_src_filepath = None
|
||||
if output_filepaths:
|
||||
thumbnail_src_filepath = output_filepaths[0]
|
||||
finale_template = get_frame_filename_template(mark_out)
|
||||
for frame_idx in range(mark_in, mark_out + 1):
|
||||
filename = finale_template.format(frame=frame_idx)
|
||||
|
||||
filepath = os.path.join(output_dir, filename)
|
||||
output_filepaths_by_frame[frame_idx] = filepath
|
||||
|
||||
if thumbnail_src_filepath is None:
|
||||
thumbnail_src_filepath = filepath
|
||||
|
||||
self.log.info("Started compositing of layer frames.")
|
||||
composite_rendered_layers(
|
||||
layers, filepaths_by_layer_id,
|
||||
mark_in, mark_out,
|
||||
output_filepaths_by_frame
|
||||
)
|
||||
|
||||
self.log.info("Compositing finished")
|
||||
thumbnail_filepath = None
|
||||
if thumbnail_src_filepath and os.path.exists(thumbnail_src_filepath):
|
||||
source_img = Image.open(thumbnail_src_filepath)
|
||||
|
|
@ -449,7 +396,7 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
).format(source_img.mode))
|
||||
source_img.save(thumbnail_filepath)
|
||||
|
||||
return output_filenames, thumbnail_filepath
|
||||
return output_filepaths_by_frame, thumbnail_filepath
|
||||
|
||||
def _get_review_bg_color(self):
|
||||
red = green = blue = 255
|
||||
|
|
@ -460,338 +407,43 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
red, green, blue = self.review_bg
|
||||
return (red, green, blue)
|
||||
|
||||
def _render_layer(
|
||||
self,
|
||||
layer,
|
||||
tmp_filename_template,
|
||||
output_dir,
|
||||
behavior,
|
||||
mark_in_index,
|
||||
mark_out_index
|
||||
):
|
||||
def _render_layer(self, render_data, layer, output_dir):
|
||||
frame_references = render_data["frame_references"]
|
||||
filenames_by_frame_index = render_data["filenames_by_frame_index"]
|
||||
|
||||
layer_id = layer["layer_id"]
|
||||
frame_start_index = layer["frame_start"]
|
||||
frame_end_index = layer["frame_end"]
|
||||
|
||||
pre_behavior = behavior["pre"]
|
||||
post_behavior = behavior["post"]
|
||||
|
||||
# Check if layer is before mark in
|
||||
if frame_end_index < mark_in_index:
|
||||
# Skip layer if post behavior is "none"
|
||||
if post_behavior == "none":
|
||||
return {}
|
||||
|
||||
# Check if layer is after mark out
|
||||
elif frame_start_index > mark_out_index:
|
||||
# Skip layer if pre behavior is "none"
|
||||
if pre_behavior == "none":
|
||||
return {}
|
||||
|
||||
exposure_frames = lib.get_exposure_frames(
|
||||
layer_id, frame_start_index, frame_end_index
|
||||
)
|
||||
|
||||
if frame_start_index not in exposure_frames:
|
||||
exposure_frames.append(frame_start_index)
|
||||
|
||||
layer_files_by_frame = {}
|
||||
george_script_lines = [
|
||||
"tv_layerset {}".format(layer_id),
|
||||
"tv_SaveMode \"PNG\""
|
||||
]
|
||||
layer_position = layer["position"]
|
||||
|
||||
for frame_idx in exposure_frames:
|
||||
filename = tmp_filename_template.format(
|
||||
pos=layer_position,
|
||||
frame=frame_idx
|
||||
)
|
||||
filepaths_by_frame = {}
|
||||
frames_to_render = []
|
||||
for frame_idx, ref_idx in frame_references.items():
|
||||
# None reference is skipped because does not have source
|
||||
if ref_idx is None:
|
||||
filepaths_by_frame[frame_idx] = None
|
||||
continue
|
||||
filename = filenames_by_frame_index[frame_idx]
|
||||
dst_path = "/".join([output_dir, filename])
|
||||
layer_files_by_frame[frame_idx] = os.path.normpath(dst_path)
|
||||
filepaths_by_frame[frame_idx] = dst_path
|
||||
if frame_idx != ref_idx:
|
||||
continue
|
||||
|
||||
frames_to_render.append(str(frame_idx))
|
||||
# Go to frame
|
||||
george_script_lines.append("tv_layerImage {}".format(frame_idx))
|
||||
# Store image to output
|
||||
george_script_lines.append("tv_saveimage \"{}\"".format(dst_path))
|
||||
|
||||
self.log.debug("Rendering Exposure frames {} of layer {} ({})".format(
|
||||
str(exposure_frames), layer_id, layer["name"]
|
||||
",".join(frames_to_render), layer_id, layer["name"]
|
||||
))
|
||||
# Let TVPaint render layer's image
|
||||
lib.execute_george_through_file("\n".join(george_script_lines))
|
||||
|
||||
# Fill frames between `frame_start_index` and `frame_end_index`
|
||||
self.log.debug((
|
||||
"Filling frames between first and last frame of layer ({} - {})."
|
||||
).format(frame_start_index + 1, frame_end_index + 1))
|
||||
self.log.debug("Filling frames not rendered frames.")
|
||||
fill_reference_frames(frame_references, filepaths_by_frame)
|
||||
|
||||
_debug_filled_frames = []
|
||||
prev_filepath = None
|
||||
for frame_idx in range(frame_start_index, frame_end_index + 1):
|
||||
if frame_idx in layer_files_by_frame:
|
||||
prev_filepath = layer_files_by_frame[frame_idx]
|
||||
continue
|
||||
|
||||
if prev_filepath is None:
|
||||
raise ValueError("BUG: First frame of layer was not rendered!")
|
||||
_debug_filled_frames.append(frame_idx)
|
||||
filename = tmp_filename_template.format(
|
||||
pos=layer_position,
|
||||
frame=frame_idx
|
||||
)
|
||||
new_filepath = "/".join([output_dir, filename])
|
||||
self._copy_image(prev_filepath, new_filepath)
|
||||
layer_files_by_frame[frame_idx] = new_filepath
|
||||
|
||||
self.log.debug("Filled frames {}".format(str(_debug_filled_frames)))
|
||||
|
||||
# Fill frames by pre/post behavior of layer
|
||||
self.log.debug((
|
||||
"Completing image sequence of layer by pre/post behavior."
|
||||
" PRE: {} | POST: {}"
|
||||
).format(pre_behavior, post_behavior))
|
||||
|
||||
# Pre behavior
|
||||
self._fill_frame_by_pre_behavior(
|
||||
layer,
|
||||
pre_behavior,
|
||||
mark_in_index,
|
||||
layer_files_by_frame,
|
||||
tmp_filename_template,
|
||||
output_dir
|
||||
)
|
||||
self._fill_frame_by_post_behavior(
|
||||
layer,
|
||||
post_behavior,
|
||||
mark_out_index,
|
||||
layer_files_by_frame,
|
||||
tmp_filename_template,
|
||||
output_dir
|
||||
)
|
||||
return layer_files_by_frame
|
||||
|
||||
def _fill_frame_by_pre_behavior(
|
||||
self,
|
||||
layer,
|
||||
pre_behavior,
|
||||
mark_in_index,
|
||||
layer_files_by_frame,
|
||||
filename_template,
|
||||
output_dir
|
||||
):
|
||||
layer_position = layer["position"]
|
||||
frame_start_index = layer["frame_start"]
|
||||
frame_end_index = layer["frame_end"]
|
||||
frame_count = frame_end_index - frame_start_index + 1
|
||||
if mark_in_index >= frame_start_index:
|
||||
self.log.debug((
|
||||
"Skipping pre-behavior."
|
||||
" All frames after Mark In are rendered."
|
||||
))
|
||||
return
|
||||
|
||||
if pre_behavior == "none":
|
||||
# Empty frames are handled during `_composite_files`
|
||||
pass
|
||||
|
||||
elif pre_behavior == "hold":
|
||||
# Keep first frame for whole time
|
||||
eq_frame_filepath = layer_files_by_frame[frame_start_index]
|
||||
for frame_idx in range(mark_in_index, frame_start_index):
|
||||
filename = filename_template.format(
|
||||
pos=layer_position,
|
||||
frame=frame_idx
|
||||
)
|
||||
new_filepath = "/".join([output_dir, filename])
|
||||
self._copy_image(eq_frame_filepath, new_filepath)
|
||||
layer_files_by_frame[frame_idx] = new_filepath
|
||||
|
||||
elif pre_behavior in ("loop", "repeat"):
|
||||
# Loop backwards from last frame of layer
|
||||
for frame_idx in reversed(range(mark_in_index, frame_start_index)):
|
||||
eq_frame_idx_offset = (
|
||||
(frame_end_index - frame_idx) % frame_count
|
||||
)
|
||||
eq_frame_idx = frame_end_index - eq_frame_idx_offset
|
||||
eq_frame_filepath = layer_files_by_frame[eq_frame_idx]
|
||||
|
||||
filename = filename_template.format(
|
||||
pos=layer_position,
|
||||
frame=frame_idx
|
||||
)
|
||||
new_filepath = "/".join([output_dir, filename])
|
||||
self._copy_image(eq_frame_filepath, new_filepath)
|
||||
layer_files_by_frame[frame_idx] = new_filepath
|
||||
|
||||
elif pre_behavior == "pingpong":
|
||||
half_seq_len = frame_count - 1
|
||||
seq_len = half_seq_len * 2
|
||||
for frame_idx in reversed(range(mark_in_index, frame_start_index)):
|
||||
eq_frame_idx_offset = (frame_start_index - frame_idx) % seq_len
|
||||
if eq_frame_idx_offset > half_seq_len:
|
||||
eq_frame_idx_offset = (seq_len - eq_frame_idx_offset)
|
||||
eq_frame_idx = frame_start_index + eq_frame_idx_offset
|
||||
|
||||
eq_frame_filepath = layer_files_by_frame[eq_frame_idx]
|
||||
|
||||
filename = filename_template.format(
|
||||
pos=layer_position,
|
||||
frame=frame_idx
|
||||
)
|
||||
new_filepath = "/".join([output_dir, filename])
|
||||
self._copy_image(eq_frame_filepath, new_filepath)
|
||||
layer_files_by_frame[frame_idx] = new_filepath
|
||||
|
||||
def _fill_frame_by_post_behavior(
|
||||
self,
|
||||
layer,
|
||||
post_behavior,
|
||||
mark_out_index,
|
||||
layer_files_by_frame,
|
||||
filename_template,
|
||||
output_dir
|
||||
):
|
||||
layer_position = layer["position"]
|
||||
frame_start_index = layer["frame_start"]
|
||||
frame_end_index = layer["frame_end"]
|
||||
frame_count = frame_end_index - frame_start_index + 1
|
||||
if mark_out_index <= frame_end_index:
|
||||
self.log.debug((
|
||||
"Skipping post-behavior."
|
||||
" All frames up to Mark Out are rendered."
|
||||
))
|
||||
return
|
||||
|
||||
if post_behavior == "none":
|
||||
# Empty frames are handled during `_composite_files`
|
||||
pass
|
||||
|
||||
elif post_behavior == "hold":
|
||||
# Keep first frame for whole time
|
||||
eq_frame_filepath = layer_files_by_frame[frame_end_index]
|
||||
for frame_idx in range(frame_end_index + 1, mark_out_index + 1):
|
||||
filename = filename_template.format(
|
||||
pos=layer_position,
|
||||
frame=frame_idx
|
||||
)
|
||||
new_filepath = "/".join([output_dir, filename])
|
||||
self._copy_image(eq_frame_filepath, new_filepath)
|
||||
layer_files_by_frame[frame_idx] = new_filepath
|
||||
|
||||
elif post_behavior in ("loop", "repeat"):
|
||||
# Loop backwards from last frame of layer
|
||||
for frame_idx in range(frame_end_index + 1, mark_out_index + 1):
|
||||
eq_frame_idx = frame_idx % frame_count
|
||||
eq_frame_filepath = layer_files_by_frame[eq_frame_idx]
|
||||
|
||||
filename = filename_template.format(
|
||||
pos=layer_position,
|
||||
frame=frame_idx
|
||||
)
|
||||
new_filepath = "/".join([output_dir, filename])
|
||||
self._copy_image(eq_frame_filepath, new_filepath)
|
||||
layer_files_by_frame[frame_idx] = new_filepath
|
||||
|
||||
elif post_behavior == "pingpong":
|
||||
half_seq_len = frame_count - 1
|
||||
seq_len = half_seq_len * 2
|
||||
for frame_idx in range(frame_end_index + 1, mark_out_index + 1):
|
||||
eq_frame_idx_offset = (frame_idx - frame_end_index) % seq_len
|
||||
if eq_frame_idx_offset > half_seq_len:
|
||||
eq_frame_idx_offset = seq_len - eq_frame_idx_offset
|
||||
eq_frame_idx = frame_end_index - eq_frame_idx_offset
|
||||
|
||||
eq_frame_filepath = layer_files_by_frame[eq_frame_idx]
|
||||
|
||||
filename = filename_template.format(
|
||||
pos=layer_position,
|
||||
frame=frame_idx
|
||||
)
|
||||
new_filepath = "/".join([output_dir, filename])
|
||||
self._copy_image(eq_frame_filepath, new_filepath)
|
||||
layer_files_by_frame[frame_idx] = new_filepath
|
||||
|
||||
def _composite_files(
|
||||
self, files_by_position, frame_start, frame_end,
|
||||
filename_template, output_dir
|
||||
):
|
||||
"""Composite frames when more that one layer was exported.
|
||||
|
||||
This method is used when more than one layer is rendered out so and
|
||||
output should be composition of each frame of rendered layers.
|
||||
Missing frames are filled with transparent images.
|
||||
"""
|
||||
self.log.debug("Preparing files for compisiting.")
|
||||
# Prepare paths to images by frames into list where are stored
|
||||
# in order of compositing.
|
||||
images_by_frame = {}
|
||||
for frame_idx in range(frame_start, frame_end + 1):
|
||||
images_by_frame[frame_idx] = []
|
||||
for position in sorted(files_by_position.keys(), reverse=True):
|
||||
position_data = files_by_position[position]
|
||||
if frame_idx in position_data:
|
||||
filepath = position_data[frame_idx]
|
||||
images_by_frame[frame_idx].append(filepath)
|
||||
|
||||
output_filepaths = []
|
||||
missing_frame_paths = []
|
||||
random_frame_path = None
|
||||
for frame_idx in sorted(images_by_frame.keys()):
|
||||
image_filepaths = images_by_frame[frame_idx]
|
||||
output_filename = filename_template.format(frame=frame_idx)
|
||||
output_filepath = os.path.join(output_dir, output_filename)
|
||||
output_filepaths.append(output_filepath)
|
||||
|
||||
# Store information about missing frame and skip
|
||||
if not image_filepaths:
|
||||
missing_frame_paths.append(output_filepath)
|
||||
continue
|
||||
|
||||
# Just rename the file if is no need of compositing
|
||||
if len(image_filepaths) == 1:
|
||||
os.rename(image_filepaths[0], output_filepath)
|
||||
|
||||
# Composite images
|
||||
else:
|
||||
composite_images(image_filepaths, output_filepath)
|
||||
|
||||
# Store path of random output image that will 100% exist after all
|
||||
# multiprocessing as mockup for missing frames
|
||||
if random_frame_path is None:
|
||||
random_frame_path = output_filepath
|
||||
|
||||
self.log.debug(
|
||||
"Creating transparent images for frames without render {}.".format(
|
||||
str(missing_frame_paths)
|
||||
)
|
||||
)
|
||||
# Fill the sequence with transparent frames
|
||||
transparent_filepath = None
|
||||
for filepath in missing_frame_paths:
|
||||
if transparent_filepath is None:
|
||||
img_obj = Image.open(random_frame_path)
|
||||
painter = ImageDraw.Draw(img_obj)
|
||||
painter.rectangle((0, 0, *img_obj.size), fill=(0, 0, 0, 0))
|
||||
img_obj.save(filepath)
|
||||
transparent_filepath = filepath
|
||||
else:
|
||||
self._copy_image(transparent_filepath, filepath)
|
||||
return output_filepaths
|
||||
|
||||
def _cleanup_tmp_files(self, files_by_position):
|
||||
"""Remove temporary files that were used for compositing."""
|
||||
for data in files_by_position.values():
|
||||
for filepath in data.values():
|
||||
if os.path.exists(filepath):
|
||||
os.remove(filepath)
|
||||
|
||||
def _copy_image(self, src_path, dst_path):
|
||||
"""Create a copy of an image.
|
||||
|
||||
This was added to be able easier change copy method.
|
||||
"""
|
||||
# Create hardlink of image instead of copying if possible
|
||||
if hasattr(os, "link"):
|
||||
os.link(src_path, dst_path)
|
||||
else:
|
||||
shutil.copy(src_path, dst_path)
|
||||
return filepaths_by_frame
|
||||
|
|
|
|||
21
openpype/hosts/tvpaint/worker/__init__.py
Normal file
21
openpype/hosts/tvpaint/worker/__init__.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
from .worker_job import (
|
||||
JobFailed,
|
||||
ExecuteSimpleGeorgeScript,
|
||||
ExecuteGeorgeScript,
|
||||
CollectSceneData,
|
||||
SenderTVPaintCommands,
|
||||
ProcessTVPaintCommands
|
||||
)
|
||||
|
||||
from .worker import main
|
||||
|
||||
__all__ = (
|
||||
"JobFailed",
|
||||
"ExecuteSimpleGeorgeScript",
|
||||
"ExecuteGeorgeScript",
|
||||
"CollectSceneData",
|
||||
"SenderTVPaintCommands",
|
||||
"ProcessTVPaintCommands",
|
||||
|
||||
"main"
|
||||
)
|
||||
133
openpype/hosts/tvpaint/worker/worker.py
Normal file
133
openpype/hosts/tvpaint/worker/worker.py
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
import signal
|
||||
import time
|
||||
import asyncio
|
||||
|
||||
from avalon.tvpaint.communication_server import (
|
||||
BaseCommunicator,
|
||||
CommunicationWrapper
|
||||
)
|
||||
from openpype_modules.job_queue.job_workers import WorkerJobsConnection
|
||||
|
||||
from .worker_job import ProcessTVPaintCommands
|
||||
|
||||
|
||||
class TVPaintWorkerCommunicator(BaseCommunicator):
|
||||
"""Modified commuicator which cares about processing jobs.
|
||||
|
||||
Received jobs are send to TVPaint by parsing 'ProcessTVPaintCommands'.
|
||||
"""
|
||||
def __init__(self, server_url):
|
||||
super().__init__()
|
||||
|
||||
self.return_code = 1
|
||||
self._server_url = server_url
|
||||
self._worker_connection = None
|
||||
|
||||
def _start_webserver(self):
|
||||
"""Create connection to workers server before TVPaint server."""
|
||||
loop = self.websocket_server.loop
|
||||
self._worker_connection = WorkerJobsConnection(
|
||||
self._server_url, "tvpaint", loop
|
||||
)
|
||||
asyncio.ensure_future(
|
||||
self._worker_connection.main_loop(register_worker=False),
|
||||
loop=loop
|
||||
)
|
||||
|
||||
super()._start_webserver()
|
||||
|
||||
def _on_client_connect(self, *args, **kwargs):
|
||||
super()._on_client_connect(*args, **kwargs)
|
||||
# Register as "ready to work" worker
|
||||
self._worker_connection.register_as_worker()
|
||||
|
||||
def stop(self):
|
||||
"""Stop worker connection and TVPaint server."""
|
||||
self._worker_connection.stop()
|
||||
self.return_code = 0
|
||||
super().stop()
|
||||
|
||||
@property
|
||||
def current_job(self):
|
||||
"""Retrieve job which should be processed."""
|
||||
if self._worker_connection:
|
||||
return self._worker_connection.current_job
|
||||
return None
|
||||
|
||||
def _check_process(self):
|
||||
if self.process is None:
|
||||
return True
|
||||
|
||||
if self.process.poll() is not None:
|
||||
asyncio.ensure_future(
|
||||
self._worker_connection.disconnect(),
|
||||
loop=self.websocket_server.loop
|
||||
)
|
||||
self._exit()
|
||||
return False
|
||||
return True
|
||||
|
||||
def _process_job(self):
|
||||
job = self.current_job
|
||||
if job is None:
|
||||
return
|
||||
|
||||
# Prepare variables used for sendig
|
||||
success = False
|
||||
message = "Unknown function"
|
||||
data = None
|
||||
job_data = job["data"]
|
||||
workfile = job_data["workfile"]
|
||||
# Currently can process only "commands" function
|
||||
if job_data.get("function") == "commands":
|
||||
try:
|
||||
commands = ProcessTVPaintCommands(
|
||||
workfile, job_data["commands"], self
|
||||
)
|
||||
commands.execute()
|
||||
data = commands.response_data()
|
||||
success = True
|
||||
message = "Executed"
|
||||
|
||||
except Exception as exc:
|
||||
message = "Error on worker: {}".format(str(exc))
|
||||
|
||||
self._worker_connection.finish_job(success, message, data)
|
||||
|
||||
def main_loop(self):
|
||||
"""Main loop where jobs are processed.
|
||||
|
||||
Server is stopped by killing this process or TVPaint process.
|
||||
"""
|
||||
while self.server_is_running:
|
||||
if self._check_process():
|
||||
self._process_job()
|
||||
time.sleep(1)
|
||||
|
||||
return self.return_code
|
||||
|
||||
|
||||
def _start_tvpaint(tvpaint_executable_path, server_url):
|
||||
communicator = TVPaintWorkerCommunicator(server_url)
|
||||
CommunicationWrapper.set_communicator(communicator)
|
||||
communicator.launch([tvpaint_executable_path])
|
||||
|
||||
|
||||
def main(tvpaint_executable_path, server_url):
|
||||
# Register terminal signal handler
|
||||
def signal_handler(*_args):
|
||||
print("Termination signal received. Stopping.")
|
||||
if CommunicationWrapper.communicator is not None:
|
||||
CommunicationWrapper.communicator.stop()
|
||||
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
_start_tvpaint(tvpaint_executable_path, server_url)
|
||||
|
||||
communicator = CommunicationWrapper.communicator
|
||||
if communicator is None:
|
||||
print("Communicator is not set")
|
||||
return 1
|
||||
|
||||
return communicator.main_loop()
|
||||
537
openpype/hosts/tvpaint/worker/worker_job.py
Normal file
537
openpype/hosts/tvpaint/worker/worker_job.py
Normal file
|
|
@ -0,0 +1,537 @@
|
|||
import os
|
||||
import tempfile
|
||||
import inspect
|
||||
import copy
|
||||
import json
|
||||
import time
|
||||
from uuid import uuid4
|
||||
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||
|
||||
import six
|
||||
|
||||
from openpype.api import PypeLogger
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
|
||||
TMP_FILE_PREFIX = "opw_tvp_"
|
||||
|
||||
|
||||
class JobFailed(Exception):
|
||||
"""Raised when job was sent and finished unsuccessfully."""
|
||||
def __init__(self, job_status):
|
||||
job_state = job_status["state"]
|
||||
job_message = job_status["message"] or "Unknown issue"
|
||||
error_msg = (
|
||||
"Job didn't finish properly."
|
||||
" Job state: \"{}\" | Job message: \"{}\""
|
||||
).format(job_state, job_message)
|
||||
|
||||
self.job_status = job_status
|
||||
|
||||
super().__init__(error_msg)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class BaseCommand:
|
||||
"""Abstract TVPaint command which can be executed through worker.
|
||||
|
||||
Each command must have unique name and implemented 'execute' and
|
||||
'from_existing' methods.
|
||||
|
||||
Command also have id which is created on command creation.
|
||||
|
||||
The idea is that command is just a data container on sender side send
|
||||
througth server to a worker where is replicated one by one, executed and
|
||||
result sent back to sender through server.
|
||||
"""
|
||||
@abstractproperty
|
||||
def name(self):
|
||||
"""Command name (must be unique)."""
|
||||
pass
|
||||
|
||||
def __init__(self, data=None):
|
||||
if data is None:
|
||||
data = {}
|
||||
else:
|
||||
data = copy.deepcopy(data)
|
||||
|
||||
# Use 'id' from data when replicating on process side
|
||||
command_id = data.get("id")
|
||||
if command_id is None:
|
||||
command_id = str(uuid4())
|
||||
data["id"] = command_id
|
||||
data["command"] = self.name
|
||||
|
||||
self._parent = None
|
||||
self._result = None
|
||||
self._command_data = data
|
||||
self._done = False
|
||||
|
||||
def job_queue_root(self):
|
||||
"""Access to job queue root.
|
||||
|
||||
Job queue root is shared access point to files shared across senders
|
||||
and workers.
|
||||
"""
|
||||
if self._parent is None:
|
||||
return None
|
||||
return self._parent.job_queue_root()
|
||||
|
||||
def set_parent(self, parent):
|
||||
self._parent = parent
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""Command id."""
|
||||
return self._command_data["id"]
|
||||
|
||||
@property
|
||||
def parent(self):
|
||||
"""Parent of command expected type of 'TVPaintCommands'."""
|
||||
return self._parent
|
||||
|
||||
@property
|
||||
def communicator(self):
|
||||
"""TVPaint communicator.
|
||||
|
||||
Available only on worker side.
|
||||
"""
|
||||
return self._parent.communicator
|
||||
|
||||
@property
|
||||
def done(self):
|
||||
"""Is command done."""
|
||||
return self._done
|
||||
|
||||
def set_done(self):
|
||||
"""Change state of done."""
|
||||
self._done = True
|
||||
|
||||
def set_result(self, result):
|
||||
"""Set result of executed command."""
|
||||
self._result = result
|
||||
|
||||
def result(self):
|
||||
"""Result of command."""
|
||||
return copy.deepcopy(self._result)
|
||||
|
||||
def response_data(self):
|
||||
"""Data send as response to sender."""
|
||||
return {
|
||||
"id": self.id,
|
||||
"result": self._result,
|
||||
"done": self._done
|
||||
}
|
||||
|
||||
def command_data(self):
|
||||
"""Raw command data."""
|
||||
return copy.deepcopy(self._command_data)
|
||||
|
||||
@abstractmethod
|
||||
def execute(self):
|
||||
"""Execute command on worker side."""
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def from_existing(cls, data):
|
||||
"""Recreate object based on passed data."""
|
||||
pass
|
||||
|
||||
def execute_george(self, george_script):
|
||||
"""Execute george script in TVPaint."""
|
||||
return self.parent.execute_george(george_script)
|
||||
|
||||
def execute_george_through_file(self, george_script):
|
||||
"""Execute george script through temp file in TVPaint."""
|
||||
return self.parent.execute_george_through_file(george_script)
|
||||
|
||||
|
||||
class ExecuteSimpleGeorgeScript(BaseCommand):
|
||||
"""Execute simple george script in TVPaint.
|
||||
|
||||
Args:
|
||||
script(str): Script that will be executed.
|
||||
"""
|
||||
name = "execute_george_simple"
|
||||
|
||||
def __init__(self, script, data=None):
|
||||
data = data or {}
|
||||
data["script"] = script
|
||||
self._script = script
|
||||
super().__init__(data)
|
||||
|
||||
def execute(self):
|
||||
self._result = self.execute_george(self._script)
|
||||
|
||||
@classmethod
|
||||
def from_existing(cls, data):
|
||||
script = data.pop("script")
|
||||
return cls(script, data)
|
||||
|
||||
|
||||
class ExecuteGeorgeScript(BaseCommand):
|
||||
"""Execute multiline george script in TVPaint.
|
||||
|
||||
Args:
|
||||
script_lines(list): Lines that will be executed in george script
|
||||
through temp george file.
|
||||
tmp_file_keys(list): List of formatting keys in george script that
|
||||
require replacement with path to a temp file where result will be
|
||||
stored. The content of file is stored to result by the key.
|
||||
root_dir_key(str): Formatting key that will be replaced in george
|
||||
script with job queue root which can be different on worker side.
|
||||
data(dict): Raw data about command.
|
||||
"""
|
||||
name = "execute_george_through_file"
|
||||
|
||||
def __init__(
|
||||
self, script_lines, tmp_file_keys=None, root_dir_key=None, data=None
|
||||
):
|
||||
data = data or {}
|
||||
if not tmp_file_keys:
|
||||
tmp_file_keys = data.get("tmp_file_keys") or []
|
||||
|
||||
data["script_lines"] = script_lines
|
||||
data["tmp_file_keys"] = tmp_file_keys
|
||||
data["root_dir_key"] = root_dir_key
|
||||
self._script_lines = script_lines
|
||||
self._tmp_file_keys = tmp_file_keys
|
||||
self._root_dir_key = root_dir_key
|
||||
super().__init__(data)
|
||||
|
||||
def execute(self):
|
||||
filepath_by_key = {}
|
||||
script = self._script_lines
|
||||
if isinstance(script, list):
|
||||
script = "\n".join(script)
|
||||
|
||||
# Replace temporary files in george script
|
||||
for key in self._tmp_file_keys:
|
||||
output_file = tempfile.NamedTemporaryFile(
|
||||
mode="w", prefix=TMP_FILE_PREFIX, suffix=".txt", delete=False
|
||||
)
|
||||
output_file.close()
|
||||
format_key = "{" + key + "}"
|
||||
output_path = output_file.name.replace("\\", "/")
|
||||
script = script.replace(format_key, output_path)
|
||||
filepath_by_key[key] = output_path
|
||||
|
||||
# Replace job queue root in script
|
||||
if self._root_dir_key:
|
||||
job_queue_root = self.job_queue_root()
|
||||
format_key = "{" + self._root_dir_key + "}"
|
||||
script = script.replace(
|
||||
format_key, job_queue_root.replace("\\", "/")
|
||||
)
|
||||
|
||||
# Execute the script
|
||||
self.execute_george_through_file(script)
|
||||
|
||||
# Store result of temporary files
|
||||
result = {}
|
||||
for key, filepath in filepath_by_key.items():
|
||||
with open(filepath, "r") as stream:
|
||||
data = stream.read()
|
||||
result[key] = data
|
||||
os.remove(filepath)
|
||||
|
||||
self._result = result
|
||||
|
||||
@classmethod
|
||||
def from_existing(cls, data):
|
||||
"""Recreate the object from data."""
|
||||
script_lines = data.pop("script_lines")
|
||||
tmp_file_keys = data.pop("tmp_file_keys", None)
|
||||
root_dir_key = data.pop("root_dir_key", None)
|
||||
return cls(script_lines, tmp_file_keys, root_dir_key, data)
|
||||
|
||||
|
||||
class CollectSceneData(BaseCommand):
|
||||
"""Helper command which will collect all usefull info about workfile.
|
||||
|
||||
Result is dictionary with all layers data, exposure frames by layer ids
|
||||
pre/post behavior of layers by their ids, group information and scene data.
|
||||
"""
|
||||
name = "collect_scene_data"
|
||||
|
||||
def execute(self):
|
||||
from avalon.tvpaint.lib import (
|
||||
get_layers_data,
|
||||
get_groups_data,
|
||||
get_layers_pre_post_behavior,
|
||||
get_layers_exposure_frames,
|
||||
get_scene_data
|
||||
)
|
||||
|
||||
groups_data = get_groups_data(communicator=self.communicator)
|
||||
layers_data = get_layers_data(communicator=self.communicator)
|
||||
layer_ids = [
|
||||
layer_data["layer_id"]
|
||||
for layer_data in layers_data
|
||||
]
|
||||
pre_post_beh_by_layer_id = get_layers_pre_post_behavior(
|
||||
layer_ids, communicator=self.communicator
|
||||
)
|
||||
exposure_frames_by_layer_id = get_layers_exposure_frames(
|
||||
layer_ids, layers_data, communicator=self.communicator
|
||||
)
|
||||
|
||||
self._result = {
|
||||
"layers_data": layers_data,
|
||||
"exposure_frames_by_layer_id": exposure_frames_by_layer_id,
|
||||
"pre_post_beh_by_layer_id": pre_post_beh_by_layer_id,
|
||||
"groups_data": groups_data,
|
||||
"scene_data": get_scene_data(self.communicator)
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_existing(cls, data):
|
||||
return cls(data)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class TVPaintCommands:
|
||||
"""Wrapper around TVPaint commands to be able send multiple commands.
|
||||
|
||||
Commands may send one or multiple commands at once. Also gives api access
|
||||
for commands info.
|
||||
|
||||
Base for sender and receiver which are extending the logic for their
|
||||
purposes. One of differences is preparation of workfile path.
|
||||
|
||||
Args:
|
||||
workfile(str): Path to workfile.
|
||||
job_queue_module(JobQueueModule): Object of OpenPype module JobQueue.
|
||||
"""
|
||||
def __init__(self, workfile, job_queue_module=None):
|
||||
self._log = None
|
||||
self._commands = []
|
||||
self._command_classes_by_name = None
|
||||
if job_queue_module is None:
|
||||
manager = ModulesManager()
|
||||
job_queue_module = manager.modules_by_name["job_queue"]
|
||||
self._job_queue_module = job_queue_module
|
||||
|
||||
self._workfile = self._prepare_workfile(workfile)
|
||||
|
||||
@abstractmethod
|
||||
def _prepare_workfile(self, workfile):
|
||||
"""Modification of workfile path on initialization to match platorm."""
|
||||
pass
|
||||
|
||||
def job_queue_root(self):
|
||||
"""Job queue root for current platform using current settings."""
|
||||
return self._job_queue_module.get_jobs_root_from_settings()
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
"""Access to logger object."""
|
||||
if self._log is None:
|
||||
self._log = PypeLogger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
@property
|
||||
def classes_by_name(self):
|
||||
"""Prepare commands classes for validation and recreation of commands.
|
||||
|
||||
It is expected that all commands are defined in this python file so
|
||||
we're looking for all implementation of BaseCommand in globals.
|
||||
"""
|
||||
if self._command_classes_by_name is None:
|
||||
command_classes_by_name = {}
|
||||
for attr in globals().values():
|
||||
if (
|
||||
not inspect.isclass(attr)
|
||||
or not issubclass(attr, BaseCommand)
|
||||
or attr is BaseCommand
|
||||
):
|
||||
continue
|
||||
|
||||
if inspect.isabstract(attr):
|
||||
self.log.debug(
|
||||
"Skipping abstract class {}".format(attr.__name__)
|
||||
)
|
||||
command_classes_by_name[attr.name] = attr
|
||||
self._command_classes_by_name = command_classes_by_name
|
||||
|
||||
return self._command_classes_by_name
|
||||
|
||||
def add_command(self, command):
|
||||
"""Add command to process."""
|
||||
command.set_parent(self)
|
||||
self._commands.append(command)
|
||||
|
||||
def result(self):
|
||||
"""Result of commands in list in which they were processed."""
|
||||
return [
|
||||
command.result()
|
||||
for command in self._commands
|
||||
]
|
||||
|
||||
def response_data(self):
|
||||
"""Data which should be send from worker."""
|
||||
return [
|
||||
command.response_data()
|
||||
for command in self._commands
|
||||
]
|
||||
|
||||
|
||||
class SenderTVPaintCommands(TVPaintCommands):
|
||||
"""Sender implementation of TVPaint Commands."""
|
||||
def _prepare_workfile(self, workfile):
|
||||
"""Remove job queue root from workfile path.
|
||||
|
||||
It is expected that worker will add it's root before passed workfile.
|
||||
"""
|
||||
new_workfile = workfile.replace("\\", "/")
|
||||
job_queue_root = self.job_queue_root().replace("\\", "/")
|
||||
if job_queue_root not in new_workfile:
|
||||
raise ValueError((
|
||||
"Workfile is not located in JobQueue root."
|
||||
" Workfile path: \"{}\". JobQueue root: \"{}\""
|
||||
).format(workfile, job_queue_root))
|
||||
return new_workfile.replace(job_queue_root, "")
|
||||
|
||||
def commands_data(self):
|
||||
"""Commands data to be able recreate them."""
|
||||
return [
|
||||
command.command_data()
|
||||
for command in self._commands
|
||||
]
|
||||
|
||||
def to_job_data(self):
|
||||
"""Convert commands to job data before sending to workers server."""
|
||||
return {
|
||||
"workfile": self._workfile,
|
||||
"function": "commands",
|
||||
"commands": self.commands_data()
|
||||
}
|
||||
|
||||
def set_result(self, result):
|
||||
commands_by_id = {
|
||||
command.id: command
|
||||
for command in self._commands
|
||||
}
|
||||
|
||||
for item in result:
|
||||
command = commands_by_id[item["id"]]
|
||||
command.set_result(item["result"])
|
||||
command.set_done()
|
||||
|
||||
def _send_job(self):
|
||||
"""Send job to a workers server."""
|
||||
# Send job data to job queue server
|
||||
job_data = self.to_job_data()
|
||||
self.log.debug("Sending job to JobQueue server.\n{}".format(
|
||||
json.dumps(job_data, indent=4)
|
||||
))
|
||||
job_id = self._job_queue_module.send_job("tvpaint", job_data)
|
||||
self.log.info((
|
||||
"Job sent to JobQueue server and got id \"{}\"."
|
||||
" Waiting for finishing the job."
|
||||
).format(job_id))
|
||||
|
||||
return job_id
|
||||
|
||||
def send_job_and_wait(self):
|
||||
"""Send job to workers server and wait for response.
|
||||
|
||||
Result of job is stored into the object.
|
||||
|
||||
Raises:
|
||||
JobFailed: When job was finished but not successfully.
|
||||
"""
|
||||
job_id = self._send_job()
|
||||
while True:
|
||||
job_status = self._job_queue_module.get_job_status(job_id)
|
||||
if job_status["done"]:
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
# Check if job state is done
|
||||
if job_status["state"] != "done":
|
||||
raise JobFailed(job_status)
|
||||
|
||||
self.set_result(job_status["result"])
|
||||
|
||||
self.log.debug("Job is done and result is stored.")
|
||||
|
||||
|
||||
class ProcessTVPaintCommands(TVPaintCommands):
|
||||
"""Worker side of TVPaint Commands.
|
||||
|
||||
It is expected this object is created only on worker's side from existing
|
||||
data loaded from job.
|
||||
|
||||
Workfile path logic is based on 'SenderTVPaintCommands'.
|
||||
"""
|
||||
def __init__(self, workfile, commands, communicator):
|
||||
super(ProcessTVPaintCommands, self).__init__(workfile)
|
||||
|
||||
self._communicator = communicator
|
||||
|
||||
self.commands_from_data(commands)
|
||||
|
||||
def _prepare_workfile(self, workfile):
|
||||
"""Preprend job queue root before passed workfile."""
|
||||
workfile = workfile.replace("\\", "/")
|
||||
job_queue_root = self.job_queue_root().replace("\\", "/")
|
||||
new_workfile = "/".join([job_queue_root, workfile])
|
||||
while "//" in new_workfile:
|
||||
new_workfile = new_workfile.replace("//", "/")
|
||||
return os.path.normpath(new_workfile)
|
||||
|
||||
@property
|
||||
def communicator(self):
|
||||
"""Access to TVPaint communicator."""
|
||||
return self._communicator
|
||||
|
||||
def commands_from_data(self, commands_data):
|
||||
"""Recreate command from passed data."""
|
||||
for command_data in commands_data:
|
||||
command_name = command_data["command"]
|
||||
|
||||
klass = self.classes_by_name[command_name]
|
||||
command = klass.from_existing(command_data)
|
||||
self.add_command(command)
|
||||
|
||||
def execute_george(self, george_script):
|
||||
"""Helper method to execute george script."""
|
||||
return self.communicator.execute_george(george_script)
|
||||
|
||||
def execute_george_through_file(self, george_script):
|
||||
"""Helper method to execute george script through temp file."""
|
||||
temporary_file = tempfile.NamedTemporaryFile(
|
||||
mode="w", prefix=TMP_FILE_PREFIX, suffix=".grg", delete=False
|
||||
)
|
||||
temporary_file.write(george_script)
|
||||
temporary_file.close()
|
||||
temp_file_path = temporary_file.name.replace("\\", "/")
|
||||
self.execute_george("tv_runscript {}".format(temp_file_path))
|
||||
os.remove(temp_file_path)
|
||||
|
||||
def _open_workfile(self):
|
||||
"""Open workfile in TVPaint."""
|
||||
workfile = self._workfile
|
||||
print("Opening workfile {}".format(workfile))
|
||||
george_script = "tv_LoadProject '\"'\"{}\"'\"'".format(workfile)
|
||||
self.execute_george_through_file(george_script)
|
||||
|
||||
def _close_workfile(self):
|
||||
"""Close workfile in TVPaint."""
|
||||
print("Closing workfile")
|
||||
self.execute_george_through_file("tv_projectclose")
|
||||
|
||||
def execute(self):
|
||||
"""Execute commands."""
|
||||
# First open the workfile
|
||||
self._open_workfile()
|
||||
# Execute commands one by one
|
||||
# TODO maybe stop processing when command fails?
|
||||
print("Commands execution started ({})".format(len(self._commands)))
|
||||
for command in self._commands:
|
||||
command.execute()
|
||||
command.set_done()
|
||||
# Finally close workfile
|
||||
self._close_workfile()
|
||||
|
|
@ -0,0 +1,255 @@
|
|||
"""
|
||||
Requires:
|
||||
CollectTVPaintWorkfileData
|
||||
|
||||
Provides:
|
||||
Instances
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import copy
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib import get_subset_name_with_asset_doc
|
||||
|
||||
|
||||
class CollectTVPaintInstances(pyblish.api.ContextPlugin):
|
||||
label = "Collect TVPaint Instances"
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
hosts = ["webpublisher"]
|
||||
targets = ["tvpaint_worker"]
|
||||
|
||||
workfile_family = "workfile"
|
||||
workfile_variant = ""
|
||||
review_family = "review"
|
||||
review_variant = "Main"
|
||||
render_pass_family = "renderPass"
|
||||
render_layer_family = "renderLayer"
|
||||
render_layer_pass_name = "beauty"
|
||||
|
||||
# Set by settings
|
||||
# Regex must constain 'layer' and 'variant' groups which are extracted from
|
||||
# name when instances are created
|
||||
layer_name_regex = r"(?P<layer>L[0-9]{3}_\w+)_(?P<pass>.+)"
|
||||
|
||||
def process(self, context):
|
||||
# Prepare compiled regex
|
||||
layer_name_regex = re.compile(self.layer_name_regex)
|
||||
|
||||
layers_data = context.data["layersData"]
|
||||
|
||||
host_name = "tvpaint"
|
||||
task_name = context.data.get("task")
|
||||
asset_doc = context.data["assetEntity"]
|
||||
project_doc = context.data["projectEntity"]
|
||||
project_name = project_doc["name"]
|
||||
|
||||
new_instances = []
|
||||
|
||||
# Workfile instance
|
||||
workfile_subset_name = get_subset_name_with_asset_doc(
|
||||
self.workfile_family,
|
||||
self.workfile_variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
host_name
|
||||
)
|
||||
workfile_instance = self._create_workfile_instance(
|
||||
context, workfile_subset_name
|
||||
)
|
||||
new_instances.append(workfile_instance)
|
||||
|
||||
# Review instance
|
||||
review_subset_name = get_subset_name_with_asset_doc(
|
||||
self.review_family,
|
||||
self.review_variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
host_name
|
||||
)
|
||||
review_instance = self._create_review_instance(
|
||||
context, review_subset_name
|
||||
)
|
||||
new_instances.append(review_instance)
|
||||
|
||||
# Get render layers and passes from TVPaint layers
|
||||
# - it's based on regex extraction
|
||||
layers_by_layer_and_pass = {}
|
||||
for layer in layers_data:
|
||||
# Filter only visible layers
|
||||
if not layer["visible"]:
|
||||
continue
|
||||
|
||||
result = layer_name_regex.search(layer["name"])
|
||||
# Layer name not matching layer name regex
|
||||
# should raise an exception?
|
||||
if result is None:
|
||||
continue
|
||||
render_layer = result.group("layer")
|
||||
render_pass = result.group("pass")
|
||||
|
||||
render_pass_maping = layers_by_layer_and_pass.get(
|
||||
render_layer
|
||||
)
|
||||
if render_pass_maping is None:
|
||||
render_pass_maping = {}
|
||||
layers_by_layer_and_pass[render_layer] = render_pass_maping
|
||||
|
||||
if render_pass not in render_pass_maping:
|
||||
render_pass_maping[render_pass] = []
|
||||
render_pass_maping[render_pass].append(copy.deepcopy(layer))
|
||||
|
||||
layers_by_render_layer = {}
|
||||
for render_layer, render_passes in layers_by_layer_and_pass.items():
|
||||
render_layer_layers = []
|
||||
layers_by_render_layer[render_layer] = render_layer_layers
|
||||
for render_pass, layers in render_passes.items():
|
||||
render_layer_layers.extend(copy.deepcopy(layers))
|
||||
dynamic_data = {
|
||||
"render_pass": render_pass,
|
||||
"render_layer": render_layer,
|
||||
# Override family for subset name
|
||||
"family": "render"
|
||||
}
|
||||
|
||||
subset_name = get_subset_name_with_asset_doc(
|
||||
self.render_pass_family,
|
||||
render_pass,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
host_name,
|
||||
dynamic_data=dynamic_data
|
||||
)
|
||||
|
||||
instance = self._create_render_pass_instance(
|
||||
context, layers, subset_name
|
||||
)
|
||||
new_instances.append(instance)
|
||||
|
||||
for render_layer, layers in layers_by_render_layer.items():
|
||||
variant = render_layer
|
||||
dynamic_data = {
|
||||
"render_pass": self.render_layer_pass_name,
|
||||
"render_layer": render_layer,
|
||||
# Override family for subset name
|
||||
"family": "render"
|
||||
}
|
||||
subset_name = get_subset_name_with_asset_doc(
|
||||
self.render_pass_family,
|
||||
variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
host_name,
|
||||
dynamic_data=dynamic_data
|
||||
)
|
||||
instance = self._create_render_layer_instance(
|
||||
context, layers, subset_name
|
||||
)
|
||||
new_instances.append(instance)
|
||||
|
||||
# Set data same for all instances
|
||||
frame_start = context.data.get("frameStart")
|
||||
frame_end = context.data.get("frameEnd")
|
||||
|
||||
for instance in new_instances:
|
||||
if (
|
||||
instance.data.get("frameStart") is None
|
||||
or instance.data.get("frameEnd") is None
|
||||
):
|
||||
instance.data["frameStart"] = frame_start
|
||||
instance.data["frameEnd"] = frame_end
|
||||
|
||||
if instance.data.get("asset") is None:
|
||||
instance.data["asset"] = asset_doc["name"]
|
||||
|
||||
if instance.data.get("task") is None:
|
||||
instance.data["task"] = task_name
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
if "source" not in instance.data:
|
||||
instance.data["source"] = "webpublisher"
|
||||
|
||||
def _create_workfile_instance(self, context, subset_name):
|
||||
workfile_path = context.data["workfilePath"]
|
||||
staging_dir = os.path.dirname(workfile_path)
|
||||
filename = os.path.basename(workfile_path)
|
||||
ext = os.path.splitext(filename)[-1]
|
||||
|
||||
return context.create_instance(**{
|
||||
"name": subset_name,
|
||||
"label": subset_name,
|
||||
"subset": subset_name,
|
||||
"family": self.workfile_family,
|
||||
"families": [],
|
||||
"stagingDir": staging_dir,
|
||||
"representations": [{
|
||||
"name": ext.lstrip("."),
|
||||
"ext": ext.lstrip("."),
|
||||
"files": filename,
|
||||
"stagingDir": staging_dir
|
||||
}]
|
||||
})
|
||||
|
||||
def _create_review_instance(self, context, subset_name):
|
||||
staging_dir = self._create_staging_dir(context, subset_name)
|
||||
layers_data = context.data["layersData"]
|
||||
# Filter hidden layers
|
||||
filtered_layers_data = [
|
||||
copy.deepcopy(layer)
|
||||
for layer in layers_data
|
||||
if layer["visible"]
|
||||
]
|
||||
return context.create_instance(**{
|
||||
"name": subset_name,
|
||||
"label": subset_name,
|
||||
"subset": subset_name,
|
||||
"family": self.review_family,
|
||||
"families": [],
|
||||
"layers": filtered_layers_data,
|
||||
"stagingDir": staging_dir
|
||||
})
|
||||
|
||||
def _create_render_pass_instance(self, context, layers, subset_name):
|
||||
staging_dir = self._create_staging_dir(context, subset_name)
|
||||
# Global instance data modifications
|
||||
# Fill families
|
||||
return context.create_instance(**{
|
||||
"name": subset_name,
|
||||
"subset": subset_name,
|
||||
"label": subset_name,
|
||||
"family": self.render_pass_family,
|
||||
# Add `review` family for thumbnail integration
|
||||
"families": [self.render_pass_family, "review"],
|
||||
"representations": [],
|
||||
"layers": layers,
|
||||
"stagingDir": staging_dir
|
||||
})
|
||||
|
||||
def _create_render_layer_instance(self, context, layers, subset_name):
|
||||
staging_dir = self._create_staging_dir(context, subset_name)
|
||||
# Global instance data modifications
|
||||
# Fill families
|
||||
return context.create_instance(**{
|
||||
"name": subset_name,
|
||||
"subset": subset_name,
|
||||
"label": subset_name,
|
||||
"family": self.render_pass_family,
|
||||
# Add `review` family for thumbnail integration
|
||||
"families": [self.render_pass_family, "review"],
|
||||
"representations": [],
|
||||
"layers": layers,
|
||||
"stagingDir": staging_dir
|
||||
})
|
||||
|
||||
def _create_staging_dir(self, context, subset_name):
|
||||
context_staging_dir = context.data["contextStagingDir"]
|
||||
staging_dir = os.path.join(context_staging_dir, subset_name)
|
||||
if not os.path.exists(staging_dir):
|
||||
os.makedirs(staging_dir)
|
||||
return staging_dir
|
||||
|
|
@ -0,0 +1,142 @@
|
|||
"""
|
||||
Requires:
|
||||
CollectPublishedFiles
|
||||
CollectModules
|
||||
|
||||
Provides:
|
||||
workfilePath - Path to tvpaint workfile
|
||||
sceneData - Scene data loaded from the workfile
|
||||
groupsData -
|
||||
layersData
|
||||
layersExposureFrames
|
||||
layersPrePostBehavior
|
||||
"""
|
||||
import os
|
||||
import uuid
|
||||
import json
|
||||
import shutil
|
||||
import pyblish.api
|
||||
from openpype.lib.plugin_tools import parse_json
|
||||
from openpype.hosts.tvpaint.worker import (
|
||||
SenderTVPaintCommands,
|
||||
CollectSceneData
|
||||
)
|
||||
|
||||
|
||||
class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin):
|
||||
label = "Collect TVPaint Workfile data"
|
||||
order = pyblish.api.CollectorOrder - 0.4
|
||||
hosts = ["webpublisher"]
|
||||
targets = ["tvpaint_worker"]
|
||||
|
||||
def process(self, context):
|
||||
# Get JobQueue module
|
||||
modules = context.data["openPypeModules"]
|
||||
job_queue_module = modules["job_queue"]
|
||||
jobs_root = job_queue_module.get_jobs_root()
|
||||
if not jobs_root:
|
||||
raise ValueError("Job Queue root is not set.")
|
||||
|
||||
context.data["jobsRoot"] = jobs_root
|
||||
|
||||
context_staging_dir = self._create_context_staging_dir(jobs_root)
|
||||
workfile_path = self._extract_workfile_path(
|
||||
context, context_staging_dir
|
||||
)
|
||||
context.data["contextStagingDir"] = context_staging_dir
|
||||
context.data["workfilePath"] = workfile_path
|
||||
|
||||
# Prepare tvpaint command
|
||||
collect_scene_data_command = CollectSceneData()
|
||||
# Create TVPaint sender commands
|
||||
commands = SenderTVPaintCommands(workfile_path, job_queue_module)
|
||||
commands.add_command(collect_scene_data_command)
|
||||
|
||||
# Send job and wait for answer
|
||||
commands.send_job_and_wait()
|
||||
|
||||
collected_data = collect_scene_data_command.result()
|
||||
layers_data = collected_data["layers_data"]
|
||||
groups_data = collected_data["groups_data"]
|
||||
scene_data = collected_data["scene_data"]
|
||||
exposure_frames_by_layer_id = (
|
||||
collected_data["exposure_frames_by_layer_id"]
|
||||
)
|
||||
pre_post_beh_by_layer_id = (
|
||||
collected_data["pre_post_beh_by_layer_id"]
|
||||
)
|
||||
|
||||
# Store results
|
||||
# scene data store the same way as TVPaint collector
|
||||
scene_data = {
|
||||
"sceneWidth": scene_data["width"],
|
||||
"sceneHeight": scene_data["height"],
|
||||
"scenePixelAspect": scene_data["pixel_aspect"],
|
||||
"sceneFps": scene_data["fps"],
|
||||
"sceneFieldOrder": scene_data["field_order"],
|
||||
"sceneMarkIn": scene_data["mark_in"],
|
||||
# scene_data["mark_in_state"],
|
||||
"sceneMarkInState": scene_data["mark_in_set"],
|
||||
"sceneMarkOut": scene_data["mark_out"],
|
||||
# scene_data["mark_out_state"],
|
||||
"sceneMarkOutState": scene_data["mark_out_set"],
|
||||
"sceneStartFrame": scene_data["start_frame"],
|
||||
"sceneBgColor": scene_data["bg_color"]
|
||||
}
|
||||
context.data["sceneData"] = scene_data
|
||||
# Store only raw data
|
||||
context.data["groupsData"] = groups_data
|
||||
context.data["layersData"] = layers_data
|
||||
context.data["layersExposureFrames"] = exposure_frames_by_layer_id
|
||||
context.data["layersPrePostBehavior"] = pre_post_beh_by_layer_id
|
||||
|
||||
self.log.debug(
|
||||
(
|
||||
"Collected data"
|
||||
"\nScene data: {}"
|
||||
"\nLayers data: {}"
|
||||
"\nExposure frames: {}"
|
||||
"\nPre/Post behavior: {}"
|
||||
).format(
|
||||
json.dumps(scene_data, indent=4),
|
||||
json.dumps(layers_data, indent=4),
|
||||
json.dumps(exposure_frames_by_layer_id, indent=4),
|
||||
json.dumps(pre_post_beh_by_layer_id, indent=4)
|
||||
)
|
||||
)
|
||||
|
||||
def _create_context_staging_dir(self, jobs_root):
|
||||
if not os.path.exists(jobs_root):
|
||||
os.makedirs(jobs_root)
|
||||
|
||||
random_folder_name = str(uuid.uuid4())
|
||||
full_path = os.path.join(jobs_root, random_folder_name)
|
||||
if not os.path.exists(full_path):
|
||||
os.makedirs(full_path)
|
||||
return full_path
|
||||
|
||||
def _extract_workfile_path(self, context, context_staging_dir):
|
||||
"""Find first TVPaint file in tasks and use it."""
|
||||
batch_dir = context.data["batchDir"]
|
||||
batch_data = context.data["batchData"]
|
||||
src_workfile_path = None
|
||||
for task_id in batch_data["tasks"]:
|
||||
if src_workfile_path is not None:
|
||||
break
|
||||
task_dir = os.path.join(batch_dir, task_id)
|
||||
task_manifest_path = os.path.join(task_dir, "manifest.json")
|
||||
task_data = parse_json(task_manifest_path)
|
||||
task_files = task_data["files"]
|
||||
for filename in task_files:
|
||||
_, ext = os.path.splitext(filename)
|
||||
if ext.lower() == ".tvpp":
|
||||
src_workfile_path = os.path.join(task_dir, filename)
|
||||
break
|
||||
|
||||
# Copy workfile to job queue work root
|
||||
new_workfile_path = os.path.join(
|
||||
context_staging_dir, os.path.basename(src_workfile_path)
|
||||
)
|
||||
shutil.copy(src_workfile_path, new_workfile_path)
|
||||
|
||||
return new_workfile_path
|
||||
|
|
@ -0,0 +1,535 @@
|
|||
import os
|
||||
import copy
|
||||
|
||||
from openpype.hosts.tvpaint.worker import (
|
||||
SenderTVPaintCommands,
|
||||
ExecuteSimpleGeorgeScript,
|
||||
ExecuteGeorgeScript
|
||||
)
|
||||
|
||||
import pyblish.api
|
||||
from openpype.hosts.tvpaint.lib import (
|
||||
calculate_layers_extraction_data,
|
||||
get_frame_filename_template,
|
||||
fill_reference_frames,
|
||||
composite_rendered_layers,
|
||||
rename_filepaths_by_frame_start
|
||||
)
|
||||
from PIL import Image
|
||||
|
||||
|
||||
class ExtractTVPaintSequences(pyblish.api.Extractor):
|
||||
label = "Extract TVPaint Sequences"
|
||||
hosts = ["webpublisher"]
|
||||
targets = ["tvpaint_worker"]
|
||||
|
||||
# Context plugin does not have families filtering
|
||||
families_filter = ["review", "renderPass", "renderLayer"]
|
||||
|
||||
job_queue_root_key = "jobs_root"
|
||||
|
||||
# Modifiable with settings
|
||||
review_bg = [255, 255, 255, 255]
|
||||
|
||||
def process(self, context):
|
||||
# Get workfle path
|
||||
workfile_path = context.data["workfilePath"]
|
||||
jobs_root = context.data["jobsRoot"]
|
||||
jobs_root_slashed = jobs_root.replace("\\", "/")
|
||||
|
||||
# Prepare scene data
|
||||
scene_data = context.data["sceneData"]
|
||||
scene_mark_in = scene_data["sceneMarkIn"]
|
||||
scene_mark_out = scene_data["sceneMarkOut"]
|
||||
scene_start_frame = scene_data["sceneStartFrame"]
|
||||
scene_bg_color = scene_data["sceneBgColor"]
|
||||
|
||||
# Prepare layers behavior
|
||||
behavior_by_layer_id = context.data["layersPrePostBehavior"]
|
||||
exposure_frames_by_layer_id = context.data["layersExposureFrames"]
|
||||
|
||||
# Handles are not stored per instance but on Context
|
||||
handle_start = context.data["handleStart"]
|
||||
handle_end = context.data["handleEnd"]
|
||||
|
||||
# Get JobQueue module
|
||||
modules = context.data["openPypeModules"]
|
||||
job_queue_module = modules["job_queue"]
|
||||
|
||||
tvpaint_commands = SenderTVPaintCommands(
|
||||
workfile_path, job_queue_module
|
||||
)
|
||||
|
||||
# Change scene Start Frame to 0 to prevent frame index issues
|
||||
# - issue is that TVPaint versions deal with frame indexes in a
|
||||
# different way when Start Frame is not `0`
|
||||
# NOTE It will be set back after rendering
|
||||
tvpaint_commands.add_command(
|
||||
ExecuteSimpleGeorgeScript("tv_startframe 0")
|
||||
)
|
||||
|
||||
root_key_replacement = "{" + self.job_queue_root_key + "}"
|
||||
after_render_instances = []
|
||||
for instance in context:
|
||||
instance_families = set(instance.data.get("families", []))
|
||||
instance_families.add(instance.data["family"])
|
||||
valid = False
|
||||
for family in instance_families:
|
||||
if family in self.families_filter:
|
||||
valid = True
|
||||
break
|
||||
|
||||
if not valid:
|
||||
continue
|
||||
|
||||
self.log.info("* Preparing commands for instance \"{}\"".format(
|
||||
instance.data["label"]
|
||||
))
|
||||
# Get all layers and filter out not visible
|
||||
layers = instance.data["layers"]
|
||||
filtered_layers = [layer for layer in layers if layer["visible"]]
|
||||
if not filtered_layers:
|
||||
self.log.info(
|
||||
"None of the layers from the instance"
|
||||
" are visible. Extraction skipped."
|
||||
)
|
||||
continue
|
||||
|
||||
joined_layer_names = ", ".join([
|
||||
"\"{}\"".format(str(layer["name"]))
|
||||
for layer in filtered_layers
|
||||
])
|
||||
self.log.debug(
|
||||
"Instance has {} layers with names: {}".format(
|
||||
len(filtered_layers), joined_layer_names
|
||||
)
|
||||
)
|
||||
|
||||
# Staging dir must be created during collection
|
||||
staging_dir = instance.data["stagingDir"].replace("\\", "/")
|
||||
|
||||
job_root_template = staging_dir.replace(
|
||||
jobs_root_slashed, root_key_replacement
|
||||
)
|
||||
|
||||
# Frame start/end may be stored as float
|
||||
frame_start = int(instance.data["frameStart"])
|
||||
frame_end = int(instance.data["frameEnd"])
|
||||
|
||||
# Prepare output frames
|
||||
output_frame_start = frame_start - handle_start
|
||||
output_frame_end = frame_end + handle_end
|
||||
|
||||
# Change output frame start to 0 if handles cause it's negative
|
||||
# number
|
||||
if output_frame_start < 0:
|
||||
self.log.warning((
|
||||
"Frame start with handles has negative value."
|
||||
" Changed to \"0\". Frames start: {}, Handle Start: {}"
|
||||
).format(frame_start, handle_start))
|
||||
output_frame_start = 0
|
||||
|
||||
# Create copy of scene Mark In/Out
|
||||
mark_in, mark_out = scene_mark_in, scene_mark_out
|
||||
|
||||
# Fix possible changes of output frame
|
||||
mark_out, output_frame_end = self._fix_range_changes(
|
||||
mark_in, mark_out, output_frame_start, output_frame_end
|
||||
)
|
||||
filename_template = get_frame_filename_template(
|
||||
max(scene_mark_out, output_frame_end)
|
||||
)
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
self.log.debug(
|
||||
"Files will be rendered to folder: {}".format(staging_dir)
|
||||
)
|
||||
|
||||
output_filepaths_by_frame_idx = {}
|
||||
for frame_idx in range(mark_in, mark_out + 1):
|
||||
filename = filename_template.format(frame=frame_idx)
|
||||
filepath = os.path.join(staging_dir, filename)
|
||||
output_filepaths_by_frame_idx[frame_idx] = filepath
|
||||
|
||||
# Prepare data for post render processing
|
||||
post_render_data = {
|
||||
"output_dir": staging_dir,
|
||||
"layers": filtered_layers,
|
||||
"output_filepaths_by_frame_idx": output_filepaths_by_frame_idx,
|
||||
"instance": instance,
|
||||
"is_layers_render": False,
|
||||
"output_frame_start": output_frame_start,
|
||||
"output_frame_end": output_frame_end
|
||||
}
|
||||
# Store them to list
|
||||
after_render_instances.append(post_render_data)
|
||||
|
||||
# Review rendering
|
||||
if instance.data["family"] == "review":
|
||||
self.add_render_review_command(
|
||||
tvpaint_commands, mark_in, mark_out, scene_bg_color,
|
||||
job_root_template, filename_template
|
||||
)
|
||||
continue
|
||||
|
||||
# Layers rendering
|
||||
extraction_data_by_layer_id = calculate_layers_extraction_data(
|
||||
filtered_layers,
|
||||
exposure_frames_by_layer_id,
|
||||
behavior_by_layer_id,
|
||||
mark_in,
|
||||
mark_out
|
||||
)
|
||||
filepaths_by_layer_id = self.add_render_command(
|
||||
tvpaint_commands,
|
||||
job_root_template,
|
||||
staging_dir,
|
||||
filtered_layers,
|
||||
extraction_data_by_layer_id
|
||||
)
|
||||
# Add more data to post render processing
|
||||
post_render_data.update({
|
||||
"is_layers_render": True,
|
||||
"extraction_data_by_layer_id": extraction_data_by_layer_id,
|
||||
"filepaths_by_layer_id": filepaths_by_layer_id
|
||||
})
|
||||
|
||||
# Change scene frame Start back to previous value
|
||||
tvpaint_commands.add_command(
|
||||
ExecuteSimpleGeorgeScript(
|
||||
"tv_startframe {}".format(scene_start_frame)
|
||||
)
|
||||
)
|
||||
self.log.info("Sending the job and waiting for response...")
|
||||
tvpaint_commands.send_job_and_wait()
|
||||
self.log.info("Render job finished")
|
||||
|
||||
for post_render_data in after_render_instances:
|
||||
self._post_render_processing(post_render_data, mark_in, mark_out)
|
||||
|
||||
def _fix_range_changes(
|
||||
self, mark_in, mark_out, output_frame_start, output_frame_end
|
||||
):
|
||||
# Check Marks range and output range
|
||||
output_range = output_frame_end - output_frame_start
|
||||
marks_range = mark_out - mark_in
|
||||
|
||||
# Lower Mark Out if mark range is bigger than output
|
||||
# - do not rendered not used frames
|
||||
if output_range < marks_range:
|
||||
new_mark_out = mark_out - (marks_range - output_range)
|
||||
self.log.warning((
|
||||
"Lowering render range to {} frames. Changed Mark Out {} -> {}"
|
||||
).format(marks_range + 1, mark_out, new_mark_out))
|
||||
# Assign new mark out to variable
|
||||
mark_out = new_mark_out
|
||||
|
||||
# Lower output frame end so representation has right `frameEnd` value
|
||||
elif output_range > marks_range:
|
||||
new_output_frame_end = (
|
||||
output_frame_end - (output_range - marks_range)
|
||||
)
|
||||
self.log.warning((
|
||||
"Lowering representation range to {} frames."
|
||||
" Changed frame end {} -> {}"
|
||||
).format(output_range + 1, mark_out, new_output_frame_end))
|
||||
output_frame_end = new_output_frame_end
|
||||
return mark_out, output_frame_end
|
||||
|
||||
def _post_render_processing(self, post_render_data, mark_in, mark_out):
|
||||
# Unpack values
|
||||
instance = post_render_data["instance"]
|
||||
output_filepaths_by_frame_idx = (
|
||||
post_render_data["output_filepaths_by_frame_idx"]
|
||||
)
|
||||
is_layers_render = post_render_data["is_layers_render"]
|
||||
output_dir = post_render_data["output_dir"]
|
||||
layers = post_render_data["layers"]
|
||||
output_frame_start = post_render_data["output_frame_start"]
|
||||
output_frame_end = post_render_data["output_frame_end"]
|
||||
|
||||
# Trigger post processing of layers rendering
|
||||
# - only few frames were rendered this will complete the sequence
|
||||
# - multiple layers can be in single instance they must be composite
|
||||
# over each other
|
||||
if is_layers_render:
|
||||
self._finish_layer_render(
|
||||
layers,
|
||||
post_render_data["extraction_data_by_layer_id"],
|
||||
post_render_data["filepaths_by_layer_id"],
|
||||
mark_in,
|
||||
mark_out,
|
||||
output_filepaths_by_frame_idx
|
||||
)
|
||||
|
||||
# Create thumbnail
|
||||
thumbnail_filepath = os.path.join(output_dir, "thumbnail.jpg")
|
||||
thumbnail_src_path = output_filepaths_by_frame_idx[mark_in]
|
||||
self._create_thumbnail(thumbnail_src_path, thumbnail_filepath)
|
||||
|
||||
# Rename filepaths to final frames
|
||||
repre_files = self._rename_output_files(
|
||||
output_filepaths_by_frame_idx,
|
||||
mark_in,
|
||||
mark_out,
|
||||
output_frame_start
|
||||
)
|
||||
|
||||
# Fill tags and new families
|
||||
family_lowered = instance.data["family"].lower()
|
||||
tags = []
|
||||
if family_lowered in ("review", "renderlayer"):
|
||||
tags.append("review")
|
||||
|
||||
# Sequence of one frame
|
||||
single_file = len(repre_files) == 1
|
||||
if single_file:
|
||||
repre_files = repre_files[0]
|
||||
|
||||
# Extension is harcoded
|
||||
# - changing extension would require change code
|
||||
new_repre = {
|
||||
"name": "png",
|
||||
"ext": "png",
|
||||
"files": repre_files,
|
||||
"stagingDir": output_dir,
|
||||
"tags": tags
|
||||
}
|
||||
|
||||
if not single_file:
|
||||
new_repre["frameStart"] = output_frame_start
|
||||
new_repre["frameEnd"] = output_frame_end
|
||||
|
||||
self.log.debug("Creating new representation: {}".format(new_repre))
|
||||
|
||||
instance.data["representations"].append(new_repre)
|
||||
|
||||
if family_lowered in ("renderpass", "renderlayer"):
|
||||
# Change family to render
|
||||
instance.data["family"] = "render"
|
||||
|
||||
thumbnail_ext = os.path.splitext(thumbnail_filepath)[1]
|
||||
# Create thumbnail representation
|
||||
thumbnail_repre = {
|
||||
"name": "thumbnail",
|
||||
"ext": thumbnail_ext.replace(".", ""),
|
||||
"outputName": "thumb",
|
||||
"files": os.path.basename(thumbnail_filepath),
|
||||
"stagingDir": output_dir,
|
||||
"tags": ["thumbnail"]
|
||||
}
|
||||
instance.data["representations"].append(thumbnail_repre)
|
||||
|
||||
def _rename_output_files(
|
||||
self, filepaths_by_frame, mark_in, mark_out, output_frame_start
|
||||
):
|
||||
new_filepaths_by_frame = rename_filepaths_by_frame_start(
|
||||
filepaths_by_frame, mark_in, mark_out, output_frame_start
|
||||
)
|
||||
|
||||
repre_filenames = []
|
||||
for filepath in new_filepaths_by_frame.values():
|
||||
repre_filenames.append(os.path.basename(filepath))
|
||||
|
||||
if mark_in < output_frame_start:
|
||||
repre_filenames = list(reversed(repre_filenames))
|
||||
|
||||
return repre_filenames
|
||||
|
||||
def add_render_review_command(
|
||||
self,
|
||||
tvpaint_commands,
|
||||
mark_in,
|
||||
mark_out,
|
||||
scene_bg_color,
|
||||
job_root_template,
|
||||
filename_template
|
||||
):
|
||||
""" Export images from TVPaint using `tv_savesequence` command.
|
||||
|
||||
Args:
|
||||
output_dir (str): Directory where files will be stored.
|
||||
mark_in (int): Starting frame index from which export will begin.
|
||||
mark_out (int): On which frame index export will end.
|
||||
scene_bg_color (list): Bg color set in scene. Result of george
|
||||
script command `tv_background`.
|
||||
"""
|
||||
self.log.debug("Preparing data for rendering.")
|
||||
bg_color = self._get_review_bg_color()
|
||||
first_frame_filepath = "/".join([
|
||||
job_root_template,
|
||||
filename_template.format(frame=mark_in)
|
||||
])
|
||||
|
||||
george_script_lines = [
|
||||
# Change bg color to color from settings
|
||||
"tv_background \"color\" {} {} {}".format(*bg_color),
|
||||
"tv_SaveMode \"PNG\"",
|
||||
"export_path = \"{}\"".format(
|
||||
first_frame_filepath.replace("\\", "/")
|
||||
),
|
||||
"tv_savesequence '\"'export_path'\"' {} {}".format(
|
||||
mark_in, mark_out
|
||||
)
|
||||
]
|
||||
if scene_bg_color:
|
||||
# Change bg color back to previous scene bg color
|
||||
_scene_bg_color = copy.deepcopy(scene_bg_color)
|
||||
bg_type = _scene_bg_color.pop(0)
|
||||
orig_color_command = [
|
||||
"tv_background",
|
||||
"\"{}\"".format(bg_type)
|
||||
]
|
||||
orig_color_command.extend(_scene_bg_color)
|
||||
|
||||
george_script_lines.append(" ".join(orig_color_command))
|
||||
|
||||
tvpaint_commands.add_command(
|
||||
ExecuteGeorgeScript(
|
||||
george_script_lines,
|
||||
root_dir_key=self.job_queue_root_key
|
||||
)
|
||||
)
|
||||
|
||||
def add_render_command(
|
||||
self,
|
||||
tvpaint_commands,
|
||||
job_root_template,
|
||||
staging_dir,
|
||||
layers,
|
||||
extraction_data_by_layer_id
|
||||
):
|
||||
""" Export images from TVPaint.
|
||||
|
||||
Args:
|
||||
output_dir (str): Directory where files will be stored.
|
||||
mark_in (int): Starting frame index from which export will begin.
|
||||
mark_out (int): On which frame index export will end.
|
||||
layers (list): List of layers to be exported.
|
||||
|
||||
Retruns:
|
||||
tuple: With 2 items first is list of filenames second is path to
|
||||
thumbnail.
|
||||
"""
|
||||
# Map layers by position
|
||||
layers_by_id = {
|
||||
layer["layer_id"]: layer
|
||||
for layer in layers
|
||||
}
|
||||
|
||||
# Render layers
|
||||
filepaths_by_layer_id = {}
|
||||
for layer_id, render_data in extraction_data_by_layer_id.items():
|
||||
layer = layers_by_id[layer_id]
|
||||
frame_references = render_data["frame_references"]
|
||||
filenames_by_frame_index = render_data["filenames_by_frame_index"]
|
||||
|
||||
filepaths_by_frame = {}
|
||||
command_filepath_by_frame = {}
|
||||
for frame_idx, ref_idx in frame_references.items():
|
||||
# None reference is skipped because does not have source
|
||||
if ref_idx is None:
|
||||
filepaths_by_frame[frame_idx] = None
|
||||
continue
|
||||
filename = filenames_by_frame_index[frame_idx]
|
||||
|
||||
filepaths_by_frame[frame_idx] = os.path.join(
|
||||
staging_dir, filename
|
||||
)
|
||||
if frame_idx == ref_idx:
|
||||
command_filepath_by_frame[frame_idx] = "/".join(
|
||||
[job_root_template, filename]
|
||||
)
|
||||
|
||||
self._add_render_layer_command(
|
||||
tvpaint_commands, layer, command_filepath_by_frame
|
||||
)
|
||||
filepaths_by_layer_id[layer_id] = filepaths_by_frame
|
||||
|
||||
return filepaths_by_layer_id
|
||||
|
||||
def _add_render_layer_command(
|
||||
self, tvpaint_commands, layer, filepaths_by_frame
|
||||
):
|
||||
george_script_lines = [
|
||||
# Set current layer by position
|
||||
"tv_layergetid {}".format(layer["position"]),
|
||||
"layer_id = result",
|
||||
"tv_layerset layer_id",
|
||||
"tv_SaveMode \"PNG\""
|
||||
]
|
||||
|
||||
for frame_idx, filepath in filepaths_by_frame.items():
|
||||
if filepath is None:
|
||||
continue
|
||||
|
||||
# Go to frame
|
||||
george_script_lines.append("tv_layerImage {}".format(frame_idx))
|
||||
# Store image to output
|
||||
george_script_lines.append(
|
||||
"tv_saveimage \"{}\"".format(filepath.replace("\\", "/"))
|
||||
)
|
||||
|
||||
tvpaint_commands.add_command(
|
||||
ExecuteGeorgeScript(
|
||||
george_script_lines,
|
||||
root_dir_key=self.job_queue_root_key
|
||||
)
|
||||
)
|
||||
|
||||
def _finish_layer_render(
|
||||
self,
|
||||
layers,
|
||||
extraction_data_by_layer_id,
|
||||
filepaths_by_layer_id,
|
||||
mark_in,
|
||||
mark_out,
|
||||
output_filepaths_by_frame_idx
|
||||
):
|
||||
# Fill frames between `frame_start_index` and `frame_end_index`
|
||||
self.log.debug("Filling frames not rendered frames.")
|
||||
for layer_id, render_data in extraction_data_by_layer_id.items():
|
||||
frame_references = render_data["frame_references"]
|
||||
filepaths_by_frame = filepaths_by_layer_id[layer_id]
|
||||
fill_reference_frames(frame_references, filepaths_by_frame)
|
||||
|
||||
# Prepare final filepaths where compositing should store result
|
||||
self.log.info("Started compositing of layer frames.")
|
||||
composite_rendered_layers(
|
||||
layers, filepaths_by_layer_id,
|
||||
mark_in, mark_out,
|
||||
output_filepaths_by_frame_idx
|
||||
)
|
||||
|
||||
def _create_thumbnail(self, thumbnail_src_path, thumbnail_filepath):
|
||||
if not os.path.exists(thumbnail_src_path):
|
||||
return
|
||||
|
||||
source_img = Image.open(thumbnail_src_path)
|
||||
|
||||
# Composite background only on rgba images
|
||||
# - just making sure
|
||||
if source_img.mode.lower() == "rgba":
|
||||
bg_color = self._get_review_bg_color()
|
||||
self.log.debug("Adding thumbnail background color {}.".format(
|
||||
" ".join([str(val) for val in bg_color])
|
||||
))
|
||||
bg_image = Image.new("RGBA", source_img.size, bg_color)
|
||||
thumbnail_obj = Image.alpha_composite(bg_image, source_img)
|
||||
thumbnail_obj.convert("RGB").save(thumbnail_filepath)
|
||||
|
||||
else:
|
||||
self.log.info((
|
||||
"Source for thumbnail has mode \"{}\" (Expected: RGBA)."
|
||||
" Can't use thubmanail background color."
|
||||
).format(source_img.mode))
|
||||
source_img.save(thumbnail_filepath)
|
||||
|
||||
def _get_review_bg_color(self):
|
||||
red = green = blue = 255
|
||||
if self.review_bg:
|
||||
if len(self.review_bg) == 4:
|
||||
red, green, blue, _ = self.review_bg
|
||||
elif len(self.review_bg) == 3:
|
||||
red, green, blue = self.review_bg
|
||||
return (red, green, blue)
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Cleanup leftover files from publish."""
|
||||
import os
|
||||
import shutil
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CleanUpJobRoot(pyblish.api.ContextPlugin):
|
||||
"""Cleans up the job root directory after a successful publish.
|
||||
|
||||
Remove all files in job root as all of them should be published.
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 1
|
||||
label = "Clean Up Job Root"
|
||||
optional = True
|
||||
active = True
|
||||
|
||||
def process(self, context):
|
||||
context_staging_dir = context.data.get("contextStagingDir")
|
||||
if not context_staging_dir:
|
||||
self.log.info("Key 'contextStagingDir' is empty.")
|
||||
|
||||
elif not os.path.exists(context_staging_dir):
|
||||
self.log.info((
|
||||
"Job root directory for this publish does not"
|
||||
" exists anymore \"{}\"."
|
||||
).format(context_staging_dir))
|
||||
else:
|
||||
self.log.info("Deleting job root with all files.")
|
||||
shutil.rmtree(context_staging_dir)
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateWorkfileData(pyblish.api.ContextPlugin):
|
||||
"""Validate mark in and out are enabled and it's duration.
|
||||
|
||||
Mark In/Out does not have to match frameStart and frameEnd but duration is
|
||||
important.
|
||||
"""
|
||||
|
||||
label = "Validate Workfile Data"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
|
||||
def process(self, context):
|
||||
# Data collected in `CollectAvalonEntities`
|
||||
frame_start = context.data["frameStart"]
|
||||
frame_end = context.data["frameEnd"]
|
||||
handle_start = context.data["handleStart"]
|
||||
handle_end = context.data["handleEnd"]
|
||||
|
||||
scene_data = context.data["sceneData"]
|
||||
scene_mark_in = scene_data["sceneMarkIn"]
|
||||
scene_mark_out = scene_data["sceneMarkOut"]
|
||||
|
||||
expected_range = (
|
||||
(frame_end - frame_start + 1)
|
||||
+ handle_start
|
||||
+ handle_end
|
||||
)
|
||||
marks_range = scene_mark_out - scene_mark_in + 1
|
||||
if expected_range != marks_range:
|
||||
raise AssertionError((
|
||||
"Wrong Mark In/Out range."
|
||||
" Expected range is {} frames got {} frames"
|
||||
).format(expected_range, marks_range))
|
||||
|
|
@ -198,6 +198,15 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint):
|
|||
# - filter defines command and can extend arguments dictionary
|
||||
# This is used only if 'studio_processing' is enabled on batch
|
||||
studio_processing_filters = [
|
||||
# TVPaint filter
|
||||
{
|
||||
"extensions": [".tvpp"],
|
||||
"command": "remotepublish",
|
||||
"arguments": {
|
||||
"targets": ["tvpaint_worker"]
|
||||
},
|
||||
"add_to_queue": False
|
||||
},
|
||||
# Photoshop filter
|
||||
{
|
||||
"extensions": [".psd", ".psb"],
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ class CollectUsername(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder - 0.488
|
||||
label = "Collect ftrack username"
|
||||
hosts = ["webpublisher", "photoshop"]
|
||||
targets = ["remotepublish", "filespublish"]
|
||||
targets = ["remotepublish", "filespublish", "tvpaint_worker"]
|
||||
|
||||
_context = None
|
||||
|
||||
|
|
|
|||
6
openpype/modules/default_modules/job_queue/__init__.py
Normal file
6
openpype/modules/default_modules/job_queue/__init__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
from .module import JobQueueModule
|
||||
|
||||
|
||||
__all__ = (
|
||||
"JobQueueModule",
|
||||
)
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
from .server import WebServerManager
|
||||
from .utils import main
|
||||
|
||||
|
||||
__all__ = (
|
||||
"WebServerManager",
|
||||
"main"
|
||||
)
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
import json
|
||||
|
||||
from aiohttp.web_response import Response
|
||||
|
||||
|
||||
class JobQueueResource:
|
||||
def __init__(self, job_queue, server_manager):
|
||||
self.server_manager = server_manager
|
||||
|
||||
self._prefix = "/api"
|
||||
|
||||
self._job_queue = job_queue
|
||||
|
||||
self.endpoint_defs = (
|
||||
("POST", "/jobs", self.post_job),
|
||||
("GET", "/jobs", self.get_jobs),
|
||||
("GET", "/jobs/{job_id}", self.get_job)
|
||||
)
|
||||
|
||||
self.register()
|
||||
|
||||
def register(self):
|
||||
for methods, url, callback in self.endpoint_defs:
|
||||
final_url = self._prefix + url
|
||||
self.server_manager.add_route(
|
||||
methods, final_url, callback
|
||||
)
|
||||
|
||||
async def get_jobs(self, request):
|
||||
jobs_data = []
|
||||
for job in self._job_queue.get_jobs():
|
||||
jobs_data.append(job.status())
|
||||
return Response(status=200, body=self.encode(jobs_data))
|
||||
|
||||
async def post_job(self, request):
|
||||
data = await request.json()
|
||||
host_name = data.get("host_name")
|
||||
if not host_name:
|
||||
return Response(
|
||||
status=400, message="Key \"host_name\" not filled."
|
||||
)
|
||||
|
||||
job = self._job_queue.create_job(host_name, data)
|
||||
return Response(status=201, text=job.id)
|
||||
|
||||
async def get_job(self, request):
|
||||
job_id = request.match_info["job_id"]
|
||||
content = self._job_queue.get_job_status(job_id)
|
||||
if content is None:
|
||||
content = {}
|
||||
return Response(
|
||||
status=200,
|
||||
body=self.encode(content),
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def encode(cls, data):
|
||||
return json.dumps(
|
||||
data,
|
||||
indent=4
|
||||
).encode("utf-8")
|
||||
240
openpype/modules/default_modules/job_queue/job_server/jobs.py
Normal file
240
openpype/modules/default_modules/job_queue/job_server/jobs.py
Normal file
|
|
@ -0,0 +1,240 @@
|
|||
import datetime
|
||||
import collections
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
class Job:
|
||||
"""Job related to specific host name.
|
||||
|
||||
Data must contain everything needed to finish the job.
|
||||
"""
|
||||
# Remove done jobs each n days to clear memory
|
||||
keep_in_memory_days = 3
|
||||
|
||||
def __init__(self, host_name, data, job_id=None, created_time=None):
|
||||
if job_id is None:
|
||||
job_id = str(uuid4())
|
||||
self._id = job_id
|
||||
if created_time is None:
|
||||
created_time = datetime.datetime.now()
|
||||
self._created_time = created_time
|
||||
self._started_time = None
|
||||
self._done_time = None
|
||||
self.host_name = host_name
|
||||
self.data = data
|
||||
self._result_data = None
|
||||
|
||||
self._started = False
|
||||
self._done = False
|
||||
self._errored = False
|
||||
self._message = None
|
||||
self._deleted = False
|
||||
|
||||
self._worker = None
|
||||
|
||||
def keep_in_memory(self):
|
||||
if self._done_time is None:
|
||||
return True
|
||||
|
||||
now = datetime.datetime.now()
|
||||
delta = now - self._done_time
|
||||
return delta.days < self.keep_in_memory_days
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def done(self):
|
||||
return self._done
|
||||
|
||||
def reset(self):
|
||||
self._started = False
|
||||
self._started_time = None
|
||||
self._done = False
|
||||
self._done_time = None
|
||||
self._errored = False
|
||||
self._message = None
|
||||
|
||||
self._worker = None
|
||||
|
||||
@property
|
||||
def started(self):
|
||||
return self._started
|
||||
|
||||
@property
|
||||
def deleted(self):
|
||||
return self._deleted
|
||||
|
||||
def set_deleted(self):
|
||||
self._deleted = True
|
||||
self.set_worker(None)
|
||||
|
||||
def set_worker(self, worker):
|
||||
if worker is self._worker:
|
||||
return
|
||||
|
||||
if self._worker is not None:
|
||||
self._worker.set_current_job(None)
|
||||
|
||||
self._worker = worker
|
||||
if worker is not None:
|
||||
worker.set_current_job(self)
|
||||
|
||||
def set_started(self):
|
||||
self._started_time = datetime.datetime.now()
|
||||
self._started = True
|
||||
|
||||
def set_done(self, success=True, message=None, data=None):
|
||||
self._done = True
|
||||
self._done_time = datetime.datetime.now()
|
||||
self._errored = not success
|
||||
self._message = message
|
||||
self._result_data = data
|
||||
if self._worker is not None:
|
||||
self._worker.set_current_job(None)
|
||||
|
||||
def status(self):
|
||||
worker_id = None
|
||||
if self._worker is not None:
|
||||
worker_id = self._worker.id
|
||||
output = {
|
||||
"id": self.id,
|
||||
"worker_id": worker_id,
|
||||
"done": self._done
|
||||
}
|
||||
output["message"] = self._message or None
|
||||
|
||||
state = "waiting"
|
||||
if self._deleted:
|
||||
state = "deleted"
|
||||
elif self._errored:
|
||||
state = "error"
|
||||
elif self._done:
|
||||
state = "done"
|
||||
elif self._started:
|
||||
state = "started"
|
||||
|
||||
output["result"] = self._result_data
|
||||
|
||||
output["state"] = state
|
||||
|
||||
return output
|
||||
|
||||
|
||||
class JobQueue:
|
||||
"""Queue holds jobs that should be done and workers that can do them.
|
||||
|
||||
Also asign jobs to a worker.
|
||||
"""
|
||||
old_jobs_check_minutes_interval = 30
|
||||
|
||||
def __init__(self):
|
||||
self._last_old_jobs_check = datetime.datetime.now()
|
||||
self._jobs_by_id = {}
|
||||
self._job_queue_by_host_name = collections.defaultdict(
|
||||
collections.deque
|
||||
)
|
||||
self._workers_by_id = {}
|
||||
self._workers_by_host_name = collections.defaultdict(list)
|
||||
|
||||
def workers(self):
|
||||
"""All currently registered workers."""
|
||||
return self._workers_by_id.values()
|
||||
|
||||
def add_worker(self, worker):
|
||||
host_name = worker.host_name
|
||||
print("Added new worker for \"{}\"".format(host_name))
|
||||
self._workers_by_id[worker.id] = worker
|
||||
self._workers_by_host_name[host_name].append(worker)
|
||||
|
||||
def get_worker(self, worker_id):
|
||||
return self._workers_by_id.get(worker_id)
|
||||
|
||||
def remove_worker(self, worker):
|
||||
# Look if worker had assigned job to do
|
||||
job = worker.current_job
|
||||
if job is not None and not job.done:
|
||||
# Reset job
|
||||
job.set_worker(None)
|
||||
job.reset()
|
||||
# Add job back to queue
|
||||
self._job_queue_by_host_name[job.host_name].appendleft(job)
|
||||
|
||||
# Remove worker from registered workers
|
||||
self._workers_by_id.pop(worker.id, None)
|
||||
host_name = worker.host_name
|
||||
if worker in self._workers_by_host_name[host_name]:
|
||||
self._workers_by_host_name[host_name].remove(worker)
|
||||
|
||||
print("Removed worker for \"{}\"".format(host_name))
|
||||
|
||||
def assign_jobs(self):
|
||||
"""Try to assign job for each idle worker.
|
||||
|
||||
Error all jobs without needed worker.
|
||||
"""
|
||||
available_host_names = set()
|
||||
for worker in self._workers_by_id.values():
|
||||
host_name = worker.host_name
|
||||
available_host_names.add(host_name)
|
||||
if worker.is_idle():
|
||||
jobs = self._job_queue_by_host_name[host_name]
|
||||
while jobs:
|
||||
job = jobs.popleft()
|
||||
if not job.deleted:
|
||||
worker.set_current_job(job)
|
||||
break
|
||||
|
||||
for host_name in tuple(self._job_queue_by_host_name.keys()):
|
||||
if host_name in available_host_names:
|
||||
continue
|
||||
|
||||
jobs_deque = self._job_queue_by_host_name[host_name]
|
||||
message = ("Not available workers for \"{}\"").format(host_name)
|
||||
while jobs_deque:
|
||||
job = jobs_deque.popleft()
|
||||
if not job.deleted:
|
||||
job.set_done(False, message)
|
||||
self._remove_old_jobs()
|
||||
|
||||
def get_jobs(self):
|
||||
return self._jobs_by_id.values()
|
||||
|
||||
def get_job(self, job_id):
|
||||
"""Job by it's id."""
|
||||
return self._jobs_by_id.get(job_id)
|
||||
|
||||
def create_job(self, host_name, job_data):
|
||||
"""Create new job from passed data and add it to queue."""
|
||||
job = Job(host_name, job_data)
|
||||
self._jobs_by_id[job.id] = job
|
||||
self._job_queue_by_host_name[host_name].append(job)
|
||||
return job
|
||||
|
||||
def _remove_old_jobs(self):
|
||||
"""Once in specific time look if should remove old finished jobs."""
|
||||
delta = datetime.datetime.now() - self._last_old_jobs_check
|
||||
if delta.seconds < self.old_jobs_check_minutes_interval:
|
||||
return
|
||||
|
||||
for job_id in tuple(self._jobs_by_id.keys()):
|
||||
job = self._jobs_by_id[job_id]
|
||||
if not job.keep_in_memory():
|
||||
self._jobs_by_id.pop(job_id)
|
||||
|
||||
def remove_job(self, job_id):
|
||||
"""Delete job and eventually stop it."""
|
||||
job = self._jobs_by_id.get(job_id)
|
||||
if job is None:
|
||||
return
|
||||
|
||||
job.set_deleted()
|
||||
self._jobs_by_id.pop(job.id)
|
||||
|
||||
def get_job_status(self, job_id):
|
||||
"""Job's status based on id."""
|
||||
job = self._jobs_by_id.get(job_id)
|
||||
if job is None:
|
||||
return {}
|
||||
return job.status()
|
||||
154
openpype/modules/default_modules/job_queue/job_server/server.py
Normal file
154
openpype/modules/default_modules/job_queue/job_server/server.py
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
import threading
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from .jobs import JobQueue
|
||||
from .job_queue_route import JobQueueResource
|
||||
from .workers_rpc_route import WorkerRpc
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WebServerManager:
|
||||
"""Manger that care about web server thread."""
|
||||
def __init__(self, port, host, loop=None):
|
||||
self.port = port
|
||||
self.host = host
|
||||
self.app = web.Application()
|
||||
if loop is None:
|
||||
loop = asyncio.new_event_loop()
|
||||
|
||||
# add route with multiple methods for single "external app"
|
||||
self.webserver_thread = WebServerThread(self, loop)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return "http://{}:{}".format(self.host, self.port)
|
||||
|
||||
def add_route(self, *args, **kwargs):
|
||||
self.app.router.add_route(*args, **kwargs)
|
||||
|
||||
def add_static(self, *args, **kwargs):
|
||||
self.app.router.add_static(*args, **kwargs)
|
||||
|
||||
def start_server(self):
|
||||
if self.webserver_thread and not self.webserver_thread.is_alive():
|
||||
self.webserver_thread.start()
|
||||
|
||||
def stop_server(self):
|
||||
if not self.is_running:
|
||||
return
|
||||
|
||||
try:
|
||||
log.debug("Stopping Web server")
|
||||
self.webserver_thread.stop()
|
||||
|
||||
except Exception as exc:
|
||||
print("Errored", str(exc))
|
||||
log.warning(
|
||||
"Error has happened during Killing Web server",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
@property
|
||||
def is_running(self):
|
||||
if self.webserver_thread is not None:
|
||||
return self.webserver_thread.is_running
|
||||
return False
|
||||
|
||||
|
||||
class WebServerThread(threading.Thread):
|
||||
""" Listener for requests in thread."""
|
||||
def __init__(self, manager, loop):
|
||||
super(WebServerThread, self).__init__()
|
||||
|
||||
self._is_running = False
|
||||
self._stopped = False
|
||||
self.manager = manager
|
||||
self.loop = loop
|
||||
self.runner = None
|
||||
self.site = None
|
||||
|
||||
job_queue = JobQueue()
|
||||
self.job_queue_route = JobQueueResource(job_queue, manager)
|
||||
self.workers_route = WorkerRpc(job_queue, manager, loop=loop)
|
||||
|
||||
@property
|
||||
def port(self):
|
||||
return self.manager.port
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
return self.manager.host
|
||||
|
||||
@property
|
||||
def stopped(self):
|
||||
return self._stopped
|
||||
|
||||
@property
|
||||
def is_running(self):
|
||||
return self._is_running
|
||||
|
||||
def run(self):
|
||||
self._is_running = True
|
||||
|
||||
try:
|
||||
log.info("Starting WebServer server")
|
||||
asyncio.set_event_loop(self.loop)
|
||||
self.loop.run_until_complete(self.start_server())
|
||||
|
||||
asyncio.ensure_future(self.check_shutdown(), loop=self.loop)
|
||||
self.loop.run_forever()
|
||||
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Web Server service has failed", exc_info=True
|
||||
)
|
||||
finally:
|
||||
self.loop.close()
|
||||
|
||||
self._is_running = False
|
||||
log.info("Web server stopped")
|
||||
|
||||
async def start_server(self):
|
||||
""" Starts runner and TCPsite """
|
||||
self.runner = web.AppRunner(self.manager.app)
|
||||
await self.runner.setup()
|
||||
self.site = web.TCPSite(self.runner, self.host, self.port)
|
||||
await self.site.start()
|
||||
|
||||
def stop(self):
|
||||
"""Sets _stopped flag to True, 'check_shutdown' shuts server down"""
|
||||
self._stopped = True
|
||||
|
||||
async def check_shutdown(self):
|
||||
""" Future that is running and checks if server should be running
|
||||
periodically.
|
||||
"""
|
||||
while not self._stopped:
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
print("Starting shutdown")
|
||||
if self.workers_route:
|
||||
await self.workers_route.stop()
|
||||
|
||||
print("Stopping site")
|
||||
await self.site.stop()
|
||||
print("Site stopped")
|
||||
await self.runner.cleanup()
|
||||
|
||||
print("Runner stopped")
|
||||
tasks = [
|
||||
task
|
||||
for task in asyncio.all_tasks()
|
||||
if task is not asyncio.current_task()
|
||||
]
|
||||
list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
log.debug(f'Finished awaiting cancelled tasks, results: {results}...')
|
||||
await self.loop.shutdown_asyncgens()
|
||||
# to really make sure everything else has time to stop
|
||||
await asyncio.sleep(0.07)
|
||||
self.loop.stop()
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
import sys
|
||||
import signal
|
||||
import time
|
||||
import socket
|
||||
|
||||
from .server import WebServerManager
|
||||
|
||||
|
||||
class SharedObjects:
|
||||
stopped = False
|
||||
|
||||
@classmethod
|
||||
def stop(cls):
|
||||
cls.stopped = True
|
||||
|
||||
|
||||
def main(port=None, host=None):
|
||||
def signal_handler(sig, frame):
|
||||
print("Signal to kill process received. Termination starts.")
|
||||
SharedObjects.stop()
|
||||
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
port = int(port or 8079)
|
||||
host = str(host or "localhost")
|
||||
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as con:
|
||||
result_of_check = con.connect_ex((host, port))
|
||||
|
||||
if result_of_check == 0:
|
||||
print((
|
||||
"Server {}:{} is already running or address is occupied."
|
||||
).format(host, port))
|
||||
return 1
|
||||
|
||||
print("Running server {}:{}".format(host, port))
|
||||
manager = WebServerManager(port, host)
|
||||
manager.start_server()
|
||||
|
||||
stopped = False
|
||||
while manager.is_running:
|
||||
if not stopped and SharedObjects.stopped:
|
||||
stopped = True
|
||||
manager.stop_server()
|
||||
time.sleep(0.1)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
122
openpype/modules/default_modules/job_queue/job_server/workers.py
Normal file
122
openpype/modules/default_modules/job_queue/job_server/workers.py
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
import asyncio
|
||||
from uuid import uuid4
|
||||
from aiohttp import WSCloseCode
|
||||
from aiohttp_json_rpc.protocol import encode_request
|
||||
|
||||
|
||||
class WorkerState:
|
||||
IDLE = object()
|
||||
JOB_ASSIGNED = object()
|
||||
JOB_SENT = object()
|
||||
|
||||
|
||||
class Worker:
|
||||
"""Worker that can handle jobs of specific host."""
|
||||
def __init__(self, host_name, http_request):
|
||||
self._id = None
|
||||
self.host_name = host_name
|
||||
self._http_request = http_request
|
||||
self._state = WorkerState.IDLE
|
||||
self._job = None
|
||||
|
||||
# Give ability to send requests to worker
|
||||
http_request.request_id = str(uuid4())
|
||||
http_request.pending_requests = {}
|
||||
|
||||
async def send_job(self):
|
||||
if self._job is not None:
|
||||
data = {
|
||||
"job_id": self._job.id,
|
||||
"worker_id": self.id,
|
||||
"data": self._job.data
|
||||
}
|
||||
return await self.call("start_job", data)
|
||||
return False
|
||||
|
||||
async def call(self, method, params=None, timeout=None):
|
||||
"""Call method on worker's side."""
|
||||
request_id = self._http_request.request_id
|
||||
self._http_request.request_id = str(uuid4())
|
||||
pending_requests = self._http_request.pending_requests
|
||||
pending_requests[request_id] = asyncio.Future()
|
||||
|
||||
request = encode_request(method, id=request_id, params=params)
|
||||
|
||||
await self._http_request.ws.send_str(request)
|
||||
|
||||
if timeout:
|
||||
await asyncio.wait_for(
|
||||
pending_requests[request_id],
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
else:
|
||||
await pending_requests[request_id]
|
||||
|
||||
result = pending_requests[request_id].result()
|
||||
del pending_requests[request_id]
|
||||
|
||||
return result
|
||||
|
||||
async def close(self):
|
||||
return await self.ws.close(
|
||||
code=WSCloseCode.GOING_AWAY,
|
||||
message="Server shutdown"
|
||||
)
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
if self._id is None:
|
||||
self._id = str(uuid4())
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def current_job(self):
|
||||
return self._job
|
||||
|
||||
@property
|
||||
def http_request(self):
|
||||
return self._http_request
|
||||
|
||||
@property
|
||||
def ws(self):
|
||||
return self.http_request.ws
|
||||
|
||||
def connection_is_alive(self):
|
||||
if self.ws.closed or self.ws._writer.transport.is_closing():
|
||||
return False
|
||||
return True
|
||||
|
||||
def is_idle(self):
|
||||
return self._state is WorkerState.IDLE
|
||||
|
||||
def job_assigned(self):
|
||||
return (
|
||||
self._state is WorkerState.JOB_ASSIGNED
|
||||
or self._state is WorkerState.JOB_SENT
|
||||
)
|
||||
|
||||
def is_working(self):
|
||||
return self._state is WorkerState.JOB_SENT
|
||||
|
||||
def set_current_job(self, job):
|
||||
if job is self._job:
|
||||
return
|
||||
|
||||
self._job = job
|
||||
if job is None:
|
||||
self._set_idle()
|
||||
else:
|
||||
self._state = WorkerState.JOB_ASSIGNED
|
||||
job.set_worker(self)
|
||||
|
||||
def _set_idle(self):
|
||||
self._job = None
|
||||
self._state = WorkerState.IDLE
|
||||
|
||||
def set_working(self):
|
||||
self._state = WorkerState.JOB_SENT
|
||||
|
|
@ -0,0 +1,124 @@
|
|||
import asyncio
|
||||
|
||||
import aiohttp
|
||||
from aiohttp_json_rpc import JsonRpc
|
||||
from aiohttp_json_rpc.protocol import (
|
||||
encode_error, decode_msg, JsonRpcMsgTyp
|
||||
)
|
||||
from aiohttp_json_rpc.exceptions import RpcError
|
||||
from .workers import Worker
|
||||
|
||||
|
||||
class WorkerRpc(JsonRpc):
|
||||
def __init__(self, job_queue, manager, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
self._job_queue = job_queue
|
||||
self._manager = manager
|
||||
|
||||
self._stopped = False
|
||||
|
||||
# Register methods
|
||||
self.add_methods(
|
||||
("", self.register_worker),
|
||||
("", self.job_done)
|
||||
)
|
||||
asyncio.ensure_future(self._rpc_loop(), loop=self.loop)
|
||||
|
||||
self._manager.add_route(
|
||||
"*", "/ws", self.handle_request
|
||||
)
|
||||
|
||||
# Panel routes for tools
|
||||
async def register_worker(self, request, host_name):
|
||||
worker = Worker(host_name, request.http_request)
|
||||
self._job_queue.add_worker(worker)
|
||||
return worker.id
|
||||
|
||||
async def _rpc_loop(self):
|
||||
while self.loop.is_running():
|
||||
if self._stopped:
|
||||
break
|
||||
|
||||
for worker in tuple(self._job_queue.workers()):
|
||||
if not worker.connection_is_alive():
|
||||
self._job_queue.remove_worker(worker)
|
||||
self._job_queue.assign_jobs()
|
||||
|
||||
await self.send_jobs()
|
||||
await asyncio.sleep(5)
|
||||
|
||||
async def job_done(self, worker_id, job_id, success, message, data):
|
||||
worker = self._job_queue.get_worker(worker_id)
|
||||
if worker is not None:
|
||||
worker.set_current_job(None)
|
||||
|
||||
job = self._job_queue.get_job(job_id)
|
||||
if job is not None:
|
||||
job.set_done(success, message, data)
|
||||
return True
|
||||
|
||||
async def send_jobs(self):
|
||||
invalid_workers = []
|
||||
for worker in self._job_queue.workers():
|
||||
if worker.job_assigned() and not worker.is_working():
|
||||
try:
|
||||
await worker.send_job()
|
||||
|
||||
except ConnectionResetError:
|
||||
invalid_workers.append(worker)
|
||||
|
||||
for worker in invalid_workers:
|
||||
self._job_queue.remove_worker(worker)
|
||||
|
||||
async def handle_websocket_request(self, http_request):
|
||||
"""Overide this method to catch CLOSING messages."""
|
||||
http_request.msg_id = 0
|
||||
http_request.pending = {}
|
||||
|
||||
# prepare and register websocket
|
||||
ws = aiohttp.web_ws.WebSocketResponse()
|
||||
await ws.prepare(http_request)
|
||||
http_request.ws = ws
|
||||
self.clients.append(http_request)
|
||||
|
||||
while not ws.closed:
|
||||
self.logger.debug('waiting for messages')
|
||||
raw_msg = await ws.receive()
|
||||
|
||||
if raw_msg.type == aiohttp.WSMsgType.TEXT:
|
||||
self.logger.debug('raw msg received: %s', raw_msg.data)
|
||||
self.loop.create_task(
|
||||
self._handle_rpc_msg(http_request, raw_msg)
|
||||
)
|
||||
|
||||
elif raw_msg.type == aiohttp.WSMsgType.CLOSING:
|
||||
break
|
||||
|
||||
self.clients.remove(http_request)
|
||||
return ws
|
||||
|
||||
async def _handle_rpc_msg(self, http_request, raw_msg):
|
||||
# This is duplicated code from super but there is no way how to do it
|
||||
# to be able handle server->client requests
|
||||
try:
|
||||
_raw_message = raw_msg.data
|
||||
msg = decode_msg(_raw_message)
|
||||
|
||||
except RpcError as error:
|
||||
await self._ws_send_str(http_request, encode_error(error))
|
||||
return
|
||||
|
||||
if msg.type in (JsonRpcMsgTyp.RESULT, JsonRpcMsgTyp.ERROR):
|
||||
request_id = msg.data["id"]
|
||||
if request_id in http_request.pending_requests:
|
||||
future = http_request.pending_requests[request_id]
|
||||
future.set_result(msg.data["result"])
|
||||
return
|
||||
|
||||
return await super()._handle_rpc_msg(http_request, raw_msg)
|
||||
|
||||
async def stop(self):
|
||||
self._stopped = True
|
||||
for worker in tuple(self._job_queue.workers()):
|
||||
await worker.close()
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
from .base_worker import WorkerJobsConnection
|
||||
|
||||
__all__ = (
|
||||
"WorkerJobsConnection",
|
||||
)
|
||||
|
|
@ -0,0 +1,190 @@
|
|||
import sys
|
||||
import datetime
|
||||
import asyncio
|
||||
import traceback
|
||||
|
||||
from aiohttp_json_rpc import JsonRpcClient
|
||||
|
||||
|
||||
class WorkerClient(JsonRpcClient):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.add_methods(
|
||||
("", self.start_job),
|
||||
)
|
||||
self.current_job = None
|
||||
self._id = None
|
||||
|
||||
def set_id(self, worker_id):
|
||||
self._id = worker_id
|
||||
|
||||
async def start_job(self, job_data):
|
||||
if self.current_job is not None:
|
||||
return False
|
||||
|
||||
print("Got new job {}".format(str(job_data)))
|
||||
self.current_job = job_data
|
||||
return True
|
||||
|
||||
def finish_job(self, success, message, data):
|
||||
asyncio.ensure_future(
|
||||
self._finish_job(success, message, data),
|
||||
loop=self._loop
|
||||
)
|
||||
|
||||
async def _finish_job(self, success, message, data):
|
||||
print("Current job", self.current_job)
|
||||
job_id = self.current_job["job_id"]
|
||||
self.current_job = None
|
||||
|
||||
return await self.call(
|
||||
"job_done", [self._id, job_id, success, message, data]
|
||||
)
|
||||
|
||||
|
||||
class WorkerJobsConnection:
|
||||
"""WS connection to Job server.
|
||||
|
||||
Helper class to create a connection to process jobs from job server.
|
||||
|
||||
To be able receive jobs is needed to create a connection and then register
|
||||
as worker for specific host.
|
||||
"""
|
||||
retry_time_seconds = 5
|
||||
|
||||
def __init__(self, server_url, host_name, loop=None):
|
||||
self.client = None
|
||||
self._loop = loop
|
||||
|
||||
self._host_name = host_name
|
||||
self._server_url = server_url
|
||||
|
||||
self._is_running = False
|
||||
self._connecting = False
|
||||
self._connected = False
|
||||
self._stopped = False
|
||||
|
||||
def stop(self):
|
||||
print("Stopping worker")
|
||||
self._stopped = True
|
||||
|
||||
@property
|
||||
def is_running(self):
|
||||
return self._is_running
|
||||
|
||||
@property
|
||||
def current_job(self):
|
||||
if self.client is not None:
|
||||
return self.client.current_job
|
||||
return None
|
||||
|
||||
def finish_job(self, success=True, message=None, data=None):
|
||||
"""Worker finished job and sets the result which is send to server."""
|
||||
if self.client is None:
|
||||
print((
|
||||
"Couldn't sent job status to server because"
|
||||
" client is not connected."
|
||||
))
|
||||
else:
|
||||
self.client.finish_job(success, message, data)
|
||||
|
||||
async def main_loop(self, register_worker=True):
|
||||
"""Main loop of connection which keep connection to server alive."""
|
||||
self._is_running = True
|
||||
|
||||
while not self._stopped:
|
||||
start_time = datetime.datetime.now()
|
||||
await self._connection_loop(register_worker)
|
||||
delta = datetime.datetime.now() - start_time
|
||||
print("Connection loop took {}s".format(str(delta)))
|
||||
# Check if was stopped and stop while loop in that case
|
||||
if self._stopped:
|
||||
break
|
||||
|
||||
if delta.seconds < 60:
|
||||
print((
|
||||
"Can't connect to server will try in {} seconds."
|
||||
).format(self.retry_time_seconds))
|
||||
|
||||
await asyncio.sleep(self.retry_time_seconds)
|
||||
self._is_running = False
|
||||
|
||||
async def _connect(self):
|
||||
self.client = WorkerClient()
|
||||
print("Connecting to {}".format(self._server_url))
|
||||
try:
|
||||
await self.client.connect_url(self._server_url)
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except Exception:
|
||||
traceback.print_exception(*sys.exc_info())
|
||||
|
||||
async def _connection_loop(self, register_worker):
|
||||
self._connecting = True
|
||||
future = asyncio.run_coroutine_threadsafe(
|
||||
self._connect(), loop=self._loop
|
||||
)
|
||||
|
||||
while self._connecting:
|
||||
if not future.done():
|
||||
await asyncio.sleep(0.07)
|
||||
continue
|
||||
|
||||
session = getattr(self.client, "_session", None)
|
||||
ws = getattr(self.client, "_ws", None)
|
||||
if session is not None:
|
||||
if session.closed:
|
||||
self._connecting = False
|
||||
self._connected = False
|
||||
break
|
||||
|
||||
elif ws is not None:
|
||||
self._connecting = False
|
||||
self._connected = True
|
||||
|
||||
if self._stopped:
|
||||
break
|
||||
|
||||
await asyncio.sleep(0.07)
|
||||
|
||||
if not self._connected:
|
||||
self.client = None
|
||||
return
|
||||
|
||||
print("Connected to job queue server")
|
||||
if register_worker:
|
||||
self.register_as_worker()
|
||||
|
||||
while self._connected and self._loop.is_running():
|
||||
if self._stopped or ws.closed:
|
||||
break
|
||||
|
||||
await asyncio.sleep(0.3)
|
||||
|
||||
await self._stop_cleanup()
|
||||
|
||||
def register_as_worker(self):
|
||||
"""Register as worker ready to work on server side."""
|
||||
asyncio.ensure_future(self._register_as_worker(), loop=self._loop)
|
||||
|
||||
async def _register_as_worker(self):
|
||||
worker_id = await self.client.call(
|
||||
"register_worker", [self._host_name]
|
||||
)
|
||||
self.client.set_id(worker_id)
|
||||
print(
|
||||
"Registered as worker with id {}".format(worker_id)
|
||||
)
|
||||
|
||||
async def disconnect(self):
|
||||
await self._stop_cleanup()
|
||||
|
||||
async def _stop_cleanup(self):
|
||||
print("Cleanup after stop")
|
||||
if self.client is not None and hasattr(self.client, "_ws"):
|
||||
await self.client.disconnect()
|
||||
|
||||
self.client = None
|
||||
self._connecting = False
|
||||
self._connected = False
|
||||
241
openpype/modules/default_modules/job_queue/module.py
Normal file
241
openpype/modules/default_modules/job_queue/module.py
Normal file
|
|
@ -0,0 +1,241 @@
|
|||
"""Job queue OpenPype module was created for remote execution of commands.
|
||||
|
||||
## Why is needed
|
||||
Primarily created for hosts which are not easilly controlled from command line
|
||||
or in headless mode and is easier to keep one process of host running listening
|
||||
for jobs to do.
|
||||
|
||||
### Example
|
||||
One of examples is TVPaint which does not have headless mode, can run only one
|
||||
process at one time and it's impossible to know what should be executed inside
|
||||
TVPaint before we know all data about the file that should be processed.
|
||||
|
||||
## Idea
|
||||
Idea is that there is a server, workers and workstation/s which need to process
|
||||
something on a worker.
|
||||
|
||||
Workers and workstation/s must have access to server through adress to it's
|
||||
running instance. Workers use WebSockets and workstations are using HTTP calls.
|
||||
Also both of them must have access to job queue root which is set in
|
||||
settings. Root is used as temp where files needed for job can be stored before
|
||||
sending the job or where result files are stored when job is done.
|
||||
|
||||
Server's address must be set in settings when is running so workers and
|
||||
workstations know where to send or receive jobs.
|
||||
|
||||
## Command line commands
|
||||
### start_server
|
||||
- start server which is handles jobs
|
||||
- it is possible to specify port and host address (default is localhost:8079)
|
||||
|
||||
### start_worker
|
||||
- start worker which will process jobs
|
||||
- has required possitional argument which is application name from OpenPype
|
||||
settings e.g. 'tvpaint/11-5' ('tvpaint' is group '11-5' is variant)
|
||||
- it is possible to specify server url but url from settings is used when not
|
||||
passed (this is added mainly for developing purposes)
|
||||
"""
|
||||
|
||||
import sys
|
||||
import json
|
||||
import copy
|
||||
import platform
|
||||
|
||||
import click
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.api import get_system_settings
|
||||
|
||||
|
||||
class JobQueueModule(OpenPypeModule):
|
||||
name = "job_queue"
|
||||
|
||||
def initialize(self, modules_settings):
|
||||
server_url = modules_settings.get("server_url") or ""
|
||||
|
||||
self._server_url = self.url_conversion(server_url)
|
||||
jobs_root_mapping = self._roots_mapping_conversion(
|
||||
modules_settings.get("jobs_root")
|
||||
)
|
||||
|
||||
self._jobs_root_mapping = jobs_root_mapping
|
||||
|
||||
# Is always enabled
|
||||
# - the module does nothing until is used
|
||||
self.enabled = True
|
||||
|
||||
@classmethod
|
||||
def _root_conversion(cls, root_path):
|
||||
"""Make sure root path does not end with slash."""
|
||||
# Return empty string if path is invalid
|
||||
if not root_path:
|
||||
return ""
|
||||
|
||||
# Remove all slashes
|
||||
while root_path.endswith("/") or root_path.endswith("\\"):
|
||||
root_path = root_path[:-1]
|
||||
return root_path
|
||||
|
||||
@classmethod
|
||||
def _roots_mapping_conversion(cls, roots_mapping):
|
||||
roots_mapping = roots_mapping or {}
|
||||
for platform_name in ("windows", "linux", "darwin"):
|
||||
roots_mapping[platform_name] = cls._root_conversion(
|
||||
roots_mapping.get(platform_name)
|
||||
)
|
||||
return roots_mapping
|
||||
|
||||
@staticmethod
|
||||
def url_conversion(url, ws=False):
|
||||
if sys.version_info[0] == 2:
|
||||
from urlparse import urlsplit, urlunsplit
|
||||
else:
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
if not url:
|
||||
return url
|
||||
|
||||
url_parts = list(urlsplit(url))
|
||||
scheme = url_parts[0]
|
||||
if not scheme:
|
||||
if ws:
|
||||
url = "ws://{}".format(url)
|
||||
else:
|
||||
url = "http://{}".format(url)
|
||||
url_parts = list(urlsplit(url))
|
||||
|
||||
elif ws:
|
||||
if scheme not in ("ws", "wss"):
|
||||
if scheme == "https":
|
||||
url_parts[0] = "wss"
|
||||
else:
|
||||
url_parts[0] = "ws"
|
||||
|
||||
elif scheme not in ("http", "https"):
|
||||
if scheme == "wss":
|
||||
url_parts[0] = "https"
|
||||
else:
|
||||
url_parts[0] = "http"
|
||||
|
||||
return urlunsplit(url_parts)
|
||||
|
||||
def get_jobs_root_mapping(self):
|
||||
return copy.deepcopy(self._jobs_root_mapping)
|
||||
|
||||
def get_jobs_root(self):
|
||||
return self._jobs_root_mapping.get(platform.system().lower())
|
||||
|
||||
@classmethod
|
||||
def get_jobs_root_from_settings(cls):
|
||||
module_settings = get_system_settings()["modules"]
|
||||
jobs_root_mapping = module_settings.get(cls.name, {}).get("jobs_root")
|
||||
converted_mapping = cls._roots_mapping_conversion(jobs_root_mapping)
|
||||
|
||||
return converted_mapping[platform.system().lower()]
|
||||
|
||||
@property
|
||||
def server_url(self):
|
||||
return self._server_url
|
||||
|
||||
def send_job(self, host_name, job_data):
|
||||
import requests
|
||||
|
||||
job_data = job_data or {}
|
||||
job_data["host_name"] = host_name
|
||||
api_path = "{}/api/jobs".format(self._server_url)
|
||||
post_request = requests.post(api_path, data=json.dumps(job_data))
|
||||
return str(post_request.content.decode())
|
||||
|
||||
def get_job_status(self, job_id):
|
||||
import requests
|
||||
|
||||
api_path = "{}/api/jobs/{}".format(self._server_url, job_id)
|
||||
return requests.get(api_path).json()
|
||||
|
||||
def cli(self, click_group):
|
||||
click_group.add_command(cli_main)
|
||||
|
||||
@classmethod
|
||||
def get_server_url_from_settings(cls):
|
||||
module_settings = get_system_settings()["modules"]
|
||||
return cls.url_conversion(
|
||||
module_settings
|
||||
.get(cls.name, {})
|
||||
.get("server_url")
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def start_server(cls, port=None, host=None):
|
||||
from .job_server import main
|
||||
|
||||
return main(port, host)
|
||||
|
||||
@classmethod
|
||||
def start_worker(cls, app_name, server_url=None):
|
||||
import requests
|
||||
from openpype.lib import ApplicationManager
|
||||
|
||||
if not server_url:
|
||||
server_url = cls.get_server_url_from_settings()
|
||||
|
||||
if not server_url:
|
||||
raise ValueError("Server url is not set.")
|
||||
|
||||
http_server_url = cls.url_conversion(server_url)
|
||||
|
||||
# Validate url
|
||||
requests.get(http_server_url)
|
||||
|
||||
ws_server_url = cls.url_conversion(server_url) + "/ws"
|
||||
|
||||
app_manager = ApplicationManager()
|
||||
app = app_manager.applications.get(app_name)
|
||||
if app is None:
|
||||
raise ValueError(
|
||||
"Didn't find application \"{}\" in settings.".format(app_name)
|
||||
)
|
||||
|
||||
if app.host_name == "tvpaint":
|
||||
return cls._start_tvpaint_worker(app, ws_server_url)
|
||||
raise ValueError("Unknown host \"{}\"".format(app.host_name))
|
||||
|
||||
@classmethod
|
||||
def _start_tvpaint_worker(cls, app, server_url):
|
||||
from openpype.hosts.tvpaint.worker import main
|
||||
|
||||
executable = app.find_executable()
|
||||
if not executable:
|
||||
raise ValueError((
|
||||
"Executable for app \"{}\" is not set"
|
||||
" or accessible on this workstation."
|
||||
).format(app.full_name))
|
||||
|
||||
return main(str(executable), server_url)
|
||||
|
||||
|
||||
@click.group(
|
||||
JobQueueModule.name,
|
||||
help="Application job server. Can be used as render farm."
|
||||
)
|
||||
def cli_main():
|
||||
pass
|
||||
|
||||
|
||||
@cli_main.command(
|
||||
"start_server",
|
||||
help="Start server handling workers and their jobs."
|
||||
)
|
||||
@click.option("--port", help="Server port")
|
||||
@click.option("--host", help="Server host (ip address)")
|
||||
def cli_start_server(port, host):
|
||||
JobQueueModule.start_server(port, host)
|
||||
|
||||
|
||||
@cli_main.command(
|
||||
"start_worker", help=(
|
||||
"Start a worker for a specific application. (e.g. \"tvpaint/11.5\")"
|
||||
)
|
||||
)
|
||||
@click.argument("app_name")
|
||||
@click.option("--server_url", help="Server url which handle workers and jobs.")
|
||||
def cli_start_worker(app_name, server_url):
|
||||
JobQueueModule.start_worker(app_name, server_url)
|
||||
|
|
@ -7,7 +7,7 @@ import pyblish.api
|
|||
class CollectModules(pyblish.api.ContextPlugin):
|
||||
"""Collect OpenPype modules."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
order = pyblish.api.CollectorOrder - 0.45
|
||||
label = "OpenPype Modules"
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -115,6 +115,9 @@
|
|||
"default_task_type": "Default task type"
|
||||
}
|
||||
}
|
||||
},
|
||||
"CollectTVPaintInstances": {
|
||||
"layer_name_regex": "(?P<layer>L[0-9]{3}_\\w+)_(?P<pass>.+)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -188,5 +188,13 @@
|
|||
},
|
||||
"slack": {
|
||||
"enabled": false
|
||||
},
|
||||
"job_queue": {
|
||||
"server_url": "",
|
||||
"jobs_root": {
|
||||
"windows": "",
|
||||
"darwin": "",
|
||||
"linux": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -62,8 +62,25 @@
|
|||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CollectTVPaintInstances",
|
||||
"label": "Collect TVPaint Instances",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Regex helps to extract render layer and pass names from TVPaint layer name.<br>The regex must contain named groups <b>'layer'</b> and <b>'pass'</b> which are used for creation of RenderPass instances.<hr><br>Example layer name: <b>\"L001_Person_Hand\"</b><br>Example regex: <b>\"(?P<layer>L[0-9]{3}_\\w+)_(?P<pass>.+)\"</b><br>Extracted layer: <b>\"L001_Person\"</b><br>Extracted pass: <b>\"Hand\"</b>"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "layer_name_regex",
|
||||
"label": "Layer name regex"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -262,6 +262,38 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "job_queue",
|
||||
"label": "Job Queue",
|
||||
"require_restart": true,
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Address of machine where job queue server is running."
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "server_url",
|
||||
"label": "Server Rest URL"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Jobs root is used as temporary directory for workers where source is copied and render output can be stored."
|
||||
},
|
||||
{
|
||||
"key": "jobs_root",
|
||||
"label": "Jobs root",
|
||||
"type": "path",
|
||||
"multipath": false,
|
||||
"multiplatform": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dynamic_schema",
|
||||
"name": "system_settings/modules"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue