mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'release/2.13.5' into 2.x/develop
This commit is contained in:
commit
a13207df9a
6 changed files with 37 additions and 13 deletions
|
|
@ -2,6 +2,7 @@ import os
|
||||||
import re
|
import re
|
||||||
import json
|
import json
|
||||||
import copy
|
import copy
|
||||||
|
import tempfile
|
||||||
|
|
||||||
import pype.api
|
import pype.api
|
||||||
import pyblish
|
import pyblish
|
||||||
|
|
@ -227,12 +228,30 @@ class ExtractBurnin(pype.api.Extractor):
|
||||||
# Dump data to string
|
# Dump data to string
|
||||||
dumped_script_data = json.dumps(script_data)
|
dumped_script_data = json.dumps(script_data)
|
||||||
|
|
||||||
|
# Store dumped json to temporary file
|
||||||
|
temporary_json_file = tempfile.NamedTemporaryFile(
|
||||||
|
mode="w", suffix=".json", delete=False
|
||||||
|
)
|
||||||
|
temporary_json_file.write(dumped_script_data)
|
||||||
|
temporary_json_file.close()
|
||||||
|
temporary_json_filepath = temporary_json_file.name.replace(
|
||||||
|
"\\", "/"
|
||||||
|
)
|
||||||
|
|
||||||
# Prepare subprocess arguments
|
# Prepare subprocess arguments
|
||||||
args = [executable, scriptpath, dumped_script_data]
|
args = [
|
||||||
self.log.debug("Executing: {}".format(args))
|
"\"{}\"".format(executable),
|
||||||
|
"\"{}\"".format(scriptpath),
|
||||||
|
"\"{}\"".format(temporary_json_filepath)
|
||||||
|
]
|
||||||
|
subprcs_cmd = " ".join(args)
|
||||||
|
self.log.debug("Executing: {}".format(subprcs_cmd))
|
||||||
|
|
||||||
# Run burnin script
|
# Run burnin script
|
||||||
pype.api.subprocess(args, shell=True, logger=self.log)
|
pype.api.subprocess(subprcs_cmd, shell=True, logger=self.log)
|
||||||
|
|
||||||
|
# Remove the temporary json
|
||||||
|
os.remove(temporary_json_filepath)
|
||||||
|
|
||||||
for filepath in temp_data["full_input_paths"]:
|
for filepath in temp_data["full_input_paths"]:
|
||||||
filepath = filepath.replace("\\", "/")
|
filepath = filepath.replace("\\", "/")
|
||||||
|
|
|
||||||
|
|
@ -29,6 +29,6 @@ class ValidateFFmpegInstalled(pyblish.api.ContextPlugin):
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
ffmpeg_path = pype.lib.get_ffmpeg_tool_path("ffmpeg")
|
ffmpeg_path = pype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||||
self.log.info("ffmpeg path: `{}`".format(ffmpeg_path))
|
self.log.info("ffmpeg path: `{}`".format(ffmpeg_path))
|
||||||
if self.is_tool("\"{}\"".format(ffmpeg_path)) is False:
|
if self.is_tool("{}".format(ffmpeg_path)) is False:
|
||||||
self.log.error("ffmpeg not found in PATH")
|
self.log.error("ffmpeg not found in PATH")
|
||||||
raise RuntimeError('ffmpeg not installed.')
|
raise RuntimeError('ffmpeg not installed.')
|
||||||
|
|
|
||||||
|
|
@ -54,7 +54,7 @@ class ExtractReview(pype.api.Extractor):
|
||||||
# Generate thumbnail.
|
# Generate thumbnail.
|
||||||
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
|
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
|
||||||
args = [
|
args = [
|
||||||
"\"{}\"".format(ffmpeg_path), "-y",
|
"{}".format(ffmpeg_path), "-y",
|
||||||
"-i", output_image_path,
|
"-i", output_image_path,
|
||||||
"-vf", "scale=300:-1",
|
"-vf", "scale=300:-1",
|
||||||
"-vframes", "1",
|
"-vframes", "1",
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ class CollectInstanceData(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
label = "Collect instance data"
|
label = "Collect instance data"
|
||||||
order = pyblish.api.CollectorOrder + 0.49
|
order = pyblish.api.CollectorOrder + 0.49
|
||||||
families = ["render", "plate"]
|
families = ["render", "plate", "review"]
|
||||||
hosts = ["standalonepublisher"]
|
hosts = ["standalonepublisher"]
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,9 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
elif is_jpeg:
|
elif is_jpeg:
|
||||||
# use first frame as thumbnail if is sequence of jpegs
|
# use first frame as thumbnail if is sequence of jpegs
|
||||||
full_thumbnail_path = file
|
full_thumbnail_path = os.path.join(
|
||||||
|
thumbnail_repre["stagingDir"], file
|
||||||
|
)
|
||||||
self.log.info(
|
self.log.info(
|
||||||
"For thumbnail is used file: {}".format(full_thumbnail_path)
|
"For thumbnail is used file: {}".format(full_thumbnail_path)
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -213,9 +213,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
|
||||||
if frame_start is None:
|
if frame_start is None:
|
||||||
replacement_final = replacement_size = str(MISSING_KEY_VALUE)
|
replacement_final = replacement_size = str(MISSING_KEY_VALUE)
|
||||||
else:
|
else:
|
||||||
replacement_final = "\\'{}\\'".format(
|
replacement_final = "%{eif:n+" + str(frame_start) + ":d}"
|
||||||
r'%%{eif\:n+%d\:d}' % frame_start
|
|
||||||
)
|
|
||||||
replacement_size = str(frame_end)
|
replacement_size = str(frame_end)
|
||||||
|
|
||||||
final_text = final_text.replace(
|
final_text = final_text.replace(
|
||||||
|
|
@ -328,11 +326,13 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
|
||||||
|
|
||||||
_stdout, _stderr = proc.communicate()
|
_stdout, _stderr = proc.communicate()
|
||||||
if _stdout:
|
if _stdout:
|
||||||
print(_stdout.decode("utf-8"))
|
for line in _stdout.split(b"\r\n"):
|
||||||
|
print(line.decode("utf-8"))
|
||||||
|
|
||||||
# This will probably never happen as ffmpeg use stdout
|
# This will probably never happen as ffmpeg use stdout
|
||||||
if _stderr:
|
if _stderr:
|
||||||
print(_stderr.decode("utf-8"))
|
for line in _stderr.split(b"\r\n"):
|
||||||
|
print(line.decode("utf-8"))
|
||||||
|
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
|
|
@ -578,7 +578,10 @@ def burnins_from_data(
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
print("* Burnin script started")
|
print("* Burnin script started")
|
||||||
in_data = json.loads(sys.argv[-1])
|
in_data_json_path = sys.argv[-1]
|
||||||
|
with open(in_data_json_path, "r") as file_stream:
|
||||||
|
in_data = json.load(file_stream)
|
||||||
|
|
||||||
burnins_from_data(
|
burnins_from_data(
|
||||||
in_data["input"],
|
in_data["input"],
|
||||||
in_data["output"],
|
in_data["output"],
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue