mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'develop' into bugfix/PYPE-428-dazzle-feedback-publish-errors
# Conflicts: # pype/plugins/nuke/load/load_luts.py # pype/plugins/nuke/load/load_luts_ip.py # pype/plugins/nuke/publish/validate_active_viewer.py
This commit is contained in:
commit
b13d2f717b
269 changed files with 9200 additions and 5676 deletions
86
pype/plugins/global/_publish_unused/extract_quicktime.py
Normal file
86
pype/plugins/global/_publish_unused/extract_quicktime.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
from pype.vendor import clique
|
||||
|
||||
|
||||
class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
|
||||
"""Resolve any dependency issies
|
||||
|
||||
This plug-in resolves any paths which, if not updated might break
|
||||
the published file.
|
||||
|
||||
The order of families is important, when working with lookdev you want to
|
||||
first publish the texture, update the texture paths in the nodes and then
|
||||
publish the shading network. Same goes for file dependent assets.
|
||||
"""
|
||||
|
||||
label = "Extract Quicktime"
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "write", "source"]
|
||||
hosts = ["shell"]
|
||||
|
||||
def process(self, instance):
|
||||
# fps = instance.data.get("fps")
|
||||
# start = instance.data.get("startFrame")
|
||||
# stagingdir = os.path.normpath(instance.data.get("stagingDir"))
|
||||
#
|
||||
# collected_frames = os.listdir(stagingdir)
|
||||
# collections, remainder = clique.assemble(collected_frames)
|
||||
#
|
||||
# full_input_path = os.path.join(
|
||||
# stagingdir, collections[0].format('{head}{padding}{tail}')
|
||||
# )
|
||||
# self.log.info("input {}".format(full_input_path))
|
||||
#
|
||||
# filename = collections[0].format('{head}')
|
||||
# if not filename.endswith('.'):
|
||||
# filename += "."
|
||||
# movFile = filename + "mov"
|
||||
# full_output_path = os.path.join(stagingdir, movFile)
|
||||
#
|
||||
# self.log.info("output {}".format(full_output_path))
|
||||
#
|
||||
# config_data = instance.context.data['output_repre_config']
|
||||
#
|
||||
# proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
# profile = config_data.get(proj_name, config_data['__default__'])
|
||||
#
|
||||
# input_args = []
|
||||
# # overrides output file
|
||||
# input_args.append("-y")
|
||||
# # preset's input data
|
||||
# input_args.extend(profile.get('input', []))
|
||||
# # necessary input data
|
||||
# input_args.append("-start_number {}".format(start))
|
||||
# input_args.append("-i {}".format(full_input_path))
|
||||
# input_args.append("-framerate {}".format(fps))
|
||||
#
|
||||
# output_args = []
|
||||
# # preset's output data
|
||||
# output_args.extend(profile.get('output', []))
|
||||
# # output filename
|
||||
# output_args.append(full_output_path)
|
||||
# mov_args = [
|
||||
# "ffmpeg",
|
||||
# " ".join(input_args),
|
||||
# " ".join(output_args)
|
||||
# ]
|
||||
# subprocess_mov = " ".join(mov_args)
|
||||
# sub_proc = subprocess.Popen(subprocess_mov)
|
||||
# sub_proc.wait()
|
||||
#
|
||||
# if not os.path.isfile(full_output_path):
|
||||
# raise("Quicktime wasn't created succesfully")
|
||||
#
|
||||
# if "representations" not in instance.data:
|
||||
# instance.data["representations"] = []
|
||||
#
|
||||
# representation = {
|
||||
# 'name': 'mov',
|
||||
# 'ext': 'mov',
|
||||
# 'files': movFile,
|
||||
# "stagingDir": stagingdir,
|
||||
# "preview": True
|
||||
# }
|
||||
# instance.data["representations"].append(representation)
|
||||
|
|
@ -1,22 +1,15 @@
|
|||
import os
|
||||
import subprocess
|
||||
import json
|
||||
from pype import lib as pypelib
|
||||
from pypeapp import config
|
||||
from avalon import api
|
||||
|
||||
|
||||
def get_config_data():
|
||||
path_items = [pypelib.get_presets_path(), 'djv_view', 'config.json']
|
||||
filepath = os.path.sep.join(path_items)
|
||||
data = dict()
|
||||
with open(filepath) as data_file:
|
||||
data = json.load(data_file)
|
||||
return data
|
||||
|
||||
|
||||
def get_families():
|
||||
families = []
|
||||
paths = get_config_data().get('djv_paths', [])
|
||||
paths = config.get_presets().get("djv_view", {}).get("config", {}).get(
|
||||
"djv_paths", []
|
||||
)
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
families.append("*")
|
||||
|
|
@ -25,13 +18,15 @@ def get_families():
|
|||
|
||||
|
||||
def get_representation():
|
||||
return get_config_data().get('file_ext', [])
|
||||
return config.get_presets().get("djv_view", {}).get("config", {}).get(
|
||||
'file_ext', []
|
||||
)
|
||||
|
||||
|
||||
class OpenInDJV(api.Loader):
|
||||
"""Open Image Sequence with system default"""
|
||||
|
||||
config_data = get_config_data()
|
||||
config_data = config.get_presets().get("djv_view", {}).get("config", {})
|
||||
families = get_families()
|
||||
representations = get_representation()
|
||||
|
||||
|
|
@ -42,7 +37,9 @@ class OpenInDJV(api.Loader):
|
|||
|
||||
def load(self, context, name, namespace, data):
|
||||
self.djv_path = None
|
||||
paths = get_config_data().get('djv_paths', [])
|
||||
paths = config.get_presets().get("djv_view", {}).get("config", {}).get(
|
||||
"djv_paths", []
|
||||
)
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
self.djv_path = path
|
||||
|
|
|
|||
|
|
@ -3,11 +3,33 @@ import shutil
|
|||
import pyblish.api
|
||||
|
||||
|
||||
def clean_renders(instance):
|
||||
transfers = instance.data.get("transfers", list())
|
||||
|
||||
current_families = instance.data.get("families", list())
|
||||
instance_family = instance.data.get("family", None)
|
||||
dirnames = []
|
||||
|
||||
for src, dest in transfers:
|
||||
if os.path.normpath(src) != os.path.normpath(dest):
|
||||
if instance_family == 'render' or 'render' in current_families:
|
||||
os.remove(src)
|
||||
dirnames.append(os.path.dirname(src))
|
||||
|
||||
# make unique set
|
||||
cleanup_dirs = set(dirnames)
|
||||
for dir in cleanup_dirs:
|
||||
try:
|
||||
os.rmdir(dir)
|
||||
except OSError:
|
||||
# directory is not empty, skipping
|
||||
continue
|
||||
|
||||
|
||||
class CleanUp(pyblish.api.InstancePlugin):
|
||||
"""Cleans up the staging directory after a successful publish.
|
||||
|
||||
The removal will only happen for staging directories which are inside the
|
||||
temporary folder, otherwise the folder is ignored.
|
||||
This will also clean published renders and delete their parent directories.
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -36,3 +58,5 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.info("Removing temporary folder ...")
|
||||
shutil.rmtree(staging_dir)
|
||||
self.log.info("Cleaning renders ...")
|
||||
clean_renders(instance)
|
||||
|
|
|
|||
|
|
@ -67,9 +67,9 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
|||
if isinstance(component['files'], list):
|
||||
collections, remainder = clique.assemble(component['files'])
|
||||
self.log.debug("collecting sequence: {}".format(collections))
|
||||
instance.data['startFrame'] = int(component['startFrame'])
|
||||
instance.data['endFrame'] = int(component['endFrame'])
|
||||
instance.data['frameRate'] = int(component['frameRate'])
|
||||
instance.data["frameStart"] = int(component["frameStart"])
|
||||
instance.data["frameEnd"] = int(component["frameEnd"])
|
||||
instance.data['fps'] = int(component['fps'])
|
||||
|
||||
instance.data["representations"].append(component)
|
||||
|
||||
|
|
|
|||
|
|
@ -11,8 +11,8 @@ from avalon import api
|
|||
def collect(root,
|
||||
regex=None,
|
||||
exclude_regex=None,
|
||||
startFrame=None,
|
||||
endFrame=None):
|
||||
frame_start=None,
|
||||
frame_end=None):
|
||||
"""Collect sequence collections in root"""
|
||||
|
||||
from avalon.vendor import clique
|
||||
|
|
@ -51,10 +51,10 @@ def collect(root,
|
|||
# Exclude any frames outside start and end frame.
|
||||
for collection in collections:
|
||||
for index in list(collection.indexes):
|
||||
if startFrame is not None and index < startFrame:
|
||||
if frame_start is not None and index < frame_start:
|
||||
collection.indexes.discard(index)
|
||||
continue
|
||||
if endFrame is not None and index > endFrame:
|
||||
if frame_end is not None and index > frame_end:
|
||||
collection.indexes.discard(index)
|
||||
continue
|
||||
|
||||
|
|
@ -64,7 +64,7 @@ def collect(root,
|
|||
return collections
|
||||
|
||||
|
||||
class CollectFileSequences(pyblish.api.ContextPlugin):
|
||||
class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
||||
"""Gather file sequences from working directory
|
||||
|
||||
When "FILESEQUENCE" environment variable is set these paths (folders or
|
||||
|
|
@ -76,8 +76,8 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
|
|||
api.Session["AVALON_ASSET"]
|
||||
subset (str): The subset to publish to. If not provided the sequence's
|
||||
head (up to frame number) will be used.
|
||||
startFrame (int): The start frame for the sequence
|
||||
endFrame (int): The end frame for the sequence
|
||||
frame_start (int): The start frame for the sequence
|
||||
frame_end (int): The end frame for the sequence
|
||||
root (str): The path to collect from (can be relative to the .json)
|
||||
regex (str): A regex for the sequence filename
|
||||
exclude_regex (str): A regex for filename to exclude from collection
|
||||
|
|
@ -87,7 +87,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder
|
||||
targets = ["filesequence"]
|
||||
label = "File Sequences"
|
||||
label = "RenderedFrames"
|
||||
|
||||
def process(self, context):
|
||||
if os.environ.get("PYPE_PUBLISH_PATHS"):
|
||||
|
|
@ -128,6 +128,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
|
|||
self.log.info("setting session using metadata")
|
||||
api.Session.update(session)
|
||||
os.environ.update(session)
|
||||
|
||||
else:
|
||||
# Search in directory
|
||||
data = dict()
|
||||
|
|
@ -141,8 +142,8 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
|
|||
collections = collect(root=root,
|
||||
regex=regex,
|
||||
exclude_regex=data.get("exclude_regex"),
|
||||
startFrame=data.get("startFrame"),
|
||||
endFrame=data.get("endFrame"))
|
||||
frame_start=data.get("frameStart"),
|
||||
frame_end=data.get("frameEnd"))
|
||||
|
||||
self.log.info("Found collections: {}".format(collections))
|
||||
|
||||
|
|
@ -158,9 +159,13 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
|
|||
|
||||
# Get family from the data
|
||||
families = data.get("families", ["render"])
|
||||
assert isinstance(families, (list, tuple)), "Must be iterable"
|
||||
assert families, "Must have at least a single family"
|
||||
families.append("ftrack")
|
||||
if "render" not in families:
|
||||
families.append("render")
|
||||
if "ftrack" not in families:
|
||||
families.append("ftrack")
|
||||
if "review" not in families:
|
||||
families.append("review")
|
||||
|
||||
for collection in collections:
|
||||
instance = context.create_instance(str(collection))
|
||||
self.log.info("Collection: %s" % list(collection))
|
||||
|
|
@ -173,8 +178,8 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
|
|||
|
||||
# If no start or end frame provided, get it from collection
|
||||
indices = list(collection.indexes)
|
||||
start = data.get("startFrame", indices[0])
|
||||
end = data.get("endFrame", indices[-1])
|
||||
start = data.get("frameStart", indices[0])
|
||||
end = data.get("frameEnd", indices[-1])
|
||||
|
||||
# root = os.path.normpath(root)
|
||||
# self.log.info("Source: {}}".format(data.get("source", "")))
|
||||
|
|
@ -188,8 +193,8 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
|
|||
"subset": subset,
|
||||
"asset": data.get("asset", api.Session["AVALON_ASSET"]),
|
||||
"stagingDir": root,
|
||||
"startFrame": start,
|
||||
"endFrame": end,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"fps": fps,
|
||||
"source": data.get('source', '')
|
||||
})
|
||||
|
|
@ -205,7 +210,8 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
|
|||
'files': list(collection),
|
||||
"stagingDir": root,
|
||||
"anatomy_template": "render",
|
||||
"frameRate": fps
|
||||
"fps": fps,
|
||||
"tags": ['review']
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import json
|
||||
import pyblish.api
|
||||
from pype import lib as pypelib
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class CollectOutputRepreConfig(pyblish.api.ContextPlugin):
|
||||
|
|
@ -12,13 +12,5 @@ class CollectOutputRepreConfig(pyblish.api.ContextPlugin):
|
|||
hosts = ["shell"]
|
||||
|
||||
def process(self, context):
|
||||
config_items = [
|
||||
pypelib.get_presets_path(),
|
||||
"ftrack",
|
||||
"output_representation.json"
|
||||
]
|
||||
config_file = os.path.sep.join(config_items)
|
||||
with open(config_file) as data_file:
|
||||
config_data = json.load(data_file)
|
||||
|
||||
config_data = config.get_presets()["ftrack"]["output_representation"]
|
||||
context.data['output_repre_config'] = config_data
|
||||
|
|
|
|||
|
|
@ -12,6 +12,6 @@ class CollectProjectData(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
# get project data from avalon db
|
||||
context.data["projectData"] = pype.get_project_data()
|
||||
context.data["projectData"] = pype.get_project()["data"]
|
||||
|
||||
return
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import subprocess
|
||||
import pype.api
|
||||
import json
|
||||
|
||||
import pype.api
|
||||
import pyblish
|
||||
|
||||
|
||||
|
|
@ -17,6 +17,7 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
label = "Quicktime with burnins"
|
||||
order = pyblish.api.ExtractorOrder + 0.03
|
||||
families = ["review", "burnin"]
|
||||
hosts = ["nuke", "maya", "shell"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -24,14 +25,18 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
raise RuntimeError("Burnin needs already created mov to work on.")
|
||||
|
||||
# TODO: expand burnin data list to include all usefull keys
|
||||
burnin_data = {
|
||||
version = ''
|
||||
if instance.context.data.get('version'):
|
||||
version = "v" + str(instance.context.data['version'])
|
||||
|
||||
prep_data = {
|
||||
"username": instance.context.data['user'],
|
||||
"asset": os.environ['AVALON_ASSET'],
|
||||
"task": os.environ['AVALON_TASK'],
|
||||
"start_frame": int(instance.data['startFrame']),
|
||||
"version": "v" + str(instance.context.data['version'])
|
||||
"start_frame": int(instance.data["frameStart"]),
|
||||
"version": version
|
||||
}
|
||||
self.log.debug("__ burnin_data1: {}".format(burnin_data))
|
||||
self.log.debug("__ prep_data: {}".format(prep_data))
|
||||
for i, repre in enumerate(instance.data["representations"]):
|
||||
self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre))
|
||||
|
||||
|
|
@ -44,40 +49,69 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
name = "_burnin"
|
||||
movieFileBurnin = filename.replace(".mov", "") + name + ".mov"
|
||||
|
||||
full_movie_path = os.path.join(stagingdir, repre["files"])
|
||||
full_burnin_path = os.path.join(stagingdir, movieFileBurnin)
|
||||
full_movie_path = os.path.join(os.path.normpath(stagingdir), repre["files"])
|
||||
full_burnin_path = os.path.join(os.path.normpath(stagingdir), movieFileBurnin)
|
||||
self.log.debug("__ full_burnin_path: {}".format(full_burnin_path))
|
||||
|
||||
burnin_data = {
|
||||
"input": full_movie_path.replace("\\", "/"),
|
||||
"output": full_burnin_path.replace("\\", "/"),
|
||||
"burnin_data": burnin_data
|
||||
"burnin_data": prep_data
|
||||
}
|
||||
|
||||
self.log.debug("__ burnin_data2: {}".format(burnin_data))
|
||||
|
||||
json_data = json.dumps(burnin_data)
|
||||
scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'],
|
||||
"pype",
|
||||
"scripts",
|
||||
"otio_burnin.py")
|
||||
|
||||
self.log.debug("Burnin scriptpath: {}".format(scriptpath))
|
||||
# Get script path.
|
||||
module_path = os.environ['PYPE_MODULE_ROOT']
|
||||
|
||||
try:
|
||||
p = subprocess.Popen(
|
||||
[os.getenv("PYPE_PYTHON_EXE"), scriptpath, json_data]
|
||||
# There can be multiple paths in PYPE_MODULE_ROOT, in which case
|
||||
# we just take first one.
|
||||
if os.pathsep in module_path:
|
||||
module_path = module_path.split(os.pathsep)[0]
|
||||
|
||||
scriptpath = os.path.normpath(
|
||||
os.path.join(
|
||||
module_path,
|
||||
"pype",
|
||||
"scripts",
|
||||
"otio_burnin.py"
|
||||
)
|
||||
p.wait()
|
||||
if not os.path.isfile(full_burnin_path):
|
||||
self.log.error(
|
||||
"Burnin file wasn't created succesfully")
|
||||
except Exception as e:
|
||||
raise RuntimeError("Burnin script didn't work: `{}`".format(e))
|
||||
)
|
||||
|
||||
if os.path.exists(full_burnin_path):
|
||||
repre_update = {
|
||||
"files": movieFileBurnin,
|
||||
"name": repre["name"] + name
|
||||
}
|
||||
instance.data["representations"][i].update(repre_update)
|
||||
self.log.debug("__ scriptpath: {}".format(scriptpath))
|
||||
|
||||
# Get executable.
|
||||
executable = os.getenv("PYPE_PYTHON_EXE")
|
||||
|
||||
# There can be multiple paths in PYPE_PYTHON_EXE, in which case
|
||||
# we just take first one.
|
||||
if os.pathsep in executable:
|
||||
executable = executable.split(os.pathsep)[0]
|
||||
|
||||
self.log.debug("__ EXE: {}".format(executable))
|
||||
|
||||
args = [executable, scriptpath, json_data]
|
||||
self.log.debug("Executing: {}".format(args))
|
||||
output = pype.api.subprocess(args)
|
||||
self.log.debug("Output: {}".format(output))
|
||||
|
||||
repre_update = {
|
||||
"files": movieFileBurnin,
|
||||
"name": repre["name"],
|
||||
"tags": [x for x in repre["tags"] if x != "delete"]
|
||||
}
|
||||
instance.data["representations"][i].update(repre_update)
|
||||
|
||||
# removing the source mov file
|
||||
os.remove(full_movie_path)
|
||||
self.log.debug("Removed: `{}`".format(full_movie_path))
|
||||
|
||||
# Remove any representations tagged for deletion.
|
||||
for repre in instance.data["representations"]:
|
||||
if "delete" in repre.get("tags", []):
|
||||
self.log.debug("Removing representation: {}".format(repre))
|
||||
instance.data["representations"].remove(repre)
|
||||
|
||||
self.log.debug(instance.data["representations"])
|
||||
|
|
|
|||
|
|
@ -2,11 +2,11 @@ import pyblish.api
|
|||
from avalon import io
|
||||
|
||||
|
||||
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
|
||||
class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
||||
"""Create entities in Avalon based on collected data."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder - 0.1
|
||||
label = "Integrate Hierarchy To Avalon"
|
||||
order = pyblish.api.ExtractorOrder - 0.01
|
||||
label = "Extract Hierarchy To Avalon"
|
||||
families = ["clip", "shot"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
from pype.vendor import clique
|
||||
import pype.api
|
||||
|
||||
|
||||
class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
||||
|
|
@ -20,9 +21,8 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "write", "source"]
|
||||
|
||||
|
||||
def process(self, instance):
|
||||
start = instance.data.get("startFrame")
|
||||
start = instance.data.get("frameStart")
|
||||
stagingdir = os.path.normpath(instance.data.get("stagingDir"))
|
||||
|
||||
collected_frames = os.listdir(stagingdir)
|
||||
|
|
@ -59,8 +59,10 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
jpeg_items.append(full_output_path)
|
||||
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
sub_proc = subprocess.Popen(subprocess_jpeg)
|
||||
sub_proc.wait()
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprocess_jpeg))
|
||||
pype.api.subprocess(subprocess_jpeg)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -1,86 +0,0 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
from pype.vendor import clique
|
||||
|
||||
|
||||
class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
|
||||
"""Resolve any dependency issies
|
||||
|
||||
This plug-in resolves any paths which, if not updated might break
|
||||
the published file.
|
||||
|
||||
The order of families is important, when working with lookdev you want to
|
||||
first publish the texture, update the texture paths in the nodes and then
|
||||
publish the shading network. Same goes for file dependent assets.
|
||||
"""
|
||||
|
||||
label = "Extract Quicktime"
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "write", "source"]
|
||||
hosts = ["shell"]
|
||||
|
||||
def process(self, instance):
|
||||
fps = instance.data.get("fps")
|
||||
start = instance.data.get("startFrame")
|
||||
stagingdir = os.path.normpath(instance.data.get("stagingDir"))
|
||||
|
||||
collected_frames = os.listdir(stagingdir)
|
||||
collections, remainder = clique.assemble(collected_frames)
|
||||
|
||||
full_input_path = os.path.join(
|
||||
stagingdir, collections[0].format('{head}{padding}{tail}')
|
||||
)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
|
||||
filename = collections[0].format('{head}')
|
||||
if not filename.endswith('.'):
|
||||
filename += "."
|
||||
movFile = filename + "mov"
|
||||
full_output_path = os.path.join(stagingdir, movFile)
|
||||
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
|
||||
input_args = []
|
||||
# overrides output file
|
||||
input_args.append("-y")
|
||||
# preset's input data
|
||||
input_args.extend(profile.get('input', []))
|
||||
# necessary input data
|
||||
input_args.append("-start_number {}".format(start))
|
||||
input_args.append("-i {}".format(full_input_path))
|
||||
input_args.append("-framerate {}".format(fps))
|
||||
|
||||
output_args = []
|
||||
# preset's output data
|
||||
output_args.extend(profile.get('output', []))
|
||||
# output filename
|
||||
output_args.append(full_output_path)
|
||||
mov_args = [
|
||||
"ffmpeg",
|
||||
" ".join(input_args),
|
||||
" ".join(output_args)
|
||||
]
|
||||
subprocess_mov = " ".join(mov_args)
|
||||
sub_proc = subprocess.Popen(subprocess_mov)
|
||||
sub_proc.wait()
|
||||
|
||||
if not os.path.isfile(full_output_path):
|
||||
raise("Quicktime wasn't created succesfully")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'mov',
|
||||
'ext': 'mov',
|
||||
'files': movFile,
|
||||
"stagingDir": stagingdir,
|
||||
"preview": True
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
from pype.vendor import clique
|
||||
import pype.api
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
|
|
@ -19,6 +20,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
label = "Extract Review"
|
||||
order = pyblish.api.ExtractorOrder + 0.02
|
||||
families = ["review"]
|
||||
hosts = ["nuke", "maya", "shell"]
|
||||
|
||||
def process(self, instance):
|
||||
# adding plugin attributes from presets
|
||||
|
|
@ -28,7 +30,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
|
||||
inst_data = instance.data
|
||||
fps = inst_data.get("fps")
|
||||
start_frame = inst_data.get("startFrame")
|
||||
start_frame = inst_data.get("frameStart")
|
||||
|
||||
self.log.debug("Families In: `{}`".format(instance.data["families"]))
|
||||
|
||||
|
|
@ -36,7 +38,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
representations = instance.data["representations"]
|
||||
|
||||
# filter out mov and img sequences
|
||||
representations_new = list()
|
||||
representations_new = representations[:]
|
||||
for repre in representations:
|
||||
if repre['ext'] in plugin_attrs["ext_filter"]:
|
||||
tags = repre.get("tags", [])
|
||||
|
|
@ -44,110 +46,158 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
self.log.info("Try repre: {}".format(repre))
|
||||
|
||||
if "review" in tags:
|
||||
|
||||
staging_dir = repre["stagingDir"]
|
||||
|
||||
for name, profile in output_profiles.items():
|
||||
self.log.debug("Profile name: {}".format(name))
|
||||
|
||||
ext = profile.get("ext", None)
|
||||
if not ext:
|
||||
ext = "mov"
|
||||
self.log.warning("`ext` attribute not in output profile. Setting to default ext: `mov`")
|
||||
if "mov" not in repre['ext']:
|
||||
# get output presets and loop them
|
||||
collections, remainder = clique.assemble(
|
||||
repre["files"])
|
||||
self.log.warning(
|
||||
"`ext` attribute not in output profile. Setting to default ext: `mov`")
|
||||
|
||||
full_input_path = os.path.join(
|
||||
staging_dir, collections[0].format(
|
||||
'{head}{padding}{tail}')
|
||||
)
|
||||
self.log.debug("instance.families: {}".format(instance.data['families']))
|
||||
self.log.debug("profile.families: {}".format(profile['families']))
|
||||
|
||||
filename = collections[0].format('{head}')
|
||||
if filename.endswith('.'):
|
||||
filename = filename[:-1]
|
||||
else:
|
||||
full_input_path = os.path.join(
|
||||
staging_dir, repre["files"])
|
||||
filename = repre["files"].split(".")[0]
|
||||
if any(item in instance.data['families'] for item in profile['families']):
|
||||
if isinstance(repre["files"], list):
|
||||
collections, remainder = clique.assemble(
|
||||
repre["files"])
|
||||
|
||||
repr_file = filename + "_{0}.{1}".format(name, ext)
|
||||
full_input_path = os.path.join(
|
||||
staging_dir, collections[0].format(
|
||||
'{head}{padding}{tail}')
|
||||
)
|
||||
|
||||
full_output_path = os.path.join(staging_dir, repr_file)
|
||||
filename = collections[0].format('{head}')
|
||||
if filename.endswith('.'):
|
||||
filename = filename[:-1]
|
||||
else:
|
||||
full_input_path = os.path.join(
|
||||
staging_dir, repre["files"])
|
||||
filename = repre["files"].split(".")[0]
|
||||
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
repr_file = filename + "_{0}.{1}".format(name, ext)
|
||||
|
||||
repre_new = repre.copy()
|
||||
full_output_path = os.path.join(
|
||||
staging_dir, repr_file)
|
||||
|
||||
self.log.debug("Profile name: {}".format(name))
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
|
||||
new_tags = tags[:]
|
||||
p_tags = profile.get('tags', [])
|
||||
self.log.info("p_tags: `{}`".format(p_tags))
|
||||
# add families
|
||||
[instance.data["families"].append(t) for t in p_tags
|
||||
if t not in instance.data["families"]]
|
||||
# add to
|
||||
[new_tags.append(t) for t in p_tags
|
||||
if t not in new_tags]
|
||||
repre_new = repre.copy()
|
||||
|
||||
self.log.info("new_tags: `{}`".format(new_tags))
|
||||
new_tags = [x for x in tags if x != "delete"]
|
||||
p_tags = profile.get('tags', [])
|
||||
self.log.info("p_tags: `{}`".format(p_tags))
|
||||
# add families
|
||||
[instance.data["families"].append(t)
|
||||
for t in p_tags
|
||||
if t not in instance.data["families"]]
|
||||
# add to
|
||||
[new_tags.append(t) for t in p_tags
|
||||
if t not in new_tags]
|
||||
|
||||
input_args = []
|
||||
self.log.info("new_tags: `{}`".format(new_tags))
|
||||
|
||||
# overrides output file
|
||||
input_args.append("-y")
|
||||
input_args = []
|
||||
|
||||
# preset's input data
|
||||
input_args.extend(profile.get('input', []))
|
||||
# overrides output file
|
||||
input_args.append("-y")
|
||||
|
||||
# necessary input data
|
||||
# adds start arg only if image sequence
|
||||
if "mov" not in repre_new['ext']:
|
||||
input_args.append("-start_number {}".format(
|
||||
start_frame))
|
||||
# preset's input data
|
||||
input_args.extend(profile.get('input', []))
|
||||
|
||||
input_args.append("-i {}".format(full_input_path))
|
||||
input_args.append("-framerate {}".format(fps))
|
||||
# necessary input data
|
||||
# adds start arg only if image sequence
|
||||
if isinstance(repre["files"], list):
|
||||
input_args.append("-start_number {0} -framerate {1}".format(
|
||||
start_frame, fps))
|
||||
|
||||
output_args = []
|
||||
# preset's output data
|
||||
output_args.extend(profile.get('output', []))
|
||||
input_args.append("-i {}".format(full_input_path))
|
||||
|
||||
# output filename
|
||||
output_args.append(full_output_path)
|
||||
mov_args = [
|
||||
"ffmpeg",
|
||||
" ".join(input_args),
|
||||
" ".join(output_args)
|
||||
]
|
||||
subprocess_mov = " ".join(mov_args)
|
||||
for audio in instance.data.get("audio", []):
|
||||
offset_frames = (
|
||||
instance.data.get("startFrameReview") -
|
||||
audio["offset"]
|
||||
)
|
||||
offset_seconds = offset_frames / fps
|
||||
|
||||
# run subprocess
|
||||
sub_proc = subprocess.Popen(subprocess_mov)
|
||||
sub_proc.wait()
|
||||
if offset_seconds > 0:
|
||||
input_args.append("-ss")
|
||||
else:
|
||||
input_args.append("-itsoffset")
|
||||
|
||||
if not os.path.isfile(full_output_path):
|
||||
self.log.error(
|
||||
"Quicktime wasn't created succesfully")
|
||||
input_args.append(str(abs(offset_seconds)))
|
||||
|
||||
# create representation data
|
||||
repre_new.update({
|
||||
'name': name,
|
||||
'ext': ext,
|
||||
'files': repr_file,
|
||||
"tags": new_tags,
|
||||
"outputName": name
|
||||
})
|
||||
repre_new.pop("preview")
|
||||
repre_new.pop("thumbnail")
|
||||
input_args.extend(
|
||||
["-i", audio["filename"]]
|
||||
)
|
||||
|
||||
# adding representation
|
||||
representations_new.append(repre_new)
|
||||
# Need to merge audio if there are more
|
||||
# than 1 input.
|
||||
if len(instance.data["audio"]) > 1:
|
||||
input_args.extend(
|
||||
[
|
||||
"-filter_complex",
|
||||
"amerge",
|
||||
"-ac",
|
||||
"2"
|
||||
]
|
||||
)
|
||||
|
||||
output_args = []
|
||||
# preset's output data
|
||||
output_args.extend(profile.get('output', []))
|
||||
|
||||
# letter_box
|
||||
# TODO: add to documentation
|
||||
lb = profile.get('letter_box', None)
|
||||
if lb:
|
||||
output_args.append(
|
||||
"-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb))
|
||||
|
||||
# In case audio is longer than video.
|
||||
output_args.append("-shortest")
|
||||
|
||||
# output filename
|
||||
output_args.append(full_output_path)
|
||||
mov_args = [
|
||||
"ffmpeg",
|
||||
" ".join(input_args),
|
||||
" ".join(output_args)
|
||||
]
|
||||
subprcs_cmd = " ".join(mov_args)
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("Executing: {}".format(subprcs_cmd))
|
||||
output = pype.api.subprocess(subprcs_cmd)
|
||||
self.log.debug("Output: {}".format(output))
|
||||
|
||||
# create representation data
|
||||
repre_new.update({
|
||||
'name': name,
|
||||
'ext': ext,
|
||||
'files': repr_file,
|
||||
"tags": new_tags,
|
||||
"outputName": name
|
||||
})
|
||||
if repre_new.get('preview'):
|
||||
repre_new.pop("preview")
|
||||
if repre_new.get('thumbnail'):
|
||||
repre_new.pop("thumbnail")
|
||||
|
||||
# adding representation
|
||||
self.log.debug("Adding: {}".format(repre_new))
|
||||
representations_new.append(repre_new)
|
||||
else:
|
||||
representations_new.append(repre)
|
||||
continue
|
||||
else:
|
||||
representations_new.append(repre)
|
||||
continue
|
||||
|
||||
for repre in representations_new:
|
||||
if "delete" in repre.get("tags", []):
|
||||
representations_new.remove(repre)
|
||||
|
||||
self.log.debug(
|
||||
"new representations: {}".format(representations_new))
|
||||
|
|
|
|||
|
|
@ -404,7 +404,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
# Include optional data if present in
|
||||
optionals = [
|
||||
"startFrame", "endFrame", "step", "handles", "sourceHashes"
|
||||
"frameStart", "frameEnd", "step", "handles", "sourceHashes"
|
||||
]
|
||||
for key in optionals:
|
||||
if key in instance.data:
|
||||
|
|
|
|||
|
|
@ -36,9 +36,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
template from anatomy that should be used for
|
||||
integrating this file. Only the first level can
|
||||
be specified right now.
|
||||
'startFrame'
|
||||
'endFrame'
|
||||
'framerate'
|
||||
"frameStart"
|
||||
"frameEnd"
|
||||
'fps'
|
||||
"""
|
||||
|
||||
label = "Integrate Asset New"
|
||||
|
|
@ -63,6 +63,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"rig",
|
||||
"plate",
|
||||
"look",
|
||||
"lut",
|
||||
"audio"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
|
|
@ -99,18 +100,18 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
# \ /
|
||||
# o __/
|
||||
#
|
||||
for result in context.data["results"]:
|
||||
if not result["success"]:
|
||||
self.log.debug(result)
|
||||
exc_type, exc_value, exc_traceback = result["error_info"]
|
||||
extracted_traceback = traceback.extract_tb(exc_traceback)[-1]
|
||||
self.log.debug(
|
||||
"Error at line {}: \"{}\"".format(
|
||||
extracted_traceback[1], result["error"]
|
||||
)
|
||||
)
|
||||
assert all(result["success"] for result in context.data["results"]), (
|
||||
"Atomicity not held, aborting.")
|
||||
# for result in context.data["results"]:
|
||||
# if not result["success"]:
|
||||
# self.log.debug(result)
|
||||
# exc_type, exc_value, exc_traceback = result["error_info"]
|
||||
# extracted_traceback = traceback.extract_tb(exc_traceback)[-1]
|
||||
# self.log.debug(
|
||||
# "Error at line {}: \"{}\"".format(
|
||||
# extracted_traceback[1], result["error"]
|
||||
# )
|
||||
# )
|
||||
# assert all(result["success"] for result in context.data["results"]), (
|
||||
# "Atomicity not held, aborting.")
|
||||
|
||||
# Assemble
|
||||
#
|
||||
|
|
@ -225,17 +226,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = os.path.join(*parents)
|
||||
|
||||
template_data = {"root": root,
|
||||
"project": {"name": PROJECT,
|
||||
"code": project['data']['code']},
|
||||
"silo": asset['silo'],
|
||||
"task": TASK,
|
||||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"version": int(version["name"]),
|
||||
"hierarchy": hierarchy}
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
# Find the representations to transfer amongst the files
|
||||
|
|
@ -257,6 +247,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
# | ||
|
||||
# |_______|
|
||||
#
|
||||
# create template data for Anatomy
|
||||
template_data = {"root": root,
|
||||
"project": {"name": PROJECT,
|
||||
"code": project['data']['code']},
|
||||
"silo": asset['silo'],
|
||||
"task": TASK,
|
||||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"version": int(version["name"]),
|
||||
"hierarchy": hierarchy}
|
||||
|
||||
files = repre['files']
|
||||
if repre.get('stagingDir'):
|
||||
|
|
@ -271,22 +272,29 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
self.log.debug(
|
||||
"src_tail_collections: {}".format(str(src_collections)))
|
||||
src_collection = src_collections[0]
|
||||
|
||||
# Assert that each member has identical suffix
|
||||
src_head = src_collection.format("{head}")
|
||||
src_tail = src_collection.format("{tail}")
|
||||
|
||||
# fix dst_padding
|
||||
valid_files = [x for x in files if src_collection.match(x)]
|
||||
padd_len = len(
|
||||
valid_files[0].replace(src_head, "").replace(src_tail, "")
|
||||
)
|
||||
src_padding_exp = "%0{}d".format(padd_len)
|
||||
|
||||
test_dest_files = list()
|
||||
for i in [1, 2]:
|
||||
template_data["representation"] = repre['ext']
|
||||
template_data["frame"] = src_collection.format(
|
||||
"{padding}") % i
|
||||
template_data["frame"] = src_padding_exp % i
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
|
||||
test_dest_files.append(
|
||||
os.path.normpath(
|
||||
anatomy_filled[template_name]["path"])
|
||||
)
|
||||
|
||||
|
||||
self.log.debug(
|
||||
"test_dest_files: {}".format(str(test_dest_files)))
|
||||
|
||||
|
|
@ -295,24 +303,23 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
dst_head = dst_collection.format("{head}")
|
||||
dst_tail = dst_collection.format("{tail}")
|
||||
|
||||
repre['published_path'] = dst_collection.format()
|
||||
|
||||
index_frame_start = None
|
||||
if repre.get('startFrame'):
|
||||
if repre.get("frameStart"):
|
||||
frame_start_padding = len(str(
|
||||
repre.get('endFrame')))
|
||||
index_frame_start = repre.get('startFrame')
|
||||
repre.get("frameEnd")))
|
||||
index_frame_start = repre.get("frameStart")
|
||||
|
||||
dst_padding_exp = src_padding_exp
|
||||
for i in src_collection.indexes:
|
||||
src_padding = src_collection.format("{padding}") % i
|
||||
src_padding = src_padding_exp % i
|
||||
src_file_name = "{0}{1}{2}".format(
|
||||
src_head, src_padding, src_tail)
|
||||
|
||||
dst_padding = dst_collection.format("{padding}") % i
|
||||
dst_padding = src_padding_exp % i
|
||||
|
||||
if index_frame_start:
|
||||
dst_padding = "%0{}d".format(
|
||||
frame_start_padding) % index_frame_start
|
||||
dst_padding_exp = "%0{}d".format(frame_start_padding)
|
||||
dst_padding = dst_padding_exp % index_frame_start
|
||||
index_frame_start += 1
|
||||
|
||||
dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail)
|
||||
|
|
@ -321,6 +328,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
self.log.debug("source: {}".format(src))
|
||||
instance.data["transfers"].append([src, dst])
|
||||
|
||||
repre['published_path'] = "{0}{1}{2}".format(dst_head, dst_padding_exp, dst_tail)
|
||||
# for imagesequence version data
|
||||
hashes = '#' * len(dst_padding)
|
||||
dst = os.path.normpath("{0}{1}{2}".format(
|
||||
|
|
@ -380,7 +388,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"representation": repre['ext']
|
||||
}
|
||||
}
|
||||
self.log.debug("__ _representation: {}".format(representation))
|
||||
self.log.debug("__ representation: {}".format(representation))
|
||||
destination_list.append(dst)
|
||||
self.log.debug("__ destination_list: {}".format(destination_list))
|
||||
instance.data['destination_list'] = destination_list
|
||||
|
|
@ -396,20 +404,23 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
self.log.info("Registered {} items".format(len(representations)))
|
||||
|
||||
def integrate(self, instance):
|
||||
"""Move the files
|
||||
""" Move the files.
|
||||
|
||||
Through `instance.data["transfers"]`
|
||||
Through `instance.data["transfers"]`
|
||||
|
||||
Args:
|
||||
instance: the instance to integrate
|
||||
Args:
|
||||
instance: the instance to integrate
|
||||
"""
|
||||
|
||||
transfers = instance.data.get("transfers", list())
|
||||
|
||||
for src, dest in transfers:
|
||||
if os.path.normpath(src) != os.path.normpath(dest):
|
||||
self.copy_file(src, dest)
|
||||
|
||||
transfers = instance.data.get("transfers", list())
|
||||
for src, dest in transfers:
|
||||
self.copy_file(src, dest)
|
||||
|
||||
# Produce hardlinked copies
|
||||
# Note: hardlink can only be produced between two files on the same
|
||||
# server/disk and editing one of the two will edit both files at once.
|
||||
|
|
@ -545,8 +556,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
# Include optional data if present in
|
||||
optionals = [
|
||||
"startFrame", "endFrame", "step", "handles",
|
||||
"handle_end", "handle_start", "sourceHashes"
|
||||
"frameStart", "frameEnd", "step", "handles",
|
||||
"handleEnd", "handleStart", "sourceHashes"
|
||||
]
|
||||
for key in optionals:
|
||||
if key in instance.data:
|
||||
|
|
|
|||
|
|
@ -408,7 +408,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
"comment": context.data.get("comment")}
|
||||
|
||||
# Include optional data if present in
|
||||
optionals = ["startFrame", "endFrame", "step",
|
||||
optionals = ["frameStart", "frameEnd", "step",
|
||||
"handles", "colorspace", "fps", "outputDir"]
|
||||
|
||||
for key in optionals:
|
||||
|
|
|
|||
|
|
@ -121,7 +121,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
- publishJobState (str, Optional): "Active" or "Suspended"
|
||||
This defaults to "Suspended"
|
||||
|
||||
This requires a "startFrame" and "endFrame" to be present in instance.data
|
||||
This requires a "frameStart" and "frameEnd" to be present in instance.data
|
||||
or in context.data.
|
||||
|
||||
"""
|
||||
|
|
@ -138,6 +138,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"imagesequence"
|
||||
]
|
||||
|
||||
enviro_filter = [
|
||||
"PATH",
|
||||
"PYTHONPATH",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"PYPE_ROOT"
|
||||
]
|
||||
|
||||
|
||||
def _submit_deadline_post_job(self, instance, job):
|
||||
"""
|
||||
Deadline specific code separated from :meth:`process` for sake of
|
||||
|
|
@ -181,13 +191,22 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
# Transfer the environment from the original job to this dependent
|
||||
# job so they use the same environment
|
||||
|
||||
|
||||
environment = job["Props"].get("Env", {})
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
) for index, key in enumerate(environment)
|
||||
})
|
||||
i = 0
|
||||
for index, key in enumerate(environment):
|
||||
self.log.info("KEY: {}".format(key))
|
||||
self.log.info("FILTER: {}".format(self.enviro_filter))
|
||||
|
||||
if key.upper() in self.enviro_filter:
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % i: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
)
|
||||
})
|
||||
i += 1
|
||||
|
||||
# Avoid copied pools and remove secondary pool
|
||||
payload["JobInfo"]["Pool"] = "none"
|
||||
|
|
@ -212,40 +231,36 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
# Get a submission job
|
||||
data = instance.data.copy()
|
||||
job = instance.data.get("deadlineSubmissionJob")
|
||||
render_job = data.pop("deadlineSubmissionJob")
|
||||
submission_type = "deadline"
|
||||
|
||||
if not job:
|
||||
if not render_job:
|
||||
# No deadline job. Try Muster: musterSubmissionJob
|
||||
job = data.pop("musterSubmissionJob")
|
||||
render_job = data.pop("musterSubmissionJob")
|
||||
submission_type = "muster"
|
||||
if not job:
|
||||
if not render_job:
|
||||
raise RuntimeError("Can't continue without valid Deadline "
|
||||
"or Muster submission prior to this "
|
||||
"plug-in.")
|
||||
|
||||
if submission_type == "deadline":
|
||||
render_job = data.pop("deadlineSubmissionJob")
|
||||
self.DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
assert self.DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
|
||||
self._submit_deadline_post_job(instance, job)
|
||||
|
||||
if submission_type == "muster":
|
||||
render_job = data.pop("musterSubmissionJob")
|
||||
self._submit_deadline_post_job(instance, render_job)
|
||||
|
||||
asset = data.get("asset") or api.Session["AVALON_ASSET"]
|
||||
subset = data["subset"]
|
||||
|
||||
# Get start/end frame from instance, if not available get from context
|
||||
context = instance.context
|
||||
start = instance.data.get("startFrame")
|
||||
start = instance.data.get("frameStart")
|
||||
if start is None:
|
||||
start = context.data["startFrame"]
|
||||
end = instance.data.get("endFrame")
|
||||
start = context.data["frameStart"]
|
||||
end = instance.data.get("frameEnd")
|
||||
if end is None:
|
||||
end = context.data["endFrame"]
|
||||
end = context.data["frameEnd"]
|
||||
|
||||
# Add in regex for sequence filename
|
||||
# This assumes the output files start with subset name and ends with
|
||||
|
|
@ -270,13 +285,13 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
metadata = {
|
||||
"asset": asset,
|
||||
"regex": regex,
|
||||
"startFrame": start,
|
||||
"endFrame": end,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"fps": context.data.get("fps", None),
|
||||
"families": ["render"],
|
||||
"source": source,
|
||||
"user": context.data["user"],
|
||||
|
||||
"version": context.data["version"],
|
||||
# Optional metadata (for debugging)
|
||||
"metadata": {
|
||||
"instance": data,
|
||||
|
|
@ -319,8 +334,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
# Set prev start / end frames for comparison
|
||||
if not prev_start and not prev_end:
|
||||
prev_start = version["data"]["startFrame"]
|
||||
prev_end = version["data"]["endFrame"]
|
||||
prev_start = version["data"]["frameStart"]
|
||||
prev_end = version["data"]["frameEnd"]
|
||||
|
||||
subset_resources = get_resources(version, _ext)
|
||||
resource_files = get_resource_files(subset_resources,
|
||||
|
|
@ -356,12 +371,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
# Please do so when fixing this.
|
||||
|
||||
# Start frame
|
||||
metadata["startFrame"] = updated_start
|
||||
metadata["metadata"]["instance"]["startFrame"] = updated_start
|
||||
metadata["frameStart"] = updated_start
|
||||
metadata["metadata"]["instance"]["frameStart"] = updated_start
|
||||
|
||||
# End frame
|
||||
metadata["endFrame"] = updated_end
|
||||
metadata["metadata"]["instance"]["endFrame"] = updated_end
|
||||
metadata["frameEnd"] = updated_end
|
||||
metadata["metadata"]["instance"]["frameEnd"] = updated_end
|
||||
|
||||
metadata_filename = "{}_metadata.json".format(subset)
|
||||
|
||||
|
|
|
|||
12
pype/plugins/global/publish/validate_filesequences.py
Normal file
12
pype/plugins/global/publish/validate_filesequences.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateFileSequences(pyblish.api.ContextPlugin):
|
||||
"""Validates whether any file sequences were collected."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
targets = ["filesequence"]
|
||||
label = "Validate File Sequences"
|
||||
|
||||
def process(self, context):
|
||||
assert context, "Nothing collected."
|
||||
|
|
@ -22,8 +22,8 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin):
|
|||
frames = list(collection.indexes)
|
||||
|
||||
current_range = (frames[0], frames[-1])
|
||||
required_range = (instance.data["startFrame"],
|
||||
instance.data["endFrame"])
|
||||
required_range = (instance.data["frameStart"],
|
||||
instance.data["frameEnd"])
|
||||
|
||||
if current_range != required_range:
|
||||
raise ValueError("Invalid frame range: {0} - "
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue