feat(sp): editorial with multiple edl file instances

This commit is contained in:
Jakub Jezek 2020-08-06 21:22:23 +02:00
parent 7ed3c2686a
commit 4261972a8d
No known key found for this signature in database
GPG key ID: C4B96E101D2A47F3
4 changed files with 91 additions and 35 deletions

View file

@ -5,8 +5,7 @@ import tempfile
import random
import string
from avalon import io, api
import importlib
from avalon import io
import pype
from pype.api import execute, Logger
@ -62,8 +61,6 @@ def set_context(project, asset, task, app):
def publish(data, gui=True):
# cli pyblish seems like better solution
return cli_publish(data, gui)
# # this uses avalon pyblish launch tool
# avalon_api_publish(data, gui)
def cli_publish(data, gui=True):
@ -76,10 +73,6 @@ def cli_publish(data, gui=True):
chars = "".join([random.choice(string.ascii_letters) for i in range(15)])
staging_dir = tempfile.mkdtemp(chars)
# create json for return data
return_data_path = (
staging_dir + os.path.basename(staging_dir) + 'return.json'
)
# create also json and fill with data
json_data_path = staging_dir + os.path.basename(staging_dir) + '.json'
with open(json_data_path, 'w') as outfile:
@ -88,7 +81,6 @@ def cli_publish(data, gui=True):
envcopy = os.environ.copy()
envcopy["PYBLISH_HOSTS"] = "standalonepublisher"
envcopy["SAPUBLISH_INPATH"] = json_data_path
envcopy["SAPUBLISH_OUTPATH"] = return_data_path
envcopy["PYBLISHGUI"] = "pyblish_pype"
envcopy["PUBLISH_PATHS"] = os.pathsep.join(PUBLISH_PATHS)

View file

@ -36,16 +36,18 @@ class CollectClipInstances(pyblish.api.InstancePlugin):
custom_start_frame = None
def process(self, instance):
staging_dir = os.path.normpath(
tempfile.mkdtemp(prefix="pyblish_tmp_")
)
# get context
context = instance.context
# attribute for checking duplicity during creation
if not context.data.get("assetNameCheck"):
context.data["assetNameCheck"] = list()
# create asset_names conversion table
if not context.data.get("assetsShared"):
self.log.debug("Created `assetsShared` in context")
context.data["assetsShared"] = dict()
# get timeline otio data
@ -55,13 +57,11 @@ class CollectClipInstances(pyblish.api.InstancePlugin):
tracks = timeline.each_child(
descended_from_type=otio.schema.track.Track
)
self.log.debug(f"__ tracks: `{tracks}`")
# get data from avalon
asset_entity = instance.context.data["assetEntity"]
asset_data = asset_entity["data"]
asset_name = asset_entity["name"]
self.log.debug(f"__ asset_entity: `{asset_entity}`")
# Timeline data.
handle_start = int(asset_data["handleStart"])
@ -69,7 +69,6 @@ class CollectClipInstances(pyblish.api.InstancePlugin):
instances = []
for track in tracks:
self.log.debug(f"__ track: `{track}`")
try:
track_start_frame = (
abs(track.source_range.start_time.value)
@ -77,8 +76,6 @@ class CollectClipInstances(pyblish.api.InstancePlugin):
except AttributeError:
track_start_frame = 0
self.log.debug(f"__ track: `{track}`")
for clip in track.each_child():
# skip all generators like black ampty
if isinstance(
@ -98,6 +95,11 @@ class CollectClipInstances(pyblish.api.InstancePlugin):
clip_name = os.path.splitext(clip.name)[0].lower()
name = f"{asset_name.split('_')[0]}_{clip_name}"
if name not in context.data["assetNameCheck"]:
context.data["assetNameCheck"].append(name)
else:
self.log.warning(f"duplicate shot name: {name}")
# frame ranges data
clip_in = clip.range_in_parent().start_time.value
clip_out = clip.range_in_parent().end_time_inclusive().value
@ -171,7 +173,6 @@ class CollectClipInstances(pyblish.api.InstancePlugin):
})
instances.append(instance.context.create_instance(
**subset_instance_data))
self.log.debug(instance_data)
context.data["assetsShared"][name] = {
"_clipIn": clip_in,

View file

@ -36,18 +36,6 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
def process(self, context):
# get json paths from os and load them
io.install()
input_json_path = os.environ.get("SAPUBLISH_INPATH")
output_json_path = os.environ.get("SAPUBLISH_OUTPATH")
# context.data["stagingDir"] = os.path.dirname(input_json_path)
context.data["returnJsonPath"] = output_json_path
with open(input_json_path, "r") as f:
in_data = json.load(f)
asset_name = in_data["asset"]
family = in_data["family"]
subset = in_data["subset"]
# Load presets
presets = context.data.get("presets")
@ -57,19 +45,92 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
presets = config.get_presets()
project = io.find_one({"type": "project"})
asset = io.find_one({"type": "asset", "name": asset_name})
context.data["project"] = project
# get json file context
input_json_path = os.environ.get("SAPUBLISH_INPATH")
with open(input_json_path, "r") as f:
in_data = json.load(f)
self.log.debug(f"_ in_data: {in_data}")
self.asset_name = in_data["asset"]
self.family = in_data["family"]
asset = io.find_one({"type": "asset", "name": self.asset_name})
context.data["asset"] = asset
# exception for editorial
if "editorial" in self.family:
# avoid subset name duplicity
if not context.data.get("subsetNamesCheck"):
context.data["subsetNamesCheck"] = list()
in_data_list = list()
representations = in_data.pop("representations")
for repr in representations:
in_data_copy = in_data.copy()
ext = repr["ext"][1:]
subset = in_data_copy["subset"]
# filter out non editorial files
if ext not in ["edl", "xml"]:
in_data_copy["representations"] = [repr]
in_data_copy["subset"] = f"{ext}{subset}"
in_data_list.append(in_data_copy)
files = repr.pop("files")
# delete unneeded keys
delete_repr_keys = ["frameStart", "frameEnd"]
for k in delete_repr_keys:
if repr.get(k):
repr.pop(k)
# convert files to list if it isnt
if not isinstance(files, list):
files = [files]
self.log.debug(f"_ files: {files}")
for index, f in enumerate(files):
index += 1
# copy dictionaries
in_data_copy = in_data_copy.copy()
repr_new = repr.copy()
repr_new["files"] = f
repr_new["name"] = ext
in_data_copy["representations"] = [repr_new]
# create subset Name
new_subset = f"{ext}{index}{subset}"
while new_subset in context.data["subsetNamesCheck"]:
index += 1
new_subset = f"{ext}{index}{subset}"
context.data["subsetNamesCheck"].append(new_subset)
in_data_copy["subset"] = new_subset
in_data_list.append(in_data_copy)
self.log.info(f"Creating subset: {ext}{index}{subset}")
else:
in_data_list = [in_data]
self.log.debug(f"_ in_data_list: {in_data_list}")
for in_data in in_data_list:
# create instance
self.create_instance(context, in_data)
def create_instance(self, context, in_data):
subset = in_data["subset"]
instance = context.create_instance(subset)
instance.data.update(
{
"subset": subset,
"asset": asset_name,
"asset": self.asset_name,
"label": subset,
"name": subset,
"family": family,
"family": self.family,
"version": in_data.get("version", 1),
"frameStart": in_data.get("representations", [None])[0].get(
"frameStart", None
@ -77,7 +138,7 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
"frameEnd": in_data.get("representations", [None])[0].get(
"frameEnd", None
),
"families": [family, "ftrack"],
"families": [self.family, "ftrack"],
}
)
self.log.info("collected instance: {}".format(instance.data))
@ -105,5 +166,3 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
self.log.debug("Adding review family")
instance.data["representations"].append(component)
self.log.info(in_data)

View file

@ -35,6 +35,10 @@ class CollectEditorial(pyblish.api.InstancePlugin):
extensions = [".mov"]
def process(self, instance):
# remove context test attribute
if instance.context.data.get("subsetNamesCheck"):
instance.context.data.pop("subsetNamesCheck")
self.log.debug(f"__ instance: `{instance}`")
# get representation with editorial file
for representation in instance.data["representations"]: