mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-26 05:42:15 +01:00
collecting instances of clips from json example file
This commit is contained in:
parent
f6caee4c43
commit
5693f8a4d7
4 changed files with 337 additions and 4 deletions
|
|
@ -4,6 +4,7 @@ from avalon import (
|
|||
io,
|
||||
api as avalon
|
||||
)
|
||||
import json
|
||||
|
||||
|
||||
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
||||
|
|
@ -19,18 +20,38 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
|
||||
label = "Collect Aport Context"
|
||||
order = pyblish.api.CollectorOrder - 0.01
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
|
||||
def process(self, context):
|
||||
context.data["avalonSession"] = session = avalon.session
|
||||
rqst_json_data_path = context.data['rqst_json_data_path']
|
||||
post_json_data_path = context.data['post_json_data_path']
|
||||
context.data["stagingDir"] = staging_dir = os.path.dirname(post_json_data_path)
|
||||
|
||||
context.data["stagingDir"] = \
|
||||
staging_dir = os.path.dirname(post_json_data_path)
|
||||
|
||||
with open(rqst_json_data_path) as f:
|
||||
context.data['json_data'] = json_data = json.load(f)
|
||||
assert json_data, "No `data` in json file"
|
||||
|
||||
host = json_data.get("host", None)
|
||||
host_version = json_data.get("hostVersion", None)
|
||||
assert host, "No `host` data in json file"
|
||||
assert host_version, "No `hostVersion` data in json file"
|
||||
context.data["host"] = session["AVALON_APP"] = host
|
||||
context.data["hostVersion"] = \
|
||||
session["AVALON_APP_VERSION"] = host_version
|
||||
|
||||
pyblish.api.deregister_all_hosts()
|
||||
pyblish.api.register_host(session["AVALON_APP"])
|
||||
pyblish.api.register_host(host)
|
||||
|
||||
context.data["currentFile"] = session["AVALON_WORKDIR"]
|
||||
current_file = json_data.get("currentFile", None)
|
||||
assert current_file, "No `currentFile` data in json file"
|
||||
context.data["currentFile"] = current_file
|
||||
|
||||
presets = json_data.get("presets", None)
|
||||
assert presets, "No `presets` data in json file"
|
||||
context.data["presets"] = presets
|
||||
|
||||
if not os.path.exists(staging_dir):
|
||||
os.makedirs(staging_dir)
|
||||
|
|
|
|||
84
pype/plugins/aport/publish/collect_instances.py
Normal file
84
pype/plugins/aport/publish/collect_instances.py
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
import os
|
||||
import json
|
||||
import pyblish.api
|
||||
from avalon import (
|
||||
io,
|
||||
api as avalon
|
||||
)
|
||||
|
||||
|
||||
class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Collecting temp json data sent from a host context
|
||||
and path for returning json data back to hostself.
|
||||
|
||||
Setting avalon session into correct context
|
||||
|
||||
Args:
|
||||
context (obj): pyblish context session
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect instances from JSON"
|
||||
order = pyblish.api.CollectorOrder - 0.05
|
||||
|
||||
def process(self, context):
|
||||
json_data = context.data.get("json_data", None)
|
||||
instances_data = json_data.get("instances", None)
|
||||
assert instances_data, "No `instance` data in json file"
|
||||
|
||||
presets = context.data["presets"]
|
||||
rules_tasks = presets["rules_tasks"]
|
||||
|
||||
instances = []
|
||||
for inst in instances_data:
|
||||
# for key, value in inst.items():
|
||||
# self.log.debug('instance[key]: {}'.format(key))
|
||||
#
|
||||
name = asset = inst.get("name", None)
|
||||
assert name, "No `name` key in json_data.instance: {}".format(inst)
|
||||
|
||||
family = inst.get("family", None)
|
||||
assert family, "No `family` key in json_data.instance: {}".format(inst)
|
||||
|
||||
tags = inst.get("tags", None)
|
||||
if tags:
|
||||
tasks = [t["task"] for t in tags
|
||||
if t.get("task")]
|
||||
else:
|
||||
tasks = rules_tasks["defaultTasks"]
|
||||
self.log.debug("tasks: `{}`".format(tasks))
|
||||
|
||||
for task in tasks:
|
||||
host = rules_tasks["taskHost"][task]
|
||||
subsets = rules_tasks["taskSubsets"][task]
|
||||
|
||||
for subset in subsets:
|
||||
subset_name = "{0}_{1}".format(task, subset)
|
||||
instance = context.create_instance(subset_name)
|
||||
# instance.add(inst)
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"task": task,
|
||||
"host": host,
|
||||
"asset": asset,
|
||||
"label": "{0} - {1} > {2}".format(name, task, subset),
|
||||
"name": subset_name,
|
||||
"family": inst["family"],
|
||||
"families": [subset],
|
||||
"jsonData": inst,
|
||||
"publish": True,
|
||||
})
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
instances.append(instance)
|
||||
|
||||
context.data["instances"] = instances
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
# context[:] = sorted(context, key=self.sort_by_task)
|
||||
|
||||
self.log.debug("context: {}".format(context))
|
||||
|
||||
def sort_by_task(self, instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get("task", instance.data.get("task"))
|
||||
107
pype/plugins/nuke/publish/extract_post_json.py
Normal file
107
pype/plugins/nuke/publish/extract_post_json.py
Normal file
|
|
@ -0,0 +1,107 @@
|
|||
import os
|
||||
import json
|
||||
import datetime
|
||||
import time
|
||||
|
||||
import clique
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ExtractJSON(pyblish.api.ContextPlugin):
|
||||
""" Extract all instances to a serialized json file. """
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 1
|
||||
label = "Extract to JSON"
|
||||
families = ["write"]
|
||||
|
||||
def process(self, context):
|
||||
workspace = os.path.join(
|
||||
os.path.dirname(context.data["currentFile"]), "workspace",
|
||||
"instances")
|
||||
|
||||
if not os.path.exists(workspace):
|
||||
os.makedirs(workspace)
|
||||
|
||||
context_data = context.data.copy()
|
||||
out_data = dict(self.serialize(context_data))
|
||||
|
||||
instances_data = []
|
||||
for instance in context:
|
||||
|
||||
data = {}
|
||||
for key, value in instance.data.items():
|
||||
if isinstance(value, clique.Collection):
|
||||
value = value.format()
|
||||
|
||||
try:
|
||||
json.dumps(value)
|
||||
data[key] = value
|
||||
except KeyError:
|
||||
msg = "\"{0}\"".format(value)
|
||||
msg += " in instance.data[\"{0}\"]".format(key)
|
||||
msg += " could not be serialized."
|
||||
self.log.debug(msg)
|
||||
|
||||
instances_data.append(data)
|
||||
|
||||
out_data["instances"] = instances_data
|
||||
|
||||
timestamp = datetime.datetime.fromtimestamp(
|
||||
time.time()).strftime("%Y%m%d-%H%M%S")
|
||||
filename = timestamp + "_instances.json"
|
||||
|
||||
with open(os.path.join(workspace, filename), "w") as outfile:
|
||||
outfile.write(json.dumps(out_data, indent=4, sort_keys=True))
|
||||
|
||||
def serialize(self, data):
|
||||
"""
|
||||
Convert all nested content to serialized objects
|
||||
|
||||
Args:
|
||||
data (dict): nested data
|
||||
|
||||
Returns:
|
||||
dict: nested data
|
||||
"""
|
||||
|
||||
def encoding_obj(value):
|
||||
try:
|
||||
value = str(value).replace("\\", "/")
|
||||
# value = getattr(value, '__dict__', str(value))
|
||||
except Exception:
|
||||
pass
|
||||
return value
|
||||
|
||||
for key, value in dict(data).items():
|
||||
if key in ["records", "instances", "results"]:
|
||||
# escape all record objects
|
||||
data[key] = None
|
||||
continue
|
||||
|
||||
if hasattr(value, '__module__'):
|
||||
# only deals with module objects
|
||||
if "plugins" in value.__module__:
|
||||
# only dealing with plugin objects
|
||||
data[key] = str(value.__module__)
|
||||
else:
|
||||
if ".lib." in value.__module__:
|
||||
# will allow only anatomy dict
|
||||
data[key] = self.serialize(value)
|
||||
else:
|
||||
data[key] = None
|
||||
continue
|
||||
continue
|
||||
|
||||
if isinstance(value, dict):
|
||||
# loops if dictionary
|
||||
data[key] = self.serialize(value)
|
||||
|
||||
if isinstance(value, (list or tuple)):
|
||||
# loops if list or tuple
|
||||
for i, item in enumerate(value):
|
||||
value[i] = self.serialize(item)
|
||||
data[key] = value
|
||||
|
||||
data[key] = encoding_obj(value)
|
||||
|
||||
return data
|
||||
121
pype/premiere/example_publish_reqst.json
Normal file
121
pype/premiere/example_publish_reqst.json
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
{
|
||||
"time": "20190112T181028Z",
|
||||
"user": "jakub.jezek",
|
||||
"currentFile": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/editorial/e01/work/conform/jkprx_e01_conform_v001.prproj",
|
||||
"cwd": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/editorial/e01/work/conform",
|
||||
"date": "2019-01-12T17:10:28.377000Z",
|
||||
"framerate": "24.0",
|
||||
"host": "premiere",
|
||||
"hostVersion": "2019",
|
||||
"startFrame": 1001,
|
||||
"isRenderedReference": true,
|
||||
"referenceFile": "C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\resources\\reference\\e01\\sequence01\\conform\\jkprx_e01_conform_v001.mov",
|
||||
"presets": {"rules_tasks": {
|
||||
"defaultTasks": ["compositing"],
|
||||
"taskHost":{
|
||||
"compositing": "nuke",
|
||||
"3d": "maya",
|
||||
"roto": "nukeAssist"
|
||||
},
|
||||
"taskSubsets": {
|
||||
"compositing": ["nukescript", "read", "write"],
|
||||
"3d": ["scene", "camera", "imageplane"],
|
||||
"roto": ["nukescript", "read", "write"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"instances": [
|
||||
{
|
||||
"publish": true,
|
||||
"family": "clip",
|
||||
"name": "e01_s010_0010",
|
||||
"filePath": "C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\resources\\footage\\raw\\day01\\bbt_test_001_raw.mov",
|
||||
"tags": [
|
||||
{"task": "compositing"},
|
||||
{"task": "roto"},
|
||||
{"task": "3d"}
|
||||
],
|
||||
"layer": "V1",
|
||||
"sequence": "sequence01",
|
||||
"representation": "mov",
|
||||
"metadata": [
|
||||
{"colorspace": "BT.709"},
|
||||
{"fps": 24},
|
||||
{"hasAudio": true},
|
||||
{"format.width": 720},
|
||||
{"format.height": 404},
|
||||
{"format.pixelaspect": 1},
|
||||
{"source.start": "00:00:00:00"},
|
||||
{"source.end": "00:01:52:12"},
|
||||
{"source.duration": "00:01:52:13"},
|
||||
{"clip.start": "01:00:00:00"},
|
||||
{"clip.end": "01:00:42:07"},
|
||||
{"clip.duration": "00:00:42:08"},
|
||||
{"clip.audio": {
|
||||
"audioChannels": 2,
|
||||
"audioRate": 48000
|
||||
}},
|
||||
{"timeline.audio": [
|
||||
{"metadata": [
|
||||
{"audioChannels": 2},
|
||||
{"audioRate": 48000},
|
||||
{"source.start": "00:00:00:00"},
|
||||
{"source.end": "00:01:52:12"},
|
||||
{"source.duration": "00:01:52:13"},
|
||||
{"clip.start": "01:00:00:00"},
|
||||
{"clip.end": "01:00:42:07"},
|
||||
{"clip.duration": "00:00:42:08"}
|
||||
],
|
||||
"layer": "A2",
|
||||
"path": "file/path/to/audio.wav"}
|
||||
]}
|
||||
]
|
||||
},
|
||||
{
|
||||
"publish": true,
|
||||
"family": "clip",
|
||||
"name": "e01_s010_0020",
|
||||
"filePath": "C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\resources\\footage\\raw\\day01\\bbt_test_001_raw.mov",
|
||||
"tags": [
|
||||
{"task": "compositing"},
|
||||
{"task": "roto"},
|
||||
{"task": "3d"}
|
||||
],
|
||||
"layer": "V1",
|
||||
"sequence": "sequence01",
|
||||
"representation": "mov",
|
||||
"metadata": [
|
||||
{"colorspace": "BT.709"},
|
||||
{"fps": 24},
|
||||
{"hasAudio": true},
|
||||
{"format.width": 720},
|
||||
{"format.height": 404},
|
||||
{"format.pixelaspect": 1},
|
||||
{"source.start": "00:00:00:00"},
|
||||
{"source.end": "00:01:52:12"},
|
||||
{"source.duration": "00:01:52:13"},
|
||||
{"clip.start": "01:00:00:00"},
|
||||
{"clip.end": "01:00:42:07"},
|
||||
{"clip.duration": "00:00:42:08"},
|
||||
{"clip.audio": {
|
||||
"audioChannels": 2,
|
||||
"audioRate": 48000
|
||||
}},
|
||||
{"timeline.audio": [
|
||||
{"metadata": [
|
||||
{"audioChannels": 2},
|
||||
{"audioRate": 48000},
|
||||
{"source.start": "00:00:00:00"},
|
||||
{"source.end": "00:01:52:12"},
|
||||
{"source.duration": "00:01:52:13"},
|
||||
{"clip.start": "01:00:00:00"},
|
||||
{"clip.end": "01:00:42:07"},
|
||||
{"clip.duration": "00:00:42:08"}
|
||||
],
|
||||
"layer": "A2",
|
||||
"path": "file/path/to/audio.wav"}
|
||||
]}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue