mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
adding json export of all context and instances for debug, polishing integrate_render_frames
This commit is contained in:
parent
09c38da4e3
commit
3fd9170e1f
4 changed files with 129 additions and 21 deletions
|
|
@ -1,4 +1,5 @@
|
|||
import tempfile
|
||||
import os
|
||||
import pyblish.api
|
||||
|
||||
ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05
|
||||
|
|
@ -28,7 +29,9 @@ class Extractor(pyblish.api.InstancePlugin):
|
|||
staging_dir = instance.data.get('stagingDir', None)
|
||||
|
||||
if not staging_dir:
|
||||
staging_dir = tempfile.mkdtemp(prefix="pyblish_tmp_")
|
||||
staging_dir = os.path.normpath(
|
||||
tempfile.mkdtemp(prefix="pyblish_tmp_")
|
||||
)
|
||||
instance.data['stagingDir'] = staging_dir
|
||||
|
||||
return staging_dir
|
||||
|
|
|
|||
|
|
@ -39,8 +39,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
self.register(instance)
|
||||
|
||||
self.log.info("Integrating Asset in to the database ...")
|
||||
self.log.info("instance.data: {}".format(instance.data))
|
||||
# self.log.info("Integrating Asset in to the database ...")
|
||||
# self.log.info("instance.data: {}".format(instance.data))
|
||||
if instance.data.get('transfer', True):
|
||||
self.integrate(instance)
|
||||
|
||||
|
|
@ -158,8 +158,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
# Each should be a single representation (as such, a single extension)
|
||||
representations = []
|
||||
destination_list = []
|
||||
self.log.debug("integrate_frames:instance.data[files]: {}".format(
|
||||
instance.data["files"]))
|
||||
|
||||
for files in instance.data["files"]:
|
||||
# Collection
|
||||
# _______
|
||||
|
|
@ -193,7 +192,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
for i in src_collection.indexes:
|
||||
src_padding = src_collection.format("{padding}") % i
|
||||
src_file_name = "{0}{1}{2}".format(src_head, src_padding, src_tail)
|
||||
src_file_name = "{0}{1}{2}".format(src_head,
|
||||
src_padding, src_tail)
|
||||
dst_padding = dst_collection.format("{padding}") % i
|
||||
dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail)
|
||||
|
||||
|
|
@ -244,17 +244,17 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
# Imprint shortcut to context
|
||||
# for performance reasons.
|
||||
"context": {
|
||||
"root": root,
|
||||
"project": PROJECT,
|
||||
"projectcode": project['data']['code'],
|
||||
'task': api.Session["AVALON_TASK"],
|
||||
"silo": asset['silo'],
|
||||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"VERSION": version["name"],
|
||||
"hierarchy": hierarchy,
|
||||
"representation": ext[1:]
|
||||
"root": root,
|
||||
"project": PROJECT,
|
||||
"projectcode": project['data']['code'],
|
||||
'task': api.Session["AVALON_TASK"],
|
||||
"silo": asset['silo'],
|
||||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"VERSION": version["name"],
|
||||
"hierarchy": hierarchy,
|
||||
"representation": ext[1:]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
107
pype/plugins/nuke/publish/extract_post_json.py
Normal file
107
pype/plugins/nuke/publish/extract_post_json.py
Normal file
|
|
@ -0,0 +1,107 @@
|
|||
import os
|
||||
import json
|
||||
import datetime
|
||||
import time
|
||||
|
||||
import clique
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ExtractJSON(pyblish.api.ContextPlugin):
|
||||
""" Extract all instances to a serialized json file. """
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 1
|
||||
label = "Extract to JSON"
|
||||
families = ["write"]
|
||||
|
||||
def process(self, context):
|
||||
workspace = os.path.join(
|
||||
os.path.dirname(context.data["currentFile"]), "workspace",
|
||||
"instances")
|
||||
|
||||
if not os.path.exists(workspace):
|
||||
os.makedirs(workspace)
|
||||
|
||||
context_data = context.data.copy()
|
||||
out_data = dict(self.serialize(context_data))
|
||||
|
||||
instances_data = []
|
||||
for instance in context:
|
||||
|
||||
data = {}
|
||||
for key, value in instance.data.items():
|
||||
if isinstance(value, clique.Collection):
|
||||
value = value.format()
|
||||
|
||||
try:
|
||||
json.dumps(value)
|
||||
data[key] = value
|
||||
except KeyError:
|
||||
msg = "\"{0}\"".format(value)
|
||||
msg += " in instance.data[\"{0}\"]".format(key)
|
||||
msg += " could not be serialized."
|
||||
self.log.debug(msg)
|
||||
|
||||
instances_data.append(data)
|
||||
|
||||
out_data["instances"] = instances_data
|
||||
|
||||
timestamp = datetime.datetime.fromtimestamp(
|
||||
time.time()).strftime("%Y%m%d-%H%M%S")
|
||||
filename = timestamp + "_instances.json"
|
||||
|
||||
with open(os.path.join(workspace, filename), "w") as outfile:
|
||||
outfile.write(json.dumps(out_data, indent=4, sort_keys=True))
|
||||
|
||||
def serialize(self, data):
|
||||
"""
|
||||
Convert all nested content to serialized objects
|
||||
|
||||
Args:
|
||||
data (dict): nested data
|
||||
|
||||
Returns:
|
||||
dict: nested data
|
||||
"""
|
||||
|
||||
def encoding_obj(value):
|
||||
try:
|
||||
value = str(value).replace("\\", "/")
|
||||
# value = getattr(value, '__dict__', str(value))
|
||||
except Exception:
|
||||
pass
|
||||
return value
|
||||
|
||||
for key, value in dict(data).items():
|
||||
if key in ["records", "instances", "results"]:
|
||||
# escape all record objects
|
||||
data[key] = None
|
||||
continue
|
||||
|
||||
if hasattr(value, '__module__'):
|
||||
# only deals with module objects
|
||||
if "plugins" in value.__module__:
|
||||
# only dealing with plugin objects
|
||||
data[key] = str(value.__module__)
|
||||
else:
|
||||
if ".lib." in value.__module__:
|
||||
# will allow only anatomy dict
|
||||
data[key] = self.serialize(value)
|
||||
else:
|
||||
data[key] = None
|
||||
continue
|
||||
continue
|
||||
|
||||
if isinstance(value, dict):
|
||||
# loops if dictionary
|
||||
data[key] = self.serialize(value)
|
||||
|
||||
if isinstance(value, (list or tuple)):
|
||||
# loops if list or tuple
|
||||
for i, item in enumerate(value):
|
||||
value[i] = self.serialize(item)
|
||||
data[key] = value
|
||||
|
||||
data[key] = encoding_obj(value)
|
||||
|
||||
return data
|
||||
|
|
@ -35,6 +35,7 @@ class ExtractDataForReview(pype.api.Extractor):
|
|||
self.log.debug("here:")
|
||||
self.log.debug("creating staging dir:")
|
||||
self.staging_dir(instance)
|
||||
|
||||
self.render_review_representation(instance,
|
||||
representation="mov")
|
||||
self.log.debug("review mov:")
|
||||
|
|
@ -52,7 +53,7 @@ class ExtractDataForReview(pype.api.Extractor):
|
|||
staging_dir = instance.data["stagingDir"]
|
||||
file_name = collection.format("{head}mov")
|
||||
|
||||
review_mov = os.path.join(staging_dir, file_name)
|
||||
review_mov = os.path.join(staging_dir, file_name).replace("\\", "/")
|
||||
|
||||
if instance.data.get("baked_colorspace_movie"):
|
||||
args = [
|
||||
|
|
@ -110,9 +111,6 @@ class ExtractDataForReview(pype.api.Extractor):
|
|||
first_frame = min(collection.indexes)
|
||||
last_frame = max(collection.indexes)
|
||||
|
||||
self.log.warning("first_frame: {}".format(first_frame))
|
||||
self.log.warning("last_frame: {}".format(last_frame))
|
||||
|
||||
node = previous_node = nuke.createNode("Read")
|
||||
|
||||
node["file"].setValue(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue