feat(nuke): Deadline submitter. Minor fixes in connected collectors.

This commit is contained in:
antirotor 2019-06-19 22:53:01 +02:00
parent a955bafc55
commit 73cf209570
No known key found for this signature in database
GPG key ID: 8A29C663C672C2B7
7 changed files with 240 additions and 35 deletions

View file

@ -60,6 +60,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"nukescript",
"render",
"rendersetup",
"render.farm",
"write",
"rig",
"plate",

View file

@ -1,6 +1,8 @@
import os
import json
import re
from pprint import pprint
import logging
from avalon import api, io
from avalon.vendor import requests, clique
@ -215,7 +217,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
if not job:
# No deadline job. Try Muster: musterSubmissionJob
job = instance.data.get("musterSubmissionJob")
job = data.pop("musterSubmissionJob")
submission_type = "muster"
if not job:
raise RuntimeError("Can't continue without valid Deadline "
@ -362,7 +364,19 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
metadata["metadata"]["instance"]["endFrame"] = updated_end
metadata_filename = "{}_metadata.json".format(subset)
metadata_path = os.path.join(output_dir, metadata_filename)
# convert log messages if they are `LogRecord` to their
# string format to allow serializing as JSON later on.
rendered_logs = []
for log in metadata["metadata"]["instance"].get("_log", []):
if isinstance(log, logging.LogRecord):
rendered_logs.append(log.getMessage())
else:
rendered_logs.append(log)
metadata["metadata"]["instance"]["_log"] = rendered_logs
pprint(metadata)
with open(metadata_path, "w") as f:
json.dump(metadata, f, indent=4, sort_keys=True)