diff --git a/colorbleed/plugins/maya/publish/submit_deadline.py b/colorbleed/plugins/maya/publish/submit_deadline.py index ce78eb03c9..6ae6ffbce4 100644 --- a/colorbleed/plugins/maya/publish/submit_deadline.py +++ b/colorbleed/plugins/maya/publish/submit_deadline.py @@ -86,11 +86,11 @@ class MindbenderSubmitDeadline(pyblish.api.InstancePlugin): fname = os.path.basename(fpath) comment = context.data.get("comment", "") scene = os.path.splitext(fname)[0] - # Get image rule from workspace dirname = os.path.join(workspace, "renders") renderlayer = instance.data['setMembers'] # rs_beauty renderlayer_name = instance.name # beauty deadline_user = context.data.get("deadlineUser", getpass.getuser()) + jobname = "%s - %s" % (fname, instance.name) # Get the variables depending on the renderer # Following hardcoded "renders//_/" @@ -128,7 +128,7 @@ class MindbenderSubmitDeadline(pyblish.api.InstancePlugin): "BatchName": fname, # Job name, as seen in Monitor - "Name": "%s - %s" % (fname, instance.name), + "Name": jobname, # Arbitrary username, for visualisation in Monitor "UserName": deadline_user, @@ -203,18 +203,27 @@ class MindbenderSubmitDeadline(pyblish.api.InstancePlugin): response = requests.post(url, json=payload) if response.ok: - # TODO: REN-11 Implement auto publish logic here as depending job # Write metadata for publish + render_job = response.json() data = { "submission": payload, "session": api.Session, "instance": instance.data, - "jobs": [response.json()], + "jobs": [render_job], } with open(json_fpath, "w") as f: json.dump(data, f, indent=4, sort_keys=True) + publish_job = self.create_publish_job(fname, + deadline_user, + comment, + jobname, + render_job, + json_fpath) + if not publish_job: + self.log.error("Could not submit publish job!") + else: try: shutil.rmtree(dirname) @@ -268,3 +277,49 @@ class MindbenderSubmitDeadline(pyblish.api.InstancePlugin): "%f=%d was rounded off to nearest integer" % (value, int(value)) ) + + def create_publish_job(self, fname, user, comment, jobname, + job, json_fpath): + """ + Make sure all frames are published + Args: + job (dict): the render job data + json_fpath (str): file path to json file + + Returns: + + """ + + url = "{}/api/jobs".format(api.Session["AVALON_DEADLINE"]) + try: + from colorbleed.scripts import publish_imagesequence + except Exception as e: + raise RuntimeError("Expected module 'publish_imagesequence'" + "to be available") + + payload = { + "JobInfo": { + "Plugin": "Python", + "BatchName": fname, + "Name": "{} [publish]".format(jobname), + "JobType": "Normal", + "JobDependency0": job["_id"], + "UserName": user, + "Comment": comment, + }, + "PluginInfo": { + "Version": "3.6", + "ScriptFile": publish_imagesequence.__file__, + "Arguments": "--path {}".format(json_fpath), + "SingleFrameOnly": "True" + }, + + # Mandatory for Deadline, may be empty + "AuxFiles": [] + } + + response = requests.post(url, json=payload) + if not response.ok: + return + + return payload \ No newline at end of file diff --git a/colorbleed/scripts/__init__.py b/colorbleed/scripts/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/colorbleed/scripts/publish_imagesequence.py b/colorbleed/scripts/publish_imagesequence.py new file mode 100644 index 0000000000..df0c566b76 --- /dev/null +++ b/colorbleed/scripts/publish_imagesequence.py @@ -0,0 +1,90 @@ +""" +This module is used for command line publishing of image sequences. +Due to its early intergration this module might change location within the +config. It is also subject to change + +Contributors: + Roy Nieterau + Wijnand Koreman + +Dependencies: + Avalon + Pyblish + +""" + +import os +import sys +import json +import logging + +handler = logging.basicConfig() +log = logging.getLogger() +log.setLevel(logging.DEBUG) + + +def publish_data(json_file): + """Publish rendered image sequences based on the job data + + Args: + json_file (str): the json file of the data dump of the submitted job + + Returns: + None + + """ + with open(json_file, "r") as fp: + json_data = json.load(fp) + + # Get the job's environment + job = json_data["jobs"][0] + job_env = job["Props"]["Env"] + job_env = {str(key): str(value) for key, value in job_env.items()} + + # Ensure the key exists + os.environ["PYTHONPATH"] = os.environ.get("PYTHONPATH", "") + + # Add the pythonpaths (also to sys.path for local session) + pythonpaths = job_env.pop("PYTHONPATH", "").split(";") + for path in pythonpaths: + sys.path.append(path) + + os.environ['PYTHONPATH'] += ";" + ";".join(pythonpaths) + + # Use the rest of the job's environment + os.environ.update(job_env) + + # Set the current pyblish host + os.environ["PYBLISH_HOSTS"] = "shell" + + # Set the current working directory + os.chdir(os.path.dirname(json_file)) + + # Install Avalon with shell as current host + from avalon import api, shell + api.install(shell) + + # Publish items, returns context instances + import pyblish.util + context = pyblish.util.publish() + + if not context: + log.warning("Nothing published.") + sys.exit(1) + + +def __main__(): + import argparse + parser = argparse.ArgumentParser() + parser.add_argument("--path", help="The filepath of the JSON") + + kwargs, args = parser.parse_known_args() + + if kwargs.path: + filepath = os.path.normpath(kwargs.path) + print("JSON File {}".format(filepath)) + if not filepath.endswith(".json"): + raise RuntimeError("Wrong extesion! Expecting publish data to be " + "stored in a .JSON file") + + publish_data(filepath)