From 4ae098e4d2a4434849c7f42152102d8bc59a31af Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 1 Dec 2018 01:39:38 +0100 Subject: [PATCH 1/3] minor cleanups --- .../_publish_unused/collect_deadline_user.py | 60 ---- .../global/publish/collect_templates.py | 3 - pype/plugins/global/publish/integrate.py | 1 + .../global/publish/submit_publish_job.py | 330 ------------------ .../global/publish/validate_templates.py | 42 --- 5 files changed, 1 insertion(+), 435 deletions(-) delete mode 100644 pype/plugins/global/_publish_unused/collect_deadline_user.py delete mode 100644 pype/plugins/global/publish/submit_publish_job.py delete mode 100644 pype/plugins/global/publish/validate_templates.py diff --git a/pype/plugins/global/_publish_unused/collect_deadline_user.py b/pype/plugins/global/_publish_unused/collect_deadline_user.py deleted file mode 100644 index f4d13a0545..0000000000 --- a/pype/plugins/global/_publish_unused/collect_deadline_user.py +++ /dev/null @@ -1,60 +0,0 @@ -import os -import subprocess - -import pyblish.api - -CREATE_NO_WINDOW = 0x08000000 - - -def deadline_command(cmd): - # Find Deadline - path = os.environ.get("DEADLINE_PATH", None) - assert path is not None, "Variable 'DEADLINE_PATH' must be set" - - executable = os.path.join(path, "deadlinecommand") - if os.name == "nt": - executable += ".exe" - assert os.path.exists( - executable), "Deadline executable not found at %s" % executable - assert cmd, "Must have a command" - - query = (executable, cmd) - - process = subprocess.Popen(query, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, - creationflags=CREATE_NO_WINDOW) - out, err = process.communicate() - - return out - - -class CollectDeadlineUser(pyblish.api.ContextPlugin): - """Retrieve the local active Deadline user""" - - order = pyblish.api.CollectorOrder + 0.499 - label = "Deadline User" - - hosts = ['maya', 'fusion', 'nuke'] - families = [ - "renderlayer", - "saver.deadline", - "imagesequence" - ] - - - def process(self, context): - """Inject the current working file""" - user = None - try: - user = deadline_command("GetCurrentUserName").strip() - except: - self.log.warning("Deadline command seems not to be working") - - if not user: - self.log.warning("No Deadline user found. " - "Do you have Deadline installed?") - return - - self.log.info("Found Deadline user: {}".format(user)) - context.data['deadlineUser'] = user diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index f2a3da7df4..48b6c448e3 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -18,6 +18,3 @@ class CollectTemplates(pyblish.api.ContextPlugin): type=["anatomy"] ) context.data['anatomy'] = templates.anatomy - for key in templates.anatomy: - self.log.info(str(key) + ": " + str(templates.anatomy[key])) - # return diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index 87ffa2aaa3..e20f59133c 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -233,6 +233,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "root": root, "project": PROJECT, "projectcode": "prjX", + 'task': api.Session["AVALON_TASK"], "silo": asset['silo'], "asset": ASSET, "family": instance.data['family'], diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py deleted file mode 100644 index cb852f7c43..0000000000 --- a/pype/plugins/global/publish/submit_publish_job.py +++ /dev/null @@ -1,330 +0,0 @@ -import os -import json -import re - -from avalon import api, io -from avalon.vendor import requests, clique - -import pyblish.api - - -def _get_script(): - """Get path to the image sequence script""" - try: - from pype.fusion.scripts import publish_filesequence - except Exception as e: - raise RuntimeError("Expected module 'publish_imagesequence'" - "to be available") - - module_path = publish_filesequence.__file__ - if module_path.endswith(".pyc"): - module_path = module_path[:-len(".pyc")] + ".py" - - return module_path - - -# Logic to retrieve latest files concerning extendFrames -def get_latest_version(asset_name, subset_name, family): - # Get asset - asset_name = io.find_one({"type": "asset", - "name": asset_name}, - projection={"name": True}) - - subset = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset_name["_id"]}, - projection={"_id": True, "name": True}) - - # Check if subsets actually exists (pre-run check) - assert subset, "No subsets found, please publish with `extendFrames` off" - - # Get version - version_projection = {"name": True, - "data.startFrame": True, - "data.endFrame": True, - "parent": True} - - version = io.find_one({"type": "version", - "parent": subset["_id"], - "data.families": family}, - projection=version_projection, - sort=[("name", -1)]) - - assert version, "No version found, this is a bug" - - return version - - -def get_resources(version, extension=None): - """ - Get the files from the specific version - """ - query = {"type": "representation", "parent": version["_id"]} - if extension: - query["name"] = extension - - representation = io.find_one(query) - assert representation, "This is a bug" - - directory = api.get_representation_path(representation) - print("Source: ", directory) - resources = sorted([os.path.normpath(os.path.join(directory, fname)) - for fname in os.listdir(directory)]) - - return resources - - -def get_resource_files(resources, frame_range, override=True): - - res_collections, _ = clique.assemble(resources) - assert len(res_collections) == 1, "Multiple collections found" - res_collection = res_collections[0] - - # Remove any frames - if override: - for frame in frame_range: - if frame not in res_collection.indexes: - continue - res_collection.indexes.remove(frame) - - return list(res_collection) - - -class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin): - """Submit image sequence publish jobs to Deadline. - - These jobs are dependent on a deadline job submission prior to this - plug-in. - - Renders are submitted to a Deadline Web Service as - supplied via the environment variable AVALON_DEADLINE - - Options in instance.data: - - deadlineSubmission (dict, Required): The returned .json - data from the job submission to deadline. - - - outputDir (str, Required): The output directory where the metadata - file should be generated. It's assumed that this will also be - final folder containing the output files. - - - ext (str, Optional): The extension (including `.`) that is required - in the output filename to be picked up for image sequence - publishing. - - - publishJobState (str, Optional): "Active" or "Suspended" - This defaults to "Suspended" - - This requires a "startFrame" and "endFrame" to be present in instance.data - or in context.data. - - """ - - label = "Submit image sequence jobs to Deadline" - order = pyblish.api.IntegratorOrder + 0.1 - - hosts = ["fusion", "maya", "nuke"] - - families = [ - "render.deadline", - "renderlayer", - "imagesequence" - ] - - def process(self, instance): - - # AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", - # "http://localhost:8082") - # assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" - - try: - deadline_url = os.environ["DEADLINE_REST_URL"] - except KeyError: - self.log.error("Deadline REST API url not found.") - - # Get a submission job - job = instance.data.get("deadlineSubmissionJob") - if not job: - raise RuntimeError("Can't continue without valid deadline " - "submission prior to this plug-in.") - - data = instance.data.copy() - subset = data["subset"] - state = data.get("publishJobState", "Suspended") - job_name = "{batch} - {subset} [publish image sequence]".format( - batch=job["Props"]["Name"], - subset=subset - ) - - # Add in start/end frame - context = instance.context - start = instance.data.get("startFrame", context.data["startFrame"]) - end = instance.data.get("endFrame", context.data["endFrame"]) - resources = [] - - # Add in regex for sequence filename - # This assumes the output files start with subset name and ends with - # a file extension. - if "ext" in instance.data: - ext = re.escape(instance.data["ext"]) - else: - ext = "\.\D+" - - regex = "^{subset}.*\d+{ext}$".format(subset=re.escape(subset), - ext=ext) - - # Write metadata for publish job - render_job = data.pop("deadlineSubmissionJob") - metadata = { - "regex": regex, - "startFrame": start, - "endFrame": end, - "families": ["imagesequence"], - - # Optional metadata (for debugging) - "metadata": { - "instance": data, - "job": job, - "session": api.Session.copy() - } - } - - # Ensure output dir exists - output_dir = instance.data["outputDir"] - if not os.path.isdir(output_dir): - os.makedirs(output_dir) - - if data.get("extendFrames", False): - - family = "imagesequence" - override = data["overrideExistingFrame"] - - # override = data.get("overrideExistingFrame", False) - out_file = render_job.get("OutFile") - if not out_file: - raise RuntimeError("OutFile not found in render job!") - - extension = os.path.splitext(out_file[0])[1] - _ext = extension[1:] - - # Frame comparison - prev_start = None - prev_end = None - resource_range = range(int(start), int(end)+1) - - # Gather all the subset files (one subset per render pass!) - subset_names = [data["subset"]] - subset_names.extend(data.get("renderPasses", [])) - - for subset_name in subset_names: - version = get_latest_version(asset_name=data["asset"], - subset_name=subset_name, - family=family) - - # Set prev start / end frames for comparison - if not prev_start and not prev_end: - prev_start = version["data"]["startFrame"] - prev_end = version["data"]["endFrame"] - - subset_resources = get_resources(version, _ext) - resource_files = get_resource_files(subset_resources, - resource_range, - override) - - resources.extend(resource_files) - - updated_start = min(start, prev_start) - updated_end = max(end, prev_end) - - # Update metadata and instance start / end frame - self.log.info("Updating start / end frame : " - "{} - {}".format(updated_start, updated_end)) - - # TODO : Improve logic to get new frame range for the - # publish job (publish_filesequence.py) - # The current approach is not following Pyblish logic which is based - # on Collect / Validate / Extract. - - # ---- Collect Plugins --- - # Collect Extend Frames - Only run if extendFrames is toggled - # # # Store in instance: - # # # Previous rendered files per subset based on frames - # # # --> Add to instance.data[resources] - # # # Update publish frame range - - # ---- Validate Plugins --- - # Validate Extend Frames - # # # Check if instance has the requirements to extend frames - # There might have been some things which can be added to the list - # Please do so when fixing this. - - # Start frame - metadata["startFrame"] = updated_start - metadata["metadata"]["instance"]["startFrame"] = updated_start - - # End frame - metadata["endFrame"] = updated_end - metadata["metadata"]["instance"]["endFrame"] = updated_end - - metadata_filename = "{}_metadata.json".format(subset) - metadata_path = os.path.join(output_dir, metadata_filename) - with open(metadata_path, "w") as f: - json.dump(metadata, f, indent=4, sort_keys=True) - - # Generate the payload for Deadline submission - payload = { - "JobInfo": { - "Plugin": "Python", - "BatchName": job["Props"]["Batch"], - "Name": job_name, - "JobType": "Normal", - "JobDependency0": job["_id"], - "UserName": job["Props"]["User"], - "Comment": instance.context.data.get("comment", ""), - "InitialStatus": state - }, - "PluginInfo": { - "Version": "3.6", - "ScriptFile": _get_script(), - "Arguments": '--path "{}"'.format(metadata_path), - "SingleFrameOnly": "True" - }, - - # Mandatory for Deadline, may be empty - "AuxFiles": [] - } - - # Transfer the environment from the original job to this dependent - # job so they use the same environment - environment = job["Props"].get("Env", {}) - payload["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) - - # Avoid copied pools and remove secondary pool - payload["JobInfo"]["Pool"] = "none" - payload["JobInfo"].pop("SecondaryPool", None) - - self.log.info("Submitting..") - self.log.info(json.dumps(payload, indent=4, sort_keys=True)) - - url = "{}/api/jobs".format(deadline_url) - response = requests.post(url, json=payload) - if not response.ok: - raise Exception(response.text) - - # Copy files from previous render if extendFrame is True - if data.get("extendFrames", False): - - self.log.info("Preparing to copy ..") - import shutil - - dest_path = data["outputDir"] - for source in resources: - src_file = os.path.basename(source) - dest = os.path.join(dest_path, src_file) - shutil.copy(source, dest) - - self.log.info("Finished copying %i files" % len(resources)) diff --git a/pype/plugins/global/publish/validate_templates.py b/pype/plugins/global/publish/validate_templates.py deleted file mode 100644 index f806104bb2..0000000000 --- a/pype/plugins/global/publish/validate_templates.py +++ /dev/null @@ -1,42 +0,0 @@ -import pyblish.api -from app.api import ( - Templates -) - -class ValidateTemplates(pyblish.api.ContextPlugin): - """Check if all templates were filed""" - - label = "Validate Templates" - order = pyblish.api.ValidatorOrder - 0.1 - hosts = ["maya", "houdini", "nuke"] - - def process(self, context): - - anatomy = context.data["anatomy"] - if not anatomy: - raise RuntimeError("Did not find templates") - else: - data = { "project": {"name": "D001_projectsx", - "code": "prjX"}, - "representation": "exr", - "VERSION": 3, - "SUBVERSION": 10, - "task": "animation", - "asset": "sh001", - "hierarchy": "ep101/sq01/sh010"} - - - anatomy = context.data["anatomy"].format(data) - self.log.info(anatomy.work.path) - - data = { "project": {"name": "D001_projectsy", - "code": "prjY"}, - "representation": "abc", - "VERSION": 1, - "SUBVERSION": 5, - "task": "lookdev", - "asset": "bob", - "hierarchy": "ep101/sq01/bob"} - - anatomy = context.data["anatomy"].format(data) - self.log.info(anatomy.work.file) From 71b51d1fa9c2bbd14a8fc3235d1cbd2ddadb1ef8 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 1 Dec 2018 01:40:37 +0100 Subject: [PATCH 2/3] add publish of already rendered frames, make write instance wotk with publish, render and famr attributes. --- .../nuke/publish/collect_nuke_writes.py | 26 +- .../publish/collect_prerendered_frames.py | 23 ++ .../nuke/publish/integrate_rendered_frames.py | 361 ++++++++++++++++++ pype/plugins/nuke/publish/render_local.py | 4 +- .../nuke/publish/validate_collections.py | 34 ++ 5 files changed, 438 insertions(+), 10 deletions(-) create mode 100644 pype/plugins/nuke/publish/collect_prerendered_frames.py create mode 100644 pype/plugins/nuke/publish/integrate_rendered_frames.py create mode 100644 pype/plugins/nuke/publish/validate_collections.py diff --git a/pype/plugins/nuke/publish/collect_nuke_writes.py b/pype/plugins/nuke/publish/collect_nuke_writes.py index b92d03c40b..443b08f691 100644 --- a/pype/plugins/nuke/publish/collect_nuke_writes.py +++ b/pype/plugins/nuke/publish/collect_nuke_writes.py @@ -4,7 +4,6 @@ import nuke import pyblish.api import clique - @pyblish.api.log class CollectNukeInstances(pyblish.api.ContextPlugin): """Collect all write nodes.""" @@ -57,11 +56,16 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): instance.add(node) # Adding/Checking publish and render target attribute - if "render_local" not in node.knobs(): - knob = nuke.Boolean_Knob("render_local", "Local rendering") + if "farm" not in node.knobs(): + knob = nuke.Boolean_Knob("farm", "Farm Rendering") knob.setValue(False) node.addKnob(knob) + # Adding/Checking publish and render target attribute + if "render" not in node.knobs(): + knob = nuke.Boolean_Knob("render", "Render") + knob.setValue(False) + node.addKnob(knob) instance.data.update({ "asset": os.environ["AVALON_ASSET"], # todo: not a constant @@ -70,15 +74,21 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "outputDir": os.path.dirname(nuke.filename(node)), "ext": ext, # todo: should be redundant "label": label, - "families": ["render.local"], - "family": "write", - "publish": node.knob("publish"), + "family": "render", + "publish": node.knob("publish").value(), "collection": collection, - "first_frame": first_frame, - "last_frame": last_frame, + "startFrame": first_frame, + "endFrame": last_frame, "output_type": output_type }) + if node.knob('render').value(): + instance.data["families"] = ["render.local"] + if node.knob('farm').value(): + instance.data["families"] = ["render.farm"] + else: + instance.data["families"] = ["prerendered.frames"] + # Sort/grouped by family (preserving local index) context[:] = sorted(context, key=self.sort_by_family) diff --git a/pype/plugins/nuke/publish/collect_prerendered_frames.py b/pype/plugins/nuke/publish/collect_prerendered_frames.py new file mode 100644 index 0000000000..e3cf9e2c42 --- /dev/null +++ b/pype/plugins/nuke/publish/collect_prerendered_frames.py @@ -0,0 +1,23 @@ +import pyblish.api +import os + +class CollectFrames(pyblish.api.InstancePlugin): + """Inject the host into context""" + + order = pyblish.api.CollectorOrder + 0.499 + label = "Collect data into prerenderd frames" + hosts = ["nuke"] + families = ['prerendered.frames'] + + def process(self, instance): + + collected_frames = os.listdir(instance.data['outputDir']) + + if "files" not in instance.data: + instance.data["files"] = list() + + instance.data["files"].append(collected_frames) + instance.data['stagingDir'] = instance.data['outputDir'] + instance.data['transfer'] = False + + self.log.info('collected frames: {}'.format(collected_frames)) diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py new file mode 100644 index 0000000000..f482a48cda --- /dev/null +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -0,0 +1,361 @@ +import os +import logging +import shutil + +import errno +import pyblish.api +from avalon import api, io + + +log = logging.getLogger(__name__) + + +class IntegrateFrames(pyblish.api.InstancePlugin): + """Resolve any dependency issies + + This plug-in resolves any paths which, if not updated might break + the published file. + + The order of families is important, when working with lookdev you want to + first publish the texture, update the texture paths in the nodes and then + publish the shading network. Same goes for file dependent assets. + """ + + label = "Integrate Frames" + order = pyblish.api.IntegratorOrder + families = ["prerendered.frames"] + + def process(self, instance): + + self.register(instance) + + self.log.info("Integrating Asset in to the database ...") + # self.integrate(instance) + + def register(self, instance): + + # Required environment variables + PROJECT = api.Session["AVALON_PROJECT"] + ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] + LOCATION = api.Session["AVALON_LOCATION"] + + context = instance.context + # Atomicity + # + # Guarantee atomic publishes - each asset contains + # an identical set of members. + # __ + # / o + # / \ + # | o | + # \ / + # o __/ + # + assert all(result["success"] for result in context.data["results"]), ( + "Atomicity not held, aborting.") + + # Assemble + # + # | + # v + # ---> <---- + # ^ + # | + # + stagingdir = instance.data.get("stagingDir") + assert stagingdir, ("Incomplete instance \"%s\": " + "Missing reference to staging area." % instance) + + # extra check if stagingDir actually exists and is available + + self.log.debug("Establishing staging directory @ %s" % stagingdir) + + project = io.find_one({"type": "project"}, + projection={"config.template.publish": True}) + + asset = io.find_one({"type": "asset", + "name": ASSET, + "parent": project["_id"]}) + + assert all([project, asset]), ("Could not find current project or " + "asset '%s'" % ASSET) + + subset = self.get_subset(asset, instance) + + # get next version + latest_version = io.find_one({"type": "version", + "parent": subset["_id"]}, + {"name": True}, + sort=[("name", -1)]) + + next_version = 1 + if latest_version is not None: + next_version += latest_version["name"] + + self.log.info("Verifying version from assumed destination") + + assumed_data = instance.data["assumedTemplateData"] + assumed_version = assumed_data["version"] + if assumed_version != next_version: + raise AttributeError("Assumed version 'v{0:03d}' does not match" + "next version in database " + "('v{1:03d}')".format(assumed_version, + next_version)) + + self.log.debug("Next version: v{0:03d}".format(next_version)) + + version_data = self.create_version_data(context, instance) + version = self.create_version(subset=subset, + version_number=next_version, + locations=[LOCATION], + data=version_data) + + self.log.debug("Creating version ...") + version_id = io.insert_one(version).inserted_id + + # Write to disk + # _ + # | | + # _| |_ + # ____\ / + # |\ \ / \ + # \ \ v \ + # \ \________. + # \|________| + # + root = api.registered_root() + # template_data = {"root": root, + # "project": PROJECT, + # "silo": asset['silo'], + # "asset": ASSET, + # "subset": subset["name"], + # "version": version["name"]} + hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents'] + if hierarchy: + # hierarchy = os.path.sep.join(hierarchy) + hierarchy = os.path.join(*hierarchy) + + template_data = {"root": root, + "project": {"name": PROJECT, + "code": "prjX"}, + "silo": asset['silo'], + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "VERSION": version["name"], + "hierarchy": hierarchy} + + template_publish = project["config"]["template"]["publish"] + anatomy = instance.context.data['anatomy'] + + # Find the representations to transfer amongst the files + # Each should be a single representation (as such, a single extension) + representations = [] + + for files in instance.data["files"]: + + # Collection + # _______ + # |______|\ + # | |\| + # | || + # | || + # | || + # |_______| + # + if isinstance(files, list): + collection = files + # Assert that each member has identical suffix + _, ext = os.path.splitext(collection[0]) + assert all(ext == os.path.splitext(name)[1] + for name in collection), ( + "Files had varying suffixes, this is a bug" + ) + + assert not any(os.path.isabs(name) for name in collection) + + template_data["representation"] = ext[1:] + + for fname in collection: + + src = os.path.join(stagingdir, fname) + anatomy_filled = anatomy.format(template_data) + dst = anatomy_filled.publish.path + + # if instance.data.get('transfer', True): + # instance.data["transfers"].append([src, dst]) + + else: + # Single file + # _______ + # | |\ + # | | + # | | + # | | + # |_______| + # + fname = files + assert not os.path.isabs(fname), ( + "Given file name is a full path" + ) + _, ext = os.path.splitext(fname) + + template_data["representation"] = ext[1:] + + src = os.path.join(stagingdir, fname) + anatomy_filled = anatomy.format(template_data) + dst = anatomy_filled.publish.path + + + # if instance.data.get('transfer', True): + # dst = src + # instance.data["transfers"].append([src, dst]) + + representation = { + "schema": "pype:representation-2.0", + "type": "representation", + "parent": version_id, + "name": ext[1:], + "data": {'path': src}, + "dependencies": instance.data.get("dependencies", "").split(), + + # Imprint shortcut to context + # for performance reasons. + "context": { + "root": root, + "project": PROJECT, + "projectcode": "prjX", + 'task': api.Session["AVALON_TASK"], + "silo": asset['silo'], + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "version": version["name"], + "hierarchy": hierarchy, + "representation": ext[1:] + } + } + representations.append(representation) + + self.log.info("Registering {} items".format(len(representations))) + + io.insert_many(representations) + + def integrate(self, instance): + """Move the files + + Through `instance.data["transfers"]` + + Args: + instance: the instance to integrate + """ + + transfers = instance.data["transfers"] + + for src, dest in transfers: + self.log.info("Copying file .. {} -> {}".format(src, dest)) + self.copy_file(src, dest) + + def copy_file(self, src, dst): + """ Copy given source to destination + + Arguments: + src (str): the source file which needs to be copied + dst (str): the destination of the sourc file + Returns: + None + """ + + dirname = os.path.dirname(dst) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + raise + + shutil.copy(src, dst) + + def get_subset(self, asset, instance): + + subset = io.find_one({"type": "subset", + "parent": asset["_id"], + "name": instance.data["subset"]}) + + if subset is None: + subset_name = instance.data["subset"] + self.log.info("Subset '%s' not found, creating.." % subset_name) + + _id = io.insert_one({ + "schema": "pype:subset-2.0", + "type": "subset", + "name": subset_name, + "data": {}, + "parent": asset["_id"] + }).inserted_id + + subset = io.find_one({"_id": _id}) + + return subset + + def create_version(self, subset, version_number, locations, data=None): + """ Copy given source to destination + + Args: + subset (dict): the registered subset of the asset + version_number (int): the version number + locations (list): the currently registered locations + + Returns: + dict: collection of data to create a version + """ + # Imprint currently registered location + version_locations = [location for location in locations if + location is not None] + + return {"schema": "pype:version-2.0", + "type": "version", + "parent": subset["_id"], + "name": version_number, + "locations": version_locations, + "data": data} + + def create_version_data(self, context, instance): + """Create the data collection for the version + + Args: + context: the current context + instance: the current instance being published + + Returns: + dict: the required information with instance.data as key + """ + + families = [] + current_families = instance.data.get("families", list()) + instance_family = instance.data.get("family", None) + + if instance_family is not None: + families.append(instance_family) + families += current_families + + # create relative source path for DB + relative_path = os.path.relpath(context.data["currentFile"], + api.registered_root()) + source = os.path.join("{root}", relative_path).replace("\\", "/") + + version_data = {"families": families, + "time": context.data["time"], + "author": context.data["user"], + "source": source, + "comment": context.data.get("comment")} + + # Include optional data if present in + optionals = ["startFrame", "endFrame", "step", "handles"] + for key in optionals: + if key in instance.data: + version_data[key] = instance.data[key] + + return version_data diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py index 9e69134600..d1a4ca870a 100644 --- a/pype/plugins/nuke/publish/render_local.py +++ b/pype/plugins/nuke/publish/render_local.py @@ -34,8 +34,8 @@ class NukeRenderLocal(pyblish.api.InstancePlugin): node_subset_name = instance.data.get("subset", None) self.log.info("Starting render") - self.log.info("Start frame: {}".format(first_frame)) - self.log.info("End frame: {}".format(last_frame)) + self.log.info("Start frame: {}".format(startFrame)) + self.log.info("End frame: {}".format(endFrame)) # Render frames nuke.execute( diff --git a/pype/plugins/nuke/publish/validate_collections.py b/pype/plugins/nuke/publish/validate_collections.py new file mode 100644 index 0000000000..ff1bfdc245 --- /dev/null +++ b/pype/plugins/nuke/publish/validate_collections.py @@ -0,0 +1,34 @@ +import pyblish.api +import pype.api +import clique + +import os +import glob + + +class ValidateCollections(pyblish.api.InstancePlugin): + """Validates mapped resources. + + These are external files to the current application, for example + these could be textures, image planes, cache files or other linked + media. + + This validates: + - The resources are existing files. + - The resources have correctly collected the data. + + """ + + order = pype.api.ValidateContentsOrder + label = "Validate Collections" + families = ['prerendered.frames'] + + def process(self, instance): + + collections, remainder = clique.assemble(*instance.data['files']) + self.log.info('collections: {}'.format(collections)) + + assert len(collections) == 1, "There are multiple collections in the folder" + collection_instance = instance.data.get('collection', None) + + assert collections[0].is_contiguous(),"Some frames appear to be missing" From 5fffc698758e76bbe95bd4ba8c4cc4227d1edfcb Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 1 Dec 2018 01:44:41 +0100 Subject: [PATCH 3/3] restore valiadte templates temporarily --- .../global/publish/collect_deadline_user.py | 60 ++++ .../global/publish/submit_publish_job.py | 330 ++++++++++++++++++ .../global/publish/validate_templates.py | 42 +++ 3 files changed, 432 insertions(+) create mode 100644 pype/plugins/global/publish/collect_deadline_user.py create mode 100644 pype/plugins/global/publish/submit_publish_job.py create mode 100644 pype/plugins/global/publish/validate_templates.py diff --git a/pype/plugins/global/publish/collect_deadline_user.py b/pype/plugins/global/publish/collect_deadline_user.py new file mode 100644 index 0000000000..f4d13a0545 --- /dev/null +++ b/pype/plugins/global/publish/collect_deadline_user.py @@ -0,0 +1,60 @@ +import os +import subprocess + +import pyblish.api + +CREATE_NO_WINDOW = 0x08000000 + + +def deadline_command(cmd): + # Find Deadline + path = os.environ.get("DEADLINE_PATH", None) + assert path is not None, "Variable 'DEADLINE_PATH' must be set" + + executable = os.path.join(path, "deadlinecommand") + if os.name == "nt": + executable += ".exe" + assert os.path.exists( + executable), "Deadline executable not found at %s" % executable + assert cmd, "Must have a command" + + query = (executable, cmd) + + process = subprocess.Popen(query, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + creationflags=CREATE_NO_WINDOW) + out, err = process.communicate() + + return out + + +class CollectDeadlineUser(pyblish.api.ContextPlugin): + """Retrieve the local active Deadline user""" + + order = pyblish.api.CollectorOrder + 0.499 + label = "Deadline User" + + hosts = ['maya', 'fusion', 'nuke'] + families = [ + "renderlayer", + "saver.deadline", + "imagesequence" + ] + + + def process(self, context): + """Inject the current working file""" + user = None + try: + user = deadline_command("GetCurrentUserName").strip() + except: + self.log.warning("Deadline command seems not to be working") + + if not user: + self.log.warning("No Deadline user found. " + "Do you have Deadline installed?") + return + + self.log.info("Found Deadline user: {}".format(user)) + context.data['deadlineUser'] = user diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py new file mode 100644 index 0000000000..cb852f7c43 --- /dev/null +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -0,0 +1,330 @@ +import os +import json +import re + +from avalon import api, io +from avalon.vendor import requests, clique + +import pyblish.api + + +def _get_script(): + """Get path to the image sequence script""" + try: + from pype.fusion.scripts import publish_filesequence + except Exception as e: + raise RuntimeError("Expected module 'publish_imagesequence'" + "to be available") + + module_path = publish_filesequence.__file__ + if module_path.endswith(".pyc"): + module_path = module_path[:-len(".pyc")] + ".py" + + return module_path + + +# Logic to retrieve latest files concerning extendFrames +def get_latest_version(asset_name, subset_name, family): + # Get asset + asset_name = io.find_one({"type": "asset", + "name": asset_name}, + projection={"name": True}) + + subset = io.find_one({"type": "subset", + "name": subset_name, + "parent": asset_name["_id"]}, + projection={"_id": True, "name": True}) + + # Check if subsets actually exists (pre-run check) + assert subset, "No subsets found, please publish with `extendFrames` off" + + # Get version + version_projection = {"name": True, + "data.startFrame": True, + "data.endFrame": True, + "parent": True} + + version = io.find_one({"type": "version", + "parent": subset["_id"], + "data.families": family}, + projection=version_projection, + sort=[("name", -1)]) + + assert version, "No version found, this is a bug" + + return version + + +def get_resources(version, extension=None): + """ + Get the files from the specific version + """ + query = {"type": "representation", "parent": version["_id"]} + if extension: + query["name"] = extension + + representation = io.find_one(query) + assert representation, "This is a bug" + + directory = api.get_representation_path(representation) + print("Source: ", directory) + resources = sorted([os.path.normpath(os.path.join(directory, fname)) + for fname in os.listdir(directory)]) + + return resources + + +def get_resource_files(resources, frame_range, override=True): + + res_collections, _ = clique.assemble(resources) + assert len(res_collections) == 1, "Multiple collections found" + res_collection = res_collections[0] + + # Remove any frames + if override: + for frame in frame_range: + if frame not in res_collection.indexes: + continue + res_collection.indexes.remove(frame) + + return list(res_collection) + + +class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin): + """Submit image sequence publish jobs to Deadline. + + These jobs are dependent on a deadline job submission prior to this + plug-in. + + Renders are submitted to a Deadline Web Service as + supplied via the environment variable AVALON_DEADLINE + + Options in instance.data: + - deadlineSubmission (dict, Required): The returned .json + data from the job submission to deadline. + + - outputDir (str, Required): The output directory where the metadata + file should be generated. It's assumed that this will also be + final folder containing the output files. + + - ext (str, Optional): The extension (including `.`) that is required + in the output filename to be picked up for image sequence + publishing. + + - publishJobState (str, Optional): "Active" or "Suspended" + This defaults to "Suspended" + + This requires a "startFrame" and "endFrame" to be present in instance.data + or in context.data. + + """ + + label = "Submit image sequence jobs to Deadline" + order = pyblish.api.IntegratorOrder + 0.1 + + hosts = ["fusion", "maya", "nuke"] + + families = [ + "render.deadline", + "renderlayer", + "imagesequence" + ] + + def process(self, instance): + + # AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", + # "http://localhost:8082") + # assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" + + try: + deadline_url = os.environ["DEADLINE_REST_URL"] + except KeyError: + self.log.error("Deadline REST API url not found.") + + # Get a submission job + job = instance.data.get("deadlineSubmissionJob") + if not job: + raise RuntimeError("Can't continue without valid deadline " + "submission prior to this plug-in.") + + data = instance.data.copy() + subset = data["subset"] + state = data.get("publishJobState", "Suspended") + job_name = "{batch} - {subset} [publish image sequence]".format( + batch=job["Props"]["Name"], + subset=subset + ) + + # Add in start/end frame + context = instance.context + start = instance.data.get("startFrame", context.data["startFrame"]) + end = instance.data.get("endFrame", context.data["endFrame"]) + resources = [] + + # Add in regex for sequence filename + # This assumes the output files start with subset name and ends with + # a file extension. + if "ext" in instance.data: + ext = re.escape(instance.data["ext"]) + else: + ext = "\.\D+" + + regex = "^{subset}.*\d+{ext}$".format(subset=re.escape(subset), + ext=ext) + + # Write metadata for publish job + render_job = data.pop("deadlineSubmissionJob") + metadata = { + "regex": regex, + "startFrame": start, + "endFrame": end, + "families": ["imagesequence"], + + # Optional metadata (for debugging) + "metadata": { + "instance": data, + "job": job, + "session": api.Session.copy() + } + } + + # Ensure output dir exists + output_dir = instance.data["outputDir"] + if not os.path.isdir(output_dir): + os.makedirs(output_dir) + + if data.get("extendFrames", False): + + family = "imagesequence" + override = data["overrideExistingFrame"] + + # override = data.get("overrideExistingFrame", False) + out_file = render_job.get("OutFile") + if not out_file: + raise RuntimeError("OutFile not found in render job!") + + extension = os.path.splitext(out_file[0])[1] + _ext = extension[1:] + + # Frame comparison + prev_start = None + prev_end = None + resource_range = range(int(start), int(end)+1) + + # Gather all the subset files (one subset per render pass!) + subset_names = [data["subset"]] + subset_names.extend(data.get("renderPasses", [])) + + for subset_name in subset_names: + version = get_latest_version(asset_name=data["asset"], + subset_name=subset_name, + family=family) + + # Set prev start / end frames for comparison + if not prev_start and not prev_end: + prev_start = version["data"]["startFrame"] + prev_end = version["data"]["endFrame"] + + subset_resources = get_resources(version, _ext) + resource_files = get_resource_files(subset_resources, + resource_range, + override) + + resources.extend(resource_files) + + updated_start = min(start, prev_start) + updated_end = max(end, prev_end) + + # Update metadata and instance start / end frame + self.log.info("Updating start / end frame : " + "{} - {}".format(updated_start, updated_end)) + + # TODO : Improve logic to get new frame range for the + # publish job (publish_filesequence.py) + # The current approach is not following Pyblish logic which is based + # on Collect / Validate / Extract. + + # ---- Collect Plugins --- + # Collect Extend Frames - Only run if extendFrames is toggled + # # # Store in instance: + # # # Previous rendered files per subset based on frames + # # # --> Add to instance.data[resources] + # # # Update publish frame range + + # ---- Validate Plugins --- + # Validate Extend Frames + # # # Check if instance has the requirements to extend frames + # There might have been some things which can be added to the list + # Please do so when fixing this. + + # Start frame + metadata["startFrame"] = updated_start + metadata["metadata"]["instance"]["startFrame"] = updated_start + + # End frame + metadata["endFrame"] = updated_end + metadata["metadata"]["instance"]["endFrame"] = updated_end + + metadata_filename = "{}_metadata.json".format(subset) + metadata_path = os.path.join(output_dir, metadata_filename) + with open(metadata_path, "w") as f: + json.dump(metadata, f, indent=4, sort_keys=True) + + # Generate the payload for Deadline submission + payload = { + "JobInfo": { + "Plugin": "Python", + "BatchName": job["Props"]["Batch"], + "Name": job_name, + "JobType": "Normal", + "JobDependency0": job["_id"], + "UserName": job["Props"]["User"], + "Comment": instance.context.data.get("comment", ""), + "InitialStatus": state + }, + "PluginInfo": { + "Version": "3.6", + "ScriptFile": _get_script(), + "Arguments": '--path "{}"'.format(metadata_path), + "SingleFrameOnly": "True" + }, + + # Mandatory for Deadline, may be empty + "AuxFiles": [] + } + + # Transfer the environment from the original job to this dependent + # job so they use the same environment + environment = job["Props"].get("Env", {}) + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % index: "{key}={value}".format( + key=key, + value=environment[key] + ) for index, key in enumerate(environment) + }) + + # Avoid copied pools and remove secondary pool + payload["JobInfo"]["Pool"] = "none" + payload["JobInfo"].pop("SecondaryPool", None) + + self.log.info("Submitting..") + self.log.info(json.dumps(payload, indent=4, sort_keys=True)) + + url = "{}/api/jobs".format(deadline_url) + response = requests.post(url, json=payload) + if not response.ok: + raise Exception(response.text) + + # Copy files from previous render if extendFrame is True + if data.get("extendFrames", False): + + self.log.info("Preparing to copy ..") + import shutil + + dest_path = data["outputDir"] + for source in resources: + src_file = os.path.basename(source) + dest = os.path.join(dest_path, src_file) + shutil.copy(source, dest) + + self.log.info("Finished copying %i files" % len(resources)) diff --git a/pype/plugins/global/publish/validate_templates.py b/pype/plugins/global/publish/validate_templates.py new file mode 100644 index 0000000000..f806104bb2 --- /dev/null +++ b/pype/plugins/global/publish/validate_templates.py @@ -0,0 +1,42 @@ +import pyblish.api +from app.api import ( + Templates +) + +class ValidateTemplates(pyblish.api.ContextPlugin): + """Check if all templates were filed""" + + label = "Validate Templates" + order = pyblish.api.ValidatorOrder - 0.1 + hosts = ["maya", "houdini", "nuke"] + + def process(self, context): + + anatomy = context.data["anatomy"] + if not anatomy: + raise RuntimeError("Did not find templates") + else: + data = { "project": {"name": "D001_projectsx", + "code": "prjX"}, + "representation": "exr", + "VERSION": 3, + "SUBVERSION": 10, + "task": "animation", + "asset": "sh001", + "hierarchy": "ep101/sq01/sh010"} + + + anatomy = context.data["anatomy"].format(data) + self.log.info(anatomy.work.path) + + data = { "project": {"name": "D001_projectsy", + "code": "prjY"}, + "representation": "abc", + "VERSION": 1, + "SUBVERSION": 5, + "task": "lookdev", + "asset": "bob", + "hierarchy": "ep101/sq01/bob"} + + anatomy = context.data["anatomy"].format(data) + self.log.info(anatomy.work.file)