From 8ae23bae93c1b11a0500a9900db4e2c12f9f7a8f Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Feb 2020 21:12:00 +0100 Subject: [PATCH 1/6] fixed preview tag --- pype/plugins/global/publish/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 7b5dac28d4..f6f5d5abba 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -381,7 +381,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "stagingDir": staging, "anatomy_template": "render", "fps": new_instance.get("fps"), - "tags": ["review", "preview"] if preview else [] + "tags": ["review"] if preview else [] } # add tags From 86d3f4d7d2e3d0d95e28b06b1523d0073bc8d7e3 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 22 Feb 2020 00:19:50 +0100 Subject: [PATCH 2/6] prioritize instance version in burnins --- pype/plugins/global/publish/extract_burnin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index b95c15f340..d09ba91f72 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -26,8 +26,8 @@ class ExtractBurnin(pype.api.Extractor): if "representations" not in instance.data: raise RuntimeError("Burnin needs already created mov to work on.") - version = instance.context.data.get( - 'version', instance.data.get('version')) + version = instance.data.get( + 'version', instance.context.data.get('version')) frame_start = int(instance.data.get("frameStart") or 0) frame_end = int(instance.data.get("frameEnd") or 1) duration = frame_end - frame_start + 1 From 66a2bc8201dab971a2d98934e541519dba39406b Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 22 Feb 2020 00:20:56 +0100 Subject: [PATCH 3/6] append renders and fix representation names --- pype/plugins/global/publish/submit_publish_job.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index f6f5d5abba..df00803326 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -170,7 +170,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): } # list of family names to transfer to new family if present - families_transfer = ["render2d", "ftrack", "slate"] + families_transfer = ["render3d", "render2d", "ftrack", "slate"] def _submit_deadline_post_job(self, instance, job): """ @@ -372,7 +372,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # create represenation rep = { - "name": aov, + "name": ext, "ext": ext, "files": [os.path.basename(f) for f in list(cols[0])], "frameStart": start, @@ -617,12 +617,14 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): `foo` and `xxx` """ + self.log.info(data.get("expectedFiles")) + if isinstance(data.get("expectedFiles")[0], dict): # we cannot attach AOVs to other subsets as we consider every # AOV subset of its own. if len(data.get("attachTo")) > 0: - assert len(data.get("expectedFiles")[0].keys()) > 1, ( + assert len(data.get("expectedFiles")[0].keys()) == 1, ( "attaching multiple AOVs or renderable cameras to " "subset is not supported") @@ -660,7 +662,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): new_i = copy(i) new_i["version"] = at.get("version") new_i["subset"] = at.get("subset") - new_i["families"].append(at.get("family")) + new_i["family"] = at.get("family") + new_i["append"] = True new_instances.append(new_i) self.log.info(" - {} / v{}".format( at.get("subset"), at.get("version"))) From 61f909c3827853d395b40a281e54181699e65cfd Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 22 Feb 2020 00:22:52 +0100 Subject: [PATCH 4/6] simplify version determination --- .../maya/publish/determine_future_version.py | 63 +------------------ 1 file changed, 3 insertions(+), 60 deletions(-) diff --git a/pype/plugins/maya/publish/determine_future_version.py b/pype/plugins/maya/publish/determine_future_version.py index 72dbf719d7..afa249aca2 100644 --- a/pype/plugins/maya/publish/determine_future_version.py +++ b/pype/plugins/maya/publish/determine_future_version.py @@ -1,6 +1,4 @@ import pyblish -from avalon import api, io - class DetermineFutureVersion(pyblish.api.InstancePlugin): """ @@ -20,66 +18,11 @@ class DetermineFutureVersion(pyblish.api.InstancePlugin): for i in context: if i.data["subset"] in attach_to_subsets: - latest_version = self._get_latest_version(i.data["subset"]) - - # this will get corresponding subset in attachTo list - # so we can set version there + # # this will get corresponding subset in attachTo list + # # so we can set version there sub = next(item for item in instance.data['attachTo'] if item["subset"] == i.data["subset"]) # noqa: E501 - if not latest_version: - # if latest_version is None, subset is not yet in - # database so we'll check its instance to see if version - # is there and use that, or we'll just stay with v1 - latest_version = i.data.get("version", 1) - - sub["version"] = latest_version + sub["version"] = i.data.get("version", 1) self.log.info("render will be attached to {} v{}".format( sub["subset"], sub["version"] )) - - def _get_latest_version(self, subset): - latest_version = None - - project_name = api.Session["AVALON_PROJECT"] - asset_name = api.Session["AVALON_ASSET"] - - project_entity = io.find_one({ - "type": "project", - "name": project_name - }) - - assert project_entity, ( - "Project '{0}' was not found." - ).format(project_name) - - asset_entity = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project_entity["_id"] - }) - assert asset_entity, ( - "No asset found by the name '{0}' in project '{1}'" - ).format(asset_name, project_name) - - if asset_entity: - subset_entity = io.find_one({ - "type": "subset", - "name": subset, - "parent": asset_entity["_id"] - }) - - if subset_entity is None: - self.log.info("Subset entity does not exist yet.") - pass - - else: - version_entity = io.find_one( - { - "type": "version", - "parent": subset_entity["_id"] - }, - sort=[("name", -1)] - ) - if version_entity: - latest_version = version_entity["name"] - return latest_version From 284da78ccdbebc76fba70189bf9e8a9dd0a181a8 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 22 Feb 2020 00:23:23 +0100 Subject: [PATCH 5/6] allow multichanel rendering --- .../maya/publish/validate_rendersettings.py | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/pype/plugins/maya/publish/validate_rendersettings.py b/pype/plugins/maya/publish/validate_rendersettings.py index d6cbea8b2d..c98f0f8cdc 100644 --- a/pype/plugins/maya/publish/validate_rendersettings.py +++ b/pype/plugins/maya/publish/validate_rendersettings.py @@ -50,7 +50,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): ImagePrefixTokens = { - 'arnold': 'maya///_//_', 'redshift': 'maya///', 'vray': 'maya///', 'renderman': '_..' @@ -143,11 +143,19 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): dir_prefix)) else: - if not re.search(cls.R_AOV_TOKEN, prefix): - invalid = True - cls.log.error("Wrong image prefix [ {} ] - " - "doesn't have: '' or " - "token".format(prefix)) + multichannel = cmds.getAttr("defaultArnoldDriver.mergeAOVs") + if multichannel: + if re.search(cls.R_AOV_TOKEN, prefix): + invalid = True + cls.log.error("Wrong image prefix [ {} ] - " + "You can't use '' token " + "with merge AOVs turned on".format(prefix)) + else: + if not re.search(cls.R_AOV_TOKEN, prefix): + invalid = True + cls.log.error("Wrong image prefix [ {} ] - " + "doesn't have: '' or " + "token".format(prefix)) # prefix check if prefix.lower() != cls.ImagePrefixTokens[renderer].lower(): From 6f8a860f00c76a5d3b93d96155bec8fda0343889 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Sat, 22 Feb 2020 01:19:32 +0100 Subject: [PATCH 6/6] rendering is done by default from published file --- .../global/publish/submit_publish_job.py | 2 +- pype/plugins/maya/publish/collect_render.py | 4 ++ .../maya/publish/submit_maya_deadline.py | 57 +++++++++++++++++-- 3 files changed, 57 insertions(+), 6 deletions(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index df00803326..29dce58101 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -662,8 +662,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): new_i = copy(i) new_i["version"] = at.get("version") new_i["subset"] = at.get("subset") - new_i["family"] = at.get("family") new_i["append"] = True + new_i["families"].append(at.get("family")) new_instances.append(new_i) self.log.info(" - {} / v{}".format( at.get("subset"), at.get("version"))) diff --git a/pype/plugins/maya/publish/collect_render.py b/pype/plugins/maya/publish/collect_render.py index f76ee1120f..07eec4192f 100644 --- a/pype/plugins/maya/publish/collect_render.py +++ b/pype/plugins/maya/publish/collect_render.py @@ -101,6 +101,10 @@ class CollectMayaRender(pyblish.api.ContextPlugin): render_instance = instance render_instance.data["remove"] = True + # make sure workfile instance publishing is enabled + if 'workfile' in instance.data['families']: + instance.data["publish"] = True + if not render_instance: self.log.info("No render instance found, skipping render " "layer collection.") diff --git a/pype/plugins/maya/publish/submit_maya_deadline.py b/pype/plugins/maya/publish/submit_maya_deadline.py index 4c6399a880..2f236be424 100644 --- a/pype/plugins/maya/publish/submit_maya_deadline.py +++ b/pype/plugins/maya/publish/submit_maya_deadline.py @@ -117,6 +117,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): else: optional = True + use_published = True + def process(self, instance): DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL", @@ -125,21 +127,66 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): context = instance.context workspace = context.data["workspaceDir"] + anatomy = context.data['anatomy'] filepath = None + if self.use_published: + for i in context: + if "workfile" in i.data["families"]: + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") + template_data = i.data.get("anatomyData") + rep = i.data.get("representations")[0].get("name") + template_data["representation"] = rep + template_data["ext"] = rep + template_data["comment"] = None + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled["publish"]["path"] + filepath = os.path.normpath(template_filled) + self.log.info("Using published scene for render {}".format( + filepath)) + + # now we need to switch scene in expected files + # because token will now point to published + # scene file and that might differ from current one + new_scene = os.path.splitext( + os.path.basename(filepath))[0] + orig_scene = os.path.splitext( + os.path.basename(context.data["currentFile"]))[0] + exp = instance.data.get("expectedFiles") + + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + new_exp = {} + for aov, files in exp[0].items(): + replaced_files = [] + for f in files: + replaced_files.append( + f.replace(orig_scene, new_scene) + ) + new_exp[aov] = replaced_files + instance.data["expectedFiles"] = [new_exp] + else: + new_exp = [] + for f in exp: + new_exp.append( + f.replace(orig_scene, new_scene) + ) + instance.data["expectedFiles"] = [new_exp] + self.log.info("Scene name was switched {} -> {}".format( + orig_scene, new_scene + )) + allInstances = [] for result in context.data["results"]: if (result["instance"] is not None and result["instance"] not in allInstances): allInstances.append(result["instance"]) - for inst in allInstances: - print(inst) - if inst.data['family'] == 'scene': - filepath = inst.data['destination_list'][0] - + # fallback if nothing was set if not filepath: + self.log.warning("Falling back to workfile") filepath = context.data["currentFile"] self.log.debug(filepath)