From 65d785d100c986128a88f1dc2c77b5321b85d7da Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 24 Aug 2022 00:29:08 +0200 Subject: [PATCH 01/82] Draft stash for refactoring maya submit deadline to use `AbstractSubmitDeadline` base. - This does *NOT* work currently! --- .../deadline/abstract_submit_deadline.py | 22 + .../plugins/publish/submit_maya_deadline.py | 1297 ++++++++--------- 2 files changed, 623 insertions(+), 696 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 0bad981fdf..577378335e 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -359,6 +359,27 @@ class DeadlineJobInfo(object): def OutputDirectory(self, val): # noqa: N802 self._outputDirectory.append(val) + # Asset Dependency + # ---------------------------------------------- + _assetDependency = attr.ib(factory=list) + + @property + def AssetDependency(self): # noqa: N802 + """Return all OutputDirectory values formatted for Deadline. + + Returns: + dict: as `{'OutputDirectory0': 'dir'}` + + """ + out = {} + for index, v in enumerate(self._assetDependency): + out["AssetDependency{}".format(index)] = v + return out + + @OutputDirectory.setter + def AssetDependency(self, val): # noqa: N802 + self._assetDependency.append(val) + # Tile Job # ---------------------------------------------- TileJob = attr.ib(default=None) # Default: false @@ -396,6 +417,7 @@ class DeadlineJobInfo(object): serialized.update(self.OutputFilename) serialized.update(self.OutputFilenameTile) serialized.update(self.OutputDirectory) + serialized.update(self.AssetDependency) return serialized diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 7966861358..6dfa48a9f8 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -18,7 +18,6 @@ Attributes: from __future__ import print_function import os -import json import getpass import copy import re @@ -27,252 +26,32 @@ from datetime import datetime import itertools from collections import OrderedDict +import attr import clique -import requests from maya import cmds -import pyblish.api - -from openpype.lib import requests_post from openpype.hosts.maya.api import lib from openpype.pipeline import legacy_io -# Documentation for keys available at: -# https://docs.thinkboxsoftware.com -# /products/deadline/8.0/1_User%20Manual/manual -# /manual-submission.html#job-info-file-options - -payload_skeleton_template = { - "JobInfo": { - "BatchName": None, # Top-level group name - "Name": None, # Job name, as seen in Monitor - "UserName": None, - "Plugin": "MayaBatch", - "Frames": "{start}-{end}x{step}", - "Comment": None, - "Priority": 50, - }, - "PluginInfo": { - "SceneFile": None, # Input - "OutputFilePath": None, # Output directory and filename - "OutputFilePrefix": None, - "Version": cmds.about(version=True), # Mandatory for Deadline - "UsingRenderLayers": True, - "RenderLayer": None, # Render only this layer - "Renderer": None, - "ProjectPath": None, # Resolve relative references - "RenderSetupIncludeLights": None, # Include all lights flag. - }, - "AuxFiles": [] # Mandatory for Deadline, may be empty -} +from openpype_modules.deadline import abstract_submit_deadline +from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo -def _format_tiles( - filename, index, tiles_x, tiles_y, - width, height, prefix): - """Generate tile entries for Deadline tile job. - - Returns two dictionaries - one that can be directly used in Deadline - job, second that can be used for Deadline Assembly job configuration - file. - - This will format tile names: - - Example:: - { - "OutputFilename0Tile0": "_tile_1x1_4x4_Main_beauty.1001.exr", - "OutputFilename0Tile1": "_tile_2x1_4x4_Main_beauty.1001.exr" - } - - And add tile prefixes like: - - Example:: - Image prefix is: - `maya///_` - - Result for tile 0 for 4x4 will be: - `maya///_tile_1x1_4x4__` - - Calculating coordinates is tricky as in Job they are defined as top, - left, bottom, right with zero being in top-left corner. But Assembler - configuration file takes tile coordinates as X, Y, Width and Height and - zero is bottom left corner. - - Args: - filename (str): Filename to process as tiles. - index (int): Index of that file if it is sequence. - tiles_x (int): Number of tiles in X. - tiles_y (int): Number if tikes in Y. - width (int): Width resolution of final image. - height (int): Height resolution of final image. - prefix (str): Image prefix. - - Returns: - (dict, dict): Tuple of two dictionaires - first can be used to - extend JobInfo, second has tiles x, y, width and height - used for assembler configuration. - - """ - tile = 0 - out = {"JobInfo": {}, "PluginInfo": {}} - cfg = OrderedDict() - w_space = width / tiles_x - h_space = height / tiles_y - - cfg["TilesCropped"] = "False" - - for tile_x in range(1, tiles_x + 1): - for tile_y in reversed(range(1, tiles_y + 1)): - tile_prefix = "_tile_{}x{}_{}x{}_".format( - tile_x, tile_y, - tiles_x, - tiles_y - ) - out_tile_index = "OutputFilename{}Tile{}".format( - str(index), tile - ) - new_filename = "{}/{}{}".format( - os.path.dirname(filename), - tile_prefix, - os.path.basename(filename) - ) - out["JobInfo"][out_tile_index] = new_filename - out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ - "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) - - out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 - out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 - out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 - out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 - - cfg["Tile{}".format(tile)] = new_filename - cfg["Tile{}Tile".format(tile)] = new_filename - cfg["Tile{}FileName".format(tile)] = new_filename - cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space - - cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) - - cfg["Tile{}Width".format(tile)] = w_space - cfg["Tile{}Height".format(tile)] = h_space - - tile += 1 - return out, cfg +@attr.s +class DeadlinePluginInfo(): + SceneFile = attr.ib(default=None) # Input + OutputFilePath = attr.ib(default=None) # Output directory and filename + OutputFilePrefix = attr.ib(default=None) + Version = attr.ib(default=None) # Mandatory for Deadline + UsingRenderLayers = attr.ib(default=True) + RenderLayer = attr.ib(default=None) # Render only this layer + Renderer = attr.ib(default=None) + ProjectPath = attr.ib(default=None) # Resolve relative references + RenderSetupIncludeLights = attr.ib(default=None) # Include all lights flag -def get_renderer_variables(renderlayer, root): - """Retrieve the extension which has been set in the VRay settings. - - Will return None if the current renderer is not VRay - For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which - start with `rs`. Use the actual node name, do NOT use the `nice name` - - Args: - renderlayer (str): the node name of the renderlayer. - root (str): base path to render - - Returns: - dict - - """ - renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) - render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) - - padding = cmds.getAttr("{}.{}".format(render_attrs["node"], - render_attrs["padding"])) - - filename_0 = cmds.renderSettings( - fullPath=True, - gin="#" * int(padding), - lut=True, - layer=renderlayer or lib.get_current_renderlayer())[0] - filename_0 = re.sub('_', '_beauty', - filename_0, flags=re.IGNORECASE) - prefix_attr = "defaultRenderGlobals.imageFilePrefix" - - scene = cmds.file(query=True, sceneName=True) - scene, _ = os.path.splitext(os.path.basename(scene)) - - if renderer == "vray": - renderlayer = renderlayer.split("_")[-1] - # Maya's renderSettings function does not return V-Ray file extension - # so we get the extension from vraySettings - extension = cmds.getAttr("vraySettings.imageFormatStr") - - # When V-Ray image format has not been switched once from default .png - # the getAttr command above returns None. As such we explicitly set - # it to `.png` - if extension is None: - extension = "png" - - if extension in ["exr (multichannel)", "exr (deep)"]: - extension = "exr" - - prefix_attr = "vraySettings.fileNamePrefix" - filename_prefix = cmds.getAttr(prefix_attr) - # we need to determine path for vray as maya `renderSettings` query - # does not work for vray. - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = "{}.{}.{}".format( - filename_0, "#" * int(padding), extension) - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "renderman": - prefix_attr = "rmanGlobals.imageFileFormat" - # NOTE: This is guessing extensions from renderman display types. - # Some of them are just framebuffers, d_texture format can be - # set in display setting. We set those now to None, but it - # should be handled more gracefully. - display_types = { - "d_deepexr": "exr", - "d_it": None, - "d_null": None, - "d_openexr": "exr", - "d_png": "png", - "d_pointcloud": "ptc", - "d_targa": "tga", - "d_texture": None, - "d_tiff": "tif" - } - - extension = display_types.get( - cmds.listConnections("rmanDefaultDisplay.displayType")[0], - "exr" - ) or "exr" - - filename_prefix = "{}/{}".format( - cmds.getAttr("rmanGlobals.imageOutputDir"), - cmds.getAttr("rmanGlobals.imageFileFormat") - ) - - renderlayer = renderlayer.split("_")[-1] - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', "#" * int(padding), filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', extension, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "redshift": - # mapping redshift extension dropdown values to strings - ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] - extension = ext_mapping[ - cmds.getAttr("redshiftOptions.imageFormat") - ] - else: - # Get the extension, getAttr defaultRenderGlobals.imageFormat - # returns an index number. - filename_base = os.path.basename(filename_0) - extension = os.path.splitext(filename_base)[-1].strip(".") - - filename_prefix = cmds.getAttr(prefix_attr) - return {"ext": extension, - "filename_prefix": filename_prefix, - "padding": padding, - "filename_0": filename_0} - - -class MayaSubmitDeadline(pyblish.api.InstancePlugin): +class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): """Submit available render layers to Deadline. Renders are submitted to a Deadline Web Service as @@ -284,15 +63,12 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): """ - label = "Submit to Deadline" - order = pyblish.api.IntegratorOrder + 0.1 + label = "Submit Render to Deadline" hosts = ["maya"] families = ["renderlayer"] targets = ["local"] - use_published = True tile_assembler_plugin = "OpenPypeTileAssembler" - asset_dependencies = False priority = 50 tile_priority = 50 limit_groups = [] @@ -300,32 +76,173 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): pluginInfo = {} group = "none" - def process(self, instance): - """Plugin entry point.""" - instance.data["toBeRenderedOn"] = "deadline" + def get_job_info(self): + job_info = DeadlineJobInfo(Plugin="MayaBatch") + + # todo: test whether this works for existing production cases + # where custom jobInfo was stored in the project settings + for key, value in self.jobInfo.items(): + setattr(job_info, key, value) + + instance = self._instance context = instance.context - self._instance = instance - self.payload_skeleton = copy.deepcopy(payload_skeleton_template) + filepath = context.data["currentFile"] + filename = os.path.basename(filepath) - # get default deadline webservice url from deadline module - self.deadline_url = instance.context.data.get("defaultDeadline") - # if custom one is set in instance, use that - if instance.data.get("deadlineUrl"): - self.deadline_url = instance.data.get("deadlineUrl") - assert self.deadline_url, "Requires Deadline Webservice URL" + job_info.Name = "%s - %s" % (filename, instance.name) + job_info.BatchName = filename + job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") + job_info.UserName = context.data.get( + "deadlineUser", getpass.getuser()) - # just using existing names from Setting - self._job_info = self.jobInfo + # Deadline requires integers in frame range + frames = "{start}-{end}x{step}".format( + start=int(instance.data["frameStartHandle"]), + end=int(instance.data["frameEndHandle"]), + step=int(instance.data["byFrameStep"]), + ) + job_info.Frames = frames - self._plugin_info = self.pluginInfo + job_info.Pool = instance.data.get("primaryPool") + job_info.SecondaryPool = instance.data.get("secondaryPool") + job_info.ChunkSize = instance.data.get("chunkSize", 10) + job_info.Comment = context.data.get("comment") + job_info.Priority = instance.data.get("priority", self.priority) + + if self.group != "none" and self.group: + job_info.Group = self.group + + if self.limit_groups: + job_info.LimitGroups = ",".join(self.limit_groups) + + self.payload_skeleton["JobInfo"]["Name"] = jobname + self.payload_skeleton["JobInfo"]["BatchName"] = src_filename + + # Optional, enable double-click to preview rendered + # frames from Deadline Monitor + self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ + os.path.dirname(output_filename_0).replace("\\", "/") + self.payload_skeleton["JobInfo"]["OutputFilename0"] = \ + output_filename_0.replace("\\", "/") + + # Add options from RenderGlobals------------------------------------- + render_globals = instance.data.get("renderGlobals", {}) + self.payload_skeleton["JobInfo"].update(render_globals) + + keys = [ + "FTRACK_API_KEY", + "FTRACK_API_USER", + "FTRACK_SERVER", + "OPENPYPE_SG_USER", + "AVALON_PROJECT", + "AVALON_ASSET", + "AVALON_TASK", + "AVALON_APP_NAME", + "OPENPYPE_DEV", + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" + ] + # Add mongo url if it's enabled + if self._instance.context.data.get("deadlinePassMongoUrl"): + keys.append("OPENPYPE_MONGO") + + environment = dict({key: os.environ[key] for key in keys + if key in os.environ}, **legacy_io.Session) + + + # TODO: Taken from old publish class - test whether still needed + environment["OPENPYPE_LOG_NO_COLORS"] = "1" + environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) + # to recognize job from PYPE for turning Event On/Off + environment["OPENPYPE_RENDER_JOB"] = "1" + + for key in keys: + val = environment.get(key) + if val: + job_info.EnvironmentKeyValue = "{key}={value}".format( + key=key, + value=val + ) + # to recognize job from PYPE for turning Event On/Off + job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + + for i, filepath in enumerate(instance.data["files"]): + dirname = os.path.dirname(filepath) + fname = os.path.basename(filepath) + job_info.OutputDirectory = dirname.replace("\\", "/") + job_info.OutputFilename = fname + + # Adding file dependencies. + if self.asset_dependencies: + dependencies = instance.context.data["fileDependencies"] + dependencies.append(context.data["currentFile"]) + for dependency in dependencies: + job_info.AssetDependency = dependency + + # Add list of expected files to job + # --------------------------------- + exp = instance.data.get("expectedFiles") + + def _get_output_filename(files): + col, rem = clique.assemble(files) + if not col and rem: + # we couldn't find any collections but have + # individual files. + assert len(rem) == 1, ( + "Found multiple non related files " + "to render, don't know what to do " + "with them.") + return rem[0] + else: + return col[0].format('{head}{padding}{tail}') + + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + for _aov, files in exp[0].items(): + output_file = _get_output_filename(files) + job_info.OutputFilename = output_file + else: + output_file = _get_output_filename(exp) + job_info.OutputFilename = output_file + + return job_info + + def get_plugin_info(self): + + instance = self._instance + context = instance.context + + renderlayer = instance.data['setMembers'] # rs_beauty + + self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer + self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa + + # Output driver to render + plugin_info = DeadlinePluginInfo( + SceneFile=context.data["currentFile"], + Version=cmds.about(version=True), + ) + + return attr.asdict(plugin_info) + + def process_submission(self): + # Override to NOT submit by default when calling super process() method + pass + + def process(self, instance): + super(MayaSubmitDeadline, self).process(instance) + + # TODO: Avoid the need for this logic here, needed for submit publish + # Store output dir for unified publisher (filesequence) + output_dir = os.path.dirname(instance.data["files"][0]) + instance.data["outputDir"] = output_dir + instance.data["toBeRenderedOn"] = "deadline" self.limit_groups = self.limit context = instance.context workspace = context.data["workspaceDir"] - anatomy = context.data['anatomy'] - instance.data["toBeRenderedOn"] = "deadline" filepath = None patches = ( @@ -336,80 +253,24 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "scene_patches", {}) ) - # Handle render/export from published scene or not ------------------ - if self.use_published: - patched_files = [] - for i in context: - if "workfile" not in i.data["families"]: - continue - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0].get("name") - template_data["representation"] = rep - template_data["ext"] = rep - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - filepath = os.path.normpath(template_filled) - self.log.info("Using published scene for render {}".format( - filepath)) + # todo: on self.use_published originally use template_data["representation"] using .get("name") instead of .get("ext") + # todo: on self.use_published replace path for publishRenderMetadataFolder + # todo: on self.use_published apply scene patches to workfile instance + # rep = i.data.get("representations")[0].get("name") - if not os.path.exists(filepath): - self.log.error("published scene does not exist!") - raise - # now we need to switch scene in expected files - # because token will now point to published - # scene file and that might differ from current one - new_scene = os.path.splitext( - os.path.basename(filepath))[0] - orig_scene = os.path.splitext( - os.path.basename(context.data["currentFile"]))[0] - exp = instance.data.get("expectedFiles") + # if instance.data.get("publishRenderMetadataFolder"): + # instance.data["publishRenderMetadataFolder"] = \ + # instance.data["publishRenderMetadataFolder"].replace( + # orig_scene, new_scene) + # self.log.info("Scene name was switched {} -> {}".format( + # orig_scene, new_scene + # )) + # # patch workfile is needed + # if filepath not in patched_files: + # patched_file = self._patch_workfile(filepath, patches) + # patched_files.append(patched_file) - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - new_exp = {} - for aov, files in exp[0].items(): - replaced_files = [] - for f in files: - replaced_files.append( - f.replace(orig_scene, new_scene) - ) - new_exp[aov] = replaced_files - instance.data["expectedFiles"] = [new_exp] - else: - new_exp = [] - for f in exp: - new_exp.append( - f.replace(orig_scene, new_scene) - ) - instance.data["expectedFiles"] = [new_exp] - - if instance.data.get("publishRenderMetadataFolder"): - instance.data["publishRenderMetadataFolder"] = \ - instance.data["publishRenderMetadataFolder"].replace( - orig_scene, new_scene) - self.log.info("Scene name was switched {} -> {}".format( - orig_scene, new_scene - )) - # patch workfile is needed - if filepath not in patched_files: - patched_file = self._patch_workfile(filepath, patches) - patched_files.append(patched_file) - - all_instances = [] - for result in context.data["results"]: - if (result["instance"] is not None and - result["instance"] not in all_instances): # noqa: E128 - all_instances.append(result["instance"]) - - # fallback if nothing was set - if not filepath: - self.log.warning("Falling back to workfile") - filepath = context.data["currentFile"] - - self.log.debug(filepath) + filepath = self.scene_path # collect by super().process # Gather needed data ------------------------------------------------ default_render_file = instance.context.data.get('project_settings')\ @@ -417,10 +278,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): .get('RenderSettings')\ .get('default_render_image_folder') filename = os.path.basename(filepath) - comment = context.data.get("comment", "") dirname = os.path.join(workspace, default_render_file) renderlayer = instance.data['setMembers'] # rs_beauty - deadline_user = context.data.get("user", getpass.getuser()) # Always use the original work file name for the Job name even when # rendering is done from the published Work File. The original work @@ -454,116 +313,34 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): pass # Fill in common data to payload ------------------------------------ - payload_data = {} - payload_data["filename"] = filename - payload_data["filepath"] = filepath - payload_data["jobname"] = jobname - payload_data["deadline_user"] = deadline_user - payload_data["comment"] = comment - payload_data["output_filename_0"] = output_filename_0 - payload_data["render_variables"] = render_variables - payload_data["renderlayer"] = renderlayer - payload_data["workspace"] = workspace - payload_data["dirname"] = dirname - - self.log.info("--- Submission data:") - for k, v in payload_data.items(): - self.log.info("- {}: {}".format(k, v)) - self.log.info("-" * 20) - - frame_pattern = self.payload_skeleton["JobInfo"]["Frames"] - self.payload_skeleton["JobInfo"]["Frames"] = frame_pattern.format( - start=int(self._instance.data["frameStartHandle"]), - end=int(self._instance.data["frameEndHandle"]), - step=int(self._instance.data["byFrameStep"])) - - self.payload_skeleton["JobInfo"]["Plugin"] = self._instance.data.get( - "mayaRenderPlugin", "MayaBatch") - - self.payload_skeleton["JobInfo"]["BatchName"] = src_filename - # Job name, as seen in Monitor - self.payload_skeleton["JobInfo"]["Name"] = jobname - # Arbitrary username, for visualisation in Monitor - self.payload_skeleton["JobInfo"]["UserName"] = deadline_user - # Set job priority - self.payload_skeleton["JobInfo"]["Priority"] = \ - self._instance.data.get("priority", self.priority) - - if self.group != "none" and self.group: - self.payload_skeleton["JobInfo"]["Group"] = self.group - - if self.limit_groups: - self.payload_skeleton["JobInfo"]["LimitGroups"] = \ - ",".join(self.limit_groups) - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ - os.path.dirname(output_filename_0).replace("\\", "/") - self.payload_skeleton["JobInfo"]["OutputFilename0"] = \ - output_filename_0.replace("\\", "/") - - self.payload_skeleton["JobInfo"]["Comment"] = comment - self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer - - self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa - # Adding file dependencies. - dependencies = instance.context.data["fileDependencies"] - dependencies.append(filepath) - if self.asset_dependencies: - for dependency in dependencies: - key = "AssetDependency" + str(dependencies.index(dependency)) - self.payload_skeleton["JobInfo"][key] = dependency - - # Handle environments ----------------------------------------------- - # We need those to pass them to pype for it to set correct context - keys = [ - "FTRACK_API_KEY", - "FTRACK_API_USER", - "FTRACK_SERVER", - "OPENPYPE_SG_USER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", - "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS", - "OPENPYPE_VERSION" - ] - # Add mongo url if it's enabled - if instance.context.data.get("deadlinePassMongoUrl"): - keys.append("OPENPYPE_MONGO") - - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) - environment["OPENPYPE_LOG_NO_COLORS"] = "1" - environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) - # to recognize job from PYPE for turning Event On/Off - environment["OPENPYPE_RENDER_JOB"] = "1" - self.payload_skeleton["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) - # Add options from RenderGlobals------------------------------------- - render_globals = instance.data.get("renderGlobals", {}) - self.payload_skeleton["JobInfo"].update(render_globals) + payload_data = { + "filename": filename, + "filepath": filepath, + "jobname": jobname, + "comment": comment, + "output_filename_0": output_filename_0, + "render_variables": render_variables, + "renderlayer": renderlayer, + "workspace": workspace, + "dirname": dirname, + } # Submit preceding export jobs ------------------------------------- export_job = None assert not all(x in instance.data["families"] for x in ['vrayscene', 'assscene']), ( "Vray Scene and Ass Scene options are mutually exclusive") - if "vrayscene" in instance.data["families"]: - export_job = self._submit_export(payload_data, "vray") - if "assscene" in instance.data["families"]: - export_job = self._submit_export(payload_data, "arnold") - - # Prepare main render job ------------------------------------------- if "vrayscene" in instance.data["families"]: + vray_export_payload = self._get_vray_export_payload(payload_data) + export_job = self.submit(vray_export_payload) + payload = self._get_vray_render_payload(payload_data) + elif "assscene" in instance.data["families"]: + ass_export_payload = self._get_arnold_export_payload(payload_data) + export_job = self.submit(ass_export_payload) + payload = self._get_arnold_render_payload(payload_data) else: payload = self._get_maya_payload(payload_data) @@ -572,267 +349,222 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): if export_job: payload["JobInfo"]["JobDependency0"] = export_job - # Add list of expected files to job --------------------------------- - exp = instance.data.get("expectedFiles") - exp_index = 0 - output_filenames = {} - - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - for _aov, files in exp[0].items(): - col, rem = clique.assemble(files) - if not col and rem: - # we couldn't find any collections but have - # individual files. - assert len(rem) == 1, ("Found multiple non related files " - "to render, don't know what to do " - "with them.") - output_file = rem[0] - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - else: - output_file = col[0].format('{head}{padding}{tail}') - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - - output_filenames['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - exp_index += 1 - else: - col, rem = clique.assemble(exp) - if not col and rem: - # we couldn't find any collections but have - # individual files. - assert len(rem) == 1, ("Found multiple non related files " - "to render, don't know what to do " - "with them.") - - output_file = rem[0] - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - else: - output_file = col[0].format('{head}{padding}{tail}') - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - - output_filenames['OutputFilename' + str(exp_index)] = output_file - plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) # Store output dir for unified publisher (filesequence) instance.data["outputDir"] = os.path.dirname(output_filename_0) - self.preflight_check(instance) - # add jobInfo and pluginInfo variables from Settings - payload["JobInfo"].update(self._job_info) - payload["PluginInfo"].update(self._plugin_info) + payload["JobInfo"].update(self.jobInfo) + payload["PluginInfo"].update(self.pluginInfo) - # Prepare tiles data ------------------------------------------------ if instance.data.get("tileRendering"): - # if we have sequence of files, we need to create tile job for - # every frame + # Prepare tiles data + self._tile_render(instance, payload) + else: + # Submit main render job + self.submit(payload) - payload["JobInfo"]["TileJob"] = True - payload["JobInfo"]["TileJobTilesInX"] = instance.data.get("tilesX") - payload["JobInfo"]["TileJobTilesInY"] = instance.data.get("tilesY") - payload["PluginInfo"]["ImageHeight"] = instance.data.get("resolutionHeight") # noqa: E501 - payload["PluginInfo"]["ImageWidth"] = instance.data.get("resolutionWidth") # noqa: E501 - payload["PluginInfo"]["RegionRendering"] = True + def _tile_render(self, instance, payload): - assembly_payload = { - "AuxFiles": [], - "JobInfo": { - "BatchName": payload["JobInfo"]["BatchName"], - "Frames": 1, - "Name": "{} - Tile Assembly Job".format( - payload["JobInfo"]["Name"]), - "OutputDirectory0": - payload["JobInfo"]["OutputDirectory0"].replace( - "\\", "/"), - "Plugin": self.tile_assembler_plugin, - "MachineLimit": 1 - }, - "PluginInfo": { - "CleanupTiles": 1, - "ErrorOnMissing": True - } + # As collected by super process() + job_info = self.job_info + plugin_info = self.pluginInfo + + # if we have sequence of files, we need to create tile job for + # every frame + + job_info.TileJob = True + job_info.TileJobTilesInX = instance.data.get("tilesX") + job_info.TileJobTilesInY = instance.data.get("tilesY") + + plugin_info["ImageHeight"] = instance.data.get("resolutionHeight") + plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") + plugin_info["RegionRendering"] = True + + assembly_payload = { + "AuxFiles": [], + "JobInfo": { + "BatchName": payload["JobInfo"]["BatchName"], + "Frames": 1, + "Name": "{} - Tile Assembly Job".format( + payload["JobInfo"]["Name"]), + "OutputDirectory0": + payload["JobInfo"]["OutputDirectory0"].replace( + "\\", "/"), + "Plugin": self.tile_assembler_plugin, + "MachineLimit": 1 + }, + "PluginInfo": { + "CleanupTiles": 1, + "ErrorOnMissing": True } - assembly_payload["JobInfo"].update(output_filenames) - assembly_payload["JobInfo"]["Priority"] = self._instance.data.get( - "tile_priority", self.tile_priority) - assembly_payload["JobInfo"]["UserName"] = deadline_user + } + assembly_payload["JobInfo"]["Priority"] = self._instance.data.get( + "tile_priority", self.tile_priority) - frame_payloads = [] - assembly_payloads = [] + frame_payloads = [] + assembly_payloads = [] - R_FRAME_NUMBER = re.compile(r".+\.(?P[0-9]+)\..+") # noqa: N806, E501 - REPL_FRAME_NUMBER = re.compile(r"(.+\.)([0-9]+)(\..+)") # noqa: N806, E501 + R_FRAME_NUMBER = re.compile( + r".+\.(?P[0-9]+)\..+") # noqa: N806, E501 + REPL_FRAME_NUMBER = re.compile( + r"(.+\.)([0-9]+)(\..+)") # noqa: N806, E501 - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - # get files from `beauty` - files = exp[0].get("beauty") - # assembly files are used for assembly jobs as we need to put - # together all AOVs - assembly_files = list( - itertools.chain.from_iterable( - [f for _, f in exp[0].items()])) - if not files: - # if beauty doesn't exists, use first aov we found - files = exp[0].get(list(exp[0].keys())[0]) - else: - files = exp - assembly_files = files + exp = instance.data["expectedFiles"] + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + # get files from `beauty` + files = exp[0].get("beauty") + # assembly files are used for assembly jobs as we need to put + # together all AOVs + assembly_files = list( + itertools.chain.from_iterable( + [f for _, f in exp[0].items()])) + if not files: + # if beauty doesn't exists, use first aov we found + files = exp[0].get(list(exp[0].keys())[0]) + else: + files = exp + assembly_files = files - frame_jobs = {} + frame_jobs = {} - file_index = 1 - for file in files: - frame = re.search(R_FRAME_NUMBER, file).group("frame") - new_payload = copy.deepcopy(payload) - new_payload["JobInfo"]["Name"] = \ - "{} (Frame {} - {} tiles)".format( - payload["JobInfo"]["Name"], - frame, - instance.data.get("tilesX") * instance.data.get("tilesY") # noqa: E501 + file_index = 1 + for file in files: + frame = re.search(R_FRAME_NUMBER, file).group("frame") + new_payload = copy.deepcopy(payload) + new_payload["JobInfo"]["Name"] = \ + "{} (Frame {} - {} tiles)".format( + payload["JobInfo"]["Name"], + frame, + instance.data.get("tilesX") * instance.data.get("tilesY") + # noqa: E501 ) - self.log.info( - "... preparing job {}".format( - new_payload["JobInfo"]["Name"])) - new_payload["JobInfo"]["TileJobFrame"] = frame + self.log.info( + "... preparing job {}".format( + new_payload["JobInfo"]["Name"])) + new_payload["JobInfo"]["TileJobFrame"] = frame - tiles_data = _format_tiles( + tiles_data = _format_tiles( + file, 0, + instance.data.get("tilesX"), + instance.data.get("tilesY"), + instance.data.get("resolutionWidth"), + instance.data.get("resolutionHeight"), + payload["PluginInfo"]["OutputFilePrefix"] + )[0] + new_payload["JobInfo"].update(tiles_data["JobInfo"]) + new_payload["PluginInfo"].update(tiles_data["PluginInfo"]) + + self.log.info("hashing {} - {}".format(file_index, file)) + job_hash = hashlib.sha256( + ("{}_{}".format(file_index, file)).encode("utf-8")) + frame_jobs[frame] = job_hash.hexdigest() + new_payload["JobInfo"]["ExtraInfo0"] = job_hash.hexdigest() + new_payload["JobInfo"]["ExtraInfo1"] = file + + frame_payloads.append(new_payload) + file_index += 1 + + file_index = 1 + for file in assembly_files: + frame = re.search(R_FRAME_NUMBER, file).group("frame") + new_assembly_payload = copy.deepcopy(assembly_payload) + new_assembly_payload["JobInfo"]["Name"] = \ + "{} (Frame {})".format( + assembly_payload["JobInfo"]["Name"], + frame) + new_assembly_payload["JobInfo"]["OutputFilename0"] = re.sub( + REPL_FRAME_NUMBER, + "\\1{}\\3".format("#" * len(frame)), file) + + new_assembly_payload["PluginInfo"]["Renderer"] = \ + self._instance.data["renderer"] # noqa: E501 + new_assembly_payload["JobInfo"]["ExtraInfo0"] = frame_jobs[ + frame] # noqa: E501 + new_assembly_payload["JobInfo"]["ExtraInfo1"] = file + assembly_payloads.append(new_assembly_payload) + file_index += 1 + + self.log.info( + "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) + + url = "{}/api/jobs".format(self.deadline_url) + tiles_count = instance.data.get("tilesX") * instance.data.get( + "tilesY") # noqa: E501 + + for tile_job in frame_payloads: + response = self.submit(tile_job) + + job_id = response.json()["_id"] + hash = response.json()["Props"]["Ex0"] + + for assembly_job in assembly_payloads: + if assembly_job["JobInfo"]["ExtraInfo0"] == hash: + assembly_job["JobInfo"]["JobDependency0"] = job_id + + for assembly_job in assembly_payloads: + file = assembly_job["JobInfo"]["ExtraInfo1"] + # write assembly job config files + now = datetime.now() + + config_file = os.path.join( + os.path.dirname(output_filename_0), + "{}_config_{}.txt".format( + os.path.splitext(file)[0], + now.strftime("%Y_%m_%d_%H_%M_%S") + ) + ) + + try: + if not os.path.isdir(os.path.dirname(config_file)): + os.makedirs(os.path.dirname(config_file)) + except OSError: + # directory is not available + self.log.warning( + "Path is unreachable: `{}`".format( + os.path.dirname(config_file))) + + # add config file as job auxFile + assembly_job["AuxFiles"] = [config_file] + + with open(config_file, "w") as cf: + print("TileCount={}".format(tiles_count), file=cf) + print("ImageFileName={}".format(file), file=cf) + print("ImageWidth={}".format( + instance.data.get("resolutionWidth")), file=cf) + print("ImageHeight={}".format( + instance.data.get("resolutionHeight")), file=cf) + + tiles = _format_tiles( file, 0, instance.data.get("tilesX"), instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), payload["PluginInfo"]["OutputFilePrefix"] - )[0] - new_payload["JobInfo"].update(tiles_data["JobInfo"]) - new_payload["PluginInfo"].update(tiles_data["PluginInfo"]) + )[1] + sorted(tiles) + for k, v in tiles.items(): + print("{}={}".format(k, v), file=cf) - self.log.info("hashing {} - {}".format(file_index, file)) - job_hash = hashlib.sha256( - ("{}_{}".format(file_index, file)).encode("utf-8")) - frame_jobs[frame] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo0"] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo1"] = file + job_idx = 1 + instance.data["assemblySubmissionJobs"] = [] + for ass_job in assembly_payloads: + self.log.info("submitting assembly job {} of {}".format( + job_idx, len(assembly_payloads) + )) + response = self.submit(ass_job) - frame_payloads.append(new_payload) - file_index += 1 + instance.data["assemblySubmissionJobs"].append( + response.json()["_id"]) + job_idx += 1 - file_index = 1 - for file in assembly_files: - frame = re.search(R_FRAME_NUMBER, file).group("frame") - new_assembly_payload = copy.deepcopy(assembly_payload) - new_assembly_payload["JobInfo"]["Name"] = \ - "{} (Frame {})".format( - assembly_payload["JobInfo"]["Name"], - frame) - new_assembly_payload["JobInfo"]["OutputFilename0"] = re.sub( - REPL_FRAME_NUMBER, - "\\1{}\\3".format("#" * len(frame)), file) - - new_assembly_payload["PluginInfo"]["Renderer"] = self._instance.data["renderer"] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo0"] = frame_jobs[frame] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo1"] = file - assembly_payloads.append(new_assembly_payload) - file_index += 1 - - self.log.info( - "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) - - url = "{}/api/jobs".format(self.deadline_url) - tiles_count = instance.data.get("tilesX") * instance.data.get("tilesY") # noqa: E501 - - for tile_job in frame_payloads: - response = requests_post(url, json=tile_job) - if not response.ok: - raise Exception(response.text) - - job_id = response.json()["_id"] - hash = response.json()["Props"]["Ex0"] - - for assembly_job in assembly_payloads: - if assembly_job["JobInfo"]["ExtraInfo0"] == hash: - assembly_job["JobInfo"]["JobDependency0"] = job_id - - for assembly_job in assembly_payloads: - file = assembly_job["JobInfo"]["ExtraInfo1"] - # write assembly job config files - now = datetime.now() - - config_file = os.path.join( - os.path.dirname(output_filename_0), - "{}_config_{}.txt".format( - os.path.splitext(file)[0], - now.strftime("%Y_%m_%d_%H_%M_%S") - ) - ) - - try: - if not os.path.isdir(os.path.dirname(config_file)): - os.makedirs(os.path.dirname(config_file)) - except OSError: - # directory is not available - self.log.warning( - "Path is unreachable: `{}`".format( - os.path.dirname(config_file))) - - # add config file as job auxFile - assembly_job["AuxFiles"] = [config_file] - - with open(config_file, "w") as cf: - print("TileCount={}".format(tiles_count), file=cf) - print("ImageFileName={}".format(file), file=cf) - print("ImageWidth={}".format( - instance.data.get("resolutionWidth")), file=cf) - print("ImageHeight={}".format( - instance.data.get("resolutionHeight")), file=cf) - - tiles = _format_tiles( - file, 0, - instance.data.get("tilesX"), - instance.data.get("tilesY"), - instance.data.get("resolutionWidth"), - instance.data.get("resolutionHeight"), - payload["PluginInfo"]["OutputFilePrefix"] - )[1] - sorted(tiles) - for k, v in tiles.items(): - print("{}={}".format(k, v), file=cf) - - job_idx = 1 - instance.data["assemblySubmissionJobs"] = [] - for ass_job in assembly_payloads: - self.log.info("submitting assembly job {} of {}".format( - job_idx, len(assembly_payloads) - )) - self.log.debug(json.dumps(ass_job, indent=4, sort_keys=True)) - response = requests_post(url, json=ass_job) - if not response.ok: - raise Exception(response.text) - - instance.data["assemblySubmissionJobs"].append( - response.json()["_id"]) - job_idx += 1 - - instance.data["jobBatchName"] = payload["JobInfo"]["BatchName"] - self.log.info("Setting batch name on instance: {}".format( - instance.data["jobBatchName"])) - else: - # Submit job to farm -------------------------------------------- - self.log.info("Submitting ...") - self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) - - # E.g. http://192.168.0.1:8082/api/jobs - url = "{}/api/jobs".format(self.deadline_url) - response = requests_post(url, json=payload) - if not response.ok: - raise Exception(response.text) - instance.data["deadlineSubmissionJob"] = response.json() + instance.data["jobBatchName"] = payload["JobInfo"]["BatchName"] + self.log.info("Setting batch name on instance: {}".format( + instance.data["jobBatchName"])) def _get_maya_payload(self, data): payload = copy.deepcopy(self.payload_skeleton) @@ -1045,39 +777,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): payload["PluginInfo"].update(plugin_info) return payload - def _submit_export(self, data, format): - if format == "vray": - payload = self._get_vray_export_payload(data) - self.log.info("Submitting vrscene export job.") - elif format == "arnold": - payload = self._get_arnold_export_payload(data) - self.log.info("Submitting ass export job.") - - url = "{}/api/jobs".format(self.deadline_url) - response = requests_post(url, json=payload) - if not response.ok: - self.log.error("Submition failed!") - self.log.error(response.status_code) - self.log.error(response.content) - self.log.debug(payload) - raise RuntimeError(response.text) - - dependency = response.json() - return dependency["_id"] - - def preflight_check(self, instance): - """Ensure the startFrame, endFrame and byFrameStep are integers.""" - for key in ("frameStartHandle", "frameEndHandle", "byFrameStep"): - value = instance.data[key] - - if int(value) == value: - continue - - self.log.warning( - "%f=%d was rounded off to nearest integer" - % (value, int(value)) - ) - def format_vray_output_filename(self, filename, template, dir=False): """Format the expected output file of the Export job. @@ -1160,3 +859,209 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "Applied {} patch to scene.".format( patches[i]["name"])) return file + + +def _format_tiles( + filename, index, tiles_x, tiles_y, + width, height, prefix): + """Generate tile entries for Deadline tile job. + + Returns two dictionaries - one that can be directly used in Deadline + job, second that can be used for Deadline Assembly job configuration + file. + + This will format tile names: + + Example:: + { + "OutputFilename0Tile0": "_tile_1x1_4x4_Main_beauty.1001.exr", + "OutputFilename0Tile1": "_tile_2x1_4x4_Main_beauty.1001.exr" + } + + And add tile prefixes like: + + Example:: + Image prefix is: + `maya///_` + + Result for tile 0 for 4x4 will be: + `maya///_tile_1x1_4x4__` + + Calculating coordinates is tricky as in Job they are defined as top, + left, bottom, right with zero being in top-left corner. But Assembler + configuration file takes tile coordinates as X, Y, Width and Height and + zero is bottom left corner. + + Args: + filename (str): Filename to process as tiles. + index (int): Index of that file if it is sequence. + tiles_x (int): Number of tiles in X. + tiles_y (int): Number if tikes in Y. + width (int): Width resolution of final image. + height (int): Height resolution of final image. + prefix (str): Image prefix. + + Returns: + (dict, dict): Tuple of two dictionaires - first can be used to + extend JobInfo, second has tiles x, y, width and height + used for assembler configuration. + + """ + tile = 0 + out = {"JobInfo": {}, "PluginInfo": {}} + cfg = OrderedDict() + w_space = width / tiles_x + h_space = height / tiles_y + + cfg["TilesCropped"] = "False" + + for tile_x in range(1, tiles_x + 1): + for tile_y in reversed(range(1, tiles_y + 1)): + tile_prefix = "_tile_{}x{}_{}x{}_".format( + tile_x, tile_y, + tiles_x, + tiles_y + ) + out_tile_index = "OutputFilename{}Tile{}".format( + str(index), tile + ) + new_filename = "{}/{}{}".format( + os.path.dirname(filename), + tile_prefix, + os.path.basename(filename) + ) + out["JobInfo"][out_tile_index] = new_filename + out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ + "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) + + out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 + out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 + out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 + out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 + + cfg["Tile{}".format(tile)] = new_filename + cfg["Tile{}Tile".format(tile)] = new_filename + cfg["Tile{}FileName".format(tile)] = new_filename + cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space + + cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) + + cfg["Tile{}Width".format(tile)] = w_space + cfg["Tile{}Height".format(tile)] = h_space + + tile += 1 + return out, cfg + + +def get_renderer_variables(renderlayer, root): + """Retrieve the extension which has been set in the VRay settings. + + Will return None if the current renderer is not VRay + For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which + start with `rs`. Use the actual node name, do NOT use the `nice name` + + Args: + renderlayer (str): the node name of the renderlayer. + root (str): base path to render + + Returns: + dict + + """ + renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) + render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) + + padding = cmds.getAttr("{}.{}".format(render_attrs["node"], + render_attrs["padding"])) + + filename_0 = cmds.renderSettings( + fullPath=True, + gin="#" * int(padding), + lut=True, + layer=renderlayer or lib.get_current_renderlayer())[0] + filename_0 = re.sub('_', '_beauty', + filename_0, flags=re.IGNORECASE) + prefix_attr = "defaultRenderGlobals.imageFilePrefix" + + scene = cmds.file(query=True, sceneName=True) + scene, _ = os.path.splitext(os.path.basename(scene)) + + if renderer == "vray": + renderlayer = renderlayer.split("_")[-1] + # Maya's renderSettings function does not return V-Ray file extension + # so we get the extension from vraySettings + extension = cmds.getAttr("vraySettings.imageFormatStr") + + # When V-Ray image format has not been switched once from default .png + # the getAttr command above returns None. As such we explicitly set + # it to `.png` + if extension is None: + extension = "png" + + if extension in ["exr (multichannel)", "exr (deep)"]: + extension = "exr" + + prefix_attr = "vraySettings.fileNamePrefix" + filename_prefix = cmds.getAttr(prefix_attr) + # we need to determine path for vray as maya `renderSettings` query + # does not work for vray. + + filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = "{}.{}.{}".format( + filename_0, "#" * int(padding), extension) + filename_0 = os.path.normpath(os.path.join(root, filename_0)) + elif renderer == "renderman": + prefix_attr = "rmanGlobals.imageFileFormat" + # NOTE: This is guessing extensions from renderman display types. + # Some of them are just framebuffers, d_texture format can be + # set in display setting. We set those now to None, but it + # should be handled more gracefully. + display_types = { + "d_deepexr": "exr", + "d_it": None, + "d_null": None, + "d_openexr": "exr", + "d_png": "png", + "d_pointcloud": "ptc", + "d_targa": "tga", + "d_texture": None, + "d_tiff": "tif" + } + + extension = display_types.get( + cmds.listConnections("rmanDefaultDisplay.displayType")[0], + "exr" + ) or "exr" + + filename_prefix = "{}/{}".format( + cmds.getAttr("rmanGlobals.imageOutputDir"), + cmds.getAttr("rmanGlobals.imageFileFormat") + ) + + renderlayer = renderlayer.split("_")[-1] + + filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', "#" * int(padding), filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', extension, filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = os.path.normpath(os.path.join(root, filename_0)) + elif renderer == "redshift": + # mapping redshift extension dropdown values to strings + ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] + extension = ext_mapping[ + cmds.getAttr("redshiftOptions.imageFormat") + ] + else: + # Get the extension, getAttr defaultRenderGlobals.imageFormat + # returns an index number. + filename_base = os.path.basename(filename_0) + extension = os.path.splitext(filename_base)[-1].strip(".") + + filename_prefix = cmds.getAttr(prefix_attr) + return {"ext": extension, + "filename_prefix": filename_prefix, + "padding": padding, + "filename_0": filename_0} + + From 0f95f87d773ddcbe979fe28d5f0196f1befad38e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 24 Aug 2022 14:59:14 +0200 Subject: [PATCH 02/82] More draft refactoring - still not functional (WIP commit for my own sanity) --- .../plugins/publish/submit_maya_deadline.py | 185 ++++-------------- 1 file changed, 35 insertions(+), 150 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 6dfa48a9f8..5a7d0b98c6 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -31,7 +31,6 @@ import clique from maya import cmds -from openpype.hosts.maya.api import lib from openpype.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline @@ -87,11 +86,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - filepath = context.data["currentFile"] - filename = os.path.basename(filepath) + # Always use the original work file name for the Job name even when + # rendering is done from the published Work File. The original work + # file name is clearer because it can also have subversion strings, + # etc. which are stripped for the published file. + src_filepath = context.data["currentFile"] + src_filename = os.path.basename(src_filepath) - job_info.Name = "%s - %s" % (filename, instance.name) - job_info.BatchName = filename + job_info.Name = "%s - %s" % (src_filename, instance.name) + job_info.BatchName = src_filename job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") job_info.UserName = context.data.get( "deadlineUser", getpass.getuser()) @@ -116,9 +119,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if self.limit_groups: job_info.LimitGroups = ",".join(self.limit_groups) - self.payload_skeleton["JobInfo"]["Name"] = jobname - self.payload_skeleton["JobInfo"]["BatchName"] = src_filename - # Optional, enable double-click to preview rendered # frames from Deadline Monitor self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ @@ -227,11 +227,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): return attr.asdict(plugin_info) def process_submission(self): - # Override to NOT submit by default when calling super process() method - pass - def process(self, instance): - super(MayaSubmitDeadline, self).process(instance) + instance = self._instance + context = instance.context + + # Generated by AbstractSubmitDeadline. The `job_info`, `plugin_info` + # and `aux_files` are the skeleton payloads that are the basis for + # all the maya submissions + job_info = self.job_info + plugin_info = self.plugin_info + aux_files = self.aux_files + filepath = self.scene_path # publish if `use_publish` else workfile # TODO: Avoid the need for this logic here, needed for submit publish # Store output dir for unified publisher (filesequence) @@ -241,21 +247,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.limit_groups = self.limit - context = instance.context - workspace = context.data["workspaceDir"] - - filepath = None - patches = ( - context.data["project_settings"].get( - "deadline", {}).get( - "publish", {}).get( - "MayaSubmitDeadline", {}).get( - "scene_patches", {}) - ) + # Patch workfile (only when use_published is enabled) + if self.use_published: + patches = ( + context.data["project_settings"].get( + "deadline", {}).get( + "publish", {}).get( + "MayaSubmitDeadline", {}).get( + "scene_patches", {}) + ) + self._patch_workfile(filepath, patches) # todo: on self.use_published originally use template_data["representation"] using .get("name") instead of .get("ext") # todo: on self.use_published replace path for publishRenderMetadataFolder - # todo: on self.use_published apply scene patches to workfile instance # rep = i.data.get("representations")[0].get("name") # if instance.data.get("publishRenderMetadataFolder"): @@ -270,9 +274,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # patched_file = self._patch_workfile(filepath, patches) # patched_files.append(patched_file) - filepath = self.scene_path # collect by super().process - # Gather needed data ------------------------------------------------ + workspace = context.data["workspaceDir"] default_render_file = instance.context.data.get('project_settings')\ .get('maya')\ .get('RenderSettings')\ @@ -281,14 +284,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): dirname = os.path.join(workspace, default_render_file) renderlayer = instance.data['setMembers'] # rs_beauty - # Always use the original work file name for the Job name even when - # rendering is done from the published Work File. The original work - # file name is clearer because it can also have subversion strings, - # etc. which are stripped for the published file. - src_filename = os.path.basename(context.data["currentFile"]) - jobname = "%s - %s" % (src_filename, instance.name) - # Get the variables depending on the renderer + # TODO: Find replacement logic for `get_renderer_variables` through + # what is collected for the render or is implemented in maya + # api `lib_renderproducts` render_variables = get_renderer_variables(renderlayer, dirname) filename_0 = render_variables["filename_0"] if self.use_published: @@ -842,8 +841,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): str: Patched file path or None """ - if os.path.splitext(file)[1].lower() != ".ma" or not patches: - return None + if not patches or os.path.splitext(file)[1].lower() != ".ma": + return compiled_regex = [re.compile(p["regex"]) for p in patches] with open(file, "r+") as pf: @@ -931,7 +930,7 @@ def _format_tiles( os.path.basename(filename) ) out["JobInfo"][out_tile_index] = new_filename - out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ + out["PluginInfo"]["RegionPrefix{}".format(tile)] = \ "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 @@ -951,117 +950,3 @@ def _format_tiles( tile += 1 return out, cfg - - -def get_renderer_variables(renderlayer, root): - """Retrieve the extension which has been set in the VRay settings. - - Will return None if the current renderer is not VRay - For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which - start with `rs`. Use the actual node name, do NOT use the `nice name` - - Args: - renderlayer (str): the node name of the renderlayer. - root (str): base path to render - - Returns: - dict - - """ - renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) - render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) - - padding = cmds.getAttr("{}.{}".format(render_attrs["node"], - render_attrs["padding"])) - - filename_0 = cmds.renderSettings( - fullPath=True, - gin="#" * int(padding), - lut=True, - layer=renderlayer or lib.get_current_renderlayer())[0] - filename_0 = re.sub('_', '_beauty', - filename_0, flags=re.IGNORECASE) - prefix_attr = "defaultRenderGlobals.imageFilePrefix" - - scene = cmds.file(query=True, sceneName=True) - scene, _ = os.path.splitext(os.path.basename(scene)) - - if renderer == "vray": - renderlayer = renderlayer.split("_")[-1] - # Maya's renderSettings function does not return V-Ray file extension - # so we get the extension from vraySettings - extension = cmds.getAttr("vraySettings.imageFormatStr") - - # When V-Ray image format has not been switched once from default .png - # the getAttr command above returns None. As such we explicitly set - # it to `.png` - if extension is None: - extension = "png" - - if extension in ["exr (multichannel)", "exr (deep)"]: - extension = "exr" - - prefix_attr = "vraySettings.fileNamePrefix" - filename_prefix = cmds.getAttr(prefix_attr) - # we need to determine path for vray as maya `renderSettings` query - # does not work for vray. - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = "{}.{}.{}".format( - filename_0, "#" * int(padding), extension) - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "renderman": - prefix_attr = "rmanGlobals.imageFileFormat" - # NOTE: This is guessing extensions from renderman display types. - # Some of them are just framebuffers, d_texture format can be - # set in display setting. We set those now to None, but it - # should be handled more gracefully. - display_types = { - "d_deepexr": "exr", - "d_it": None, - "d_null": None, - "d_openexr": "exr", - "d_png": "png", - "d_pointcloud": "ptc", - "d_targa": "tga", - "d_texture": None, - "d_tiff": "tif" - } - - extension = display_types.get( - cmds.listConnections("rmanDefaultDisplay.displayType")[0], - "exr" - ) or "exr" - - filename_prefix = "{}/{}".format( - cmds.getAttr("rmanGlobals.imageOutputDir"), - cmds.getAttr("rmanGlobals.imageFileFormat") - ) - - renderlayer = renderlayer.split("_")[-1] - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', "#" * int(padding), filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', extension, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "redshift": - # mapping redshift extension dropdown values to strings - ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] - extension = ext_mapping[ - cmds.getAttr("redshiftOptions.imageFormat") - ] - else: - # Get the extension, getAttr defaultRenderGlobals.imageFormat - # returns an index number. - filename_base = os.path.basename(filename_0) - extension = os.path.splitext(filename_base)[-1].strip(".") - - filename_prefix = cmds.getAttr(prefix_attr) - return {"ext": extension, - "filename_prefix": filename_prefix, - "padding": padding, - "filename_0": filename_0} - - From 0d8cf12618cee76a5e144429a3459074b14e4adf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 17:14:22 +0200 Subject: [PATCH 03/82] define new source where publish templates are not defined in integrate plubin --- .../defaults/project_settings/global.json | 3 + .../defaults/project_settings/maya.json | 2 +- .../schemas/schema_global_publish.json | 4 ++ .../schemas/schema_global_tools.json | 57 +++++++++++++++++++ 4 files changed, 65 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 0ff9363ba7..3e00cd725e 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -414,6 +414,9 @@ "filter_families": [] } ] + }, + "publish": { + "template_name_profiles": [] } }, "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets\": {\"characters\": {}, \"locations\": {}}, \"shots\": {}}}", diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 28f6d23e4d..38063bc2c1 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -980,4 +980,4 @@ "ValidateNoAnimation": false } } -} +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index e1aa230b49..c24c88d04a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -642,6 +642,10 @@ ] } }, + { + "type": "label", + "label": "NOTE: Publish template profiles settings were moved to Tools/Publish/Template name profiles. Please move values there." + }, { "type": "list", "key": "template_name_profiles", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index f8c9482e5f..7dc44c2842 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -284,6 +284,63 @@ } } ] + }, + { + "type": "dict", + "key": "publish", + "label": "Publish", + "children": [ + { + "type": "label", + "label": "NOTE: For backwards compatibility can be value empty and in that case are used values from IntegrateAssetNew. This will change in future so please move all values here as soon as possible." + }, + { + "type": "list", + "key": "template_name_profiles", + "label": "Template name profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "label", + "label": "" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template_name", + "label": "Template name" + } + ] + } + } + ] } ] } From 2b6c4659237259b6c691dd2b5dc1db927b47fcd3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:03:30 +0200 Subject: [PATCH 04/82] added helper functions to get template name --- openpype/pipeline/publish/__init__.py | 4 ++ openpype/pipeline/publish/contants.py | 1 + openpype/pipeline/publish/lib.py | 97 ++++++++++++++++++++++++++- 3 files changed, 100 insertions(+), 2 deletions(-) create mode 100644 openpype/pipeline/publish/contants.py diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index aa7fe0bdbf..a2aa61c4d5 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -10,6 +10,8 @@ from .publish_plugins import ( ) from .lib import ( + get_publish_template_name, + DiscoverResult, publish_plugins_discover, load_help_content_from_plugin, @@ -33,6 +35,8 @@ __all__ = ( "OpenPypePyblishPluginMixin", "OptionalPyblishPluginMixin", + "get_publish_template_name", + "DiscoverResult", "publish_plugins_discover", "load_help_content_from_plugin", diff --git a/openpype/pipeline/publish/contants.py b/openpype/pipeline/publish/contants.py new file mode 100644 index 0000000000..958675ecc1 --- /dev/null +++ b/openpype/pipeline/publish/contants.py @@ -0,0 +1 @@ +DEFAULT_PUBLISH_TEMPLATE = "publish" diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 9060a0bf4b..7c3ea22c06 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -8,8 +8,101 @@ import six import pyblish.plugin import pyblish.api -from openpype.lib import Logger -from openpype.settings import get_project_settings, get_system_settings +from openpype.lib import Logger, filter_profiles +from openpype.settings import ( + get_project_settings, + get_system_settings, +) + +from .contants import DEFAULT_PUBLISH_TEMPLATE + + +def get_template_name_profiles(project_name=None, project_settings=None): + """Receive profiles for publish template keys. + + At least one of arguments must be passed. + + Args: + project_name (str): Name of project where to look for templates. + project_settings(Dic[str, Any]): Prepared project settings. + + Returns: + List[Dict[str, Any]]: Publish template profiles. + """ + + if not project_name and not project_settings: + raise ValueError(( + "Both project name and project settings are missing." + " At least one must be entered." + )) + + if not project_settings: + project_settings = get_project_settings(project_name) + + profiles = ( + project_settings + ["global"] + ["tools"] + ["publish"] + ["template_name_profiles"] + ) + if profiles: + return profiles + + # Use legacy approach for cases new settings are not filled yet for the + # project + return ( + project_settings + ["global"] + ["publish"] + ["IntegrateAssetNew"] + ["template_name_profiles"] + ) + + +def get_publish_template_name( + project_name, + host_name, + family, + task_name, + task_type, + project_settings=None, + logger=None +): + """Get template name which should be used for passed context. + + Publish templates are filtered by host name, family, task name and + task type. + + Default template which is used at if profiles are not available or profile + has empty value is defined by 'DEFAULT_PUBLISH_TEMPLATE' constant. + + Args: + project_name (str): Name of project where to look for settings. + host_name (str): Name of host integration. + family (str): Family for which should be found template. + task_name (str): Task name on which is intance working. + task_type (str): Task type on which is intance working. + project_setting (Dict[str, Any]): Prepared project settings. + logger (logging.Logger): Custom logger used for 'filter_profiles' + function. + + Returns: + str: Template name which should be used for integration. + """ + + template = None + filter_criteria = { + "hosts": host_name, + "families": family, + "tasks": task_name, + "task_types": task_type, + } + profiles = get_template_name_profiles(project_name, project_settings) + profile = filter_profiles(profiles, filter_criteria, logger=logger) + if profile: + template = profile["template_name"] + return template or DEFAULT_PUBLISH_TEMPLATE class DiscoverResult: From 96138a0b73ba6a3f9757283853da9cd1aa85c023 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:22:49 +0200 Subject: [PATCH 05/82] use new functions in integrators --- openpype/plugins/publish/integrate.py | 61 ++++++-------------- openpype/plugins/publish/integrate_legacy.py | 21 +++---- 2 files changed, 27 insertions(+), 55 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index f99c718f8a..56d2621015 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -26,7 +26,10 @@ from openpype.lib import source_hash from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io -from openpype.pipeline.publish import KnownPublishError +from openpype.pipeline.publish import ( + KnownPublishError, + get_publish_template_name, +) log = logging.getLogger(__name__) @@ -792,52 +795,26 @@ class IntegrateAsset(pyblish.api.InstancePlugin): def get_template_name(self, instance): """Return anatomy template name to use for integration""" - # Define publish template name from profiles - filter_criteria = self.get_profile_filter_criteria(instance) - template_name_profiles = self._get_template_name_profiles(instance) - profile = filter_profiles( - template_name_profiles, - filter_criteria, - logger=self.log - ) - - if profile: - return profile["template_name"] - return self.default_template_name - - def _get_template_name_profiles(self, instance): - """Receive profiles for publish template keys. - - Reuse template name profiles from legacy integrator. Goal is to move - the profile settings out of plugin settings but until that happens we - want to be able set it at one place and don't break backwards - compatibility (more then once). - """ - - return ( - instance.context.data["project_settings"] - ["global"] - ["publish"] - ["IntegrateAssetNew"] - ["template_name_profiles"] - ) - - def get_profile_filter_criteria(self, instance): - """Return filter criteria for `filter_profiles`""" # Anatomy data is pre-filled by Collectors - anatomy_data = instance.data["anatomyData"] + + project_name = legacy_io.active_project() # Task can be optional in anatomy data - task = anatomy_data.get("task", {}) + host_name = instance.context.data["hostName"] + anatomy_data = instance.data["anatomyData"] + family = anatomy_data["family"] + task_info = anatomy_data.get("task") or {} - # Return filter criteria - return { - "families": anatomy_data["family"], - "tasks": task.get("name"), - "task_types": task.get("type"), - "hosts": instance.context.data["hostName"], - } + return get_publish_template_name( + project_name, + host_name, + family, + task_name=task_info.get("name"), + task_type=task_info.get("type"), + project_settings=instance.context.data["project_settings"], + logger=self.log + ) def get_rootless_path(self, anatomy, path): """Returns, if possible, path without absolute portion from root diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index b90b61f587..fedaae794a 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -33,6 +33,7 @@ from openpype.lib import ( TemplateUnsolved ) from openpype.pipeline import legacy_io +from openpype.pipeline.publish import get_publish_template_name # this is needed until speedcopy for linux is fixed if sys.platform == "win32": @@ -388,22 +389,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): family = self.main_family_from_instance(instance) - key_values = { - "families": family, - "tasks": task_name, - "hosts": instance.context.data["hostName"], - "task_types": task_type - } - profile = filter_profiles( - self.template_name_profiles, - key_values, + template_name = get_publish_template_name( + project_name, + instance.context.data["hostName"], + family, + task_name=task_info.get("name"), + task_type=task_info.get("type"), + project_settings=instance.context.data["project_settings"], logger=self.log ) - template_name = "publish" - if profile: - template_name = profile["template_name"] - published_representations = {} for idx, repre in enumerate(repres): published_files = [] From c7108ac7fbad46fe2aafe669498cb3755d9c7730 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:23:37 +0200 Subject: [PATCH 06/82] modified imports in integrators --- openpype/plugins/publish/integrate.py | 6 +++--- openpype/plugins/publish/integrate_legacy.py | 11 ++++++----- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 56d2621015..8b60ea3b51 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -5,6 +5,9 @@ import copy import clique import six +from bson.objectid import ObjectId +import pyblish.api + from openpype.client.operations import ( OperationsSession, new_subset_document, @@ -14,8 +17,6 @@ from openpype.client.operations import ( prepare_version_update_data, prepare_representation_update_data, ) -from bson.objectid import ObjectId -import pyblish.api from openpype.client import ( get_representations, @@ -23,7 +24,6 @@ from openpype.client import ( get_version_by_name, ) from openpype.lib import source_hash -from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io from openpype.pipeline.publish import ( diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index fedaae794a..0e157c9d1f 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -15,7 +15,6 @@ from bson.objectid import ObjectId from pymongo import DeleteOne, InsertOne import pyblish.api -import openpype.api from openpype.client import ( get_asset_by_name, get_subset_by_id, @@ -25,12 +24,14 @@ from openpype.client import ( get_representations, get_archived_representations, ) -from openpype.lib.profiles_filtering import filter_profiles from openpype.lib import ( prepare_template_data, create_hard_link, StringTemplate, - TemplateUnsolved + TemplateUnsolved, + source_hash, + filter_profiles, + get_local_site_id, ) from openpype.pipeline import legacy_io from openpype.pipeline.publish import get_publish_template_name @@ -1053,7 +1054,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for _src, dest in resources: path = self.get_rootless_path(anatomy, dest) dest = self.get_dest_temp_url(dest) - file_hash = openpype.api.source_hash(dest) + file_hash = source_hash(dest) if self.TMP_FILE_EXT and \ ',{}'.format(self.TMP_FILE_EXT) in file_hash: file_hash = file_hash.replace(',{}'.format(self.TMP_FILE_EXT), @@ -1163,7 +1164,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def _get_sites(self, sync_project_presets): """Returns tuple (local_site, remote_site)""" - local_site_id = openpype.api.get_local_site_id() + local_site_id = get_local_site_id() local_site = sync_project_presets["config"]. \ get("active_site", "studio").strip() From c76a1a1dbbe8e705b06ebf02f37237cf7dda98fd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:52:43 +0200 Subject: [PATCH 07/82] added settings for hero templates and changed 'tasks' to 'task_names' --- .../defaults/project_settings/global.json | 3 +- .../schemas/schema_global_tools.json | 47 +++++++++++++++++-- 2 files changed, 45 insertions(+), 5 deletions(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 3e00cd725e..8692f95a04 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -416,7 +416,8 @@ ] }, "publish": { - "template_name_profiles": [] + "template_name_profiles": [], + "hero_template_name_profiles": [] } }, "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets\": {\"characters\": {}, \"locations\": {}}, \"shots\": {}}}", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index 7dc44c2842..c919cd73c5 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -303,9 +303,47 @@ "type": "dict", "children": [ { - "type": "label", - "label": "" + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template_name", + "label": "Template name" + } + ] + } + }, + { + "type": "list", + "key": "hero_template_name_profiles", + "label": "Hero template name profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ { "key": "families", "label": "Families", @@ -324,7 +362,7 @@ "type": "task-types-enum" }, { - "key": "tasks", + "key": "task_names", "label": "Task names", "type": "list", "object_type": "text" @@ -335,7 +373,8 @@ { "type": "text", "key": "template_name", - "label": "Template name" + "label": "Template name", + "tooltip": "Name of template from Anatomy templates" } ] } From 9d4416719b4a99d50b0d411b5548a8afa8072240 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:53:33 +0200 Subject: [PATCH 08/82] convert legacy to new settings by replacing 'tasks' with 'task_names' --- openpype/pipeline/publish/lib.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 7c3ea22c06..03dfbadfcc 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -2,6 +2,7 @@ import os import sys import types import inspect +import copy import xml.etree.ElementTree import six @@ -47,17 +48,23 @@ def get_template_name_profiles(project_name=None, project_settings=None): ["template_name_profiles"] ) if profiles: - return profiles + return copy.deepcopy(profiles) # Use legacy approach for cases new settings are not filled yet for the # project - return ( + legacy_profiles = ( project_settings ["global"] ["publish"] ["IntegrateAssetNew"] ["template_name_profiles"] ) + # Replace "tasks" key with "task_names" + profiles = [] + for profile in copy.deepcopy(legacy_profiles): + profile["task_names"] = profile.pop("tasks", []) + profiles.append(profile) + return profiles def get_publish_template_name( @@ -95,7 +102,7 @@ def get_publish_template_name( filter_criteria = { "hosts": host_name, "families": family, - "tasks": task_name, + "task_names": task_name, "task_types": task_type, } profiles = get_template_name_profiles(project_name, project_settings) From 63f5b5f2ab40a94c7496b8f08fa19204a5687b5a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:53:49 +0200 Subject: [PATCH 09/82] added ability to get hero version template name --- openpype/pipeline/publish/contants.py | 1 + openpype/pipeline/publish/lib.py | 64 +++++++++++++++++++++++++-- 2 files changed, 62 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/publish/contants.py b/openpype/pipeline/publish/contants.py index 958675ecc1..169eca2e5c 100644 --- a/openpype/pipeline/publish/contants.py +++ b/openpype/pipeline/publish/contants.py @@ -1 +1,2 @@ DEFAULT_PUBLISH_TEMPLATE = "publish" +DEFAULT_HERO_PUBLISH_TEMPLATE = "hero" diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 03dfbadfcc..85a64da721 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -15,7 +15,10 @@ from openpype.settings import ( get_system_settings, ) -from .contants import DEFAULT_PUBLISH_TEMPLATE +from .contants import ( + DEFAULT_PUBLISH_TEMPLATE, + DEFAULT_HERO_PUBLISH_TEMPLATE, +) def get_template_name_profiles(project_name=None, project_settings=None): @@ -67,6 +70,49 @@ def get_template_name_profiles(project_name=None, project_settings=None): return profiles +def get_hero_template_name_profiles(project_name=None, project_settings=None): + """Receive profiles for hero publish template keys. + + At least one of arguments must be passed. + + Args: + project_name (str): Name of project where to look for templates. + project_settings(Dic[str, Any]): Prepared project settings. + + Returns: + List[Dict[str, Any]]: Publish template profiles. + """ + + if not project_name and not project_settings: + raise ValueError(( + "Both project name and project settings are missing." + " At least one must be entered." + )) + + if not project_settings: + project_settings = get_project_settings(project_name) + + profiles = ( + project_settings + ["global"] + ["tools"] + ["publish"] + ["hero_template_name_profiles"] + ) + if profiles: + return copy.deepcopy(profiles) + + # Use legacy approach for cases new settings are not filled yet for the + # project + return copy.deepcopy( + project_settings + ["global"] + ["publish"] + ["IntegrateHeroVersion"] + ["template_name_profiles"] + ) + + def get_publish_template_name( project_name, host_name, @@ -74,6 +120,7 @@ def get_publish_template_name( task_name, task_type, project_settings=None, + hero=False, logger=None ): """Get template name which should be used for passed context. @@ -105,11 +152,22 @@ def get_publish_template_name( "task_names": task_name, "task_types": task_type, } - profiles = get_template_name_profiles(project_name, project_settings) + if hero: + default_template = DEFAULT_HERO_PUBLISH_TEMPLATE + profiles = get_hero_template_name_profiles( + project_name, project_settings + ) + + else: + profiles = get_template_name_profiles( + project_name, project_settings + ) + default_template = DEFAULT_PUBLISH_TEMPLATE + profile = filter_profiles(profiles, filter_criteria, logger=logger) if profile: template = profile["template_name"] - return template or DEFAULT_PUBLISH_TEMPLATE + return template or default_template class DiscoverResult: From 1698aefcfbc887ba6f29fc59dbdfbc2595d5c6a8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:57:00 +0200 Subject: [PATCH 10/82] use 'get_publish_template_name' in hero integration --- .../plugins/publish/integrate_hero_version.py | 41 ++++++++----------- 1 file changed, 17 insertions(+), 24 deletions(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 7d698ff98d..2938c61f8e 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -14,14 +14,12 @@ from openpype.client import ( get_archived_representations, get_representations, ) -from openpype.lib import ( - create_hard_link, - filter_profiles -) +from openpype.lib import create_hard_link from openpype.pipeline import ( schema, legacy_io, ) +from openpype.pipeline.publish import get_publish_template_name class IntegrateHeroVersion(pyblish.api.InstancePlugin): @@ -68,10 +66,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): ) return - template_key = self._get_template_key(instance) - anatomy = instance.context.data["anatomy"] project_name = anatomy.project_name + + template_key = self._get_template_key(project_name, instance) + if template_key not in anatomy.templates: self.log.warning(( "!!! Anatomy of project \"{}\" does not have set" @@ -527,30 +526,24 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): return publish_folder - def _get_template_key(self, instance): + def _get_template_key(self, project_name, instance): anatomy_data = instance.data["anatomyData"] - task_data = anatomy_data.get("task") or {} - task_name = task_data.get("name") - task_type = task_data.get("type") + task_info = anatomy_data.get("task") or {} host_name = instance.context.data["hostName"] + # TODO raise error if Hero not set? family = self.main_family_from_instance(instance) - key_values = { - "families": family, - "task_names": task_name, - "task_types": task_type, - "hosts": host_name - } - profile = filter_profiles( - self.template_name_profiles, - key_values, + + return get_publish_template_name( + project_name, + host_name, + family, + task_info.get("name"), + task_info.get("type"), + project_settings=instance.context.data["project_settings"], + hero=True, logger=self.log ) - if profile: - template_name = profile["template_name"] - else: - template_name = self._default_template_name - return template_name def main_family_from_instance(self, instance): """Returns main family of entered instance.""" From 9b7384e1ae96b0f348911e9e163a23857dd2ca7f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:57:27 +0200 Subject: [PATCH 11/82] removed unused attribute --- openpype/plugins/publish/integrate_legacy.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 0e157c9d1f..bbf30c9ab7 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -140,7 +140,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): integrated_file_sizes = {} # Attributes set by settings - template_name_profiles = None subset_grouping_profiles = None def process(self, instance): From c6a6e3b21a4aaa6c98450de918c78907fed91f5e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 19:07:34 +0200 Subject: [PATCH 12/82] added warning for access to legacy settings --- openpype/pipeline/publish/lib.py | 35 +++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 85a64da721..29c745ed15 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -21,7 +21,9 @@ from .contants import ( ) -def get_template_name_profiles(project_name=None, project_settings=None): +def get_template_name_profiles( + project_name, project_settings=None, logger=None +): """Receive profiles for publish template keys. At least one of arguments must be passed. @@ -62,6 +64,16 @@ def get_template_name_profiles(project_name=None, project_settings=None): ["IntegrateAssetNew"] ["template_name_profiles"] ) + if legacy_profiles: + if not logger: + logger = Logger.get_logger("get_template_name_profiles") + + logger.warning(( + "Project \"{}\" is using legacy access to publish template." + " It is recommended to move settings to new location" + " 'project_settings/global/tools/publish/template_name_profiles'." + ).format(project_name)) + # Replace "tasks" key with "task_names" profiles = [] for profile in copy.deepcopy(legacy_profiles): @@ -70,7 +82,9 @@ def get_template_name_profiles(project_name=None, project_settings=None): return profiles -def get_hero_template_name_profiles(project_name=None, project_settings=None): +def get_hero_template_name_profiles( + project_name, project_settings=None, logger=None +): """Receive profiles for hero publish template keys. At least one of arguments must be passed. @@ -104,13 +118,24 @@ def get_hero_template_name_profiles(project_name=None, project_settings=None): # Use legacy approach for cases new settings are not filled yet for the # project - return copy.deepcopy( + legacy_profiles = copy.deepcopy( project_settings ["global"] ["publish"] ["IntegrateHeroVersion"] ["template_name_profiles"] ) + if legacy_profiles: + if not logger: + logger = Logger.get_logger("get_hero_template_name_profiles") + + logger.warning(( + "Project \"{}\" is using legacy access to hero publish template." + " It is recommended to move settings to new location" + " 'project_settings/global/tools/publish/" + "hero_template_name_profiles'." + ).format(project_name)) + return legacy_profiles def get_publish_template_name( @@ -155,12 +180,12 @@ def get_publish_template_name( if hero: default_template = DEFAULT_HERO_PUBLISH_TEMPLATE profiles = get_hero_template_name_profiles( - project_name, project_settings + project_name, project_settings, logger ) else: profiles = get_template_name_profiles( - project_name, project_settings + project_name, project_settings, logger ) default_template = DEFAULT_PUBLISH_TEMPLATE From dc4c32b6fc6aaeaedb6bb9d76c7b72d9f5c45c45 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:14:58 +0200 Subject: [PATCH 13/82] Fix representation data for workfile --- openpype/modules/deadline/abstract_submit_deadline.py | 6 +++--- .../deadline/plugins/publish/submit_maya_deadline.py | 3 --- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 577378335e..d198542370 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -554,9 +554,9 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): "Workfile (scene) must be published along") # determine published path from Anatomy. template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0].get("ext") - template_data["representation"] = rep - template_data["ext"] = rep + rep = i.data.get("representations")[0] + template_data["representation"] = rep.get("name") + template_data["ext"] = rep.get("ext") template_data["comment"] = None anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled["publish"]["path"] diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 5a7d0b98c6..68e8eaaa73 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -258,10 +258,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) self._patch_workfile(filepath, patches) - # todo: on self.use_published originally use template_data["representation"] using .get("name") instead of .get("ext") # todo: on self.use_published replace path for publishRenderMetadataFolder - # rep = i.data.get("representations")[0].get("name") - # if instance.data.get("publishRenderMetadataFolder"): # instance.data["publishRenderMetadataFolder"] = \ # instance.data["publishRenderMetadataFolder"].replace( From 67b8664be07fbb8a0061b7c8a62bf4073ef79307 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:15:20 +0200 Subject: [PATCH 14/82] Remove comment for patched file code refactor since it's already implemented --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 68e8eaaa73..07ed237c94 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -266,10 +266,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # self.log.info("Scene name was switched {} -> {}".format( # orig_scene, new_scene # )) - # # patch workfile is needed - # if filepath not in patched_files: - # patched_file = self._patch_workfile(filepath, patches) - # patched_files.append(patched_file) # Gather needed data ------------------------------------------------ workspace = context.data["workspaceDir"] From 2da8f036dee501be62da07d07fa9efafc9e8839f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:19:37 +0200 Subject: [PATCH 15/82] Refactor logic for less indentation --- .../deadline/abstract_submit_deadline.py | 103 +++++++++--------- 1 file changed, 52 insertions(+), 51 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index d198542370..55e16d8d21 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -546,65 +546,66 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): anatomy = self._instance.context.data['anatomy'] file_path = None for i in self._instance.context: - if "workfile" in i.data["families"] \ - or i.data["family"] == "workfile": - # test if there is instance of workfile waiting - # to be published. - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - # determine published path from Anatomy. - template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0] - template_data["representation"] = rep.get("name") - template_data["ext"] = rep.get("ext") - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - file_path = os.path.normpath(template_filled) - self.log.info("Using published scene for render {}".format( - file_path)) + is_workfile = + if not is_workfile: + continue - if not os.path.exists(file_path): - self.log.error("published scene does not exist!") - raise + # test if there is instance of workfile waiting + # to be published. + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") + # determine published path from Anatomy. + template_data = i.data.get("anatomyData") + rep = i.data.get("representations")[0] + template_data["representation"] = rep.get("name") + template_data["ext"] = rep.get("ext") + template_data["comment"] = None + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled["publish"]["path"] + file_path = os.path.normpath(template_filled) - if not replace_in_path: - return file_path + self.log.info("Using published scene for render {}".format( + file_path)) - # now we need to switch scene in expected files - # because token will now point to published - # scene file and that might differ from current one - new_scene = os.path.splitext( - os.path.basename(file_path))[0] - orig_scene = os.path.splitext( - os.path.basename( - self._instance.context.data["currentFile"]))[0] - exp = self._instance.data.get("expectedFiles") + if not os.path.exists(file_path): + self.log.error("published scene does not exist!") + raise - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - new_exp = {} - for aov, files in exp[0].items(): - replaced_files = [] - for f in files: - replaced_files.append( - str(f).replace(orig_scene, new_scene) - ) - new_exp[aov] = replaced_files - # [] might be too much here, TODO - self._instance.data["expectedFiles"] = [new_exp] - else: - new_exp = [] - for f in exp: - new_exp.append( + if not replace_in_path: + return file_path + + # now we need to switch scene in expected files + # because token will now point to published + # scene file and that might differ from current one + new_scene = os.path.splitext(os.path.basename(file_path))[0] + orig_scene = os.path.splitext(os.path.basename( + self._instance.context.data["currentFile"]))[0] + exp = self._instance.data.get("expectedFiles") + + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + new_exp = {} + for aov, files in exp[0].items(): + replaced_files = [] + for f in files: + replaced_files.append( str(f).replace(orig_scene, new_scene) ) - self._instance.data["expectedFiles"] = new_exp + new_exp[aov] = replaced_files + # [] might be too much here, TODO + self._instance.data["expectedFiles"] = [new_exp] + else: + new_exp = [] + for f in exp: + new_exp.append( + str(f).replace(orig_scene, new_scene) + ) + self._instance.data["expectedFiles"] = new_exp - self.log.info("Scene name was switched {} -> {}".format( - orig_scene, new_scene - )) + self.log.info("Scene name was switched {} -> {}".format( + orig_scene, new_scene + )) return file_path From 21a319b10c46dec6efc2aaf6fe6ac1fe09bfc512 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 19:20:33 +0200 Subject: [PATCH 16/82] added 'deprecated' to integrator labels and added new location for hero templates as note --- .../projects_schema/schemas/schema_global_publish.json | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index c24c88d04a..2cb0cebf95 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -649,7 +649,7 @@ { "type": "list", "key": "template_name_profiles", - "label": "Template name profiles", + "label": "Template name profiles (DEPRECATED)", "use_label_wrap": true, "object_type": { "type": "dict", @@ -754,10 +754,14 @@ "type": "list", "object_type": "text" }, + { + "type": "label", + "label": "NOTE: Hero publish template profiles settings were moved to Tools/Publish/Hero template name profiles. Please move values there." + }, { "type": "list", "key": "template_name_profiles", - "label": "Template name profiles", + "label": "Template name profiles (DEPRECATED)", "use_label_wrap": true, "object_type": { "type": "dict", From e81e3a7a1021db4e442aa3147ed03ccf8d92d8c6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:22:06 +0200 Subject: [PATCH 17/82] Fix missing line --- openpype/modules/deadline/abstract_submit_deadline.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 55e16d8d21..86eebc0d35 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -547,7 +547,10 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): file_path = None for i in self._instance.context: - is_workfile = + is_workfile = ( + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" + ) if not is_workfile: continue From 2933b37ef7711aac4e04284120b6c9b0ce2c9612 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:34:27 +0200 Subject: [PATCH 18/82] Refactor code for readability --- .../deadline/abstract_submit_deadline.py | 124 ++++++++++-------- 1 file changed, 68 insertions(+), 56 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 86eebc0d35..46baa9ee57 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -543,72 +543,84 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): published. """ - anatomy = self._instance.context.data['anatomy'] - file_path = None - for i in self._instance.context: - is_workfile = ( - "workfile" in i.data.get("families", []) or - i.data["family"] == "workfile" - ) - if not is_workfile: - continue + def _get_workfile_instance(context): + """Find workfile instance in context""" + for i in context: - # test if there is instance of workfile waiting - # to be published. - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - # determine published path from Anatomy. - template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0] - template_data["representation"] = rep.get("name") - template_data["ext"] = rep.get("ext") - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - file_path = os.path.normpath(template_filled) + is_workfile = ( + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" + ) + if not is_workfile: + continue - self.log.info("Using published scene for render {}".format( - file_path)) + # test if there is instance of workfile waiting + # to be published. + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") - if not os.path.exists(file_path): - self.log.error("published scene does not exist!") - raise + return i - if not replace_in_path: - return file_path + instance = self._instance + workfile_instance = _get_workfile_instance(instance.context) + if not workfile_instance: + return - # now we need to switch scene in expected files - # because token will now point to published - # scene file and that might differ from current one - new_scene = os.path.splitext(os.path.basename(file_path))[0] - orig_scene = os.path.splitext(os.path.basename( - self._instance.context.data["currentFile"]))[0] - exp = self._instance.data.get("expectedFiles") + # determine published path from Anatomy. + template_data = workfile_instance.data.get("anatomyData") + rep = workfile_instance.data.get("representations")[0] + template_data["representation"] = rep.get("name") + template_data["ext"] = rep.get("ext") + template_data["comment"] = None - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - new_exp = {} - for aov, files in exp[0].items(): - replaced_files = [] - for f in files: - replaced_files.append( - str(f).replace(orig_scene, new_scene) - ) - new_exp[aov] = replaced_files - # [] might be too much here, TODO - self._instance.data["expectedFiles"] = [new_exp] - else: - new_exp = [] - for f in exp: - new_exp.append( + anatomy = instance.context.data['anatomy'] + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled["publish"]["path"] + file_path = os.path.normpath(template_filled) + + self.log.info("Using published scene for render {}".format(file_path)) + + if not os.path.exists(file_path): + self.log.error("published scene does not exist!") + raise + + if not replace_in_path: + return file_path + + # now we need to switch scene in expected files + # because token will now point to published + # scene file and that might differ from current one + def _clean_name(path): + return os.path.splitext(os.path.basename(path))[0] + + new_scene = _clean_name(file_path) + orig_scene = _clean_name(instance.context.data["currentFile"]) + expected_files = instance.data.get("expectedFiles") + + if isinstance(expected_files[0], dict): + # we have aovs and we need to iterate over them + new_exp = {} + for aov, files in expected_files[0].items(): + replaced_files = [] + for f in files: + replaced_files.append( str(f).replace(orig_scene, new_scene) ) - self._instance.data["expectedFiles"] = new_exp + new_exp[aov] = replaced_files + # [] might be too much here, TODO + instance.data["expectedFiles"] = [new_exp] + else: + new_exp = [] + for f in expected_files: + new_exp.append( + str(f).replace(orig_scene, new_scene) + ) + instance.data["expectedFiles"] = new_exp - self.log.info("Scene name was switched {} -> {}".format( - orig_scene, new_scene - )) + self.log.info("Scene name was switched {} -> {}".format( + orig_scene, new_scene + )) return file_path From c725ff5b42c2f3f248a6af8f835020c9efb23182 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:35:05 +0200 Subject: [PATCH 19/82] Move replacing in `publishRenderMetadataFolder` to abstract base class --- openpype/modules/deadline/abstract_submit_deadline.py | 6 ++++++ .../deadline/plugins/publish/submit_maya_deadline.py | 9 --------- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 46baa9ee57..f56cf49f6d 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -618,6 +618,12 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): ) instance.data["expectedFiles"] = new_exp + metadata_folder = instance.data.get("publishRenderMetadataFolder") + if metadata_folder: + metadata_folder = metadata_folder.replace(orig_scene, + new_scene) + instance.data["publishRenderMetadataFolder"] = metadata_folder + self.log.info("Scene name was switched {} -> {}".format( orig_scene, new_scene )) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 07ed237c94..26c26a124c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -258,15 +258,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) self._patch_workfile(filepath, patches) - # todo: on self.use_published replace path for publishRenderMetadataFolder - # if instance.data.get("publishRenderMetadataFolder"): - # instance.data["publishRenderMetadataFolder"] = \ - # instance.data["publishRenderMetadataFolder"].replace( - # orig_scene, new_scene) - # self.log.info("Scene name was switched {} -> {}".format( - # orig_scene, new_scene - # )) - # Gather needed data ------------------------------------------------ workspace = context.data["workspaceDir"] default_render_file = instance.context.data.get('project_settings')\ From 23e652a51f41a6c65d6feba6edb0268f85feccb8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:54:36 +0200 Subject: [PATCH 20/82] Patch plug-in payload with settings --- .../maya/plugins/publish/collect_render.py | 1 + .../plugins/publish/submit_maya_deadline.py | 17 +++++++++-------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index ebda5e190d..768a53329f 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -293,6 +293,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "source": filepath, "expectedFiles": full_exp_files, "publishRenderMetadataFolder": common_publish_meta_path, + "renderProducts": layer_render_products, "resolutionWidth": lib.get_attr_in_layer( "defaultResolution.width", layer=layer_name ), diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 26c26a124c..854a66eaa5 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -215,16 +215,21 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): renderlayer = instance.data['setMembers'] # rs_beauty - self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer - self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa - # Output driver to render plugin_info = DeadlinePluginInfo( SceneFile=context.data["currentFile"], Version=cmds.about(version=True), + RenderLayer=renderlayer, + RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights") # noqa ) - return attr.asdict(plugin_info) + plugin_payload = attr.asdict(plugin_info) + + # Patching with pluginInfo from settings + for key, value in self.pluginInfo.items(): + plugin_payload[key] = value + + return plugin_payload def process_submission(self): @@ -338,10 +343,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Store output dir for unified publisher (filesequence) instance.data["outputDir"] = os.path.dirname(output_filename_0) - # add jobInfo and pluginInfo variables from Settings - payload["JobInfo"].update(self.jobInfo) - payload["PluginInfo"].update(self.pluginInfo) - if instance.data.get("tileRendering"): # Prepare tiles data self._tile_render(instance, payload) From 4abddd027de9d4a1814ddff1b971bb9a99c47008 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:55:07 +0200 Subject: [PATCH 21/82] Use collected render products for image prefix --- .../deadline/plugins/publish/submit_maya_deadline.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 854a66eaa5..bb7ae380b6 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -307,7 +307,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "jobname": jobname, "comment": comment, "output_filename_0": output_filename_0, - "render_variables": render_variables, "renderlayer": renderlayer, "workspace": workspace, "dirname": dirname, @@ -564,6 +563,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): renderer = self._instance.data["renderer"] + # Get layer prefix + render_products = self._instance.data["renderProducts"] + layer_metadata = render_products.layer_data + layer_prefix = layer_metadata.filePrefix + # This hack is here because of how Deadline handles Renderman version. # it considers everything with `renderman` set as version older than # Renderman 22, and so if we are using renderman > 21 we need to set @@ -583,7 +587,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "SceneFile": data["filepath"], # Output directory and filename "OutputFilePath": data["dirname"].replace("\\", "/"), - "OutputFilePrefix": data["render_variables"]["filename_prefix"], # noqa: E501 + "OutputFilePrefix": layer_prefix, # Only render layers are considered renderable in this pipeline "UsingRenderLayers": True, From 6f5fcecfae7ec92bf2df80fb673bfff3e1049231 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:55:24 +0200 Subject: [PATCH 22/82] Use existing variable `renderer` --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index bb7ae380b6..c7f91905ea 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -573,7 +573,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Renderman 22, and so if we are using renderman > 21 we need to set # renderer string on the job to `renderman22`. We will have to change # this when Deadline releases new version handling this. - if self._instance.data["renderer"] == "renderman": + if renderer == "renderman": try: from rfm2.config import cfg # noqa except ImportError: From ae250c4a100dcc4474512913dbc858326ced3c8a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:55:40 +0200 Subject: [PATCH 23/82] Remove unused `comment` key-value --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index c7f91905ea..b2b877ab0e 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -305,7 +305,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "filename": filename, "filepath": filepath, "jobname": jobname, - "comment": comment, "output_filename_0": output_filename_0, "renderlayer": renderlayer, "workspace": workspace, From 7af7f71edacea21a00c370e4ad1e92b2fe576b66 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:58:04 +0200 Subject: [PATCH 24/82] Remove logging of plugin name --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index b2b877ab0e..db796f25a9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -335,9 +335,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if export_job: payload["JobInfo"]["JobDependency0"] = export_job - plugin = payload["JobInfo"]["Plugin"] - self.log.info("using render plugin : {}".format(plugin)) - # Store output dir for unified publisher (filesequence) instance.data["outputDir"] = os.path.dirname(output_filename_0) From f91e33c0385762a7a73cca192f4d36716377ee1e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 23:21:30 +0200 Subject: [PATCH 25/82] More refactoring/cleanup (WIP) --- .../plugins/publish/submit_maya_deadline.py | 304 +++++++----------- 1 file changed, 110 insertions(+), 194 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index db796f25a9..8f12a9518f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -70,7 +70,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): tile_assembler_plugin = "OpenPypeTileAssembler" priority = 50 tile_priority = 50 - limit_groups = [] + limit = [] # limit groups jobInfo = {} pluginInfo = {} group = "none" @@ -112,23 +112,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.ChunkSize = instance.data.get("chunkSize", 10) job_info.Comment = context.data.get("comment") job_info.Priority = instance.data.get("priority", self.priority) + job_info.FramesPerTask = instance.data.get("framesPerTask", 1) if self.group != "none" and self.group: job_info.Group = self.group - if self.limit_groups: - job_info.LimitGroups = ",".join(self.limit_groups) + if self.limit: + job_info.LimitGroups = ",".join(self.limit) - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ - os.path.dirname(output_filename_0).replace("\\", "/") - self.payload_skeleton["JobInfo"]["OutputFilename0"] = \ - output_filename_0.replace("\\", "/") - - # Add options from RenderGlobals------------------------------------- + # Add options from RenderGlobals render_globals = instance.data.get("renderGlobals", {}) - self.payload_skeleton["JobInfo"].update(render_globals) + for key, value in render_globals: + setattr(job_info, key, value) keys = [ "FTRACK_API_KEY", @@ -140,7 +135,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS", "OPENPYPE_VERSION" ] # Add mongo url if it's enabled @@ -150,10 +144,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # TODO: Taken from old publish class - test whether still needed environment["OPENPYPE_LOG_NO_COLORS"] = "1" - environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) # to recognize job from PYPE for turning Event On/Off environment["OPENPYPE_RENDER_JOB"] = "1" @@ -166,7 +158,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) # to recognize job from PYPE for turning Event On/Off job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + job_info.EnvironmentKeyValue = "OPENPYPE_LOG_NO_COLORS=1" + # Optional, enable double-click to preview rendered + # frames from Deadline Monitor for i, filepath in enumerate(instance.data["files"]): dirname = os.path.dirname(filepath) fname = os.path.basename(filepath) @@ -213,14 +208,13 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - renderlayer = instance.data['setMembers'] # rs_beauty - - # Output driver to render plugin_info = DeadlinePluginInfo( - SceneFile=context.data["currentFile"], + SceneFile=self.scene_path, Version=cmds.about(version=True), - RenderLayer=renderlayer, - RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights") # noqa + RenderLayer=instance.data['setMembers'], + RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights"), # noqa + ProjectPath=context.data["workspaceDir"], + UsingRenderLayers=True, ) plugin_payload = attr.asdict(plugin_info) @@ -236,12 +230,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - # Generated by AbstractSubmitDeadline. The `job_info`, `plugin_info` - # and `aux_files` are the skeleton payloads that are the basis for - # all the maya submissions - job_info = self.job_info - plugin_info = self.plugin_info - aux_files = self.aux_files filepath = self.scene_path # publish if `use_publish` else workfile # TODO: Avoid the need for this logic here, needed for submit publish @@ -250,18 +238,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance.data["outputDir"] = output_dir instance.data["toBeRenderedOn"] = "deadline" - self.limit_groups = self.limit - # Patch workfile (only when use_published is enabled) if self.use_published: - patches = ( - context.data["project_settings"].get( - "deadline", {}).get( - "publish", {}).get( - "MayaSubmitDeadline", {}).get( - "scene_patches", {}) - ) - self._patch_workfile(filepath, patches) + self._patch_workfile() # Gather needed data ------------------------------------------------ workspace = context.data["workspaceDir"] @@ -271,22 +250,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): .get('default_render_image_folder') filename = os.path.basename(filepath) dirname = os.path.join(workspace, default_render_file) - renderlayer = instance.data['setMembers'] # rs_beauty - - # Get the variables depending on the renderer - # TODO: Find replacement logic for `get_renderer_variables` through - # what is collected for the render or is implemented in maya - # api `lib_renderproducts` - render_variables = get_renderer_variables(renderlayer, dirname) - filename_0 = render_variables["filename_0"] - if self.use_published: - new_scene = os.path.splitext(filename)[0] - orig_scene = os.path.splitext( - os.path.basename(context.data["currentFile"]))[0] - filename_0 = render_variables["filename_0"].replace( - orig_scene, new_scene) - - output_filename_0 = filename_0 # this is needed because renderman handles directory and file # prefixes separately @@ -301,16 +264,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): pass # Fill in common data to payload ------------------------------------ + # TODO: Replace these with collected data from CollectRender payload_data = { "filename": filename, "filepath": filepath, - "jobname": jobname, "output_filename_0": output_filename_0, "renderlayer": renderlayer, - "workspace": workspace, "dirname": dirname, } + # Store output dir for unified publisher (filesequence) + instance.data["outputDir"] = os.path.dirname(output_filename_0) + # Submit preceding export jobs ------------------------------------- export_job = None assert not all(x in instance.data["families"] @@ -333,17 +298,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add export job as dependency -------------------------------------- if export_job: - payload["JobInfo"]["JobDependency0"] = export_job - - # Store output dir for unified publisher (filesequence) - instance.data["outputDir"] = os.path.dirname(output_filename_0) + job_info, _ = payload + job_info.JobDependency = export_job if instance.data.get("tileRendering"): # Prepare tiles data self._tile_render(instance, payload) else: # Submit main render job - self.submit(payload) + job_info, plugin_info = payload + self.submit(self.assemble_payload(job_info, plugin_info)) def _tile_render(self, instance, payload): @@ -546,18 +510,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance.data["jobBatchName"])) def _get_maya_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) - if not self.asset_dependencies: - job_info_ext = {} + job_info = copy.deepcopy(self.job_info) - else: - job_info_ext = { - # Asset dependency to wait for at least the scene file to sync. - "AssetDependency0": data["filepath"], - } - - renderer = self._instance.data["renderer"] + if self.asset_dependencies: + # Asset dependency to wait for at least the scene file to sync. + job_info.AssetDependency = self.scene_path # Get layer prefix render_products = self._instance.data["renderProducts"] @@ -569,6 +527,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Renderman 22, and so if we are using renderman > 21 we need to set # renderer string on the job to `renderman22`. We will have to change # this when Deadline releases new version handling this. + renderer = self._instance.data["renderer"] if renderer == "renderman": try: from rfm2.config import cfg # noqa @@ -580,29 +539,20 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): renderer = "renderman22" plugin_info = { - "SceneFile": data["filepath"], # Output directory and filename "OutputFilePath": data["dirname"].replace("\\", "/"), "OutputFilePrefix": layer_prefix, - - # Only render layers are considered renderable in this pipeline - "UsingRenderLayers": True, - - # Render only this layer - "RenderLayer": data["renderlayer"], - - # Determine which renderer to use from the file itself - "Renderer": renderer, - - # Resolve relative references - "ProjectPath": data["workspace"], } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload + + return job_info, plugin_info def _get_vray_export_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) + + job_info = copy.deepcopy(self.job_info) + + job_info.Name = self._job_info_label("Export") + + # Get V-Ray settings info to compute output path vray_settings = cmds.ls(type="VRaySettingsNode") node = vray_settings[0] template = cmds.getAttr("{}.vrscene_filename".format(node)) @@ -610,34 +560,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): first_file = self.format_vray_output_filename(scene, template) first_file = "{}/{}".format(data["workspace"], first_file) output = os.path.dirname(first_file) - job_info_ext = { - # Job name, as seen in Monitor - "Name": "Export {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - "Plugin": self._instance.data.get( - "mayaRenderPlugin", "MayaPype"), - "FramesPerTask": self._instance.data.get("framesPerTask", 1) - } - - plugin_info_ext = { - # Renderer + plugin_info = { "Renderer": "vray", - # Input - "SceneFile": data["filepath"], "SkipExistingFrames": True, - "UsingRenderLayers": True, "UseLegacyRenderLayers": True, - "RenderLayer": data["renderlayer"], - "ProjectPath": data["workspace"], "OutputFilePath": output } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info_ext) - return payload + return job_info, plugin_info def _get_arnold_export_payload(self, data): @@ -653,76 +584,55 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): script = os.path.normpath(module_path) - payload = copy.deepcopy(self.payload_skeleton) - job_info_ext = { - # Job name, as seen in Monitor - "Name": "Export {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), + job_info = copy.deepcopy(self.job_info) + plugin_info = copy.deepcopy(self.plugin_info) - "Plugin": "Python", - "FramesPerTask": self._instance.data.get("framesPerTask", 1), - "Frames": 1 + job_info.Name = self._job_info_label("Export") + + # Force a single frame Python job + job_info.Plugin = "Python" + job_info.Frames = 1 + + # add required env vars for the export script + envs = { + "AVALON_APP_NAME": os.environ.get("AVALON_APP_NAME"), + "OPENPYPE_ASS_EXPORT_RENDER_LAYER": data["renderlayer"], + "OPENPYPE_ASS_EXPORT_SCENE_FILE": self.scene_path, + "OPENPYPE_ASS_EXPORT_OUTPUT": payload['JobInfo']['OutputFilename0'], # noqa + "OPENPYPE_ASS_EXPORT_START": int(self._instance.data["frameStartHandle"]), # noqa + "OPENPYPE_ASS_EXPORT_END": int(self._instance.data["frameEndHandle"]), # noqa + "OPENPYPE_ASS_EXPORT_STEP": 1 } + for key, value in envs.items(): + job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, + value=value) - plugin_info_ext = { + plugin_info.update({ "Version": "3.6", "ScriptFile": script, "Arguments": "", "SingleFrameOnly": "True", - } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info_ext) + }) - envs = [ - v - for k, v in payload["JobInfo"].items() - if k.startswith("EnvironmentKeyValue") - ] - - # add app name to environment - envs.append( - "AVALON_APP_NAME={}".format(os.environ.get("AVALON_APP_NAME"))) - envs.append( - "OPENPYPE_ASS_EXPORT_RENDER_LAYER={}".format(data["renderlayer"])) - envs.append( - "OPENPYPE_ASS_EXPORT_SCENE_FILE={}".format(data["filepath"])) - envs.append( - "OPENPYPE_ASS_EXPORT_OUTPUT={}".format( - payload['JobInfo']['OutputFilename0'])) - envs.append( - "OPENPYPE_ASS_EXPORT_START={}".format( - int(self._instance.data["frameStartHandle"]))) - envs.append( - "OPENPYPE_ASS_EXPORT_END={}".format( - int(self._instance.data["frameEndHandle"]))) - envs.append( - "OPENPYPE_ASS_EXPORT_STEP={}".format(1)) - - for i, e in enumerate(envs): - payload["JobInfo"]["EnvironmentKeyValue{}".format(i)] = e - return payload + return job_info, plugin_info def _get_vray_render_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) + + # Job Info + job_info = copy.deepcopy(self.job_info) + job_info.Name = self._job_info_label("Render") + job_info.Plugin = "Vray" + job_info.OverrideTaskExtraInfoNames = False + + # Plugin Info vray_settings = cmds.ls(type="VRaySettingsNode") node = vray_settings[0] template = cmds.getAttr("{}.vrscene_filename".format(node)) # "vrayscene//_/" - scene, _ = os.path.splitext(data["filename"]) + scene, _ = os.path.splitext(self.scene_path) first_file = self.format_vray_output_filename(scene, template) first_file = "{}/{}".format(data["workspace"], first_file) - job_info_ext = { - "Name": "Render {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - - "Plugin": "Vray", - "OverrideTaskExtraInfoNames": False, - } plugin_info = { "InputFilename": first_file, @@ -731,35 +641,28 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "Width": self._instance.data["resolutionWidth"], "Height": self._instance.data["resolutionHeight"], - "OutputFilePath": payload["JobInfo"]["OutputDirectory0"], - "OutputFileName": payload["JobInfo"]["OutputFilename0"] + "OutputFilePath": job_info.OutputDirectory[0], + "OutputFileName": job_info.OutputFilename[0] } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload + return job_info, plugin_info def _get_arnold_render_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) + + # Job Info + job_info = copy.deepcopy(self.job_info) + job_info.Name = self._job_info_label("Render") + job_info.Plugin = "Arnold" + job_info.OverrideTaskExtraInfoNames = False + + # Plugin Info ass_file, _ = os.path.splitext(data["output_filename_0"]) first_file = ass_file + ".ass" - job_info_ext = { - "Name": "Render {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - - "Plugin": "Arnold", - "OverrideTaskExtraInfoNames": False, - } - plugin_info = { "ArnoldFile": first_file, } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload + return job_info, plugin_info def format_vray_output_filename(self, filename, template, dir=False): """Format the expected output file of the Export job. @@ -804,7 +707,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): return result - def _patch_workfile(self, file, patches): + def _patch_workfile(self): # type: (str, dict) -> [str, None] """Patch Maya scene. @@ -818,19 +721,25 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "line": "line to insert" } - Args: - file (str): File to patch. - patches (dict): Dictionary defining patches. - - Returns: - str: Patched file path or None - """ - if not patches or os.path.splitext(file)[1].lower() != ".ma": + project_settings = self._instance.context.data["project_settings"] + patches = ( + project_settings.get( + "deadline", {}).get( + "publish", {}).get( + "MayaSubmitDeadline", {}).get( + "scene_patches", {}) + ) + if not patches: + return + + if not os.path.splitext(self.scene_path)[1].lower() != ".ma": + self.log.debug("Skipping workfile patch since workfile is not " + ".ma file") return compiled_regex = [re.compile(p["regex"]) for p in patches] - with open(file, "r+") as pf: + with open(self.scene_path, "r+") as pf: scene_data = pf.readlines() for ln, line in enumerate(scene_data): for i, r in enumerate(compiled_regex): @@ -839,10 +748,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): pf.seek(0) pf.writelines(scene_data) pf.truncate() - self.log.info( - "Applied {} patch to scene.".format( - patches[i]["name"])) - return file + self.log.info("Applied {} patch to scene.".format( + patches[i]["name"] + )) + + def _job_info_label(self, label): + return "{label} {job.Name} [{start}-{end}]".format( + label=label, + job=self.job_info, + start=int(self._instance.data["frameStartHandle"]), + end=int(self._instance.data["frameEndHandle"]), + ) def _format_tiles( From f9bbda244bee373dd3bfb025528923d061808525 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 23:22:19 +0200 Subject: [PATCH 26/82] More explicit PluginInfo name --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 8f12a9518f..87ef4e6db9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -38,7 +38,7 @@ from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s -class DeadlinePluginInfo(): +class MayaPluginInfo: SceneFile = attr.ib(default=None) # Input OutputFilePath = attr.ib(default=None) # Output directory and filename OutputFilePrefix = attr.ib(default=None) @@ -208,7 +208,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - plugin_info = DeadlinePluginInfo( + plugin_info = MayaPluginInfo( SceneFile=self.scene_path, Version=cmds.about(version=True), RenderLayer=instance.data['setMembers'], From ecf2a89081f19e14c65b0fd7b1992fe80519e983 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 23:39:11 +0200 Subject: [PATCH 27/82] More temp restructuring --- .../plugins/publish/submit_maya_deadline.py | 52 +++++++------------ 1 file changed, 20 insertions(+), 32 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 87ef4e6db9..a77ccd73d4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -273,9 +273,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "dirname": dirname, } - # Store output dir for unified publisher (filesequence) - instance.data["outputDir"] = os.path.dirname(output_filename_0) - # Submit preceding export jobs ------------------------------------- export_job = None assert not all(x in instance.data["families"] @@ -326,26 +323,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") plugin_info["RegionRendering"] = True - assembly_payload = { - "AuxFiles": [], - "JobInfo": { - "BatchName": payload["JobInfo"]["BatchName"], - "Frames": 1, - "Name": "{} - Tile Assembly Job".format( - payload["JobInfo"]["Name"]), - "OutputDirectory0": - payload["JobInfo"]["OutputDirectory0"].replace( - "\\", "/"), - "Plugin": self.tile_assembler_plugin, - "MachineLimit": 1 - }, - "PluginInfo": { + assembly_job_info = copy.deepcopy(job_info) + assembly_job_info.Plugin = self.tile_assembler_plugin + assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( + job=job_info) + assembly_job_info.Frames = 1 + assembly_job_info.MachineLimit = 1 + assembly_job_info.Priority = instance.data.get("tile_priority", + self.tile_priority) + + assembly_plugin_info = { "CleanupTiles": 1, "ErrorOnMissing": True - } } - assembly_payload["JobInfo"]["Priority"] = self._instance.data.get( - "tile_priority", self.tile_priority) frame_payloads = [] assembly_payloads = [] @@ -414,6 +404,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): file_index = 1 for file in assembly_files: frame = re.search(R_FRAME_NUMBER, file).group("frame") + new_assembly_payload = copy.deepcopy(assembly_payload) new_assembly_payload["JobInfo"]["Name"] = \ "{} (Frame {})".format( @@ -434,7 +425,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.info( "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) - url = "{}/api/jobs".format(self.deadline_url) tiles_count = instance.data.get("tilesX") * instance.data.get( "tilesY") # noqa: E501 @@ -444,9 +434,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_id = response.json()["_id"] hash = response.json()["Props"]["Ex0"] + # Add assembly job dependencies for assembly_job in assembly_payloads: - if assembly_job["JobInfo"]["ExtraInfo0"] == hash: - assembly_job["JobInfo"]["JobDependency0"] = job_id + assembly_job_info = assembly_job["JobInfo"] + if assembly_job_info.ExtraInfo[0] == hash: + assembly_job.JobDependency = job_id for assembly_job in assembly_payloads: file = assembly_job["JobInfo"]["ExtraInfo1"] @@ -461,14 +453,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) ) + config_file_dir = os.path.dirname(config_file) try: - if not os.path.isdir(os.path.dirname(config_file)): - os.makedirs(os.path.dirname(config_file)) + if not os.path.isdir(config_file_dir): + os.makedirs(config_file_dir) except OSError: # directory is not available - self.log.warning( - "Path is unreachable: `{}`".format( - os.path.dirname(config_file))) + self.log.warning("Path is unreachable: " + "`{}`".format(config_file_dir)) # add config file as job auxFile assembly_job["AuxFiles"] = [config_file] @@ -505,10 +497,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): response.json()["_id"]) job_idx += 1 - instance.data["jobBatchName"] = payload["JobInfo"]["BatchName"] - self.log.info("Setting batch name on instance: {}".format( - instance.data["jobBatchName"])) - def _get_maya_payload(self, data): job_info = copy.deepcopy(self.job_info) From 6abafd0aca1ca06204f5e5bc11907a0a6a855900 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 00:41:30 +0200 Subject: [PATCH 28/82] Refactor tile logic --- .../plugins/publish/submit_maya_deadline.py | 120 ++++++++---------- 1 file changed, 55 insertions(+), 65 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index a77ccd73d4..920adf7e4a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -314,11 +314,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # if we have sequence of files, we need to create tile job for # every frame - job_info.TileJob = True job_info.TileJobTilesInX = instance.data.get("tilesX") job_info.TileJobTilesInY = instance.data.get("tilesY") + tiles_count = job_info.TileJobTilesInX * job_info.TileJobTilesInY + plugin_info["ImageHeight"] = instance.data.get("resolutionHeight") plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") plugin_info["RegionRendering"] = True @@ -334,7 +335,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): assembly_plugin_info = { "CleanupTiles": 1, - "ErrorOnMissing": True + "ErrorOnMissing": True, + "Renderer": self._instance.data["renderer"] } frame_payloads = [] @@ -367,81 +369,69 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): file_index = 1 for file in files: frame = re.search(R_FRAME_NUMBER, file).group("frame") - new_payload = copy.deepcopy(payload) - new_payload["JobInfo"]["Name"] = \ - "{} (Frame {} - {} tiles)".format( + + new_job_info = copy.deepcopy(job_info) + new_job_info.Name = "{} (Frame {} - {} tiles)".format( payload["JobInfo"]["Name"], frame, instance.data.get("tilesX") * instance.data.get("tilesY") - # noqa: E501 - ) - self.log.info( - "... preparing job {}".format( - new_payload["JobInfo"]["Name"])) - new_payload["JobInfo"]["TileJobFrame"] = frame + ) + new_job_info.TileJobFrame = frame - tiles_data = _format_tiles( + new_plugin_info = copy.deepcopy(plugin_info) + + # Add tile data into job info and plugin info + tiles_out, _ = _format_tiles( file, 0, instance.data.get("tilesX"), instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), payload["PluginInfo"]["OutputFilePrefix"] - )[0] - new_payload["JobInfo"].update(tiles_data["JobInfo"]) - new_payload["PluginInfo"].update(tiles_data["PluginInfo"]) + ) + new_job_info.update(tiles_out["JobInfo"]) + new_plugin_info.update(tiles_out["PluginInfo"]) self.log.info("hashing {} - {}".format(file_index, file)) job_hash = hashlib.sha256( ("{}_{}".format(file_index, file)).encode("utf-8")) frame_jobs[frame] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo0"] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo1"] = file - frame_payloads.append(new_payload) - file_index += 1 + new_job_info.ExtraInfo[0] = job_hash.hexdigest() + new_job_info.ExtraInfo[1] = file - file_index = 1 - for file in assembly_files: - frame = re.search(R_FRAME_NUMBER, file).group("frame") - - new_assembly_payload = copy.deepcopy(assembly_payload) - new_assembly_payload["JobInfo"]["Name"] = \ - "{} (Frame {})".format( - assembly_payload["JobInfo"]["Name"], - frame) - new_assembly_payload["JobInfo"]["OutputFilename0"] = re.sub( - REPL_FRAME_NUMBER, - "\\1{}\\3".format("#" * len(frame)), file) - - new_assembly_payload["PluginInfo"]["Renderer"] = \ - self._instance.data["renderer"] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo0"] = frame_jobs[ - frame] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo1"] = file - assembly_payloads.append(new_assembly_payload) + frame_payloads.append(self.assemble_payload( + job_info=new_job_info, + plugin_info=new_plugin_info + )) file_index += 1 self.log.info( "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) - tiles_count = instance.data.get("tilesX") * instance.data.get( - "tilesY") # noqa: E501 - - for tile_job in frame_payloads: - response = self.submit(tile_job) - + frame_tile_job_id = {} + for tile_job_payload in frame_payloads: + response = self.submit(tile_job_payload) job_id = response.json()["_id"] - hash = response.json()["Props"]["Ex0"] + frame_tile_job_id[frame] = job_id - # Add assembly job dependencies - for assembly_job in assembly_payloads: - assembly_job_info = assembly_job["JobInfo"] - if assembly_job_info.ExtraInfo[0] == hash: - assembly_job.JobDependency = job_id + assembly_jobs = [] + for i, file in enumerate(assembly_files): + frame = re.search(R_FRAME_NUMBER, file).group("frame") + + frame_assembly_job_info = copy.deepcopy(assembly_job_info) + frame_assembly_job_info.Name += " (Frame {})".format(frame) + frame_assembly_job_info.OutputFilename[0] = re.sub( + REPL_FRAME_NUMBER, + "\\1{}\\3".format("#" * len(frame)), file) + + hash = frame_jobs[frame] + tile_job_id = frame_tile_job_id[frame] + + frame_assembly_job_info.ExtraInfo[0] = hash + frame_assembly_job_info.ExtraInfo[1] = file + frame_assembly_job_info.JobDependency = tile_job_id - for assembly_job in assembly_payloads: - file = assembly_job["JobInfo"]["ExtraInfo1"] # write assembly job config files now = datetime.now() @@ -462,9 +452,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.warning("Path is unreachable: " "`{}`".format(config_file_dir)) - # add config file as job auxFile - assembly_job["AuxFiles"] = [config_file] - with open(config_file, "w") as cf: print("TileCount={}".format(tiles_count), file=cf) print("ImageFileName={}".format(file), file=cf) @@ -485,17 +472,20 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): for k, v in tiles.items(): print("{}={}".format(k, v), file=cf) - job_idx = 1 - instance.data["assemblySubmissionJobs"] = [] - for ass_job in assembly_payloads: - self.log.info("submitting assembly job {} of {}".format( - job_idx, len(assembly_payloads) - )) - response = self.submit(ass_job) + payload = self.assemble_payload( + job_info=frame_assembly_job_info, + plugin_info=assembly_plugin_info.copy(), + # add config file as job auxFile + aux_files=[config_file] + ) - instance.data["assemblySubmissionJobs"].append( - response.json()["_id"]) - job_idx += 1 + self.log.info("submitting assembly job {} of {}".format( + i+1, len(assembly_payloads) + )) + response = self.submit(payload) + assembly_jobs.append(response.json()["_id"]) + + instance.data["assemblySubmissionJobs"] = assembly_jobs def _get_maya_payload(self, data): From a6002de641e1ad500192be433f620f17680ea056 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 00:58:23 +0200 Subject: [PATCH 29/82] Refactor _format_tiles for readability --- .../plugins/publish/submit_maya_deadline.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 920adf7e4a..00d8eb7859 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -800,23 +800,23 @@ def _format_tiles( tiles_x, tiles_y ) - out_tile_index = "OutputFilename{}Tile{}".format( - str(index), tile - ) + + # Job Info new_filename = "{}/{}{}".format( os.path.dirname(filename), tile_prefix, os.path.basename(filename) ) - out["JobInfo"][out_tile_index] = new_filename - out["PluginInfo"]["RegionPrefix{}".format(tile)] = \ - "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) + out["JobInfo"]["OutputFilename{}Tile{}".format(index, tile)] = new_filename # noqa + # Plugin Info + out["PluginInfo"]["RegionPrefix{}".format(tile)] = "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) # noqa: E501 out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 + # Tile config cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename @@ -828,4 +828,5 @@ def _format_tiles( cfg["Tile{}Height".format(tile)] = h_space tile += 1 + return out, cfg From a9fe806fec1a5e5ecf98327cfa4845b8b6d3edc0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 01:02:24 +0200 Subject: [PATCH 30/82] Calculate once --- .../plugins/publish/submit_maya_deadline.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 00d8eb7859..d0348119dc 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -800,6 +800,10 @@ def _format_tiles( tiles_x, tiles_y ) + top = int(height) - (tile_y * h_space) + bottom = int(height) - ((tile_y - 1) * h_space) - 1 + left = (tile_x - 1) * w_space + right = (tile_x * w_space) - 1 # Job Info new_filename = "{}/{}{}".format( @@ -811,19 +815,17 @@ def _format_tiles( # Plugin Info out["PluginInfo"]["RegionPrefix{}".format(tile)] = "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) # noqa: E501 - out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 - out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 - out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 - out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 + out["PluginInfo"]["RegionTop{}".format(tile)] = top + out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom + out["PluginInfo"]["RegionLeft{}".format(tile)] = left + out["PluginInfo"]["RegionRight{}".format(tile)] = right # Tile config cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename - cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space - - cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) - + cfg["Tile{}X".format(tile)] = left + cfg["Tile{}Y".format(tile)] = top cfg["Tile{}Width".format(tile)] = w_space cfg["Tile{}Height".format(tile)] = h_space From d7c72f97b30f85aca15b4a8148c140595f0b2a3a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 01:10:19 +0200 Subject: [PATCH 31/82] Batch submit assembly jobs --- .../plugins/publish/submit_maya_deadline.py | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index d0348119dc..265c0f79ec 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -339,7 +339,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "Renderer": self._instance.data["renderer"] } - frame_payloads = [] assembly_payloads = [] R_FRAME_NUMBER = re.compile( @@ -358,14 +357,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): itertools.chain.from_iterable( [f for _, f in exp[0].items()])) if not files: - # if beauty doesn't exists, use first aov we found + # if beauty doesn't exist, use first aov we found files = exp[0].get(list(exp[0].keys())[0]) else: files = exp assembly_files = files + # Define frame tile jobs frame_jobs = {} - + frame_payloads = {} file_index = 1 for file in files: frame = re.search(R_FRAME_NUMBER, file).group("frame") @@ -400,22 +400,24 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): new_job_info.ExtraInfo[0] = job_hash.hexdigest() new_job_info.ExtraInfo[1] = file - frame_payloads.append(self.assemble_payload( + frame_payloads[frame] = self.assemble_payload( job_info=new_job_info, plugin_info=new_plugin_info - )) + ) file_index += 1 self.log.info( "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) + # Submit frame tile jobs frame_tile_job_id = {} - for tile_job_payload in frame_payloads: + for frame, tile_job_payload in frame_payloads.items(): response = self.submit(tile_job_payload) job_id = response.json()["_id"] frame_tile_job_id[frame] = job_id - assembly_jobs = [] + # Define assembly payloads + assembly_payloads = [] for i, file in enumerate(assembly_files): frame = re.search(R_FRAME_NUMBER, file).group("frame") @@ -478,14 +480,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # add config file as job auxFile aux_files=[config_file] ) + assembly_payloads.append(payload) + # Submit assembly jobs + assembly_job_ids = [] + for i, payload in enumerate(assembly_payloads): self.log.info("submitting assembly job {} of {}".format( i+1, len(assembly_payloads) )) response = self.submit(payload) - assembly_jobs.append(response.json()["_id"]) + assembly_job_ids.append(response.json()["_id"]) - instance.data["assemblySubmissionJobs"] = assembly_jobs + instance.data["assemblySubmissionJobs"] = assembly_job_ids def _get_maya_payload(self, data): From 965522585b98e441907480caee57af5dad92c2d2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 01:11:12 +0200 Subject: [PATCH 32/82] Remove redundant docstring --- .../deadline/plugins/publish/submit_maya_deadline.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 265c0f79ec..cd9f426977 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -51,16 +51,6 @@ class MayaPluginInfo: class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): - """Submit available render layers to Deadline. - - Renders are submitted to a Deadline Web Service as - supplied via settings key "DEADLINE_REST_URL". - - Attributes: - use_published (bool): Use published scene to render instead of the - one in work area. - - """ label = "Submit Render to Deadline" hosts = ["maya"] From 8af88e115723e6abc73fff279773043e4a520326 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 09:08:44 +0200 Subject: [PATCH 33/82] More cleanup --- .../plugins/publish/submit_maya_deadline.py | 98 +++++++------------ 1 file changed, 38 insertions(+), 60 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index cd9f426977..95140a082f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -86,8 +86,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.Name = "%s - %s" % (src_filename, instance.name) job_info.BatchName = src_filename job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") - job_info.UserName = context.data.get( - "deadlineUser", getpass.getuser()) + job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # Deadline requires integers in frame range frames = "{start}-{end}x{step}".format( @@ -134,25 +133,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # TODO: Taken from old publish class - test whether still needed - environment["OPENPYPE_LOG_NO_COLORS"] = "1" # to recognize job from PYPE for turning Event On/Off environment["OPENPYPE_RENDER_JOB"] = "1" + environment["OPENPYPE_LOG_NO_COLORS"] = "1" - for key in keys: - val = environment.get(key) - if val: - job_info.EnvironmentKeyValue = "{key}={value}".format( - key=key, - value=val - ) - # to recognize job from PYPE for turning Event On/Off - job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" - job_info.EnvironmentKeyValue = "OPENPYPE_LOG_NO_COLORS=1" + for key, value in environment.items(): + if not value: + continue + job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, + value=value) - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - for i, filepath in enumerate(instance.data["files"]): + # Enable double-click to preview rendered frames from Deadline Monitor + for filepath in instance.data["files"]: dirname = os.path.dirname(filepath) fname = os.path.basename(filepath) job_info.OutputDirectory = dirname.replace("\\", "/") @@ -241,25 +233,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): filename = os.path.basename(filepath) dirname = os.path.join(workspace, default_render_file) - # this is needed because renderman handles directory and file - # prefixes separately - if self._instance.data["renderer"] == "renderman": - dirname = os.path.dirname(output_filename_0) - - # Create render folder ---------------------------------------------- - try: - # Ensure render folder exists - os.makedirs(dirname) - except OSError: - pass - # Fill in common data to payload ------------------------------------ # TODO: Replace these with collected data from CollectRender payload_data = { "filename": filename, - "filepath": filepath, - "output_filename_0": output_filename_0, - "renderlayer": renderlayer, "dirname": dirname, } @@ -299,8 +276,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): def _tile_render(self, instance, payload): # As collected by super process() - job_info = self.job_info - plugin_info = self.pluginInfo + job_info = copy.deepcopy(self.job_info) + plugin_info = copy.deepcopy(self.plugin_info) # if we have sequence of files, we need to create tile job for # every frame @@ -314,23 +291,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") plugin_info["RegionRendering"] = True - assembly_job_info = copy.deepcopy(job_info) - assembly_job_info.Plugin = self.tile_assembler_plugin - assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( - job=job_info) - assembly_job_info.Frames = 1 - assembly_job_info.MachineLimit = 1 - assembly_job_info.Priority = instance.data.get("tile_priority", - self.tile_priority) - - assembly_plugin_info = { - "CleanupTiles": 1, - "ErrorOnMissing": True, - "Renderer": self._instance.data["renderer"] - } - - assembly_payloads = [] - R_FRAME_NUMBER = re.compile( r".+\.(?P[0-9]+)\..+") # noqa: N806, E501 REPL_FRAME_NUMBER = re.compile( @@ -407,7 +367,23 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): frame_tile_job_id[frame] = job_id # Define assembly payloads + assembly_job_info = copy.deepcopy(job_info) + assembly_job_info.Plugin = self.tile_assembler_plugin + assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( + job=job_info) + assembly_job_info.Frames = 1 + assembly_job_info.MachineLimit = 1 + assembly_job_info.Priority = instance.data.get("tile_priority", + self.tile_priority) + + assembly_plugin_info = { + "CleanupTiles": 1, + "ErrorOnMissing": True, + "Renderer": self._instance.data["renderer"] + } + assembly_payloads = [] + output_dir = self.job_info.OutputDirectory[0] for i, file in enumerate(assembly_files): frame = re.search(R_FRAME_NUMBER, file).group("frame") @@ -427,22 +403,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # write assembly job config files now = datetime.now() - config_file = os.path.join( - os.path.dirname(output_filename_0), + config_file = os.path.join(output_dir, "{}_config_{}.txt".format( os.path.splitext(file)[0], now.strftime("%Y_%m_%d_%H_%M_%S") ) ) - - config_file_dir = os.path.dirname(config_file) try: - if not os.path.isdir(config_file_dir): - os.makedirs(config_file_dir) + if not os.path.isdir(output_dir): + os.makedirs(output_dir) except OSError: # directory is not available self.log.warning("Path is unreachable: " - "`{}`".format(config_file_dir)) + "`{}`".format(output_dir)) with open(config_file, "w") as cf: print("TileCount={}".format(tiles_count), file=cf) @@ -567,17 +540,22 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.Plugin = "Python" job_info.Frames = 1 + renderlayer = self._instance.data["setMembers"] + # add required env vars for the export script envs = { "AVALON_APP_NAME": os.environ.get("AVALON_APP_NAME"), - "OPENPYPE_ASS_EXPORT_RENDER_LAYER": data["renderlayer"], + "OPENPYPE_ASS_EXPORT_RENDER_LAYER": renderlayer, "OPENPYPE_ASS_EXPORT_SCENE_FILE": self.scene_path, - "OPENPYPE_ASS_EXPORT_OUTPUT": payload['JobInfo']['OutputFilename0'], # noqa + "OPENPYPE_ASS_EXPORT_OUTPUT": job_info.OutputFilename[0], "OPENPYPE_ASS_EXPORT_START": int(self._instance.data["frameStartHandle"]), # noqa "OPENPYPE_ASS_EXPORT_END": int(self._instance.data["frameEndHandle"]), # noqa "OPENPYPE_ASS_EXPORT_STEP": 1 } for key, value in envs.items(): + if not value: + continue + job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, value=value) From e8aa926cb7d338427ce7ba558f8ffa1609fde8ef Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 09:28:42 +0200 Subject: [PATCH 34/82] Move single use of in-line function to the class for readability --- .../deadline/abstract_submit_deadline.py | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index f56cf49f6d..a3db3feac9 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -544,26 +544,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): """ - def _get_workfile_instance(context): - """Find workfile instance in context""" - for i in context: - - is_workfile = ( - "workfile" in i.data.get("families", []) or - i.data["family"] == "workfile" - ) - if not is_workfile: - continue - - # test if there is instance of workfile waiting - # to be published. - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - - return i - instance = self._instance - workfile_instance = _get_workfile_instance(instance.context) + workfile_instance = self._get_workfile_instance(instance.context) if not workfile_instance: return @@ -689,3 +671,22 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): self._instance.data["deadlineSubmissionJob"] = result return result["_id"] + + @staticmethod + def _get_workfile_instance(context): + """Find workfile instance in context""" + for i in context: + + is_workfile = ( + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" + ) + if not is_workfile: + continue + + # test if there is instance of workfile waiting + # to be published. + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") + + return i From c6a0a199e1810e5c63c484d5871d34d14594e5be Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 09:40:08 +0200 Subject: [PATCH 35/82] Cosmetics --- .../deadline/plugins/publish/submit_maya_deadline.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 95140a082f..873005e051 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -267,15 +267,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if instance.data.get("tileRendering"): # Prepare tiles data - self._tile_render(instance, payload) + self._tile_render(payload) else: # Submit main render job job_info, plugin_info = payload self.submit(self.assemble_payload(job_info, plugin_info)) - def _tile_render(self, instance, payload): + def _tile_render(self, payload): # As collected by super process() + instance = self._instance job_info = copy.deepcopy(self.job_info) plugin_info = copy.deepcopy(self.plugin_info) @@ -321,11 +322,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): frame = re.search(R_FRAME_NUMBER, file).group("frame") new_job_info = copy.deepcopy(job_info) - new_job_info.Name = "{} (Frame {} - {} tiles)".format( - payload["JobInfo"]["Name"], - frame, - instance.data.get("tilesX") * instance.data.get("tilesY") - ) + new_job_info.Name += " (Frame {} - {} tiles)".format(frame, + tiles_count) new_job_info.TileJobFrame = frame new_plugin_info = copy.deepcopy(plugin_info) From e429e2ec41d23c097b301c31485a38aea634d6a3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 10:19:38 +0200 Subject: [PATCH 36/82] Remove json dump since `renderProducts` are not serializable --- openpype/hosts/maya/plugins/publish/collect_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 768a53329f..14aac2f206 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -360,7 +360,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): instance.data["label"] = label instance.data["farm"] = True instance.data.update(data) - self.log.debug("data: {}".format(json.dumps(data, indent=4))) def parse_options(self, render_globals): """Get all overrides with a value, skip those without. From 9472cbe271af4be7c0328bf05200f44c521c58ae Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 11:23:14 +0200 Subject: [PATCH 37/82] Fix submission --- .../collect_deadline_server_from_instance.py | 2 +- .../plugins/publish/submit_maya_deadline.py | 71 +++++++++---------- 2 files changed, 33 insertions(+), 40 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py index a7035cd99f..9981bead3e 100644 --- a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py +++ b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py @@ -13,7 +13,7 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.415 label = "Deadline Webservice from the Instance" - families = ["rendering"] + families = ["rendering", "renderlayer"] def process(self, instance): instance.data["deadlineUrl"] = self._collect_deadline_url(instance) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 873005e051..2afa1883c4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -27,7 +27,6 @@ import itertools from collections import OrderedDict import attr -import clique from maya import cmds @@ -111,7 +110,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add options from RenderGlobals render_globals = instance.data.get("renderGlobals", {}) - for key, value in render_globals: + for key, value in render_globals.items(): setattr(job_info, key, value) keys = [ @@ -143,13 +142,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, value=value) - # Enable double-click to preview rendered frames from Deadline Monitor - for filepath in instance.data["files"]: - dirname = os.path.dirname(filepath) - fname = os.path.basename(filepath) - job_info.OutputDirectory = dirname.replace("\\", "/") - job_info.OutputFilename = fname - # Adding file dependencies. if self.asset_dependencies: dependencies = instance.context.data["fileDependencies"] @@ -160,28 +152,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add list of expected files to job # --------------------------------- exp = instance.data.get("expectedFiles") - - def _get_output_filename(files): - col, rem = clique.assemble(files) - if not col and rem: - # we couldn't find any collections but have - # individual files. - assert len(rem) == 1, ( - "Found multiple non related files " - "to render, don't know what to do " - "with them.") - return rem[0] - else: - return col[0].format('{head}{padding}{tail}') - - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - for _aov, files in exp[0].items(): - output_file = _get_output_filename(files) - job_info.OutputFilename = output_file - else: - output_file = _get_output_filename(exp) - job_info.OutputFilename = output_file + for filepath in self._iter_expected_files(exp): + job_info.OutputDirectory = os.path.dirname(filepath) + job_info.OutputFilename = os.path.basename(filepath) return job_info @@ -194,6 +167,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): SceneFile=self.scene_path, Version=cmds.about(version=True), RenderLayer=instance.data['setMembers'], + Renderer=instance.data["renderer"], RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights"), # noqa ProjectPath=context.data["workspaceDir"], UsingRenderLayers=True, @@ -216,7 +190,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # TODO: Avoid the need for this logic here, needed for submit publish # Store output dir for unified publisher (filesequence) - output_dir = os.path.dirname(instance.data["files"][0]) + expected_files = instance.data["expectedFiles"] + first_file = next(self._iter_expected_files(expected_files)) + output_dir = os.path.dirname(first_file) instance.data["outputDir"] = output_dir instance.data["toBeRenderedOn"] = "deadline" @@ -247,17 +223,20 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "Vray Scene and Ass Scene options are mutually exclusive") if "vrayscene" in instance.data["families"]: + self.log.debug("Submitting V-Ray scene render..") vray_export_payload = self._get_vray_export_payload(payload_data) export_job = self.submit(vray_export_payload) payload = self._get_vray_render_payload(payload_data) elif "assscene" in instance.data["families"]: + self.log.debug("Submitting Arnold .ass standalone render..") ass_export_payload = self._get_arnold_export_payload(payload_data) export_job = self.submit(ass_export_payload) payload = self._get_arnold_render_payload(payload_data) else: + self.log.debug("Submitting MayaBatch render..") payload = self._get_maya_payload(payload_data) # Add export job as dependency -------------------------------------- @@ -274,6 +253,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.submit(self.assemble_payload(job_info, plugin_info)) def _tile_render(self, payload): + """Submit as tile render per frame with dependent assembly jobs.""" # As collected by super process() instance = self._instance @@ -315,7 +295,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): assembly_files = files # Define frame tile jobs - frame_jobs = {} + frame_file_hash = {} frame_payloads = {} file_index = 1 for file in files: @@ -343,9 +323,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.info("hashing {} - {}".format(file_index, file)) job_hash = hashlib.sha256( ("{}_{}".format(file_index, file)).encode("utf-8")) - frame_jobs[frame] = job_hash.hexdigest() - new_job_info.ExtraInfo[0] = job_hash.hexdigest() + file_hash = job_hash.hexdigest() + frame_file_hash[frame] = file_hash + + new_job_info.ExtraInfo[0] = file_hash new_job_info.ExtraInfo[1] = file frame_payloads[frame] = self.assemble_payload( @@ -391,10 +373,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): REPL_FRAME_NUMBER, "\\1{}\\3".format("#" * len(frame)), file) - hash = frame_jobs[frame] + file_hash = frame_file_hash[frame] tile_job_id = frame_tile_job_id[frame] - frame_assembly_job_info.ExtraInfo[0] = hash + frame_assembly_job_info.ExtraInfo[0] = file_hash frame_assembly_job_info.ExtraInfo[1] = file frame_assembly_job_info.JobDependency = tile_job_id @@ -483,11 +465,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if int(rman_version.split(".")[0]) > 22: renderer = "renderman22" - plugin_info = { + plugin_info = copy.deepcopy(self.plugin_info) + plugin_info.update({ # Output directory and filename "OutputFilePath": data["dirname"].replace("\\", "/"), "OutputFilePrefix": layer_prefix, - } + }) return job_info, plugin_info @@ -710,6 +693,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): end=int(self._instance.data["frameEndHandle"]), ) + @staticmethod + def _iter_expected_files(exp): + if isinstance(exp[0], dict): + for _aov, files in exp[0].items(): + for file in files: + yield file + else: + for file in exp: + yield file + def _format_tiles( filename, index, tiles_x, tiles_y, From 227b8405479e3a87b507e63a9b59fe473d7c9276 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:12:54 +0200 Subject: [PATCH 38/82] Refactor AbstractSubmitDeadline vars to allow easier access to indices --- .../deadline/abstract_submit_deadline.py | 251 +++++++----------- .../plugins/publish/submit_maya_deadline.py | 35 ++- 2 files changed, 115 insertions(+), 171 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index a3db3feac9..427faec115 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -9,6 +9,7 @@ import os from abc import abstractmethod import platform import getpass +from functools import partial from collections import OrderedDict import six @@ -66,6 +67,58 @@ def requests_get(*args, **kwargs): return requests.get(*args, **kwargs) +class DeadlineIndexedVar(dict): + """ + + Allows to set and query values by integer indices: + Query: var[1] or var.get(1) + Set: var[1] = "my_value" + Append: var += "value" + + Note: Iterating the instance is not guarantueed to be the order of the + indices. To do so iterate with `sorted()` + + """ + def __init__(self, key): + self.__key = key + + def next_available_index(self): + # Add as first unused entry + i = 0 + while i in self.keys(): + i += 1 + return i + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): value for index, value in sorted(self.items()) + } + + def update(self, data): + # Force the integer key check + for key, value in data.items(): + self.__setitem__(key, value) + + def __iadd__(self, other): + index = self.next_available_index() + self[index] = other + return self + + def __setitem__(self, key, value): + if not isinstance(key, int): + raise TypeError("Key must be an integer: {}".format(key)) + + if key < 0: + raise ValueError("Negative index can't be set: {}".format(key)) + dict.__setitem__(self, key, value) + + @attr.s class DeadlineJobInfo(object): """Mapping of all Deadline *JobInfo* attributes. @@ -218,24 +271,8 @@ class DeadlineJobInfo(object): # Environment # ---------------------------------------------- - _environmentKeyValue = attr.ib(factory=list) - - @property - def EnvironmentKeyValue(self): # noqa: N802 - """Return all environment key values formatted for Deadline. - - Returns: - dict: as `{'EnvironmentKeyValue0', 'key=value'}` - - """ - out = {} - for index, v in enumerate(self._environmentKeyValue): - out["EnvironmentKeyValue{}".format(index)] = v - return out - - @EnvironmentKeyValue.setter - def EnvironmentKeyValue(self, val): # noqa: N802 - self._environmentKeyValue.append(val) + EnvironmentKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + "EnvironmentKeyValue")) IncludeEnvironment = attr.ib(default=None) # Default: false UseJobEnvironmentOnly = attr.ib(default=None) # Default: false @@ -243,142 +280,29 @@ class DeadlineJobInfo(object): # Job Extra Info # ---------------------------------------------- - _extraInfos = attr.ib(factory=list) - _extraInfoKeyValues = attr.ib(factory=list) - - @property - def ExtraInfo(self): # noqa: N802 - """Return all ExtraInfo values formatted for Deadline. - - Returns: - dict: as `{'ExtraInfo0': 'value'}` - - """ - out = {} - for index, v in enumerate(self._extraInfos): - out["ExtraInfo{}".format(index)] = v - return out - - @ExtraInfo.setter - def ExtraInfo(self, val): # noqa: N802 - self._extraInfos.append(val) - - @property - def ExtraInfoKeyValue(self): # noqa: N802 - """Return all ExtraInfoKeyValue values formatted for Deadline. - - Returns: - dict: as {'ExtraInfoKeyValue0': 'key=value'}` - - """ - out = {} - for index, v in enumerate(self._extraInfoKeyValues): - out["ExtraInfoKeyValue{}".format(index)] = v - return out - - @ExtraInfoKeyValue.setter - def ExtraInfoKeyValue(self, val): # noqa: N802 - self._extraInfoKeyValues.append(val) + ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo")) + ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + "ExtraInfoKeyValue")) # Task Extra Info Names # ---------------------------------------------- OverrideTaskExtraInfoNames = attr.ib(default=None) # Default: false - _taskExtraInfos = attr.ib(factory=list) - - @property - def TaskExtraInfoName(self): # noqa: N802 - """Return all TaskExtraInfoName values formatted for Deadline. - - Returns: - dict: as `{'TaskExtraInfoName0': 'value'}` - - """ - out = {} - for index, v in enumerate(self._taskExtraInfos): - out["TaskExtraInfoName{}".format(index)] = v - return out - - @TaskExtraInfoName.setter - def TaskExtraInfoName(self, val): # noqa: N802 - self._taskExtraInfos.append(val) + TaskExtraInfoName = attr.ib(factory=partial(DeadlineIndexedVar, + "TaskExtraInfoName")) # Output # ---------------------------------------------- - _outputFilename = attr.ib(factory=list) - _outputFilenameTile = attr.ib(factory=list) - _outputDirectory = attr.ib(factory=list) - - @property - def OutputFilename(self): # noqa: N802 - """Return all OutputFilename values formatted for Deadline. - - Returns: - dict: as `{'OutputFilename0': 'filename'}` - - """ - out = {} - for index, v in enumerate(self._outputFilename): - out["OutputFilename{}".format(index)] = v - return out - - @OutputFilename.setter - def OutputFilename(self, val): # noqa: N802 - self._outputFilename.append(val) - - @property - def OutputFilenameTile(self): # noqa: N802 - """Return all OutputFilename#Tile values formatted for Deadline. - - Returns: - dict: as `{'OutputFilenme#Tile': 'tile'}` - - """ - out = {} - for index, v in enumerate(self._outputFilenameTile): - out["OutputFilename{}Tile".format(index)] = v - return out - - @OutputFilenameTile.setter - def OutputFilenameTile(self, val): # noqa: N802 - self._outputFilenameTile.append(val) - - @property - def OutputDirectory(self): # noqa: N802 - """Return all OutputDirectory values formatted for Deadline. - - Returns: - dict: as `{'OutputDirectory0': 'dir'}` - - """ - out = {} - for index, v in enumerate(self._outputDirectory): - out["OutputDirectory{}".format(index)] = v - return out - - @OutputDirectory.setter - def OutputDirectory(self, val): # noqa: N802 - self._outputDirectory.append(val) + OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputFilename")) + OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputFilename{}Tile")) + OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputDirectory")) # Asset Dependency # ---------------------------------------------- - _assetDependency = attr.ib(factory=list) - - @property - def AssetDependency(self): # noqa: N802 - """Return all OutputDirectory values formatted for Deadline. - - Returns: - dict: as `{'OutputDirectory0': 'dir'}` - - """ - out = {} - for index, v in enumerate(self._assetDependency): - out["AssetDependency{}".format(index)] = v - return out - - @OutputDirectory.setter - def AssetDependency(self, val): # noqa: N802 - self._assetDependency.append(val) + AssetDependency = attr.ib(factory=partial(DeadlineIndexedVar, + "AssetDependency")) # Tile Job # ---------------------------------------------- @@ -402,7 +326,7 @@ class DeadlineJobInfo(object): """ def filter_data(a, v): - if a.name.startswith("_"): + if isinstance(v, DeadlineIndexedVar): return False if v is None: return False @@ -410,16 +334,37 @@ class DeadlineJobInfo(object): serialized = attr.asdict( self, dict_factory=OrderedDict, filter=filter_data) - serialized.update(self.EnvironmentKeyValue) - serialized.update(self.ExtraInfo) - serialized.update(self.ExtraInfoKeyValue) - serialized.update(self.TaskExtraInfoName) - serialized.update(self.OutputFilename) - serialized.update(self.OutputFilenameTile) - serialized.update(self.OutputDirectory) - serialized.update(self.AssetDependency) + + # Custom serialize these attributes + for attribute in [ + self.EnvironmentKeyValue, + self.ExtraInfo, + self.ExtraInfoKeyValue, + self.TaskExtraInfoName, + self.OutputFilename, + self.OutputFilenameTile, + self.OutputDirectory, + self.AssetDependency + ]: + serialized.update(attribute.serialize()) + return serialized + def update(self, data): + """Update instance with data dict""" + for key, value in data.items(): + setattr(self, key, value) + + def __setattr__(self, key, value): + # Backwards compatibility: Allow appending to index vars by setting + # it on Job Info directly like: JobInfo.OutputFilename = filename + existing = getattr(self, key, None) + if isinstance(existing, DeadlineIndexedVar): + existing += value + return + + object.__setattr__(self, key, value) + @six.add_metaclass(AbstractMetaInstancePlugin) class AbstractSubmitDeadline(pyblish.api.InstancePlugin): diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 2afa1883c4..d979c92814 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -110,8 +110,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add options from RenderGlobals render_globals = instance.data.get("renderGlobals", {}) - for key, value in render_globals.items(): - setattr(job_info, key, value) + job_info.update(render_globals) keys = [ "FTRACK_API_KEY", @@ -257,8 +256,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # As collected by super process() instance = self._instance - job_info = copy.deepcopy(self.job_info) - plugin_info = copy.deepcopy(self.plugin_info) + + payload_job_info, payload_plugin_info = payload + job_info = copy.deepcopy(payload_job_info) + plugin_info = copy.deepcopy(payload_plugin_info) # if we have sequence of files, we need to create tile job for # every frame @@ -309,16 +310,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): new_plugin_info = copy.deepcopy(plugin_info) # Add tile data into job info and plugin info - tiles_out, _ = _format_tiles( + tiles_data = _format_tiles( file, 0, instance.data.get("tilesX"), instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), - payload["PluginInfo"]["OutputFilePrefix"] - ) - new_job_info.update(tiles_out["JobInfo"]) - new_plugin_info.update(tiles_out["PluginInfo"]) + payload_plugin_info["OutputFilePrefix"] + )[0] + + new_job_info.update(tiles_data["JobInfo"]) + new_plugin_info.update(tiles_data["PluginInfo"]) self.log.info("hashing {} - {}".format(file_index, file)) job_hash = hashlib.sha256( @@ -342,15 +344,13 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Submit frame tile jobs frame_tile_job_id = {} for frame, tile_job_payload in frame_payloads.items(): - response = self.submit(tile_job_payload) - job_id = response.json()["_id"] + job_id = self.submit(tile_job_payload) frame_tile_job_id[frame] = job_id # Define assembly payloads assembly_job_info = copy.deepcopy(job_info) assembly_job_info.Plugin = self.tile_assembler_plugin - assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( - job=job_info) + assembly_job_info.Name += " - Tile Assembly Job" assembly_job_info.Frames = 1 assembly_job_info.MachineLimit = 1 assembly_job_info.Priority = instance.data.get("tile_priority", @@ -411,10 +411,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), - payload["PluginInfo"]["OutputFilePrefix"] + payload_plugin_info["OutputFilePrefix"] )[1] - sorted(tiles) - for k, v in tiles.items(): + for k, v in sorted(tiles.items()): print("{}={}".format(k, v), file=cf) payload = self.assemble_payload( @@ -431,8 +430,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.info("submitting assembly job {} of {}".format( i+1, len(assembly_payloads) )) - response = self.submit(payload) - assembly_job_ids.append(response.json()["_id"]) + assembly_job_id = self.submit(payload) + assembly_job_ids.append(assembly_job_id) instance.data["assemblySubmissionJobs"] = assembly_job_ids From a7293f2a4f6a30297eea15297f5b25100e11e9f1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:13:14 +0200 Subject: [PATCH 39/82] Fix indentation --- openpype/modules/deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 427faec115..e1bdcb10d9 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -295,7 +295,7 @@ class DeadlineJobInfo(object): OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar, "OutputFilename")) OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputFilename{}Tile")) + "OutputFilename{}Tile")) OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar, "OutputDirectory")) From 91a3d8494bf8b65fe37560a02018edf59433caa6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:13:39 +0200 Subject: [PATCH 40/82] Disable aux files for now since it's not supported by Deadline Webservice --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index d979c92814..7694e80e9a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -419,8 +419,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): payload = self.assemble_payload( job_info=frame_assembly_job_info, plugin_info=assembly_plugin_info.copy(), + # todo: aux file transfers don't work with deadline webservice # add config file as job auxFile - aux_files=[config_file] + # aux_files=[config_file] ) assembly_payloads.append(payload) From 39d216797dacad00a3f42102e146c2806b9f8244 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:17:40 +0200 Subject: [PATCH 41/82] Force integer pixel values --- .../deadline/plugins/publish/submit_maya_deadline.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 7694e80e9a..3fbff0153b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -765,10 +765,10 @@ def _format_tiles( tiles_x, tiles_y ) - top = int(height) - (tile_y * h_space) - bottom = int(height) - ((tile_y - 1) * h_space) - 1 - left = (tile_x - 1) * w_space - right = (tile_x * w_space) - 1 + top = int(height - (tile_y * h_space)) + bottom = int(height - ((tile_y - 1) * h_space) - 1) + left = int((tile_x - 1) * w_space) + right = int((tile_x * w_space) - 1) # Job Info new_filename = "{}/{}{}".format( From 2fb7cabca49bae51162e2a82dfdc7d225094ed36 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:39:28 +0200 Subject: [PATCH 42/82] Shush hound --- .../deadline/abstract_submit_deadline.py | 4 ++-- .../plugins/publish/submit_maya_deadline.py | 21 +++++++++++-------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index e1bdcb10d9..35b114da95 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -623,8 +623,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): for i in context: is_workfile = ( - "workfile" in i.data.get("families", []) or - i.data["family"] == "workfile" + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" ) if not is_workfile: continue diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 3fbff0153b..1b69f8b4e9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -357,14 +357,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.tile_priority) assembly_plugin_info = { - "CleanupTiles": 1, - "ErrorOnMissing": True, - "Renderer": self._instance.data["renderer"] + "CleanupTiles": 1, + "ErrorOnMissing": True, + "Renderer": self._instance.data["renderer"] } assembly_payloads = [] output_dir = self.job_info.OutputDirectory[0] - for i, file in enumerate(assembly_files): + for file in assembly_files: frame = re.search(R_FRAME_NUMBER, file).group("frame") frame_assembly_job_info = copy.deepcopy(assembly_job_info) @@ -383,7 +383,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # write assembly job config files now = datetime.now() - config_file = os.path.join(output_dir, + config_file = os.path.join( + output_dir, "{}_config_{}.txt".format( os.path.splitext(file)[0], now.strftime("%Y_%m_%d_%H_%M_%S") @@ -427,10 +428,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Submit assembly jobs assembly_job_ids = [] + num_assemblies = len(assembly_payloads) for i, payload in enumerate(assembly_payloads): - self.log.info("submitting assembly job {} of {}".format( - i+1, len(assembly_payloads) - )) + self.log.info( + "submitting assembly job {} of {}".format(i + 1, + num_assemblies) + ) assembly_job_id = self.submit(payload) assembly_job_ids.append(assembly_job_id) @@ -682,7 +685,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): pf.writelines(scene_data) pf.truncate() self.log.info("Applied {} patch to scene.".format( - patches[i]["name"] + patches[i]["name"] )) def _job_info_label(self, label): From e9e01e3163079bc9f6c48fe633aed92592928328 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:43:18 +0200 Subject: [PATCH 43/82] Use update method --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 1b69f8b4e9..9692b136e9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -69,8 +69,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # todo: test whether this works for existing production cases # where custom jobInfo was stored in the project settings - for key, value in self.jobInfo.items(): - setattr(job_info, key, value) + job_info.update(self.jobInfo) instance = self._instance context = instance.context From 4bdd18cb817bbc58c1143e0e02442a9346ce9a1e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 11:50:14 +0200 Subject: [PATCH 44/82] Use DeadlineKeyValueVar for EnvironmentKeyValue on Job Info - To improve readability of code that sets the values --- .../deadline/abstract_submit_deadline.py | 58 +++++++++++++++---- .../publish/submit_aftereffects_deadline.py | 11 ++-- .../publish/submit_harmony_deadline.py | 10 ++-- .../plugins/publish/submit_maya_deadline.py | 7 +-- 4 files changed, 59 insertions(+), 27 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 35b114da95..beb1cd0fae 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -67,6 +67,43 @@ def requests_get(*args, **kwargs): return requests.get(*args, **kwargs) +class DeadlineKeyValueVar(dict): + """ + + Serializes dictionary key values as "{key}={value}" like Deadline uses + for EnvironmentKeyValue. + + As an example: + EnvironmentKeyValue0="A_KEY=VALUE_A" + EnvironmentKeyValue1="OTHER_KEY=VALUE_B" + + The keys are serialized in alphabetical order (sorted). + + Example: + >>> var = DeadlineKeyValueVar("EnvironmentKeyValue") + >>> var["my_var"] = "hello" + >>> var["my_other_var"] = "hello2" + >>> var.serialize() + + + """ + def __init__(self, key): + super(DeadlineKeyValueVar, self).__init__() + self.__key = key + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): "{}={}".format(var_key, var_value) + for index, (var_key, var_value) in enumerate(sorted(self.items())) + } + + class DeadlineIndexedVar(dict): """ @@ -80,15 +117,9 @@ class DeadlineIndexedVar(dict): """ def __init__(self, key): + super(DeadlineIndexedVar, self).__init__() self.__key = key - def next_available_index(self): - # Add as first unused entry - i = 0 - while i in self.keys(): - i += 1 - return i - def serialize(self): key = self.__key @@ -100,6 +131,13 @@ class DeadlineIndexedVar(dict): key.format(index): value for index, value in sorted(self.items()) } + def next_available_index(self): + # Add as first unused entry + i = 0 + while i in self.keys(): + i += 1 + return i + def update(self, data): # Force the integer key check for key, value in data.items(): @@ -271,7 +309,7 @@ class DeadlineJobInfo(object): # Environment # ---------------------------------------------- - EnvironmentKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + EnvironmentKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, "EnvironmentKeyValue")) IncludeEnvironment = attr.ib(default=None) # Default: false @@ -281,7 +319,7 @@ class DeadlineJobInfo(object): # Job Extra Info # ---------------------------------------------- ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo")) - ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, "ExtraInfoKeyValue")) # Task Extra Info Names @@ -326,7 +364,7 @@ class DeadlineJobInfo(object): """ def filter_data(a, v): - if isinstance(v, DeadlineIndexedVar): + if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)): return False if v is None: return False diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index 1d68793d53..55acd92043 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -92,13 +92,12 @@ class AfterEffectsSubmitDeadline( environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) for key in keys: - val = environment.get(key) - if val: - dln_job_info.EnvironmentKeyValue = "{key}={value}".format( - key=key, - value=val) + value = environment.get(key) + if value: + dln_job_info.EnvironmentKeyValue[key] = value + # to recognize job from PYPE for turning Event On/Off - dln_job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + dln_job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" return dln_job_info diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index 3f9c09b592..6327143623 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -284,14 +284,12 @@ class HarmonySubmitDeadline( environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) for key in keys: - val = environment.get(key) - if val: - job_info.EnvironmentKeyValue = "{key}={value}".format( - key=key, - value=val) + value = environment.get(key) + if value: + job_info.EnvironmentKeyValue[key] = value # to recognize job from PYPE for turning Event On/Off - job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" return job_info diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 9692b136e9..ad46feea03 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -137,8 +137,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): for key, value in environment.items(): if not value: continue - job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, - value=value) + job_info.EnvironmentKeyValue[key] = value # Adding file dependencies. if self.asset_dependencies: @@ -538,9 +537,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): for key, value in envs.items(): if not value: continue - - job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, - value=value) + job_info.EnvironmentKeyValue[key] = value plugin_info.update({ "Version": "3.6", From 1e87c9d6d2c7338f8e53e8a06d9f1983056797b6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 12:02:39 +0200 Subject: [PATCH 45/82] Use DeadlineIndexedVar `__iadd__` functionality --- .../plugins/publish/submit_aftereffects_deadline.py | 4 ++-- .../deadline/plugins/publish/submit_maya_deadline.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index 55acd92043..0c1ffa6bd7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -67,9 +67,9 @@ class AfterEffectsSubmitDeadline( dln_job_info.Group = self.group dln_job_info.Department = self.department dln_job_info.ChunkSize = self.chunk_size - dln_job_info.OutputFilename = \ + dln_job_info.OutputFilename += \ os.path.basename(self._instance.data["expectedFiles"][0]) - dln_job_info.OutputDirectory = \ + dln_job_info.OutputDirectory += \ os.path.dirname(self._instance.data["expectedFiles"][0]) dln_job_info.JobDelay = "00:00:00" diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index ad46feea03..6b08f9894d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -144,14 +144,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): dependencies = instance.context.data["fileDependencies"] dependencies.append(context.data["currentFile"]) for dependency in dependencies: - job_info.AssetDependency = dependency + job_info.AssetDependency += dependency # Add list of expected files to job # --------------------------------- exp = instance.data.get("expectedFiles") for filepath in self._iter_expected_files(exp): - job_info.OutputDirectory = os.path.dirname(filepath) - job_info.OutputFilename = os.path.basename(filepath) + job_info.OutputDirectory += os.path.dirname(filepath) + job_info.OutputFilename += os.path.basename(filepath) return job_info @@ -443,7 +443,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if self.asset_dependencies: # Asset dependency to wait for at least the scene file to sync. - job_info.AssetDependency = self.scene_path + job_info.AssetDependency += self.scene_path # Get layer prefix render_products = self._instance.data["renderProducts"] From 2c01cb806d68aa04a733e16d4cfd1abb15f438fe Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 12:03:34 +0200 Subject: [PATCH 46/82] Remove backwards compatibility for append functionality in old style vars --- openpype/modules/deadline/abstract_submit_deadline.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index beb1cd0fae..f698b7688e 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -393,16 +393,6 @@ class DeadlineJobInfo(object): for key, value in data.items(): setattr(self, key, value) - def __setattr__(self, key, value): - # Backwards compatibility: Allow appending to index vars by setting - # it on Job Info directly like: JobInfo.OutputFilename = filename - existing = getattr(self, key, None) - if isinstance(existing, DeadlineIndexedVar): - existing += value - return - - object.__setattr__(self, key, value) - @six.add_metaclass(AbstractMetaInstancePlugin) class AbstractSubmitDeadline(pyblish.api.InstancePlugin): From 47164b36effa0f4986ccf15aa3a4967ccb014e26 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 12:11:35 +0200 Subject: [PATCH 47/82] Be more explicit about what keys to include from Session This way it matches more with logic of other host submitters (e.g. AfterEffects + Harmony) --- .../deadline/plugins/publish/submit_maya_deadline.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 6b08f9894d..bb48fe6902 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -130,15 +130,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # to recognize job from PYPE for turning Event On/Off - environment["OPENPYPE_RENDER_JOB"] = "1" - environment["OPENPYPE_LOG_NO_COLORS"] = "1" - - for key, value in environment.items(): + for key in keys: + value = environment.get(key) if not value: continue job_info.EnvironmentKeyValue[key] = value + # to recognize job from PYPE for turning Event On/Off + job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" + job_info.EnvironmentKeyValue["OPENPYPE_LOG_NO_COLORS"] = "1" + # Adding file dependencies. if self.asset_dependencies: dependencies = instance.context.data["fileDependencies"] From 88e4798b535c47cefce3dc2a1ed9aacf60dd0f68 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 19:50:56 +0200 Subject: [PATCH 48/82] Remove old type hint --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index bb48fe6902..68d55fef5d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -641,7 +641,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): return result def _patch_workfile(self): - # type: (str, dict) -> [str, None] """Patch Maya scene. This will take list of patches (lines to add) and apply them to From 5645bcb353b13b1711ba67e0a3b394b273e7cef3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 3 Sep 2022 13:17:10 +0200 Subject: [PATCH 49/82] Use custom plugin info per type of plugin submission --- .../plugins/publish/submit_maya_deadline.py | 127 +++++++++--------- 1 file changed, 66 insertions(+), 61 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 68d55fef5d..2a41d92efd 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -49,6 +49,30 @@ class MayaPluginInfo: RenderSetupIncludeLights = attr.ib(default=None) # Include all lights flag +@attr.s +class PythonPluginInfo: + ScriptFile = attr.ib() + Version = attr.ib(default="3.6") + Arguments = attr.ib(default=None) + SingleFrameOnly = attr.ib(default=None) + + +@attr.s +class VRayPluginInfo: + InputFilename = attr.ib(default=None) # Input + SeparateFilesPerFrame = attr.ib(default=None) + VRayEngine = attr.ib(default="V-Ray") + Width = attr.ib(default=None) + Height = attr.ib(default=None) # Mandatory for Deadline + OutputFilePath = attr.ib(default=True) + OutputFileName = attr.ib(default=None) # Render only this layer + + +@attr.s +class ArnoldPluginInfo: + ArnoldFile = attr.ib(default=None) + + class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): label = "Submit Render to Deadline" @@ -479,26 +503,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): def _get_vray_export_payload(self, data): job_info = copy.deepcopy(self.job_info) - job_info.Name = self._job_info_label("Export") # Get V-Ray settings info to compute output path - vray_settings = cmds.ls(type="VRaySettingsNode") - node = vray_settings[0] - template = cmds.getAttr("{}.vrscene_filename".format(node)) - scene, _ = os.path.splitext(data["filename"]) - first_file = self.format_vray_output_filename(scene, template) - first_file = "{}/{}".format(data["workspace"], first_file) - output = os.path.dirname(first_file) + vray_scene = self.format_vray_output_filename() plugin_info = { "Renderer": "vray", "SkipExistingFrames": True, "UseLegacyRenderLayers": True, - "OutputFilePath": output + "OutputFilePath": os.path.dirname(vray_scene) } - return job_info, plugin_info + return job_info, attr.asdict(plugin_info) def _get_arnold_export_payload(self, data): @@ -515,8 +532,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): script = os.path.normpath(module_path) job_info = copy.deepcopy(self.job_info) - plugin_info = copy.deepcopy(self.plugin_info) - job_info.Name = self._job_info_label("Export") # Force a single frame Python job @@ -540,14 +555,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): continue job_info.EnvironmentKeyValue[key] = value - plugin_info.update({ - "Version": "3.6", - "ScriptFile": script, - "Arguments": "", - "SingleFrameOnly": "True", - }) + plugin_info = PythonPluginInfo( + ScriptFile=script, + Version="3.6", + Arguments="", + SingleFrameOnly="True" + ) - return job_info, plugin_info + return job_info, attr.asdict(plugin_info) def _get_vray_render_payload(self, data): @@ -558,27 +573,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.OverrideTaskExtraInfoNames = False # Plugin Info - vray_settings = cmds.ls(type="VRaySettingsNode") - node = vray_settings[0] - template = cmds.getAttr("{}.vrscene_filename".format(node)) - # "vrayscene//_/" + plugin_info = VRayPluginInfo( + InputFilename=self.format_vray_output_filename(), + SeparateFilesPerFrame=False, + VRayEngine="V-Ray", + Width=self._instance.data["resolutionWidth"], + Height=self._instance.data["resolutionHeight"], + OutputFilePath=job_info.OutputDirectory[0], + OutputFileName=job_info.OutputFilename[0] + ) - scene, _ = os.path.splitext(self.scene_path) - first_file = self.format_vray_output_filename(scene, template) - first_file = "{}/{}".format(data["workspace"], first_file) - - plugin_info = { - "InputFilename": first_file, - "SeparateFilesPerFrame": True, - "VRayEngine": "V-Ray", - - "Width": self._instance.data["resolutionWidth"], - "Height": self._instance.data["resolutionHeight"], - "OutputFilePath": job_info.OutputDirectory[0], - "OutputFileName": job_info.OutputFilename[0] - } - - return job_info, plugin_info + return job_info, attr.asdict(plugin_info) def _get_arnold_render_payload(self, data): @@ -590,55 +595,55 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Plugin Info ass_file, _ = os.path.splitext(data["output_filename_0"]) - first_file = ass_file + ".ass" - plugin_info = { - "ArnoldFile": first_file, - } + ass_filepath = ass_file + ".ass" - return job_info, plugin_info + plugin_info = ArnoldPluginInfo( + ArnoldFile=ass_filepath + ) - def format_vray_output_filename(self, filename, template, dir=False): + return job_info, attr.asdict(plugin_info) + + def format_vray_output_filename(self): """Format the expected output file of the Export job. Example: /_/ - "shot010_v006/shot010_v006_CHARS/CHARS" - - Args: - instance: - filename(str): - dir(bool): - + "shot010_v006/shot010_v006_CHARS/CHARS_0001.vrscene" Returns: str """ + + # "vrayscene//_/" + vray_settings = cmds.ls(type="VRaySettingsNode") + node = vray_settings[0] + template = cmds.getAttr("{}.vrscene_filename".format(node)) + scene, _ = os.path.splitext(self.scene_path) + def smart_replace(string, key_values): new_string = string for key, value in key_values.items(): new_string = new_string.replace(key, value) return new_string - # Ensure filename has no extension - file_name, _ = os.path.splitext(filename) + # Get workfile scene path without extension to format vrscene_filename + scene_filename = os.path.basename(self.scene_path) + scene_filename_no_ext, _ = os.path.splitext(scene_filename) layer = self._instance.data['setMembers'] # Reformat without tokens output_path = smart_replace( template, - {"": file_name, + {"": scene_filename_no_ext, "": layer}) - if dir: - return output_path.replace("\\", "/") - start_frame = int(self._instance.data["frameStartHandle"]) + workspace = self._instance.context.data["workspace"] filename_zero = "{}_{:04d}.vrscene".format(output_path, start_frame) + filepath_zero = os.path.join(workspace, filename_zero) - result = filename_zero.replace("\\", "/") - - return result + return filepath_zero.replace("\\", "/") def _patch_workfile(self): """Patch Maya scene. From 507dac4aa9f50e8978a841067262ce33e77cf5e0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 3 Sep 2022 15:06:24 +0200 Subject: [PATCH 50/82] Ensure integer math for _format_tiles See #3758 --- .../plugins/publish/submit_maya_deadline.py | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 2a41d92efd..7c486b7c34 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -754,14 +754,21 @@ def _format_tiles( used for assembler configuration. """ - tile = 0 + # Math used requires integers for correct output - as such + # we ensure our inputs are correct. + assert type(tiles_x) is int, "tiles_x must be an integer" + assert type(tiles_y) is int, "tiles_y must be an integer" + assert type(width) is int, "width must be an integer" + assert type(height) is int, "height must be an integer" + out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() - w_space = width / tiles_x - h_space = height / tiles_y + w_space = width // tiles_x + h_space = height // tiles_y cfg["TilesCropped"] = "False" + tile = 0 for tile_x in range(1, tiles_x + 1): for tile_y in reversed(range(1, tiles_y + 1)): tile_prefix = "_tile_{}x{}_{}x{}_".format( @@ -769,10 +776,10 @@ def _format_tiles( tiles_x, tiles_y ) - top = int(height - (tile_y * h_space)) - bottom = int(height - ((tile_y - 1) * h_space) - 1) - left = int((tile_x - 1) * w_space) - right = int((tile_x * w_space) - 1) + top = height - (tile_y * h_space) + bottom = height - ((tile_y - 1) * h_space) - 1 + left = (tile_x - 1) * w_space + right = (tile_x * w_space) - 1 # Job Info new_filename = "{}/{}{}".format( From d9a150022e1659aec584fe962f9c47e66bfb178d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:28:48 +0800 Subject: [PATCH 51/82] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 4 ++-- openpype/hosts/maya/lib.py | 18 ++++++++++++++++++ .../defaults/project_settings/maya.json | 14 ++++++++++++++ .../projects_schema/schema_project_maya.json | 15 +++++++++++++++ 4 files changed, 49 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index f565f6a308..5bf8b67fc2 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -28,7 +28,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -from openpype.hosts.maya.lib import copy_workspace_mel +from openpype.hosts.maya.lib import copy_workspace_mel,load_workspace_mel from . import menu, lib from .workio import ( open_file, @@ -550,7 +550,7 @@ def on_task_changed(): def before_workfile_save(event): workdir_path = event["workdir_path"] if workdir_path: - copy_workspace_mel(workdir_path) + load_workspace_mel(workdir_path) class MayaDirmap(HostDirmap): diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 6c142053e6..d24f267bbd 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,6 +1,8 @@ import os import shutil +import json +from openpype.settings import get_current_project_settings def copy_workspace_mel(workdir): # Check that source mel exists @@ -24,3 +26,19 @@ def copy_workspace_mel(workdir): src_filepath, dst_filepath )) shutil.copy(src_filepath, dst_filepath) + + +def load_workspace_mel(workdir): + dst_filepath = os.path.join(workdir, "workspace.mel") + if os.path.exists(dst_filepath): + return + + if not os.path.exists(workdir): + os.makedirs(workdir) + + with open(dst_filepath, "w") as mel_file: + setting = get_current_project_settings() + mel_script = setting["maya"]["mel-workspace"]["scripts"] + for mel in mel_script: + mel_file.write(mel) + mel_file.write("\n") diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ac0f161cf2..0a46632042 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -15,6 +15,20 @@ "destination-path": [] } }, + "mel-workspace":{ + "scripts":[ + "workspace -fr \"shaders\" \"renderData/shaders\";", + "workspace -fr \"images\" \"renders\";", + "workspace -fr \"particles\" \"particles\";", + "workspace -fr \"mayaAscii\" \"\";", + "workspace -fr \"mayaBinary\" \"\";", + "workspace -fr \"scene\" \"\";", + "workspace -fr \"alembicCache\" \"cache/alembic\";", + "workspace -fr \"renderData\" \"renderData\";", + "workspace -fr \"sourceImages\" \"sourceimages\";", + "workspace -fr \"fileCache\" \"cache/nCache\";" + ] + }, "scriptsmenu": { "name": "OpenPype Tools", "definition": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index cb380194a7..a774d604ca 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -53,6 +53,21 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "mel-workspace", + "label": "Maya MEL Workspace", + "is_group": true, + "children": [ + { + "type": "list", + "object_type": "text", + "key": "scripts", + "label": "scripts" + } + ] + }, { "type": "schema", "name": "schema_scriptsmenu" From b88def9aea1fc1a682209ea78edcf5ae87a652e3 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:40:30 +0800 Subject: [PATCH 52/82] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 27 +------------------ .../defaults/project_settings/maya.json | 2 +- .../projects_schema/schema_project_maya.json | 4 +-- 3 files changed, 4 insertions(+), 29 deletions(-) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index d24f267bbd..bf06c9ad7d 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,33 +1,8 @@ import os import shutil -import json from openpype.settings import get_current_project_settings -def copy_workspace_mel(workdir): - # Check that source mel exists - current_dir = os.path.dirname(os.path.abspath(__file__)) - src_filepath = os.path.join(current_dir, "resources", "workspace.mel") - if not os.path.exists(src_filepath): - print("Source mel file does not exist. {}".format(src_filepath)) - return - - # Skip if workspace.mel already exists - dst_filepath = os.path.join(workdir, "workspace.mel") - if os.path.exists(dst_filepath): - return - - # Create workdir if does not exists yet - if not os.path.exists(workdir): - os.makedirs(workdir) - - # Copy file - print("Copying workspace mel \"{}\" -> \"{}\"".format( - src_filepath, dst_filepath - )) - shutil.copy(src_filepath, dst_filepath) - - def load_workspace_mel(workdir): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): @@ -38,7 +13,7 @@ def load_workspace_mel(workdir): with open(dst_filepath, "w") as mel_file: setting = get_current_project_settings() - mel_script = setting["maya"]["mel-workspace"]["scripts"] + mel_script = setting["maya"]["mel-workspace"]["definition"] for mel in mel_script: mel_file.write(mel) mel_file.write("\n") diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 0a46632042..162732280f 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -16,7 +16,7 @@ } }, "mel-workspace":{ - "scripts":[ + "definition":[ "workspace -fr \"shaders\" \"renderData/shaders\";", "workspace -fr \"images\" \"renders\";", "workspace -fr \"particles\" \"particles\";", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index a774d604ca..7204ec586a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -63,8 +63,8 @@ { "type": "list", "object_type": "text", - "key": "scripts", - "label": "scripts" + "key": "definition", + "label": "definition" } ] }, From 109abb58987b22f6d390d424a27e209eff6b5638 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:41:09 +0800 Subject: [PATCH 53/82] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 5bf8b67fc2..4768a9ee4f 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -28,7 +28,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -from openpype.hosts.maya.lib import copy_workspace_mel,load_workspace_mel +from openpype.hosts.maya.lib import load_workspace_mel from . import menu, lib from .workio import ( open_file, From 69d2cf20f5b4889ce674487d2da8fd2a230a093a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:42:10 +0800 Subject: [PATCH 54/82] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index bf06c9ad7d..2853789656 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -3,6 +3,7 @@ import shutil from openpype.settings import get_current_project_settings + def load_workspace_mel(workdir): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): From 26fbdac8da117c83a71b75ce6315be4044d23942 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 16:14:30 +0800 Subject: [PATCH 55/82] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 5 +++-- openpype/hosts/maya/hooks/pre_copy_mel.py | 5 +++-- openpype/hosts/maya/lib.py | 19 ++++++++++--------- .../defaults/project_anatomy/attributes.json | 3 +-- .../defaults/project_settings/maya.json | 15 +-------------- .../projects_schema/schema_project_maya.json | 17 ++++------------- 6 files changed, 22 insertions(+), 42 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 4768a9ee4f..4578d6fb39 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -28,7 +28,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -from openpype.hosts.maya.lib import load_workspace_mel +from openpype.hosts.maya.lib import create_workspace_mel from . import menu, lib from .workio import ( open_file, @@ -548,9 +548,10 @@ def on_task_changed(): def before_workfile_save(event): + project_name = os.getenv("AVALON_PROJECT") workdir_path = event["workdir_path"] if workdir_path: - load_workspace_mel(workdir_path) + create_workspace_mel(workdir_path, project_name) class MayaDirmap(HostDirmap): diff --git a/openpype/hosts/maya/hooks/pre_copy_mel.py b/openpype/hosts/maya/hooks/pre_copy_mel.py index b11e18241e..6f90af4b7c 100644 --- a/openpype/hosts/maya/hooks/pre_copy_mel.py +++ b/openpype/hosts/maya/hooks/pre_copy_mel.py @@ -1,5 +1,5 @@ from openpype.lib import PreLaunchHook -from openpype.hosts.maya.lib import copy_workspace_mel +from openpype.hosts.maya.lib import create_workspace_mel class PreCopyMel(PreLaunchHook): @@ -10,9 +10,10 @@ class PreCopyMel(PreLaunchHook): app_groups = ["maya"] def execute(self): + project_name = self.launch_context.env.get("AVALON_PROJECT") workdir = self.launch_context.env.get("AVALON_WORKDIR") if not workdir: self.log.warning("BUG: Workdir is not filled.") return - copy_workspace_mel(workdir) + create_workspace_mel(workdir, project_name) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 2853789656..443bf7d10e 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,10 +1,8 @@ import os -import shutil - -from openpype.settings import get_current_project_settings +from openpype.settings import get_project_settings -def load_workspace_mel(workdir): +def create_workspace_mel(workdir, project_name): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): return @@ -12,9 +10,12 @@ def load_workspace_mel(workdir): if not os.path.exists(workdir): os.makedirs(workdir) + project_setting = get_project_settings(project_name) + mel_script = project_setting["maya"].get("mel_workspace") + + # Skip if mel script in settings is empty + if not mel_script: + return + with open(dst_filepath, "w") as mel_file: - setting = get_current_project_settings() - mel_script = setting["maya"]["mel-workspace"]["definition"] - for mel in mel_script: - mel_file.write(mel) - mel_file.write("\n") + mel_file.write(mel_script) diff --git a/openpype/settings/defaults/project_anatomy/attributes.json b/openpype/settings/defaults/project_anatomy/attributes.json index 983ac603f9..bf8bbef8de 100644 --- a/openpype/settings/defaults/project_anatomy/attributes.json +++ b/openpype/settings/defaults/project_anatomy/attributes.json @@ -19,8 +19,7 @@ "blender/2-91", "harmony/20", "photoshop/2021", - "aftereffects/2021", - "unreal/4-26" + "aftereffects/2021" ], "tools_env": [], "active": true diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 162732280f..ada69c3730 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -15,20 +15,7 @@ "destination-path": [] } }, - "mel-workspace":{ - "definition":[ - "workspace -fr \"shaders\" \"renderData/shaders\";", - "workspace -fr \"images\" \"renders\";", - "workspace -fr \"particles\" \"particles\";", - "workspace -fr \"mayaAscii\" \"\";", - "workspace -fr \"mayaBinary\" \"\";", - "workspace -fr \"scene\" \"\";", - "workspace -fr \"alembicCache\" \"cache/alembic\";", - "workspace -fr \"renderData\" \"renderData\";", - "workspace -fr \"sourceImages\" \"sourceimages\";", - "workspace -fr \"fileCache\" \"cache/nCache\";" - ] - }, + "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "scriptsmenu": { "name": "OpenPype Tools", "definition": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index 7204ec586a..978de56a51 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -54,19 +54,10 @@ ] }, { - "type": "dict", - "collapsible": true, - "key": "mel-workspace", - "label": "Maya MEL Workspace", - "is_group": true, - "children": [ - { - "type": "list", - "object_type": "text", - "key": "definition", - "label": "definition" - } - ] + "type": "text", + "multiline" : true, + "key": "mel_workspace", + "label": "Maya MEL Workspace" }, { "type": "schema", From a9b69536cac401221cacaaa3155c4f9a7be682b8 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 16:44:29 +0800 Subject: [PATCH 56/82] adding and loading maya mel workspace through openpype project setting --- openpype/settings/defaults/project_anatomy/attributes.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_anatomy/attributes.json b/openpype/settings/defaults/project_anatomy/attributes.json index bf8bbef8de..983ac603f9 100644 --- a/openpype/settings/defaults/project_anatomy/attributes.json +++ b/openpype/settings/defaults/project_anatomy/attributes.json @@ -19,7 +19,8 @@ "blender/2-91", "harmony/20", "photoshop/2021", - "aftereffects/2021" + "aftereffects/2021", + "unreal/4-26" ], "tools_env": [], "active": true From decc11251854f60db02531f93e2b8fbd4d3fa7ec Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 17:06:47 +0800 Subject: [PATCH 57/82] load and edit mel workspace within the Openpype project settings --- .../settings/defaults/project_settings/maya.json | 2 +- .../schemas/projects_schema/schema_project_maya.json | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ada69c3730..bb96fcf741 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -1,4 +1,5 @@ { + "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "ext_mapping": { "model": "ma", "mayaAscii": "ma", @@ -15,7 +16,6 @@ "destination-path": [] } }, - "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "scriptsmenu": { "name": "OpenPype Tools", "definition": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index 978de56a51..a54f8e6e4f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -5,6 +5,12 @@ "label": "Maya", "is_file": true, "children": [ + { + "type": "text", + "multiline" : true, + "key": "mel_workspace", + "label": "Maya MEL Workspace" + }, { "type": "dict-modifiable", "key": "ext_mapping", @@ -53,12 +59,6 @@ } ] }, - { - "type": "text", - "multiline" : true, - "key": "mel_workspace", - "label": "Maya MEL Workspace" - }, { "type": "schema", "name": "schema_scriptsmenu" From d4eeabad7e0883bb2f2d31e0b8479e4b547ba7cc Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 21:17:49 +0800 Subject: [PATCH 58/82] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 6 +++--- openpype/hosts/maya/lib.py | 5 +++-- .../schemas/projects_schema/schema_project_maya.json | 1 + 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 4578d6fb39..6012d82263 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -59,7 +59,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): self._op_events = {} def install(self): - project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) + project_settings = get_project_settings(legacy_io.active_project()) # process path mapping dirmap_processor = MayaDirmap("maya", project_settings) dirmap_processor.process_dirmap() @@ -536,7 +536,7 @@ def on_task_changed(): lib.update_content_on_context_change() msg = " project: {}\n asset: {}\n task:{}".format( - legacy_io.Session["AVALON_PROJECT"], + legacy_io.active_project(), legacy_io.Session["AVALON_ASSET"], legacy_io.Session["AVALON_TASK"] ) @@ -548,7 +548,7 @@ def on_task_changed(): def before_workfile_save(event): - project_name = os.getenv("AVALON_PROJECT") + project_name = legacy_io.active_project() workdir_path = event["workdir_path"] if workdir_path: create_workspace_mel(workdir_path, project_name) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 443bf7d10e..e466850810 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,6 +1,6 @@ import os from openpype.settings import get_project_settings - +from openpype.api import Logger def create_workspace_mel(workdir, project_name): dst_filepath = os.path.join(workdir, "workspace.mel") @@ -15,7 +15,8 @@ def create_workspace_mel(workdir, project_name): # Skip if mel script in settings is empty if not mel_script: - return + log = Logger.get_logger("create_workspace_mel") + log.debug("File 'workspace.mel' not created. Settings value is empty.") with open(dst_filepath, "w") as mel_file: mel_file.write(mel_script) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index a54f8e6e4f..72c974642f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -8,6 +8,7 @@ { "type": "text", "multiline" : true, + "use_label_wrap": true, "key": "mel_workspace", "label": "Maya MEL Workspace" }, From a6d7df1423fdd5ec37744a34042e01a76776f263 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 21:19:15 +0800 Subject: [PATCH 59/82] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index e466850810..e07e174dd6 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -2,6 +2,7 @@ import os from openpype.settings import get_project_settings from openpype.api import Logger + def create_workspace_mel(workdir, project_name): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): From b7256e7c19ba376e438a88ca5d0b4a9609a44423 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 21:49:02 +0800 Subject: [PATCH 60/82] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index e07e174dd6..6f7bb8f986 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,6 +1,6 @@ import os from openpype.settings import get_project_settings -from openpype.api import Logger +from openpype.lib import Logger def create_workspace_mel(workdir, project_name): From 9bcd86bac7ca0294fc41bc3d2465166b3b5e8861 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 22:12:16 +0800 Subject: [PATCH 61/82] load and edit mel workspace within the Openpype project settings --- openpype/hosts/maya/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 6f7bb8f986..ffb2f0b27c 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -18,6 +18,7 @@ def create_workspace_mel(workdir, project_name): if not mel_script: log = Logger.get_logger("create_workspace_mel") log.debug("File 'workspace.mel' not created. Settings value is empty.") + return with open(dst_filepath, "w") as mel_file: mel_file.write(mel_script) From 49dff63f08207eea0218cf37e4824795d08e3895 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 17:50:55 +0200 Subject: [PATCH 62/82] Fix detection of workfile instance --- openpype/modules/deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index f698b7688e..512ff800ee 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -519,7 +519,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): instance = self._instance workfile_instance = self._get_workfile_instance(instance.context) - if not workfile_instance: + if workfile_instance is None: return # determine published path from Anatomy. From d0665c3928ecff4176f6e22b676384f09485c173 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 13:06:12 +0200 Subject: [PATCH 63/82] Change `mayaascii` -> `mayaAscii` --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 2 +- openpype/settings/defaults/project_settings/ftrack.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 6024781d87..7e5815b100 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -35,7 +35,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): family_mapping = { "camera": "cam", "look": "look", - "mayaascii": "scene", + "mayaAscii": "scene", "model": "geo", "rig": "rig", "setdress": "setdress", diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 09b194e21c..cdf861df4a 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -455,7 +455,7 @@ "family_mapping": { "camera": "cam", "look": "look", - "mayaascii": "scene", + "mayaAscii": "scene", "model": "geo", "rig": "rig", "setdress": "setdress", From e76ec9e5aff5651070bcc817e9c0b60a9a980ce4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 12 Sep 2022 16:26:35 +0800 Subject: [PATCH 64/82] adding and loading mel workspace within openpype settings --- openpype/hosts/maya/resources/workspace.mel | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 openpype/hosts/maya/resources/workspace.mel diff --git a/openpype/hosts/maya/resources/workspace.mel b/openpype/hosts/maya/resources/workspace.mel deleted file mode 100644 index f7213fa4f6..0000000000 --- a/openpype/hosts/maya/resources/workspace.mel +++ /dev/null @@ -1,11 +0,0 @@ -//Maya 2018 Project Definition - -workspace -fr "shaders" "renderData/shaders"; -workspace -fr "alembicCache" "cache/alembic"; -workspace -fr "mayaAscii" ""; -workspace -fr "mayaBinary" ""; -workspace -fr "renderData" "renderData"; -workspace -fr "fileCache" "cache/nCache"; -workspace -fr "scene" ""; -workspace -fr "sourceImages" "sourceimages"; -workspace -fr "images" "renders"; From c6ad515682944690d15532cd446fae2d8c93a570 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 12 Sep 2022 09:46:17 +0000 Subject: [PATCH 65/82] [Automated] Bump version --- CHANGELOG.md | 16 ++++++---------- openpype/version.py | 2 +- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ffb6a996b..cccfc2eded 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [3.14.2-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.2-nightly.5](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...HEAD) @@ -8,7 +8,6 @@ - Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) - Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) -- Global: making collect audio plugin global [\#3679](https://github.com/pypeclub/OpenPype/pull/3679) **🚀 Enhancements** @@ -18,6 +17,7 @@ - Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) - SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) - Kitsu: Drop 'entities root' setting. [\#3739](https://github.com/pypeclub/OpenPype/pull/3739) +- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) **🐛 Bug fixes** @@ -42,6 +42,8 @@ - General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) - General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) - General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) +- Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) +- General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) - General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) @@ -65,12 +67,12 @@ **🚀 Enhancements** - General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) -- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) - Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) - General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) - General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) +- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) **🐛 Bug fixes** @@ -84,7 +86,7 @@ - Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) - Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) - PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) -- Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) +- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) **🔀 Refactored code** @@ -117,15 +119,9 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) -**🚀 Enhancements** - -- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) -- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) - **🐛 Bug fixes** - General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) -- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) diff --git a/openpype/version.py b/openpype/version.py index 142bd51a30..c5dc4ee581 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2-nightly.4" +__version__ = "3.14.2-nightly.5" From 162370e1ad1291cbbf3eca65266c226ccd119aca Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 12 Sep 2022 09:56:59 +0000 Subject: [PATCH 66/82] [Automated] Release --- CHANGELOG.md | 15 +++++++++------ openpype/version.py | 2 +- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cccfc2eded..46bf56f5bd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.14.2-nightly.5](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...3.14.2) **🆕 New features** @@ -45,10 +45,11 @@ - Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) - General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) -- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) +- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) - Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) - Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) +- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) - Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) **Merged pull requests:** @@ -72,7 +73,6 @@ - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) - General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) -- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) **🐛 Bug fixes** @@ -91,13 +91,12 @@ **🔀 Refactored code** - General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) -- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) - General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) - AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) -- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) - AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) - General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) - Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) @@ -119,6 +118,10 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) +**🚀 Enhancements** + +- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) + **🐛 Bug fixes** - General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) diff --git a/openpype/version.py b/openpype/version.py index c5dc4ee581..8469b1712a 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2-nightly.5" +__version__ = "3.14.2" From 6e2ffc1e5ceb134f17fcedd1646f2cec0014a43a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 10:33:41 +0200 Subject: [PATCH 67/82] Remove getting project name and settings twice --- openpype/hosts/maya/api/pipeline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index acd8a55aa4..45c52cd0d5 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -66,8 +66,6 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): project_name = legacy_io.active_project() project_settings = get_project_settings(project_name) # process path mapping - project_name = legacy_io.active_project() - project_settings = get_project_settings(project_name) dirmap_processor = MayaDirmap("maya", project_name, project_settings) dirmap_processor.process_dirmap() From b14ab9f2aff93fce85cec7b6403183a0b7dcf511 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 12:31:44 +0200 Subject: [PATCH 68/82] added publisher to host tools --- openpype/tools/utils/host_tools.py | 37 ++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 52d15a59f7..3177ed35aa 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -32,6 +32,7 @@ class HostToolsHelper: self._workfiles_tool = None self._loader_tool = None self._creator_tool = None + self._publisher_tool = None self._subset_manager_tool = None self._scene_inventory_tool = None self._library_loader_tool = None @@ -205,6 +206,7 @@ class HostToolsHelper: pyblish_show = self._discover_pyblish_gui() return pyblish_show(parent) + def _discover_pyblish_gui(self): """Return the most desirable of the currently registered GUIs""" # Prefer last registered @@ -269,6 +271,30 @@ class HostToolsHelper: dialog.activateWindow() dialog.showNormal() + def get_publisher_tool(self, parent): + """Create, cache and return scene inventory tool window.""" + if self._scene_inventory_tool is None: + from openpype.tools.publisher import PublisherWindow + + host = registered_host() + ILoadHost.validate_load_methods(host) + + publisher_window = PublisherWindow( + parent=parent or self._parent + ) + self._publisher_tool = publisher_window + + return self._publisher_tool + + def show_publisher_tool(self, parent=None): + with qt_app_context(): + dialog = self.get_publisher_tool(parent) + + dialog.show() + dialog.raise_() + dialog.activateWindow() + dialog.showNormal() + def get_tool_by_name(self, tool_name, parent=None, *args, **kwargs): """Show tool by it's name. @@ -298,6 +324,10 @@ class HostToolsHelper: elif tool_name == "publish": self.log.info("Can't return publish tool window.") + # "new" publisher + elif tool_name == "publisher": + return self.get_publisher_tool(parent, *args, **kwargs) + elif tool_name == "experimental_tools": return self.get_experimental_tools_dialog(parent, *args, **kwargs) @@ -335,6 +365,9 @@ class HostToolsHelper: elif tool_name == "publish": self.show_publish(parent, *args, **kwargs) + elif tool_name == "publisher": + self.show_publisher_tool(parent, *args, **kwargs) + elif tool_name == "experimental_tools": self.show_experimental_tools_dialog(parent, *args, **kwargs) @@ -414,6 +447,10 @@ def show_publish(parent=None): _SingletonPoint.show_tool_by_name("publish", parent) +def show_publisher(parent=None): + _SingletonPoint.show_tool_by_name("publisher", parent) + + def show_experimental_tools_dialog(parent=None): _SingletonPoint.show_tool_by_name("experimental_tools", parent) From 7ad8aa34db533c97de270d520874a307caa93fe4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 12:38:36 +0200 Subject: [PATCH 69/82] fix variable usage --- openpype/tools/utils/host_tools.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 3177ed35aa..f7e6d330ed 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -273,7 +273,8 @@ class HostToolsHelper: def get_publisher_tool(self, parent): """Create, cache and return scene inventory tool window.""" - if self._scene_inventory_tool is None: + + if self._publisher_tool is None: from openpype.tools.publisher import PublisherWindow host = registered_host() From a440a92838772c967f1bb844534153fe9814f4fc Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 13 Sep 2022 13:34:37 +0200 Subject: [PATCH 70/82] Fix docstring Co-authored-by: Roy Nieterau --- openpype/tools/utils/host_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index f7e6d330ed..7208e0a500 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -272,7 +272,7 @@ class HostToolsHelper: dialog.showNormal() def get_publisher_tool(self, parent): - """Create, cache and return scene inventory tool window.""" + """Create, cache and return publisher window.""" if self._publisher_tool is None: from openpype.tools.publisher import PublisherWindow From 9e5e5d59210a82d6c171f3871834955d326b2a0b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 13:35:38 +0200 Subject: [PATCH 71/82] remove unnecessary lines --- openpype/tools/utils/host_tools.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 7208e0a500..d2f05d3302 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -194,7 +194,6 @@ class HostToolsHelper: library_loader_tool.showNormal() library_loader_tool.refresh() - def show_publish(self, parent=None): """Try showing the most desirable publish GUI @@ -206,7 +205,6 @@ class HostToolsHelper: pyblish_show = self._discover_pyblish_gui() return pyblish_show(parent) - def _discover_pyblish_gui(self): """Return the most desirable of the currently registered GUIs""" # Prefer last registered From ecee2d2be5d33c4014effa836f620114cfc1bf9a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 14:02:11 +0200 Subject: [PATCH 72/82] implemented 'check_ftrack_url' in ftrack module --- openpype/modules/ftrack/__init__.py | 8 ++- openpype/modules/ftrack/ftrack_module.py | 69 ++++++++++++++++++++---- 2 files changed, 64 insertions(+), 13 deletions(-) diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/ftrack/__init__.py index 7261254c6f..6dc67b74b9 100644 --- a/openpype/modules/ftrack/__init__.py +++ b/openpype/modules/ftrack/__init__.py @@ -1,9 +1,13 @@ from .ftrack_module import ( FtrackModule, - FTRACK_MODULE_DIR + FTRACK_MODULE_DIR, + + check_ftrack_url, ) __all__ = ( "FtrackModule", - "FTRACK_MODULE_DIR" + "FTRACK_MODULE_DIR", + + "check_ftrack_url", ) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index cb4f204523..e00f9d89c6 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -12,8 +12,10 @@ from openpype_interfaces import ( ISettingsChangeListener ) from openpype.settings import SaveWarningExc +from openpype.lib import Logger FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) +_PLACEHOLDER = object() class FtrackModule( @@ -28,17 +30,8 @@ class FtrackModule( ftrack_settings = settings[self.name] self.enabled = ftrack_settings["enabled"] - # Add http schema - ftrack_url = ftrack_settings["ftrack_server"].strip("/ ") - if ftrack_url: - if "http" not in ftrack_url: - ftrack_url = "https://" + ftrack_url - - # Check if "ftrack.app" is part os url - if "ftrackapp.com" not in ftrack_url: - ftrack_url = ftrack_url + ".ftrackapp.com" - - self.ftrack_url = ftrack_url + self._settings_ftrack_url = ftrack_settings["ftrack_server"] + self._ftrack_url = _PLACEHOLDER current_dir = os.path.dirname(os.path.abspath(__file__)) low_platform = platform.system().lower() @@ -70,6 +63,16 @@ class FtrackModule( self.timers_manager_connector = None self._timers_manager_module = None + def get_ftrack_url(self): + if self._ftrack_url is _PLACEHOLDER: + self._ftrack_url = check_ftrack_url( + self._settings_ftrack_url, + logger=self.log + ) + return self._ftrack_url + + ftrack_url = property(get_ftrack_url) + def get_global_environments(self): """Ftrack's global environments.""" return { @@ -479,6 +482,50 @@ class FtrackModule( click_group.add_command(cli_main) +def _check_ftrack_url(url): + import requests + + try: + result = requests.get(url, allow_redirects=False) + except requests.exceptions.RequestException: + return False + + if (result.status_code != 200 or "FTRACK_VERSION" not in result.headers): + return False + return True + + +def check_ftrack_url(url, log_errors=True, logger=None): + """Checks if Ftrack server is responding""" + + if logger is None: + logger = Logger.get_logger(__name__) + + url = url.strip("/ ") + if not url: + logger.error("Ftrack URL is not set!") + return None + + if not url.startswith("http"): + url = "https://" + url + + ftrack_url = None + if not url.endswith("ftrackapp.com"): + ftrackapp_url = url + ".ftrackapp.com" + if _check_ftrack_url(ftrackapp_url): + ftrack_url = ftrackapp_url + + if not ftrack_url and _check_ftrack_url(url): + ftrack_url = url + + if ftrack_url: + logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) + elif log_errors: + logger.error("Entered Ftrack URL \"{}\" is not accesible!".format(url)) + + return ftrack_url + + @click.group(FtrackModule.name, help="Ftrack module related commands.") def cli_main(): pass From b29f26b28cb9350c0460b8bd8b89a8bfcbf0c7cd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 14:02:42 +0200 Subject: [PATCH 73/82] changed imports in ftrack tray --- openpype/modules/ftrack/tray/ftrack_tray.py | 40 +++++++++------------ 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index 501d837a4c..a6a87b8ef9 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -6,22 +6,18 @@ import threading from Qt import QtCore, QtWidgets, QtGui import ftrack_api -from ..ftrack_server.lib import check_ftrack_url -from ..ftrack_server import socket_thread -from ..lib import credentials -from ..ftrack_module import FTRACK_MODULE_DIR -from . import login_dialog - from openpype import resources from openpype.lib import Logger - - -log = Logger.get_logger("FtrackModule") +from openpype_modules.ftrack import check_ftrack_url, FTRACK_MODULE_DIR +from openpype_modules.ftrack.ftrack_server import socket_thread +from openpype_modules.ftrack.lib import credentials +from . import login_dialog class FtrackTrayWrapper: def __init__(self, module): self.module = module + self.log = Logger.get_logger(self.__class__.__name__) self.thread_action_server = None self.thread_socket_server = None @@ -62,19 +58,19 @@ class FtrackTrayWrapper: if validation: self.widget_login.set_credentials(ft_user, ft_api_key) self.module.set_credentials_to_env(ft_user, ft_api_key) - log.info("Connected to Ftrack successfully") + self.log.info("Connected to Ftrack successfully") self.on_login_change() return validation if not validation and ft_user and ft_api_key: - log.warning( + self.log.warning( "Current Ftrack credentials are not valid. {}: {} - {}".format( str(os.environ.get("FTRACK_SERVER")), ft_user, ft_api_key ) ) - log.info("Please sign in to Ftrack") + self.log.info("Please sign in to Ftrack") self.bool_logged = False self.show_login_widget() self.set_menu_visibility() @@ -104,7 +100,7 @@ class FtrackTrayWrapper: self.action_credentials.setIcon(self.icon_not_logged) self.action_credentials.setToolTip("Logged out") - log.info("Logged out of Ftrack") + self.log.info("Logged out of Ftrack") self.bool_logged = False self.set_menu_visibility() @@ -126,10 +122,6 @@ class FtrackTrayWrapper: ftrack_url = self.module.ftrack_url os.environ["FTRACK_SERVER"] = ftrack_url - parent_file_path = os.path.dirname( - os.path.dirname(os.path.realpath(__file__)) - ) - min_fail_seconds = 5 max_fail_count = 3 wait_time_after_max_fail = 10 @@ -154,7 +146,7 @@ class FtrackTrayWrapper: # Main loop while True: if not self.bool_action_server_running: - log.debug("Action server was pushed to stop.") + self.log.debug("Action server was pushed to stop.") break # Check if accessible Ftrack and Mongo url @@ -164,7 +156,9 @@ class FtrackTrayWrapper: # Run threads only if Ftrack is accessible if not ftrack_accessible: if not printed_ftrack_error: - log.warning("Can't access Ftrack {}".format(ftrack_url)) + self.log.warning( + "Can't access Ftrack {}".format(ftrack_url) + ) if self.thread_socket_server is not None: self.thread_socket_server.stop() @@ -191,7 +185,7 @@ class FtrackTrayWrapper: self.set_menu_visibility() elif failed_count == max_fail_count: - log.warning(( + self.log.warning(( "Action server failed {} times." " I'll try to run again {}s later" ).format( @@ -243,10 +237,10 @@ class FtrackTrayWrapper: self.thread_action_server.join() self.thread_action_server = None - log.info("Ftrack action server was forced to stop") + self.log.info("Ftrack action server was forced to stop") except Exception: - log.warning( + self.log.warning( "Error has happened during Killing action server", exc_info=True ) @@ -343,7 +337,7 @@ class FtrackTrayWrapper: self.thread_timer = None except Exception as e: - log.error("During Killing Timer event server: {0}".format(e)) + self.log.error("During Killing Timer event server: {0}".format(e)) def changed_user(self): self.stop_action_server() From 21e050a8f18a272da3e200405550be6570e9f3d9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 14:05:25 +0200 Subject: [PATCH 74/82] use new import of 'check_ftrack_url' --- openpype/modules/ftrack/ftrack_module.py | 2 +- .../modules/ftrack/ftrack_server/__init__.py | 2 -- .../ftrack/ftrack_server/event_server_cli.py | 6 ++-- openpype/modules/ftrack/ftrack_server/lib.py | 35 +------------------ openpype/modules/ftrack/lib/avalon_sync.py | 7 ++-- 5 files changed, 8 insertions(+), 44 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index e00f9d89c6..899711e33e 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -6,7 +6,7 @@ import platform import click from openpype.modules import OpenPypeModule -from openpype_interfaces import ( +from openpype.modules.interfaces import ( ITrayModule, IPluginPaths, ISettingsChangeListener diff --git a/openpype/modules/ftrack/ftrack_server/__init__.py b/openpype/modules/ftrack/ftrack_server/__init__.py index 9e3920b500..8e5f7c4c51 100644 --- a/openpype/modules/ftrack/ftrack_server/__init__.py +++ b/openpype/modules/ftrack/ftrack_server/__init__.py @@ -1,8 +1,6 @@ from .ftrack_server import FtrackServer -from .lib import check_ftrack_url __all__ = ( "FtrackServer", - "check_ftrack_url" ) diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 3ef7c8270a..2848469bc3 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -20,9 +20,11 @@ from openpype.lib import ( get_openpype_version, get_build_version, ) -from openpype_modules.ftrack import FTRACK_MODULE_DIR +from openpype_modules.ftrack import ( + FTRACK_MODULE_DIR, + check_ftrack_url, +) from openpype_modules.ftrack.lib import credentials -from openpype_modules.ftrack.ftrack_server.lib import check_ftrack_url from openpype_modules.ftrack.ftrack_server import socket_thread diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index 947dacf917..c8143f739c 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -26,45 +26,12 @@ except ImportError: from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.client import OpenPypeMongoConnection -from openpype.api import Logger +from openpype.lib import Logger TOPIC_STATUS_SERVER = "openpype.event.server.status" TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result" -def check_ftrack_url(url, log_errors=True, logger=None): - """Checks if Ftrack server is responding""" - if logger is None: - logger = Logger.get_logger(__name__) - - if not url: - logger.error("Ftrack URL is not set!") - return None - - url = url.strip('/ ') - - if 'http' not in url: - if url.endswith('ftrackapp.com'): - url = 'https://' + url - else: - url = 'https://{0}.ftrackapp.com'.format(url) - try: - result = requests.get(url, allow_redirects=False) - except requests.exceptions.RequestException: - if log_errors: - logger.error("Entered Ftrack URL is not accesible!") - return False - - if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers): - if log_errors: - logger.error("Entered Ftrack URL is not accesible!") - return False - - logger.debug("Ftrack server {} is accessible.".format(url)) - - return url - - class SocketBaseEventHub(ftrack_api.event.hub.EventHub): hearbeat_msg = b"hearbeat" diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index 72be6a8e9a..935d1e85c9 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -19,11 +19,8 @@ from openpype.client.operations import ( CURRENT_PROJECT_SCHEMA, CURRENT_PROJECT_CONFIG_SCHEMA, ) -from openpype.api import ( - Logger, - get_anatomy_settings -) -from openpype.lib import ApplicationManager +from openpype.settings import get_anatomy_settings +from openpype.lib import ApplicationManager, Logger from openpype.pipeline import AvalonMongoDB, schema from .constants import CUST_ATTR_ID_KEY, FPS_KEYS From 98f1312ce999e4be72a1a90bce99c75be967cdfd Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 13 Sep 2022 15:06:31 +0200 Subject: [PATCH 75/82] Modify log message Co-authored-by: Roy Nieterau --- openpype/modules/ftrack/ftrack_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 899711e33e..2ab0eb8239 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -521,7 +521,7 @@ def check_ftrack_url(url, log_errors=True, logger=None): if ftrack_url: logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) elif log_errors: - logger.error("Entered Ftrack URL \"{}\" is not accesible!".format(url)) + logger.error("Entered Ftrack URL \"{}\" is not accessible!".format(url)) return ftrack_url From aead601397e0ebecfafb6da62570c0585f627018 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:11:20 +0200 Subject: [PATCH 76/82] unify messages --- openpype/modules/ftrack/ftrack_module.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 2ab0eb8239..e79910372f 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -520,8 +520,9 @@ def check_ftrack_url(url, log_errors=True, logger=None): if ftrack_url: logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) + elif log_errors: - logger.error("Entered Ftrack URL \"{}\" is not accessible!".format(url)) + logger.error("Ftrack server \"{}\" is not accessible!".format(url)) return ftrack_url From 0291d2a7054b6b551fc8e5dc1092a87a026838d8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:15:18 +0200 Subject: [PATCH 77/82] renamed 'check_ftrack_url' to 'resolve_ftrack_url' --- openpype/modules/ftrack/__init__.py | 4 ++-- openpype/modules/ftrack/ftrack_module.py | 6 +++--- openpype/modules/ftrack/ftrack_server/event_server_cli.py | 8 ++++---- openpype/modules/ftrack/tray/ftrack_tray.py | 4 ++-- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/ftrack/__init__.py index 6dc67b74b9..e520f08337 100644 --- a/openpype/modules/ftrack/__init__.py +++ b/openpype/modules/ftrack/__init__.py @@ -2,12 +2,12 @@ from .ftrack_module import ( FtrackModule, FTRACK_MODULE_DIR, - check_ftrack_url, + resolve_ftrack_url, ) __all__ = ( "FtrackModule", "FTRACK_MODULE_DIR", - "check_ftrack_url", + "resolve_ftrack_url", ) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index e79910372f..05ea7b79d1 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -65,7 +65,7 @@ class FtrackModule( def get_ftrack_url(self): if self._ftrack_url is _PLACEHOLDER: - self._ftrack_url = check_ftrack_url( + self._ftrack_url = resolve_ftrack_url( self._settings_ftrack_url, logger=self.log ) @@ -495,8 +495,8 @@ def _check_ftrack_url(url): return True -def check_ftrack_url(url, log_errors=True, logger=None): - """Checks if Ftrack server is responding""" +def resolve_ftrack_url(url, log_errors=True, logger=None): + """Checks if Ftrack server is responding.""" if logger is None: logger = Logger.get_logger(__name__) diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 2848469bc3..20c5ab24a8 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -22,7 +22,7 @@ from openpype.lib import ( ) from openpype_modules.ftrack import ( FTRACK_MODULE_DIR, - check_ftrack_url, + resolve_ftrack_url, ) from openpype_modules.ftrack.lib import credentials from openpype_modules.ftrack.ftrack_server import socket_thread @@ -116,7 +116,7 @@ def legacy_server(ftrack_url): while True: if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) # Run threads only if Ftrack is accessible if not ftrack_accessible and not printed_ftrack_error: @@ -259,7 +259,7 @@ def main_loop(ftrack_url): while True: # Check if accessible Ftrack and Mongo url if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) if not mongo_accessible: mongo_accessible = check_mongo_url(mongo_uri) @@ -443,7 +443,7 @@ def run_event_server( os.environ["CLOCKIFY_API_KEY"] = clockify_api_key # Check url regex and accessibility - ftrack_url = check_ftrack_url(ftrack_url) + ftrack_url = resolve_ftrack_url(ftrack_url) if not ftrack_url: print('Exiting! < Please enter Ftrack server url >') return 1 diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index a6a87b8ef9..e3c6e30ead 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -8,7 +8,7 @@ from Qt import QtCore, QtWidgets, QtGui import ftrack_api from openpype import resources from openpype.lib import Logger -from openpype_modules.ftrack import check_ftrack_url, FTRACK_MODULE_DIR +from openpype_modules.ftrack import resolve_ftrack_url, FTRACK_MODULE_DIR from openpype_modules.ftrack.ftrack_server import socket_thread from openpype_modules.ftrack.lib import credentials from . import login_dialog @@ -151,7 +151,7 @@ class FtrackTrayWrapper: # Check if accessible Ftrack and Mongo url if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) # Run threads only if Ftrack is accessible if not ftrack_accessible: From 09519c25a804186f9cc4afb92131d0572211f712 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:17:25 +0200 Subject: [PATCH 78/82] removed unused argument 'log_errors' --- openpype/modules/ftrack/ftrack_module.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 05ea7b79d1..68575009b2 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -495,7 +495,7 @@ def _check_ftrack_url(url): return True -def resolve_ftrack_url(url, log_errors=True, logger=None): +def resolve_ftrack_url(url, logger=None): """Checks if Ftrack server is responding.""" if logger is None: @@ -521,7 +521,7 @@ def resolve_ftrack_url(url, log_errors=True, logger=None): if ftrack_url: logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) - elif log_errors: + else: logger.error("Ftrack server \"{}\" is not accessible!".format(url)) return ftrack_url From 477266f1407e84e2ba9d086107e15e8fc5173e79 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:18:56 +0200 Subject: [PATCH 79/82] better variable name for ftrack url value check --- openpype/modules/ftrack/ftrack_module.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 68575009b2..75ffd7f864 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -15,7 +15,7 @@ from openpype.settings import SaveWarningExc from openpype.lib import Logger FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -_PLACEHOLDER = object() +_URL_NOT_SET = object() class FtrackModule( @@ -31,7 +31,7 @@ class FtrackModule( self.enabled = ftrack_settings["enabled"] self._settings_ftrack_url = ftrack_settings["ftrack_server"] - self._ftrack_url = _PLACEHOLDER + self._ftrack_url = _URL_NOT_SET current_dir = os.path.dirname(os.path.abspath(__file__)) low_platform = platform.system().lower() @@ -64,7 +64,7 @@ class FtrackModule( self._timers_manager_module = None def get_ftrack_url(self): - if self._ftrack_url is _PLACEHOLDER: + if self._ftrack_url is _URL_NOT_SET: self._ftrack_url = resolve_ftrack_url( self._settings_ftrack_url, logger=self.log From 778e0b2e491f5948f2932968a70f8f620204fb01 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 15:44:20 +0200 Subject: [PATCH 80/82] Perform case-insensitive lookup --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 7e5815b100..5d39e12985 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -74,11 +74,14 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): version_number = int(instance_version) family = instance.data["family"] - family_low = family.lower() + # Perform case-insensitive family mapping + family_low = family.lower() asset_type = instance.data.get("ftrackFamily") - if not asset_type and family_low in self.family_mapping: - asset_type = self.family_mapping[family_low] + if not asset_type: + for map_family, map_value in self.family_mapping.items(): + if map_family.lower() == family_low: + asset_type = map_value if not asset_type: asset_type = "upload" From 4466d8a94249ad66546730b7135e34003f4aa4f8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 15:45:11 +0200 Subject: [PATCH 81/82] Remove redundant logic since just above it's forced to be "upload" when `not asset_type` --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 5d39e12985..a35dbf71d4 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -89,15 +89,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): self.log.debug( "Family: {}\nMapping: {}".format(family_low, self.family_mapping) ) - - # Ignore this instance if neither "ftrackFamily" or a family mapping is - # found. - if not asset_type: - self.log.info(( - "Family \"{}\" does not match any asset type mapping" - ).format(family)) - return - status_name = self._get_asset_version_status_name(instance) # Base of component item data From 1bc37ace465f647b6af35a4a2b8cf2832bd94925 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 15:45:48 +0200 Subject: [PATCH 82/82] Actually break loop early on detected mapping --- .../modules/ftrack/plugins/publish/integrate_ftrack_instances.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index a35dbf71d4..5ff75e7060 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -82,6 +82,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): for map_family, map_value in self.family_mapping.items(): if map_family.lower() == family_low: asset_type = map_value + break if not asset_type: asset_type = "upload"