From 452c912a6265d043e4931c87e2abb4dbc7b5d28a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 4 Nov 2020 17:08:41 +0100 Subject: [PATCH 01/13] initial work on abstract collector --- pype/lib/abstract_collect_render.py | 242 ++++++++++++++++++++++++++++ pype/lib/abstract_expected_files.py | 53 ++++++ 2 files changed, 295 insertions(+) create mode 100644 pype/lib/abstract_collect_render.py create mode 100644 pype/lib/abstract_expected_files.py diff --git a/pype/lib/abstract_collect_render.py b/pype/lib/abstract_collect_render.py new file mode 100644 index 0000000000..38cdc1a7ce --- /dev/null +++ b/pype/lib/abstract_collect_render.py @@ -0,0 +1,242 @@ +# -*- coding: utf-8 -*- +"""Collect render template. + +TODO: use @dataclass when times come. + +""" +from abc import ABCMeta, abstractmethod + +import six +import attr + +from avalon import api +import pyblish.api + +from expected_files import ExpectedFiles + + +@attr.s +class RenderInstance(object): + """Data collected by collectors. + + This data class later on passed to collected instances. + Those attributes are required later on. + + """ + + # metadata + version = attr.ib() + time = attr.ib() + source = attr.ib() + label = attr.ib() + subset = attr.ib() + asset = attr.ib(init=False) + attachTo = attr.ib(init=False) + setMembers = attr.ib() + publish = attr.ib() + review = attr.ib(default=False) + renderer = attr.ib() + priority = attr.ib(default=50) + name = attr.ib() + + family = attr.ib(default="renderlayer") + families = attr.ib(default=["renderlayer"]) + + # format settings + resolutionWidth = attr.ib() + resolutionHeight = attr.ib() + pixelAspect = attr.ib() + multipartExr = attr.ib(default=False) + tileRendering = attr.ib() + tilesX = attr.ib() + tilesY = attr.ib() + convertToScanline = attr.ib(default=False) + + # time settings + frameStart = attr.ib() + frameEnd = attr.ib() + frameStep = attr.ib() + + @frameStart.validator + def check_frame_start(self, attribute, value): + """Validate if frame start is not larger then end.""" + if value >= self.frameEnd: + raise ValueError("frameStart must be smaller " + "or equal then frameEnd") + + @frameEnd.validator + def check_frame_end(self, attribute, value): + """Validate if frame end is not less then start.""" + if value <= self.frameStart: + raise ValueError("frameEnd must be smaller " + "or equal then frameStart") + + @tilesX.validator + def check_tiles_x(self, attribute, value): + """Validate if tile x isn't less then 1.""" + if not self.tileRendering: + return + if value < 1: + raise ValueError("tile X size cannot be less then 1") + + if value == 1 and self.tilesY == 1: + raise ValueError("both tiles X a Y sizes are set to 1") + + @tilesY.validator + def check_tiles_y(self, attribute, value): + """Validate if tile y isn't less then 1.""" + if not self.tileRendering: + return + if value < 1: + raise ValueError("tile Y size cannot be less then 1") + + if value == 1 and self.tilesX == 1: + raise ValueError("both tiles X a Y sizes are set to 1") + + +@six.add_metaclass(ABCMeta) +class CollectRender(pyblish.api.ContextPlugin): + """Gather all publishable render layers from renderSetup.""" + + order = pyblish.api.CollectorOrder + 0.01 + label = "Collect Render" + sync_workfile_version = False + + def process(self, context): + """Entry point to collector.""" + rendering_instance = None + for instance in context: + if "rendering" in instance.data["families"]: + rendering_instance = instance + rendering_instance.data["remove"] = True + + # make sure workfile instance publishing is enabled + if "workfile" in instance.data["families"]: + instance.data["publish"] = True + + if not rendering_instance: + self.log.info( + "No rendering instance found, skipping render " + "layer collection." + ) + return + + self._filepath = context.data["currentFile"].replace("\\", "/") + self._asset = api.Session["AVALON_ASSET"] + + render_instances = self.get_instances() + for render_instance in render_instances: + exp_files = self._get_expected_files(render_instance) + + frame_start_render = int(render_instance.frameStart) + frame_end_render = int(render_instance.frameEnd) + + if (int(context.data['frameStartHandle']) == frame_start_render + and int(context.data['frameEndHandle']) == frame_end_render): # noqa: W503, E501 + + handle_start = context.data['handleStart'] + handle_end = context.data['handleEnd'] + frame_start = context.data['frameStart'] + frame_end = context.data['frameEnd'] + frame_start_handle = context.data['frameStartHandle'] + frame_end_handle = context.data['frameEndHandle'] + else: + handle_start = 0 + handle_end = 0 + frame_start = frame_start_render + frame_end = frame_end_render + frame_start_handle = frame_start_render + frame_end_handle = frame_end_render + + data = { + "subset": render_instance.subset, + "attachTo": render_instance.attachTo, + "setMembers": render_instance.setMembers, + "multipartExr": exp_files.multipart, + "review": render_instance.review or False, + "publish": True, + + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": frame_start, + "frameEnd": frame_end, + "frameStartHandle": frame_start_handle, + "frameEndHandle": frame_end_handle, + "byFrameStep": int(render_instance.frameStep), + "renderer": render_instance.renderer, + # instance subset + "family": render_instance.family, + "families": render_instance.families, + "asset": render_instance.asset, + "time": render_instance.time, + "author": context.data["user"], + # Add source to allow tracing back to the scene from + # which was submitted originally + "source": render_instance.source, + "expectedFiles": exp_files, + "resolutionWidth": render_instance.resolutionWidth, + "resolutionHeight": render_instance.resolutionHeight, + "pixelAspect": render_instance.pixelAspect, + "tileRendering": render_instance.tileRendering or False, + "tilesX": render_instance.tilesX or 2, + "tilesY": render_instance.tilesY or 2, + "priority": render_instance.priority, + "convertToScanline": render_instance.convertToScanline or False + } + if self.sync_workfile_version: + data["version"] = context.data["version"] + + # add additional data + data = self.add_additional_data(data) + + instance = context.create_instance(render_instance.name) + instance.data["label"] = render_instance.label + instance.data.update(data) + + self.post_collecting_action() + + @abstractmethod + def get_instances(self): + """Get all renderable instances and their data. + + Returns: + list of :class:`RenderInstance`: All collected renderable instances + (like render layers, write nodes, etc.) + + """ + pass + + def _get_expected_files(self, render_instance): + """Get list of expected files.""" + # return all expected files for all cameras and aovs in given + # frame range + ef = ExpectedFiles() + exp_files = ef.get(render_instance) + self.log.info("multipart: {}".format(ef.multipart)) + assert exp_files, "no file names were generated, this is bug" + + # if we want to attach render to subset, check if we have AOV's + # in expectedFiles. If so, raise error as we cannot attach AOV + # (considered to be subset on its own) to another subset + if render_instance.attachTo: + assert isinstance(exp_files, list), ( + "attaching multiple AOVs or renderable cameras to " + "subset is not supported" + ) + + def add_additional_data(self, data): + """Add additional data to collected instance. + + This can be overridden by host implementation to add custom + additional data. + + """ + return data + + def post_collecting_action(self): + """Execute some code after collection is done. + + This is useful for example for restoring current render layer. + + """ + pass diff --git a/pype/lib/abstract_expected_files.py b/pype/lib/abstract_expected_files.py new file mode 100644 index 0000000000..f493cdb751 --- /dev/null +++ b/pype/lib/abstract_expected_files.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +"""Abstract ExpectedFile class definition.""" +from abc import ABCMeta, abstractmethod +import six + + +@six.add_metaclass(ABCMeta) +class ExpectedFiles: + """Class grouping functionality for all supported renderers. + + Attributes: + multipart (bool): Flag if multipart exrs are used. + + """ + + multipart = False + + @abstractmethod + def get(self, render_instance): + """Get expected files for given renderer and render layer. + + This method should return dictionary of all files we are expecting + to be rendered from the host. Usually `render_instance` corresponds + to *render layer*. Result can be either flat list with the file + paths or it can be list of dictionaries. Each key corresponds to + for example AOV name or channel, etc. + + Example:: + + ['/path/to/file.001.exr', '/path/to/file.002.exr'] + + or as dictionary: + + [ + { + "beauty": ['/path/to/beauty.001.exr', ...], + "mask": ['/path/to/mask.001.exr'] + } + ] + + + Args: + renderer_instance (:class:`RenderInstance`): Data passed from + collector to determine files. This should be instance of + :class:`abstract_collect_render.RenderInstance` + + Returns: + list: Full paths to expected rendered files. + list of dict: Path to expected rendered files categorized by + AOVs, etc. + + """ + raise NotImplementedError() From 9f704a5b276748073c366085c4919849fcdcb016 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 5 Nov 2020 12:20:53 +0100 Subject: [PATCH 02/13] wip on submit plugin --- pype/lib/abstract_collect_render.py | 4 +- pype/lib/abstract_submit_deadline.py | 80 ++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+), 2 deletions(-) create mode 100644 pype/lib/abstract_submit_deadline.py diff --git a/pype/lib/abstract_collect_render.py b/pype/lib/abstract_collect_render.py index 38cdc1a7ce..bf14f6f850 100644 --- a/pype/lib/abstract_collect_render.py +++ b/pype/lib/abstract_collect_render.py @@ -12,7 +12,7 @@ import attr from avalon import api import pyblish.api -from expected_files import ExpectedFiles +from .abstract_expected_files import ExpectedFiles @attr.s @@ -95,7 +95,7 @@ class RenderInstance(object): @six.add_metaclass(ABCMeta) -class CollectRender(pyblish.api.ContextPlugin): +class AbstractCollectRender(pyblish.api.ContextPlugin): """Gather all publishable render layers from renderSetup.""" order = pyblish.api.CollectorOrder + 0.01 diff --git a/pype/lib/abstract_submit_deadline.py b/pype/lib/abstract_submit_deadline.py new file mode 100644 index 0000000000..8b8fa7bc1f --- /dev/null +++ b/pype/lib/abstract_submit_deadline.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +"""Abstract class for submitting jobs to Deadline.""" +import os +from abc import ABCMeta, abstractmethod + +import six +import attr +import requests + +import pyblish.api + + +@attr.s +class DeadlineJobInfo: + BatchName = attr.ib() + + +@attr.s +class DeadlinePluginInfo: + SceneFile = attr.ib() + + +@six.add_metaclass(ABCMeta) +class AbstractSubmitDeadline(pyblish.api.InstancePlugin): + + label = "Submit to Deadline" + order = pyblish.api.IntegratorOrder + 0.1 + use_published = True + asset_dependencies = False + + def submit(self, payload): + url = "{}/api/jobs".format(self._deadline_url) + response = self._requests_post(url, json=payload) + if not response.ok: + self.log.error("Submition failed!") + self.log.error(response.status_code) + self.log.error(response.content) + self.log.debug(payload) + raise RuntimeError(response.text) + + dependency = response.json() + return dependency["_id"] + + def _requests_post(self, *args, **kwargs): + """Wrap request post method. + + Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment + variable is found. This is useful when Deadline or Muster server are + running with self-signed certificates and their certificate is not + added to trusted certificates on client machines. + + Warning: + Disabling SSL certificate validation is defeating one line + of defense SSL is providing and it is not recommended. + + """ + if 'verify' not in kwargs: + kwargs['verify'] = False if os.getenv("PYPE_DONT_VERIFY_SSL", True) else True # noqa + # add 10sec timeout before bailing out + kwargs['timeout'] = 10 + return requests.post(*args, **kwargs) + + def _requests_get(self, *args, **kwargs): + """Wrap request get method. + + Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment + variable is found. This is useful when Deadline or Muster server are + running with self-signed certificates and their certificate is not + added to trusted certificates on client machines. + + Warning: + Disabling SSL certificate validation is defeating one line + of defense SSL is providing and it is not recommended. + + """ + if 'verify' not in kwargs: + kwargs['verify'] = False if os.getenv("PYPE_DONT_VERIFY_SSL", True) else True # noqa + # add 10sec timeout before bailing out + kwargs['timeout'] = 10 + return requests.get(*args, **kwargs) From 25b87bca12b84d79192254353f919c84510d0864 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 5 Nov 2020 18:35:36 +0100 Subject: [PATCH 03/13] added Deadline JobInfo dataclass --- pype/lib/abstract_submit_deadline.py | 321 ++++++++++++++++++++++++++- 1 file changed, 320 insertions(+), 1 deletion(-) diff --git a/pype/lib/abstract_submit_deadline.py b/pype/lib/abstract_submit_deadline.py index 8b8fa7bc1f..dede4c19ba 100644 --- a/pype/lib/abstract_submit_deadline.py +++ b/pype/lib/abstract_submit_deadline.py @@ -2,6 +2,8 @@ """Abstract class for submitting jobs to Deadline.""" import os from abc import ABCMeta, abstractmethod +import platform +import getpass import six import attr @@ -12,7 +14,324 @@ import pyblish.api @attr.s class DeadlineJobInfo: - BatchName = attr.ib() + """Mapping of all Deadline *JobInfo* attributes. + + This contains all JobInfo attributes plus their default values. + Those attributes set to `None` shouldn't be posted to Deadline as + the only required one is `Plugin`. Their default values used by Deadline + are stated in + comments. + + ..seealso: + https://docs.thinkboxsoftware.com/products/deadline/10.1/1_User%20Manual/manual/manual-submission.html + + """ + + # Required + # ---------------------------------------------- + Plugin = attr.ib() + + # General + Frames = attr.ib(default=None) # default: 0 + Name = attr.ib(default="Untitled") + Comment = attr.ib(default=None) # default: empty + Department = attr.ib(default=None) # default: empty + BatchName = attr.ib(default=None) # default: empty + UserName = attr.ib(default=getpass.getuser()) + MachineName = attr.ib(default=platform.node()) + Pool = attr.ib(default=None) # default: "none" + SecondaryPool = attr.ib(default=None) + Group = attr.ib(default=None) # default: "none" + Priority = attr.ib(default=50) + ChunkSize = attr.ib(default=1) + ConcurrentTasks = attr.ib(default=1) + LimitConcurrentTasksToNumberOfCpus = attr.ib( + default=None) # default: "true" + OnJobComplete = attr.ib(default="Nothing") + SynchronizeAllAuxiliaryFiles = attr.ib(default=None) # default: false + ForceReloadPlugin = attr.ib(default=None) # default: false + Sequential = attr.ib(default=None) # default: false + SuppressEvents = attr.ib(default=None) # default: false + Protected = attr.ib(default=None) # default: false + InitialStatus = attr.ib(default="Active") + NetworkRoot = attr.ib(default=None) + + # Timeouts + # ---------------------------------------------- + MinRenderTimeSeconds = attr.ib(default=None) # Default: 0 + MinRenderTimeMinutes = attr.ib(default=None) # Default: 0 + TaskTimeoutSeconds = attr.ib(default=None) # Default: 0 + TaskTimeoutMinutes = attr.ib(default=None) # Default: 0 + StartJobTimeoutSeconds = attr.ib(default=None) # Default: 0 + StartJobTimeoutMinutes = attr.ib(default=None) # Default: 0 + InitializePluginTimeoutSeconds = attr.ib(default=None) # Default: 0 + # can be one of + OnTaskTimeout = attr.ib(default=None) # Default: Error + EnableTimeoutsForScriptTasks = attr.ib(default=None) # Default: false + EnableFrameTimeouts = attr.ib(default=None) # Default: false + EnableAutoTimeout = attr.ib(default=None) # Default: false + + # Interruptible + # ---------------------------------------------- + Interruptible = attr.ib(default=None) # Default: false + InterruptiblePercentage = attr.ib(default=None) + RemTimeThreshold = attr.ib(default=None) + + # Notifications + # ---------------------------------------------- + # can be comma separated list of users + NotificationTargets = attr.ib(default=None) # Default: blank + ClearNotificationTargets = attr.ib(default=None) # Default: false + # A comma separated list of additional email addresses + NotificationEmails = attr.ib(default=None) # Default: blank + OverrideNotificationMethod = attr.ib(default=None) # Default: false + EmailNotification = attr.ib(default=None) # Default: false + PopupNotification = attr.ib(default=None) # Default: false + # String with `[EOL]` used for end of line + NotificationNote = attr.ib(default=None) # Default: blank + + # Machine Limit + # ---------------------------------------------- + MachineLimit = attr.ib(default=None) # Default: 0 + MachineLimitProgress = attr.ib(default=None) # Default: -1.0 + Whitelist = attr.ib(default=None) # Default: blank + Blacklist = attr.ib(default=None) # Default: blank + + # Limits + # ---------------------------------------------- + # comma separated list of limit groups + LimitGroups = attr.ib(default=None) # Default: blank + + # Dependencies + # ---------------------------------------------- + # comma separated list of job IDs + JobDependencies = attr.ib(default=None) # Default: blank + JobDependencyPercentage = attr.ib(default=None) # Default: -1 + IsFrameDependent = attr.ib(default=None) # Default: false + FrameDependencyOffsetStart = attr.ib(default=None) # Default: 0 + FrameDependencyOffsetEnd = attr.ib(default=None) # Default: 0 + ResumeOnCompleteDependencies = attr.ib(default=None) # Default: true + ResumeOnDeletedDependencies = attr.ib(default=None) # Default: false + ResumeOnFailedDependencies = attr.ib(default=None) # Default: false + # comma separated list of asset paths + RequiredAssets = attr.ib(default=None) # Default: blank + # comma separated list of script paths + ScriptDependencies = attr.ib(default=None) # Default: blank + + # Failure Detection + # ---------------------------------------------- + OverrideJobFailureDetection = attr.ib(default=None) # Default: false + FailureDetectionJobErrors = attr.ib(default=None) # 0..x + OverrideTaskFailureDetection = attr.ib(default=None) # Default: false + FailureDetectionTaskErrors = attr.ib(default=None) # 0..x + IgnoreBadJobDetection = attr.ib(default=None) # Default: false + SendJobErrorWarning = attr.ib(default=None) # Default: false + + # Cleanup + # ---------------------------------------------- + DeleteOnComplete = attr.ib(default=None) # Default: false + ArchiveOnComplete = attr.ib(default=None) # Default: false + OverrideAutoJobCleanup = attr.ib(default=None) # Default: false + OverrideJobCleanup = attr.ib(default=None) + JobCleanupDays = attr.ib(default=None) # Default: false + # + OverrideJobCleanupType = attr.ib(default=None) + + # Scheduling + # ---------------------------------------------- + # + ScheduledType = attr.ib(default=None) # Default: None + #
+ ScheduledStartDateTime = attr.ib(default=None) + ScheduledDays = attr.ib(default=None) # Default: 1 + # + JobDelay = attr.ib(default=None) + # Time= + Scheduled = attr.ib(default=None) + + # Scripts + # ---------------------------------------------- + # all accept path to script + PreJobScript = attr.ib(default=None) # Default: blank + PostJobScript = attr.ib(default=None) # Default: blank + PreTaskScript = attr.ib(default=None) # Default: blank + PostTaskScript = attr.ib(default=None) # Default: blank + + # Event Opt-Ins + # ---------------------------------------------- + # comma separated list of plugins + EventOptIns = attr.ib(default=None) # Default: blank + + # Environment + # ---------------------------------------------- + _environmentKeyValue = attr.ib(factory=list) + + @property + def EnvironmentKeyValue(self): # noqa: N802 + """Return all environment key values formatted for Deadline. + + Returns: + list of tuples: as `[('EnvironmentKeyValue0', 'key=value')]` + + """ + out = [] + index = 0 + for v in self._environmentKeyValue: + out.append(("EnvironmentKeyValue{}".format(index), v)) + index += 1 + return out + + @EnvironmentKeyValue.setter + def EnvironmentKeyValue(self, val): # noqa: N802 + self._environmentKeyValue.append(val) + + IncludeEnvironment = attr.ib(default=None) # Default: false + UseJobEnvironmentOnly = attr.ib(default=None) # Default: false + CustomPluginDirectory = attr.ib(default=None) # Default: blank + + # Job Extra Info + # ---------------------------------------------- + _extraInfos = attr.ib(factory=list) + _extraInfoKeyValues = attr.ib(factory=list) + + @property + def ExtraInfo(self): # noqa: N802 + """Return all ExtraInfo values formatted for Deadline. + + Returns: + list of tuples: as `[('ExtraInfo0', 'value')]` + + """ + out = [] + index = 0 + for v in self._extraInfos: + out.append(("ExtraInfo{}".format(index), v)) + index += 1 + return out + + @ExtraInfo.setter + def ExtraInfo(self, val): # noqa: N802 + self._extraInfos.append(val) + + @property + def ExtraInfoKeyValue(self): # noqa: N802 + """Return all ExtraInfoKeyValue values formatted for Deadline. + + Returns: + list of tuples: as `[('ExtraInfoKeyValue0', 'key=value')]` + + """ + out = [] + index = 0 + for v in self._extraInfoKeyValues: + out.append(("ExtraInfoKeyValue{}".format(index), v)) + index += 1 + return out + + @ExtraInfoKeyValue.setter + def ExtraInfoKeyValue(self, val): # noqa: N802 + self._extraInfoKeyValues.append(val) + + # Task Extra Info Names + # ---------------------------------------------- + OverrideTaskExtraInfoNames = attr.ib(default=None) # Default: false + _taskExtraInfos = attr.ib(factory=list) + + @property + def TaskExtraInfoName(self): # noqa: N802 + """Return all TaskExtraInfoName values formatted for Deadline. + + Returns: + list of tuples: as `[('TaskExtraInfoName0', 'value')]` + + """ + out = [] + index = 0 + for v in self._taskExtraInfos: + out.append(("TaskExtraInfoName{}".format(index), v)) + index += 1 + return out + + @TaskExtraInfoName.setter + def TaskExtraInfoName(self, val): # noqa: N802 + self._taskExtraInfos.append(val) + + # Output + # ---------------------------------------------- + _outputFilename = attr.ib(factory=list) + _outputFilenameTile = attr.ib(factory=list) + _outputDirectory = attr.ib(factory=list) + + @property + def OutputFilename(self): # noqa: N802 + """Return all OutputFilename values formatted for Deadline. + + Returns: + list of tuples: as `[('OutputFilename0', 'filename')]` + + """ + out = [] + index = 0 + for v in self._outputFilename: + out.append(("OutputFilename{}".format(index), v)) + index += 1 + return out + + @OutputFilename.setter + def OutputFilename(self, val): # noqa: N802 + self._outputFilename.append(val) + + @property + def OutputFilenameTile(self): # noqa: N802 + """Return all OutputFilename#Tile values formatted for Deadline. + + Returns: + list of tuples: as `[('OutputFilename#Tile', 'tile')]` + + """ + out = [] + index = 0 + for v in self._outputFilenameTile: + out.append(("OutputFilename{}Tile".format(index), v)) + index += 1 + return out + + @OutputFilenameTile.setter + def OutputFilenameTile(self, val): # noqa: N802 + self._outputFilenameTile.append(val) + + @property + def OutputDirectory(self): # noqa: N802 + """Return all OutputDirectory values formatted for Deadline. + + Returns: + list of tuples: as `[('OutputDirectory0', 'dir')]` + + """ + out = [] + index = 0 + for v in self._outputDirectory: + out.append(("OutputDirectory{}".format(index), v)) + index += 1 + return out + + @OutputDirectory.setter + def OutputDirectory(self, val): # noqa: N802 + self._outputDirectory.append(val) + + # Tile Job + # ---------------------------------------------- + TileJob = attr.ib(default=None) # Default: false + TileJobFrame = attr.ib(default=None) # Default: 0 + TileJobTilesInX = attr.ib(default=None) # Default: 0 + TileJobTilesInY = attr.ib(default=None) # Default: 0 + TileJobTileCount = attr.ib(default=None) # Default: 0 + + # Maintenance Job + # ---------------------------------------------- + MaintenanceJob = attr.ib(default=None) # Default: false + MaintenanceJobStartFrame = attr.ib(default=None) # Default: 0 + MaintenanceJobEndFrame = attr.ib(default=None) # Default: 0 @attr.s From 739baf70f5707316bb8a007f0d21c2ab4e859076 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 6 Nov 2020 16:19:29 +0100 Subject: [PATCH 04/13] submit to deadline functionality --- pype/lib/abstract_submit_deadline.py | 258 ++++++++++++++++++++++++--- 1 file changed, 238 insertions(+), 20 deletions(-) diff --git a/pype/lib/abstract_submit_deadline.py b/pype/lib/abstract_submit_deadline.py index dede4c19ba..26ea66f5ce 100644 --- a/pype/lib/abstract_submit_deadline.py +++ b/pype/lib/abstract_submit_deadline.py @@ -1,9 +1,14 @@ # -*- coding: utf-8 -*- -"""Abstract class for submitting jobs to Deadline.""" +"""Abstract package for submitting jobs to Deadline. + +It provides Deadline JobInfo data class. + +""" import os from abc import ABCMeta, abstractmethod import platform import getpass +from collections import OrderedDict import six import attr @@ -171,13 +176,13 @@ class DeadlineJobInfo: """Return all environment key values formatted for Deadline. Returns: - list of tuples: as `[('EnvironmentKeyValue0', 'key=value')]` + dict: as `{'EnvironmentKeyValue0', 'key=value'}` """ out = [] index = 0 for v in self._environmentKeyValue: - out.append(("EnvironmentKeyValue{}".format(index), v)) + out["EnvironmentKeyValue{}".format(index)] = v index += 1 return out @@ -199,13 +204,13 @@ class DeadlineJobInfo: """Return all ExtraInfo values formatted for Deadline. Returns: - list of tuples: as `[('ExtraInfo0', 'value')]` + dict: as `{'ExtraInfo0': 'value'}` """ out = [] index = 0 for v in self._extraInfos: - out.append(("ExtraInfo{}".format(index), v)) + out["ExtraInfo{}".format(index)] = v index += 1 return out @@ -218,13 +223,13 @@ class DeadlineJobInfo: """Return all ExtraInfoKeyValue values formatted for Deadline. Returns: - list of tuples: as `[('ExtraInfoKeyValue0', 'key=value')]` + dict: as {'ExtraInfoKeyValue0': 'key=value'}` """ out = [] index = 0 for v in self._extraInfoKeyValues: - out.append(("ExtraInfoKeyValue{}".format(index), v)) + out["ExtraInfoKeyValue{}".format(index)] = v index += 1 return out @@ -242,13 +247,13 @@ class DeadlineJobInfo: """Return all TaskExtraInfoName values formatted for Deadline. Returns: - list of tuples: as `[('TaskExtraInfoName0', 'value')]` + dict: as `{'TaskExtraInfoName0': 'value'}` """ out = [] index = 0 for v in self._taskExtraInfos: - out.append(("TaskExtraInfoName{}".format(index), v)) + out["TaskExtraInfoName{}".format(index)] = v index += 1 return out @@ -267,13 +272,13 @@ class DeadlineJobInfo: """Return all OutputFilename values formatted for Deadline. Returns: - list of tuples: as `[('OutputFilename0', 'filename')]` + dict: as `{'OutputFilename0': 'filename'}` """ out = [] index = 0 for v in self._outputFilename: - out.append(("OutputFilename{}".format(index), v)) + out["OutputFilename{}".format(index)] = v index += 1 return out @@ -286,13 +291,13 @@ class DeadlineJobInfo: """Return all OutputFilename#Tile values formatted for Deadline. Returns: - list of tuples: as `[('OutputFilename#Tile', 'tile')]` + dict: as `{'OutputFilenme#Tile': 'tile'}` """ out = [] index = 0 for v in self._outputFilenameTile: - out.append(("OutputFilename{}Tile".format(index), v)) + out["OutputFilename{}Tile".format(index)] = v index += 1 return out @@ -305,13 +310,13 @@ class DeadlineJobInfo: """Return all OutputDirectory values formatted for Deadline. Returns: - list of tuples: as `[('OutputDirectory0', 'dir')]` + dict: as `{'OutputDirectory0': 'dir'}` """ out = [] index = 0 for v in self._outputDirectory: - out.append(("OutputDirectory{}".format(index), v)) + out["OutputDirectory{}".format(index)] = v index += 1 return out @@ -333,21 +338,234 @@ class DeadlineJobInfo: MaintenanceJobStartFrame = attr.ib(default=None) # Default: 0 MaintenanceJobEndFrame = attr.ib(default=None) # Default: 0 + def render(self): + """Return all data serialized as dictionary. -@attr.s -class DeadlinePluginInfo: - SceneFile = attr.ib() + Returns: + OrderedDict: all serialized data. + + """ + def no_privates(a, _): + return not a.name.startswith("_") + + serialized = attr.asdict( + self, dict_factory=OrderedDict, filter=no_privates) + serialized.update(self.EnvironmentKeyValue) + serialized.update(self.ExtraInfo) + serialized.update(self.ExtraInfoKeyValue) + serialized.update(self.TaskExtraInfoName) + serialized.update(self.OutputFilename) + serialized.update(self.OutputFilenameTile) + serialized.update(self.OutputDirectory) + return serialized @six.add_metaclass(ABCMeta) class AbstractSubmitDeadline(pyblish.api.InstancePlugin): + """Class abstracting access to Deadline.""" label = "Submit to Deadline" order = pyblish.api.IntegratorOrder + 0.1 use_published = True asset_dependencies = False + def process(self, instance): + """Plugin entry point.""" + self._instance = instance + context = instance.context + self._deadline_url = os.environ.get( + "DEADLINE_REST_URL", "http://localhost:8082") + assert self._deadline_url, "Requires DEADLINE_REST_URL" + + file_path = None + if self.use_published: + file_path = self.from_published_scene() + + # fallback if nothing was set + if not file_path: + self.log.warning("Falling back to workfile") + file_path = context.data["currentFile"] + + self.scene_path = file_path + self.log.info("Using {} for render/export.".format(file_path)) + + self.job_info = self.get_job_info() + self.plugin_info = self.get_plugin_info() + self.aux_files = self.get_aux_files() + + def process_submission(self): + """Process data for submission. + + This takes Deadline JobInfo, PluginInfo, AuxFile, creates payload + from them and submit it do Deadline. + + Returns: + str: Deadline job ID + + """ + payload = self.assemble_payload() + return self.submit(payload) + + @abstractmethod + def get_job_info(self): + """Return filled Deadline JobInfo. + + This is host/plugin specific implementation of how to fill data in. + + See: + :class:`DeadlineJobInfo` + + Returns: + dict: Filled Deadline JobInfo. + + """ + pass + + @abstractmethod + def get_plugin_info(self): + """Return filled Deadline PluginInfo. + + This is host/plugin specific implementation of how to fill data in. + + See: + :class:`DeadlineJobInfo` + + Returns: + dict: Filled Deadline JobInfo. + + """ + pass + + def get_aux_files(self): + """Return list of auxiliary files for Deadline job. + + If needed this should be overriden, otherwise return empty list as + that field even empty must be present on Deadline submission. + + Returns: + list: List of files. + + """ + return [] + + def from_published_scene(self, replace_in_path=True): + """Switch work scene for published scene. + + If rendering/exporting from published scenes is enabled, this will + replace paths from working scene to published scene. + + Args: + replace_in_path (bool): if True, it will try to find + old scene name in path of expected files and replace it + with name of published scene. + + Returns: + str: Published scene path. + + Note: + Published scene path is actually determined from project Anatomy + as at the time this plugin is running scene can still no be + published. + + """ + anatomy = self._instance.context.data['anatomy'] + for i in self._instance.context: + if "workfile" in i.data["families"]: + # test if there is instance of workfile waiting + # to be published. + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") + # determine published path from Anatomy. + template_data = i.data.get("anatomyData") + rep = i.data.get("representations")[0].get("name") + template_data["representation"] = rep + template_data["ext"] = rep + template_data["comment"] = None + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled["publish"]["path"] + filepath = os.path.normpath(template_filled) + + self.log.info("Using published scene for render {}".format( + filepath)) + + if not os.path.exists(filepath): + self.log.error("published scene does not exist!") + raise + + if not replace_in_path: + return filepath + + # now we need to switch scene in expected files + # because token will now point to published + # scene file and that might differ from current one + new_scene = os.path.splitext( + os.path.basename(filepath))[0] + orig_scene = os.path.splitext( + os.path.basename( + self._instance.context.data["currentFile"]))[0] + exp = self._instance.data.get("expectedFiles") + + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + new_exp = {} + for aov, files in exp[0].items(): + replaced_files = [] + for f in files: + replaced_files.append( + f.replace(orig_scene, new_scene) + ) + new_exp[aov] = replaced_files + self._instance.data["expectedFiles"] = [new_exp] + else: + new_exp = [] + for f in exp: + new_exp.append( + f.replace(orig_scene, new_scene) + ) + self._instance.data["expectedFiles"] = [new_exp] + self.log.info("Scene name was switched {} -> {}".format( + orig_scene, new_scene + )) + + return filepath + + def assemble_payload( + self, job_info=None, plugin_info=None, aux_files=None): + """Assemble payload data from its various parts. + + Args: + job_info (dict): Deadline JobInfo. You can use + :class:`DeadlineJobInfo` for it. + plugin_info (dict): Deadline PluginInfo. Plugin specific options. + aux_files (list, optional): List of auxiliary file to submit with + the job. + + Returns: + dict: Deadline Payload. + + """ + return { + "JobInfo": job_info or self.job_info, + "PluginInfo": plugin_info or self.plugin_info, + "AuxFiles": aux_files or self.aux_files + } + def submit(self, payload): + """Submit payload to Deadline API end-point. + + This takes payload in the form of JSON file and POST it to + Deadline jobs end-point. + + Args: + payload (str): string encoded json with job payload. + + Returns: + str: resulting Deadline job id. + + Throws: + RuntimeError: if submission fails. + + """ url = "{}/api/jobs".format(self._deadline_url) response = self._requests_post(url, json=payload) if not response.ok: @@ -357,8 +575,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): self.log.debug(payload) raise RuntimeError(response.text) - dependency = response.json() - return dependency["_id"] + result = response.json() + return result["_id"] def _requests_post(self, *args, **kwargs): """Wrap request post method. From 79eb50c656b809bd40cc38763da243bd23dbcc48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 6 Nov 2020 18:54:05 +0100 Subject: [PATCH 05/13] few bugs and cosmetics --- pype/lib/abstract_collect_render.py | 47 ++++++++++----- pype/lib/abstract_expected_files.py | 2 +- pype/lib/abstract_submit_deadline.py | 90 +++++++++++++++------------- 3 files changed, 81 insertions(+), 58 deletions(-) diff --git a/pype/lib/abstract_collect_render.py b/pype/lib/abstract_collect_render.py index bf14f6f850..3c173a178d 100644 --- a/pype/lib/abstract_collect_render.py +++ b/pype/lib/abstract_collect_render.py @@ -34,45 +34,52 @@ class RenderInstance(object): attachTo = attr.ib(init=False) setMembers = attr.ib() publish = attr.ib() - review = attr.ib(default=False) renderer = attr.ib() - priority = attr.ib(default=50) name = attr.ib() - family = attr.ib(default="renderlayer") - families = attr.ib(default=["renderlayer"]) - # format settings resolutionWidth = attr.ib() resolutionHeight = attr.ib() pixelAspect = attr.ib() - multipartExr = attr.ib(default=False) + tileRendering = attr.ib() tilesX = attr.ib() tilesY = attr.ib() - convertToScanline = attr.ib(default=False) # time settings frameStart = attr.ib() frameEnd = attr.ib() frameStep = attr.ib() + # -------------------- + # With default values + # metadata + review = attr.ib(default=False) + priority = attr.ib(default=50) + + family = attr.ib(default="renderlayer") + families = attr.ib(default=["renderlayer"]) + + # format settings + multipartExr = attr.ib(default=False) + convertToScanline = attr.ib(default=False) + @frameStart.validator - def check_frame_start(self, attribute, value): + def check_frame_start(self, _, value): """Validate if frame start is not larger then end.""" if value >= self.frameEnd: raise ValueError("frameStart must be smaller " "or equal then frameEnd") @frameEnd.validator - def check_frame_end(self, attribute, value): + def check_frame_end(self, _, value): """Validate if frame end is not less then start.""" if value <= self.frameStart: raise ValueError("frameEnd must be smaller " "or equal then frameStart") @tilesX.validator - def check_tiles_x(self, attribute, value): + def check_tiles_x(self, _, value): """Validate if tile x isn't less then 1.""" if not self.tileRendering: return @@ -83,7 +90,7 @@ class RenderInstance(object): raise ValueError("both tiles X a Y sizes are set to 1") @tilesY.validator - def check_tiles_y(self, attribute, value): + def check_tiles_y(self, _, value): """Validate if tile y isn't less then 1.""" if not self.tileRendering: return @@ -102,6 +109,13 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): label = "Collect Render" sync_workfile_version = False + def __init__(self, *args, **kwargs): + """Constructor.""" + super(AbstractCollectRender, self).__init__(*args, **kwargs) + self._file_path = None + self._asset = api.Session["AVALON_ASSET"] + + def process(self, context): """Entry point to collector.""" rendering_instance = None @@ -121,8 +135,7 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): ) return - self._filepath = context.data["currentFile"].replace("\\", "/") - self._asset = api.Session["AVALON_ASSET"] + self._file_path = context.data["currentFile"].replace("\\", "/") render_instances = self.get_instances() for render_instance in render_instances: @@ -207,7 +220,12 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): pass def _get_expected_files(self, render_instance): - """Get list of expected files.""" + """Get list of expected files. + + Returns: + list: expected files. + + """ # return all expected files for all cameras and aovs in given # frame range ef = ExpectedFiles() @@ -223,6 +241,7 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): "attaching multiple AOVs or renderable cameras to " "subset is not supported" ) + return exp_files def add_additional_data(self, data): """Add additional data to collected instance. diff --git a/pype/lib/abstract_expected_files.py b/pype/lib/abstract_expected_files.py index f493cdb751..f9f3c17ef5 100644 --- a/pype/lib/abstract_expected_files.py +++ b/pype/lib/abstract_expected_files.py @@ -40,7 +40,7 @@ class ExpectedFiles: Args: - renderer_instance (:class:`RenderInstance`): Data passed from + render_instance (:class:`RenderInstance`): Data passed from collector to determine files. This should be instance of :class:`abstract_collect_render.RenderInstance` diff --git a/pype/lib/abstract_submit_deadline.py b/pype/lib/abstract_submit_deadline.py index 26ea66f5ce..869a011fd7 100644 --- a/pype/lib/abstract_submit_deadline.py +++ b/pype/lib/abstract_submit_deadline.py @@ -179,11 +179,9 @@ class DeadlineJobInfo: dict: as `{'EnvironmentKeyValue0', 'key=value'}` """ - out = [] - index = 0 - for v in self._environmentKeyValue: + out = {} + for index, v in enumerate(self._environmentKeyValue): out["EnvironmentKeyValue{}".format(index)] = v - index += 1 return out @EnvironmentKeyValue.setter @@ -207,11 +205,9 @@ class DeadlineJobInfo: dict: as `{'ExtraInfo0': 'value'}` """ - out = [] - index = 0 - for v in self._extraInfos: + out = {} + for index, v in enumerate(self._extraInfos): out["ExtraInfo{}".format(index)] = v - index += 1 return out @ExtraInfo.setter @@ -226,11 +222,9 @@ class DeadlineJobInfo: dict: as {'ExtraInfoKeyValue0': 'key=value'}` """ - out = [] - index = 0 - for v in self._extraInfoKeyValues: + out = {} + for index, v in enumerate(self._extraInfoKeyValues): out["ExtraInfoKeyValue{}".format(index)] = v - index += 1 return out @ExtraInfoKeyValue.setter @@ -250,11 +244,9 @@ class DeadlineJobInfo: dict: as `{'TaskExtraInfoName0': 'value'}` """ - out = [] - index = 0 - for v in self._taskExtraInfos: + out = {} + for index, v in enumerate(self._taskExtraInfos): out["TaskExtraInfoName{}".format(index)] = v - index += 1 return out @TaskExtraInfoName.setter @@ -275,11 +267,9 @@ class DeadlineJobInfo: dict: as `{'OutputFilename0': 'filename'}` """ - out = [] - index = 0 - for v in self._outputFilename: + out = {} + for index, v in enumerate(self._outputFilename): out["OutputFilename{}".format(index)] = v - index += 1 return out @OutputFilename.setter @@ -294,11 +284,9 @@ class DeadlineJobInfo: dict: as `{'OutputFilenme#Tile': 'tile'}` """ - out = [] - index = 0 - for v in self._outputFilenameTile: + out = {} + for index, v in enumerate(self._outputFilenameTile): out["OutputFilename{}Tile".format(index)] = v - index += 1 return out @OutputFilenameTile.setter @@ -313,11 +301,9 @@ class DeadlineJobInfo: dict: as `{'OutputDirectory0': 'dir'}` """ - out = [] - index = 0 - for v in self._outputDirectory: + out = {} + for index, v in enumerate(self._outputDirectory): out["OutputDirectory{}".format(index)] = v - index += 1 return out @OutputDirectory.setter @@ -338,18 +324,22 @@ class DeadlineJobInfo: MaintenanceJobStartFrame = attr.ib(default=None) # Default: 0 MaintenanceJobEndFrame = attr.ib(default=None) # Default: 0 - def render(self): + def serialize(self): """Return all data serialized as dictionary. Returns: OrderedDict: all serialized data. """ - def no_privates(a, _): - return not a.name.startswith("_") + def filter_data(a, v): + if a.name.startswith("_"): + return False + if v is None: + return False + return True serialized = attr.asdict( - self, dict_factory=OrderedDict, filter=no_privates) + self, dict_factory=OrderedDict, filter=filter_data) serialized.update(self.EnvironmentKeyValue) serialized.update(self.ExtraInfo) serialized.update(self.ExtraInfoKeyValue) @@ -369,6 +359,15 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): use_published = True asset_dependencies = False + def __init__(self, *args, **kwargs): + super(AbstractSubmitDeadline, self).__init__(*args, **kwargs) + self._instance = None + self._deadline_url = None + self.scene_path = None + self.job_info = None + self.plugin_info = None + self.aux_files = None + def process(self, instance): """Plugin entry point.""" self._instance = instance @@ -393,6 +392,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): self.plugin_info = self.get_plugin_info() self.aux_files = self.get_aux_files() + self.process_submission() + def process_submission(self): """Process data for submission. @@ -416,7 +417,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): :class:`DeadlineJobInfo` Returns: - dict: Filled Deadline JobInfo. + :class:`DeadlineJobInfo`: Filled Deadline JobInfo. """ pass @@ -461,6 +462,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): Returns: str: Published scene path. + None: if no published scene is found. Note: Published scene path is actually determined from project Anatomy @@ -469,6 +471,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): """ anatomy = self._instance.context.data['anatomy'] + file_path = None for i in self._instance.context: if "workfile" in i.data["families"]: # test if there is instance of workfile waiting @@ -483,23 +486,23 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): template_data["comment"] = None anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled["publish"]["path"] - filepath = os.path.normpath(template_filled) + file_path = os.path.normpath(template_filled) self.log.info("Using published scene for render {}".format( - filepath)) + file_path)) - if not os.path.exists(filepath): + if not os.path.exists(file_path): self.log.error("published scene does not exist!") raise if not replace_in_path: - return filepath + return file_path # now we need to switch scene in expected files # because token will now point to published # scene file and that might differ from current one new_scene = os.path.splitext( - os.path.basename(filepath))[0] + os.path.basename(file_path))[0] orig_scene = os.path.splitext( os.path.basename( self._instance.context.data["currentFile"]))[0] @@ -527,14 +530,14 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): orig_scene, new_scene )) - return filepath + return file_path def assemble_payload( self, job_info=None, plugin_info=None, aux_files=None): """Assemble payload data from its various parts. Args: - job_info (dict): Deadline JobInfo. You can use + job_info (DeadlineJobInfo): Deadline JobInfo. You can use :class:`DeadlineJobInfo` for it. plugin_info (dict): Deadline PluginInfo. Plugin specific options. aux_files (list, optional): List of auxiliary file to submit with @@ -544,8 +547,9 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): dict: Deadline Payload. """ + job = job_info or self.job_info return { - "JobInfo": job_info or self.job_info, + "JobInfo": job.serialize(), "PluginInfo": plugin_info or self.plugin_info, "AuxFiles": aux_files or self.aux_files } @@ -557,7 +561,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): Deadline jobs end-point. Args: - payload (str): string encoded json with job payload. + payload (dict): dict to become json in deadline submission. Returns: str: resulting Deadline job id. @@ -569,7 +573,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): url = "{}/api/jobs".format(self._deadline_url) response = self._requests_post(url, json=payload) if not response.ok: - self.log.error("Submition failed!") + self.log.error("Submission failed!") self.log.error(response.status_code) self.log.error(response.content) self.log.debug(payload) From ad216428f679354761c39cfb4bb390e8e3407130 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 6 Nov 2020 18:58:20 +0100 Subject: [PATCH 06/13] shut up hound --- pype/lib/abstract_collect_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pype/lib/abstract_collect_render.py b/pype/lib/abstract_collect_render.py index 3c173a178d..0f65a6ef90 100644 --- a/pype/lib/abstract_collect_render.py +++ b/pype/lib/abstract_collect_render.py @@ -115,7 +115,6 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): self._file_path = None self._asset = api.Session["AVALON_ASSET"] - def process(self, context): """Entry point to collector.""" rendering_instance = None From 75b6401cdaf698afee815debefca49f835068a5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 9 Nov 2020 12:22:16 +0100 Subject: [PATCH 07/13] remove maya specific code --- pype/lib/abstract_collect_render.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/pype/lib/abstract_collect_render.py b/pype/lib/abstract_collect_render.py index 0f65a6ef90..b8ee107bd9 100644 --- a/pype/lib/abstract_collect_render.py +++ b/pype/lib/abstract_collect_render.py @@ -117,23 +117,11 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): def process(self, context): """Entry point to collector.""" - rendering_instance = None for instance in context: - if "rendering" in instance.data["families"]: - rendering_instance = instance - rendering_instance.data["remove"] = True - # make sure workfile instance publishing is enabled if "workfile" in instance.data["families"]: instance.data["publish"] = True - if not rendering_instance: - self.log.info( - "No rendering instance found, skipping render " - "layer collection." - ) - return - self._file_path = context.data["currentFile"].replace("\\", "/") render_instances = self.get_instances() From 840ed3c104738ea4f1a54d79737d13dbcd480e3b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 9 Nov 2020 15:34:49 +0100 Subject: [PATCH 08/13] solve multiple inheritance of meta classes --- pype/lib/abstract_collect_render.py | 4 ++-- pype/lib/abstract_metaplugins.py | 10 ++++++++++ pype/lib/abstract_submit_deadline.py | 5 +++-- 3 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 pype/lib/abstract_metaplugins.py diff --git a/pype/lib/abstract_collect_render.py b/pype/lib/abstract_collect_render.py index b8ee107bd9..89bbf6f024 100644 --- a/pype/lib/abstract_collect_render.py +++ b/pype/lib/abstract_collect_render.py @@ -13,6 +13,7 @@ from avalon import api import pyblish.api from .abstract_expected_files import ExpectedFiles +from .abstract_metaplugins import AbstractMetaContextPlugin @attr.s @@ -101,8 +102,7 @@ class RenderInstance(object): raise ValueError("both tiles X a Y sizes are set to 1") -@six.add_metaclass(ABCMeta) -class AbstractCollectRender(pyblish.api.ContextPlugin): +class AbstractCollectRender(AbstractMetaContextPlugin): """Gather all publishable render layers from renderSetup.""" order = pyblish.api.CollectorOrder + 0.01 diff --git a/pype/lib/abstract_metaplugins.py b/pype/lib/abstract_metaplugins.py new file mode 100644 index 0000000000..684d2ab19e --- /dev/null +++ b/pype/lib/abstract_metaplugins.py @@ -0,0 +1,10 @@ +from abc import ABCMeta +from pyblish.api import InstancePlugin, ContextPlugin + + +class AbstractMetaInstancePlugin(ABCMeta, InstancePlugin): + pass + + +class AbstractMetaContextPlugin(ABCMeta, ContextPlugin): + pass diff --git a/pype/lib/abstract_submit_deadline.py b/pype/lib/abstract_submit_deadline.py index 869a011fd7..e67e261cae 100644 --- a/pype/lib/abstract_submit_deadline.py +++ b/pype/lib/abstract_submit_deadline.py @@ -15,6 +15,7 @@ import attr import requests import pyblish.api +from .abstract_metaplugins import AbstractMetaInstancePlugin @attr.s @@ -350,8 +351,8 @@ class DeadlineJobInfo: return serialized -@six.add_metaclass(ABCMeta) -class AbstractSubmitDeadline(pyblish.api.InstancePlugin): +@six.add_metaclass(AbstractMetaInstancePlugin) +class AbstractSubmitDeadline: """Class abstracting access to Deadline.""" label = "Submit to Deadline" From 2f615c9060b2bfd4f4b24c0230e7331b197199a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 10 Nov 2020 18:06:26 +0100 Subject: [PATCH 09/13] fix metaclass madness --- pype/lib/abstract_collect_render.py | 12 ++++++++---- pype/lib/abstract_metaplugins.py | 6 +++--- pype/lib/abstract_submit_deadline.py | 4 ++-- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/pype/lib/abstract_collect_render.py b/pype/lib/abstract_collect_render.py index 89bbf6f024..33202b6ac3 100644 --- a/pype/lib/abstract_collect_render.py +++ b/pype/lib/abstract_collect_render.py @@ -4,10 +4,10 @@ TODO: use @dataclass when times come. """ -from abc import ABCMeta, abstractmethod +from abc import abstractmethod -import six import attr +import six from avalon import api import pyblish.api @@ -102,7 +102,8 @@ class RenderInstance(object): raise ValueError("both tiles X a Y sizes are set to 1") -class AbstractCollectRender(AbstractMetaContextPlugin): +@six.add_metaclass(AbstractMetaContextPlugin) +class AbstractCollectRender(pyblish.api.ContextPlugin): """Gather all publishable render layers from renderSetup.""" order = pyblish.api.CollectorOrder + 0.01 @@ -196,9 +197,12 @@ class AbstractCollectRender(AbstractMetaContextPlugin): self.post_collecting_action() @abstractmethod - def get_instances(self): + def get_instances(self, context): """Get all renderable instances and their data. + Args: + context (pyblish.api.Context): Context object. + Returns: list of :class:`RenderInstance`: All collected renderable instances (like render layers, write nodes, etc.) diff --git a/pype/lib/abstract_metaplugins.py b/pype/lib/abstract_metaplugins.py index 684d2ab19e..f8163956ad 100644 --- a/pype/lib/abstract_metaplugins.py +++ b/pype/lib/abstract_metaplugins.py @@ -1,10 +1,10 @@ from abc import ABCMeta -from pyblish.api import InstancePlugin, ContextPlugin +from pyblish.plugin import MetaPlugin, ExplicitMetaPlugin -class AbstractMetaInstancePlugin(ABCMeta, InstancePlugin): +class AbstractMetaInstancePlugin(ABCMeta, MetaPlugin): pass -class AbstractMetaContextPlugin(ABCMeta, ContextPlugin): +class AbstractMetaContextPlugin(ABCMeta, ExplicitMetaPlugin): pass diff --git a/pype/lib/abstract_submit_deadline.py b/pype/lib/abstract_submit_deadline.py index e67e261cae..44ee9ce436 100644 --- a/pype/lib/abstract_submit_deadline.py +++ b/pype/lib/abstract_submit_deadline.py @@ -5,7 +5,7 @@ It provides Deadline JobInfo data class. """ import os -from abc import ABCMeta, abstractmethod +from abc import abstractmethod import platform import getpass from collections import OrderedDict @@ -352,7 +352,7 @@ class DeadlineJobInfo: @six.add_metaclass(AbstractMetaInstancePlugin) -class AbstractSubmitDeadline: +class AbstractSubmitDeadline(pyblish.api.InstancePlugin): """Class abstracting access to Deadline.""" label = "Submit to Deadline" From 2780ce537765ca0a0a3e707f28dbb5ab484909fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 11 Nov 2020 16:05:59 +0100 Subject: [PATCH 10/13] make expected files abstract --- pype/lib/abstract_collect_render.py | 49 ++++++++++++++++++----------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/pype/lib/abstract_collect_render.py b/pype/lib/abstract_collect_render.py index 33202b6ac3..457c0906a4 100644 --- a/pype/lib/abstract_collect_render.py +++ b/pype/lib/abstract_collect_render.py @@ -127,7 +127,17 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): render_instances = self.get_instances() for render_instance in render_instances: - exp_files = self._get_expected_files(render_instance) + exp_files = self.get_expected_files(render_instance) + assert exp_files, "no file names were generated, this is bug" + + # if we want to attach render to subset, check if we have AOV's + # in expectedFiles. If so, raise error as we cannot attach AOV + # (considered to be subset on its own) to another subset + if render_instance.attachTo: + assert isinstance(exp_files, list), ( + "attaching multiple AOVs or renderable cameras to " + "subset is not supported" + ) frame_start_render = int(render_instance.frameStart) frame_end_render = int(render_instance.frameEnd) @@ -210,29 +220,30 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): """ pass - def _get_expected_files(self, render_instance): + @abstractmethod + def get_expected_files(self, render_instance): """Get list of expected files. Returns: - list: expected files. + list: expected files. This can be either simple list of files with + their paths, or list of dictionaries, where key is name of AOV + for example and value is list of files for that AOV. + + Example:: + + ['/path/to/file.001.exr', '/path/to/file.002.exr'] + + or as dictionary: + + [ + { + "beauty": ['/path/to/beauty.001.exr', ...], + "mask": ['/path/to/mask.001.exr'] + } + ] """ - # return all expected files for all cameras and aovs in given - # frame range - ef = ExpectedFiles() - exp_files = ef.get(render_instance) - self.log.info("multipart: {}".format(ef.multipart)) - assert exp_files, "no file names were generated, this is bug" - - # if we want to attach render to subset, check if we have AOV's - # in expectedFiles. If so, raise error as we cannot attach AOV - # (considered to be subset on its own) to another subset - if render_instance.attachTo: - assert isinstance(exp_files, list), ( - "attaching multiple AOVs or renderable cameras to " - "subset is not supported" - ) - return exp_files + pass def add_additional_data(self, data): """Add additional data to collected instance. From afa051b16eef118a5e6815e082202c06eca2d333 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 12 Nov 2020 18:50:54 +0100 Subject: [PATCH 11/13] add description, default values, context handling --- pype/lib/abstract_collect_render.py | 67 ++++++++++++++++------------- 1 file changed, 36 insertions(+), 31 deletions(-) diff --git a/pype/lib/abstract_collect_render.py b/pype/lib/abstract_collect_render.py index 457c0906a4..098788430b 100644 --- a/pype/lib/abstract_collect_render.py +++ b/pype/lib/abstract_collect_render.py @@ -12,7 +12,6 @@ import six from avalon import api import pyblish.api -from .abstract_expected_files import ExpectedFiles from .abstract_metaplugins import AbstractMetaContextPlugin @@ -26,44 +25,44 @@ class RenderInstance(object): """ # metadata - version = attr.ib() - time = attr.ib() - source = attr.ib() - label = attr.ib() - subset = attr.ib() - asset = attr.ib(init=False) - attachTo = attr.ib(init=False) - setMembers = attr.ib() - publish = attr.ib() - renderer = attr.ib() - name = attr.ib() + version = attr.ib() # instance version + time = attr.ib() # time of instance creation (avalon.api.time()) + source = attr.ib() # path to source scene file + label = attr.ib() # label to show in GUI + subset = attr.ib() # subset name + asset = attr.ib() # asset name (AVALON_ASSET) + attachTo = attr.ib() # subset name to attach render to + setMembers = attr.ib() # list of nodes/members producing render output + publish = attr.ib() # bool, True to publish instance + name = attr.ib() # instance name # format settings - resolutionWidth = attr.ib() - resolutionHeight = attr.ib() - pixelAspect = attr.ib() - - tileRendering = attr.ib() - tilesX = attr.ib() - tilesY = attr.ib() + resolutionWidth = attr.ib() # resolution width (1920) + resolutionHeight = attr.ib() # resolution height (1080) + pixelAspect = attr.ib() # pixel aspect (1.0) # time settings - frameStart = attr.ib() - frameEnd = attr.ib() - frameStep = attr.ib() + frameStart = attr.ib() # start frame + frameEnd = attr.ib() # start end + frameStep = attr.ib() # frame step # -------------------- # With default values # metadata - review = attr.ib(default=False) - priority = attr.ib(default=50) + renderer = attr.ib(default="") # renderer - can be used in Deadline + review = attr.ib(default=False) # genereate review from instance (bool) + priority = attr.ib(default=50) # job priority on farm family = attr.ib(default="renderlayer") - families = attr.ib(default=["renderlayer"]) + families = attr.ib(default=["renderlayer"]) # list of families # format settings - multipartExr = attr.ib(default=False) - convertToScanline = attr.ib(default=False) + multipartExr = attr.ib(default=False) # flag for multipart exrs + convertToScanline = attr.ib(default=False) # flag for exr conversion + + tileRendering = attr.ib(default=False) # bool: treat render as tiles + tilesX = attr.ib(default=0) # number of tiles in X + tilesY = attr.ib(default=0) # number of tiles in Y @frameStart.validator def check_frame_start(self, _, value): @@ -115,17 +114,23 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): super(AbstractCollectRender, self).__init__(*args, **kwargs) self._file_path = None self._asset = api.Session["AVALON_ASSET"] + self._context = None def process(self, context): """Entry point to collector.""" + self._context = context for instance in context: # make sure workfile instance publishing is enabled - if "workfile" in instance.data["families"]: - instance.data["publish"] = True + try: + if "workfile" in instance.data["families"]: + instance.data["publish"] = True + except KeyError: + # be tolerant if 'families' is missing. + pass self._file_path = context.data["currentFile"].replace("\\", "/") - render_instances = self.get_instances() + render_instances = self.get_instances(context) for render_instance in render_instances: exp_files = self.get_expected_files(render_instance) assert exp_files, "no file names were generated, this is bug" @@ -163,7 +168,7 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): "subset": render_instance.subset, "attachTo": render_instance.attachTo, "setMembers": render_instance.setMembers, - "multipartExr": exp_files.multipart, + "multipartExr": render_instance.multipartExr, "review": render_instance.review or False, "publish": True, From 63fc013c897ffe5f8986473df74aa3faccf7a686 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 17 Nov 2020 00:36:29 +0100 Subject: [PATCH 12/13] fix how render instance is created --- pype/lib/abstract_collect_render.py | 27 +++++---------------------- pype/lib/abstract_submit_deadline.py | 7 ++++--- 2 files changed, 9 insertions(+), 25 deletions(-) diff --git a/pype/lib/abstract_collect_render.py b/pype/lib/abstract_collect_render.py index 098788430b..cd53715763 100644 --- a/pype/lib/abstract_collect_render.py +++ b/pype/lib/abstract_collect_render.py @@ -124,6 +124,8 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): try: if "workfile" in instance.data["families"]: instance.data["publish"] = True + if "renderFarm" in instance.data["families"]: + instance.data["remove"] = True except KeyError: # be tolerant if 'families' is missing. pass @@ -165,13 +167,6 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): frame_end_handle = frame_end_render data = { - "subset": render_instance.subset, - "attachTo": render_instance.attachTo, - "setMembers": render_instance.setMembers, - "multipartExr": render_instance.multipartExr, - "review": render_instance.review or False, - "publish": True, - "handleStart": handle_start, "handleEnd": handle_end, "frameStart": frame_start, @@ -179,34 +174,22 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): "frameStartHandle": frame_start_handle, "frameEndHandle": frame_end_handle, "byFrameStep": int(render_instance.frameStep), - "renderer": render_instance.renderer, - # instance subset - "family": render_instance.family, - "families": render_instance.families, - "asset": render_instance.asset, - "time": render_instance.time, + "author": context.data["user"], # Add source to allow tracing back to the scene from # which was submitted originally - "source": render_instance.source, "expectedFiles": exp_files, - "resolutionWidth": render_instance.resolutionWidth, - "resolutionHeight": render_instance.resolutionHeight, - "pixelAspect": render_instance.pixelAspect, - "tileRendering": render_instance.tileRendering or False, - "tilesX": render_instance.tilesX or 2, - "tilesY": render_instance.tilesY or 2, - "priority": render_instance.priority, - "convertToScanline": render_instance.convertToScanline or False } if self.sync_workfile_version: data["version"] = context.data["version"] # add additional data data = self.add_additional_data(data) + render_instance_dict = attr.asdict(render_instance) instance = context.create_instance(render_instance.name) instance.data["label"] = render_instance.label + instance.data.update(render_instance_dict) instance.data.update(data) self.post_collecting_action() diff --git a/pype/lib/abstract_submit_deadline.py b/pype/lib/abstract_submit_deadline.py index 44ee9ce436..3337860508 100644 --- a/pype/lib/abstract_submit_deadline.py +++ b/pype/lib/abstract_submit_deadline.py @@ -19,7 +19,7 @@ from .abstract_metaplugins import AbstractMetaInstancePlugin @attr.s -class DeadlineJobInfo: +class DeadlineJobInfo(object): """Mapping of all Deadline *JobInfo* attributes. This contains all JobInfo attributes plus their default values. @@ -474,14 +474,15 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): anatomy = self._instance.context.data['anatomy'] file_path = None for i in self._instance.context: - if "workfile" in i.data["families"]: + if "workfile" in i.data["families"] \ + or i.data["family"] == "workfile": # test if there is instance of workfile waiting # to be published. assert i.data["publish"] is True, ( "Workfile (scene) must be published along") # determine published path from Anatomy. template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0].get("name") + rep = i.data.get("representations")[0].get("ext") template_data["representation"] = rep template_data["ext"] = rep template_data["comment"] = None From c5d64bd559796d4a0d51714db056792c92dfe8d5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 18 Nov 2020 17:40:04 +0100 Subject: [PATCH 13/13] Added required attributes for submit_publish_job Added saving deadline response to instance --- pype/lib/abstract_collect_render.py | 6 ++++++ pype/lib/abstract_submit_deadline.py | 3 +++ 2 files changed, 9 insertions(+) diff --git a/pype/lib/abstract_collect_render.py b/pype/lib/abstract_collect_render.py index cd53715763..6bcef1ba90 100644 --- a/pype/lib/abstract_collect_render.py +++ b/pype/lib/abstract_collect_render.py @@ -64,6 +64,12 @@ class RenderInstance(object): tilesX = attr.ib(default=0) # number of tiles in X tilesY = attr.ib(default=0) # number of tiles in Y + # submit_publish_job + toBeRenderedOn = attr.ib(default=None) + deadlineSubmissionJob = attr.ib(default=None) + anatomyData = attr.ib(default=None) + outputDir = attr.ib(default=None) + @frameStart.validator def check_frame_start(self, _, value): """Validate if frame start is not larger then end.""" diff --git a/pype/lib/abstract_submit_deadline.py b/pype/lib/abstract_submit_deadline.py index 3337860508..09916523a4 100644 --- a/pype/lib/abstract_submit_deadline.py +++ b/pype/lib/abstract_submit_deadline.py @@ -582,6 +582,9 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): raise RuntimeError(response.text) result = response.json() + # for submit publish job + self._instance.data["deadlineSubmissionJob"] = result + return result["_id"] def _requests_post(self, *args, **kwargs):