This node is maintained by OpenPype Publisher.
-To remove it use Publisher gui.
- - """) - node.addKnob(info_knob) - def check_existing_subset(self, subset_name): """Make sure subset name is unique. @@ -153,8 +139,6 @@ class NukeCreator(NewCreator): created_node = nuke.createNode(node_type) created_node["name"].setValue(node_name) - self.add_info_knob(created_node) - for key, values in node_knobs.items(): if key in created_node.knobs(): created_node["key"].setValue(values) diff --git a/openpype/hosts/nuke/plugins/create/create_backdrop.py b/openpype/hosts/nuke/plugins/create/create_backdrop.py index ff415626be..52959bbef2 100644 --- a/openpype/hosts/nuke/plugins/create/create_backdrop.py +++ b/openpype/hosts/nuke/plugins/create/create_backdrop.py @@ -36,8 +36,6 @@ class CreateBackdrop(NukeCreator): created_node["note_font_size"].setValue(24) created_node["label"].setValue("[{}]".format(node_name)) - self.add_info_knob(created_node) - return created_node def create(self, subset_name, instance_data, pre_create_data): diff --git a/openpype/hosts/nuke/plugins/create/create_camera.py b/openpype/hosts/nuke/plugins/create/create_camera.py index 5553645af6..b84280b11b 100644 --- a/openpype/hosts/nuke/plugins/create/create_camera.py +++ b/openpype/hosts/nuke/plugins/create/create_camera.py @@ -39,8 +39,6 @@ class CreateCamera(NukeCreator): created_node["name"].setValue(node_name) - self.add_info_knob(created_node) - return created_node def create(self, subset_name, instance_data, pre_create_data): diff --git a/openpype/hosts/nuke/plugins/create/create_gizmo.py b/openpype/hosts/nuke/plugins/create/create_gizmo.py index e3ce70dd59..cbe2f635c9 100644 --- a/openpype/hosts/nuke/plugins/create/create_gizmo.py +++ b/openpype/hosts/nuke/plugins/create/create_gizmo.py @@ -40,8 +40,6 @@ class CreateGizmo(NukeCreator): created_node["name"].setValue(node_name) - self.add_info_knob(created_node) - return created_node def create(self, subset_name, instance_data, pre_create_data): diff --git a/openpype/hosts/nuke/plugins/create/create_model.py b/openpype/hosts/nuke/plugins/create/create_model.py index 08a53abca2..a94c9f0313 100644 --- a/openpype/hosts/nuke/plugins/create/create_model.py +++ b/openpype/hosts/nuke/plugins/create/create_model.py @@ -40,8 +40,6 @@ class CreateModel(NukeCreator): created_node["name"].setValue(node_name) - self.add_info_knob(created_node) - return created_node def create(self, subset_name, instance_data, pre_create_data): diff --git a/openpype/hosts/nuke/plugins/create/create_source.py b/openpype/hosts/nuke/plugins/create/create_source.py index 57504b5d53..8419c3ef33 100644 --- a/openpype/hosts/nuke/plugins/create/create_source.py +++ b/openpype/hosts/nuke/plugins/create/create_source.py @@ -32,7 +32,7 @@ class CreateSource(NukeCreator): read_node["tile_color"].setValue( int(self.node_color, 16)) read_node["name"].setValue(node_name) - self.add_info_knob(read_node) + return read_node def create(self, subset_name, instance_data, pre_create_data): diff --git a/openpype/hosts/nuke/plugins/create/create_write_image.py b/openpype/hosts/nuke/plugins/create/create_write_image.py index b74cea5dae..0c8adfb75c 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_image.py +++ b/openpype/hosts/nuke/plugins/create/create_write_image.py @@ -86,7 +86,6 @@ class CreateWriteImage(napi.NukeWriteCreator): "frame": nuke.frame() } ) - self.add_info_knob(created_node) self._add_frame_range_limit(created_node, instance_data) diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index 387768b1dd..f46dd2d6d5 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -74,7 +74,6 @@ class CreateWritePrerender(napi.NukeWriteCreator): "height": height } ) - self.add_info_knob(created_node) self._add_frame_range_limit(created_node) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 09257f662e..c24405873a 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -66,7 +66,6 @@ class CreateWriteRender(napi.NukeWriteCreator): "height": height } ) - self.add_info_knob(created_node) self.integrate_links(created_node, outputs=False) diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index e5feda4cd8..e2cf2addc5 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -23,7 +23,7 @@ class NukeRenderLocal(publish.Extractor, order = pyblish.api.ExtractorOrder label = "Render Local" hosts = ["nuke"] - families = ["render.local", "prerender.local", "still.local"] + families = ["render.local", "prerender.local", "image.local"] def process(self, instance): child_nodes = ( @@ -136,9 +136,9 @@ class NukeRenderLocal(publish.Extractor, families.remove('prerender.local') families.insert(0, "prerender") instance.data["anatomyData"]["family"] = "prerender" - elif "still.local" in families: + elif "image.local" in families: instance.data['family'] = 'image' - families.remove('still.local') + families.remove('image.local') instance.data["anatomyData"]["family"] = "image" instance.data["families"] = families diff --git a/openpype/hosts/nuke/startup/custom_write_node.py b/openpype/hosts/nuke/startup/custom_write_node.py new file mode 100644 index 0000000000..d9313231d8 --- /dev/null +++ b/openpype/hosts/nuke/startup/custom_write_node.py @@ -0,0 +1,76 @@ +import os +import nuke +from openpype.hosts.nuke.api.lib import set_node_knobs_from_settings + + +frame_padding = 5 +temp_rendering_path_template = ( + "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") + +knobs_setting = { + "knobs": [ + { + "type": "text", + "name": "file_type", + "value": "exr" + }, + { + "type": "text", + "name": "datatype", + "value": "16 bit half" + }, + { + "type": "text", + "name": "compression", + "value": "Zip (1 scanline)" + }, + { + "type": "bool", + "name": "autocrop", + "value": True + }, + { + "type": "color_gui", + "name": "tile_color", + "value": [ + 186, + 35, + 35, + 255 + ] + }, + { + "type": "text", + "name": "channels", + "value": "rgb" + }, + { + "type": "bool", + "name": "create_directories", + "value": True + } + ] +} + + +def main(): + write_selected_nodes = [ + s for s in nuke.selectedNodes() if s.Class() == "Write"] + + ext = None + knobs = knobs_setting["knobs"] + for knob in knobs: + if knob["name"] == "file_type": + ext = knob["value"] + for w in write_selected_nodes: + # data for mapping the path + data = { + "work": os.getenv("AVALON_WORKDIR"), + "subset": w["name"].value(), + "frame": "#" * frame_padding, + "ext": ext + } + file_path = temp_rendering_path_template.format(**data) + file_path = file_path.replace("\\", "/") + w["file"].setValue(file_path) + set_node_knobs_from_settings(w, knobs) diff --git a/openpype/hosts/traypublisher/plugins/create/create_editorial.py b/openpype/hosts/traypublisher/plugins/create/create_editorial.py index 0630dfb3da..8640500b18 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_editorial.py +++ b/openpype/hosts/traypublisher/plugins/create/create_editorial.py @@ -487,7 +487,22 @@ or updating already created. Publishing will create OTIO file. ) # get video stream data - video_stream = media_data["streams"][0] + video_streams = [] + audio_streams = [] + for stream in media_data["streams"]: + codec_type = stream.get("codec_type") + if codec_type == "audio": + audio_streams.append(stream) + + elif codec_type == "video": + video_streams.append(stream) + + if not video_streams: + raise ValueError( + "Could not find video stream in source file." + ) + + video_stream = video_streams[0] return_data = { "video": True, "start_frame": 0, @@ -500,12 +515,7 @@ or updating already created. Publishing will create OTIO file. } # get audio streams data - audio_stream = [ - stream for stream in media_data["streams"] - if stream["codec_type"] == "audio" - ] - - if audio_stream: + if audio_streams: return_data["audio"] = True except Exception as exc: diff --git a/openpype/lib/project_backpack.py b/openpype/lib/project_backpack.py index 07107ec011..55a96664d8 100644 --- a/openpype/lib/project_backpack.py +++ b/openpype/lib/project_backpack.py @@ -113,31 +113,45 @@ def pack_project( project_name )) - roots = project_doc["config"]["roots"] - # Determine root directory of project - source_root = None - source_root_name = None - for root_name, root_value in roots.items(): - if source_root is not None: - raise ValueError( - "Packaging is supported only for single root projects" - ) - source_root = root_value - source_root_name = root_name + if only_documents and not destination_dir: + raise ValueError(( + "Destination directory must be defined" + " when only documents should be packed." + )) - root_path = source_root[platform.system().lower()] - print("Using root \"{}\" with path \"{}\"".format( - source_root_name, root_path - )) + root_path = None + source_root = {} + project_source_path = None + if not only_documents: + roots = project_doc["config"]["roots"] + # Determine root directory of project + source_root_name = None + for root_name, root_value in roots.items(): + if source_root is not None: + raise ValueError( + "Packaging is supported only for single root projects" + ) + source_root = root_value + source_root_name = root_name - project_source_path = os.path.join(root_path, project_name) - if not os.path.exists(project_source_path): - raise ValueError("Didn't find source of project files") + root_path = source_root[platform.system().lower()] + print("Using root \"{}\" with path \"{}\"".format( + source_root_name, root_path + )) + + project_source_path = os.path.join(root_path, project_name) + if not os.path.exists(project_source_path): + raise ValueError("Didn't find source of project files") # Determine zip filepath where data will be stored if not destination_dir: destination_dir = root_path + if not destination_dir: + raise ValueError( + "Project {} does not have any roots.".format(project_name) + ) + destination_dir = os.path.normpath(destination_dir) if not os.path.exists(destination_dir): os.makedirs(destination_dir) @@ -273,8 +287,7 @@ def unpack_project( low_platform = platform.system().lower() project_name = metadata["project_name"] - source_root = metadata["root"] - root_path = source_root[low_platform] + root_path = metadata["root"].get(low_platform) # Drop existing collection replace_project_documents(project_name, docs, database_name) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 57968b3700..de6495900e 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -51,7 +51,7 @@ IMAGE_EXTENSIONS = { ".jng", ".jpeg", ".jpeg-ls", ".jpeg", ".2000", ".jpg", ".xr", ".jpeg", ".xt", ".jpeg-hdr", ".kra", ".mng", ".miff", ".nrrd", ".ora", ".pam", ".pbm", ".pgm", ".ppm", ".pnm", ".pcx", ".pgf", - ".pictor", ".png", ".psb", ".psp", ".qtvr", ".ras", + ".pictor", ".png", ".psd", ".psb", ".psp", ".qtvr", ".ras", ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", ".tiff/ep", ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", ".xbm", ".xcf", ".xpm", ".xwd" diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 0efa89c087..551a2f7373 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -36,7 +36,7 @@ def requests_post(*args, **kwargs): Warning: Disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. + of defense SSL is providing, and it is not recommended. """ if 'verify' not in kwargs: @@ -57,7 +57,7 @@ def requests_get(*args, **kwargs): Warning: Disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. + of defense SSL is providing, and it is not recommended. """ if 'verify' not in kwargs: diff --git a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py index 9981bead3e..2de6073e29 100644 --- a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py +++ b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py @@ -5,23 +5,26 @@ This is resolving index of server lists stored in `deadlineServers` instance attribute or using default server if that attribute doesn't exists. """ +from maya import cmds + import pyblish.api class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): """Collect Deadline Webservice URL from instance.""" - order = pyblish.api.CollectorOrder + 0.415 + # Run before collect_render. + order = pyblish.api.CollectorOrder + 0.005 label = "Deadline Webservice from the Instance" families = ["rendering", "renderlayer"] + hosts = ["maya"] def process(self, instance): instance.data["deadlineUrl"] = self._collect_deadline_url(instance) self.log.info( "Using {} for submission.".format(instance.data["deadlineUrl"])) - @staticmethod - def _collect_deadline_url(render_instance): + def _collect_deadline_url(self, render_instance): # type: (pyblish.api.Instance) -> str """Get Deadline Webservice URL from render instance. @@ -49,8 +52,16 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): default_server = render_instance.context.data["defaultDeadline"] instance_server = render_instance.data.get("deadlineServers") if not instance_server: + self.log.debug("Using default server.") return default_server + # Get instance server as sting. + if isinstance(instance_server, int): + instance_server = cmds.getAttr( + "{}.deadlineServers".format(render_instance.data["objset"]), + asString=True + ) + default_servers = deadline_settings["deadline_urls"] project_servers = ( render_instance.context.data @@ -58,15 +69,23 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): ["deadline"] ["deadline_servers"] ) - deadline_servers = { + if not project_servers: + self.log.debug("Not project servers found. Using default servers.") + return default_servers[instance_server] + + project_enabled_servers = { k: default_servers[k] for k in project_servers if k in default_servers } - # This is Maya specific and may not reflect real selection of deadline - # url as dictionary keys in Python 2 are not ordered - return deadline_servers[ - list(deadline_servers.keys())[ - int(render_instance.data.get("deadlineServers")) - ] - ] + + msg = ( + "\"{}\" server on instance is not enabled in project settings." + " Enabled project servers:\n{}".format( + instance_server, project_enabled_servers + ) + ) + assert instance_server in project_enabled_servers, msg + + self.log.debug("Using project approved server.") + return project_enabled_servers[instance_server] diff --git a/openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py b/openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py index cb2b0cf156..1a0d615dc3 100644 --- a/openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py +++ b/openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py @@ -17,7 +17,8 @@ class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin): `CollectDeadlineServerFromInstance`. """ - order = pyblish.api.CollectorOrder + 0.410 + # Run before collect_deadline_server_instance. + order = pyblish.api.CollectorOrder + 0.0025 label = "Default Deadline Webservice" pass_mongo_url = False diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index bcf0850768..ee28612b44 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -59,7 +59,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): render_path).replace("\\", "/") instance.data["publishJobState"] = "Suspended" - instance.context.data['ftrackStatus'] = "Render" # adding 2d render specific family for version identification in Loader instance.data["families"] = ["render2d"] diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index 73ab689c9a..254914a850 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -1,19 +1,27 @@ +import hou + import os -import json +import attr import getpass from datetime import datetime - -import requests import pyblish.api -# import hou ??? - from openpype.pipeline import legacy_io from openpype.tests.lib import is_in_tests +from openpype_modules.deadline import abstract_submit_deadline +from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo from openpype.lib import is_running_from_build -class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): +@attr.s +class DeadlinePluginInfo(): + SceneFile = attr.ib(default=None) + OutputDriver = attr.ib(default=None) + Version = attr.ib(default=None) + IgnoreInputs = attr.ib(default=True) + + +class HoudiniSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): """Submit Solaris USD Render ROPs to Deadline. Renders are submitted to a Deadline Web Service as @@ -30,83 +38,57 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder hosts = ["houdini"] families = ["usdrender", - "redshift_rop"] + "redshift_rop", + "arnold_rop", + "mantra_rop", + "karma_rop", + "vray_rop"] targets = ["local"] + use_published = True - def process(self, instance): + def get_job_info(self): + job_info = DeadlineJobInfo(Plugin="Houdini") + instance = self._instance context = instance.context - code = context.data["code"] + filepath = context.data["currentFile"] filename = os.path.basename(filepath) - comment = context.data.get("comment", "") - deadline_user = context.data.get("deadlineUser", getpass.getuser()) - jobname = "%s - %s" % (filename, instance.name) - # Support code prefix label for batch name - batch_name = filename - if code: - batch_name = "{0} - {1}".format(code, batch_name) + job_info.Name = "{} - {}".format(filename, instance.name) + job_info.BatchName = filename + job_info.Plugin = "Houdini" + job_info.UserName = context.data.get( + "deadlineUser", getpass.getuser()) if is_in_tests(): - batch_name += datetime.now().strftime("%d%m%Y%H%M%S") + job_info.BatchName += datetime.now().strftime("%d%m%Y%H%M%S") - # Output driver to render - driver = instance[0] - - # StartFrame to EndFrame by byFrameStep + # Deadline requires integers in frame range frames = "{start}-{end}x{step}".format( start=int(instance.data["frameStart"]), end=int(instance.data["frameEnd"]), step=int(instance.data["byFrameStep"]), ) + job_info.Frames = frames - # Documentation for keys available at: - # https://docs.thinkboxsoftware.com - # /products/deadline/8.0/1_User%20Manual/manual - # /manual-submission.html#job-info-file-options - payload = { - "JobInfo": { - # Top-level group name - "BatchName": batch_name, + job_info.Pool = instance.data.get("primaryPool") + job_info.SecondaryPool = instance.data.get("secondaryPool") + job_info.ChunkSize = instance.data.get("chunkSize", 10) + job_info.Comment = context.data.get("comment") - # Job name, as seen in Monitor - "Name": jobname, - - # Arbitrary username, for visualisation in Monitor - "UserName": deadline_user, - - "Plugin": "Houdini", - "Pool": instance.data.get("primaryPool"), - "secondaryPool": instance.data.get("secondaryPool"), - "Frames": frames, - - "ChunkSize": instance.data.get("chunkSize", 10), - - "Comment": comment - }, - "PluginInfo": { - # Input - "SceneFile": filepath, - "OutputDriver": driver.path(), - - # Mandatory for Deadline - # Houdini version without patch number - "Version": hou.applicationVersionString().rsplit(".", 1)[0], - - "IgnoreInputs": True - }, - - # Mandatory for Deadline, may be empty - "AuxFiles": [] - } - - # Include critical environment variables with submission + api.Session keys = [ - # Submit along the current Avalon tool setup that we launched - # this application with so the Render Slave can build its own - # similar environment using it, e.g. "maya2018;vray4.x;yeti3.1.9" - "AVALON_TOOLS" + "FTRACK_API_KEY", + "FTRACK_API_USER", + "FTRACK_SERVER", + "OPENPYPE_SG_USER", + "AVALON_PROJECT", + "AVALON_ASSET", + "AVALON_TASK", + "AVALON_APP_NAME", + "OPENPYPE_DEV", + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" ] # Add OpenPype version if we are running from build. @@ -114,61 +96,50 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): keys.append("OPENPYPE_VERSION") # Add mongo url if it's enabled - if context.data.get("deadlinePassMongoUrl"): + if self._instance.context.data.get("deadlinePassMongoUrl"): keys.append("OPENPYPE_MONGO") environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) + for key in keys: + value = environment.get(key) + if value: + job_info.EnvironmentKeyValue[key] = value - payload["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) + # to recognize job from PYPE for turning Event On/Off + job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" - # Include OutputFilename entries - # The first entry also enables double-click to preview rendered - # frames from Deadline Monitor - output_data = {} for i, filepath in enumerate(instance.data["files"]): dirname = os.path.dirname(filepath) fname = os.path.basename(filepath) - output_data["OutputDirectory%d" % i] = dirname.replace("\\", "/") - output_data["OutputFilename%d" % i] = fname + job_info.OutputDirectory += dirname.replace("\\", "/") + job_info.OutputFilename += fname - # For now ensure destination folder exists otherwise HUSK - # will fail to render the output image. This is supposedly fixed - # in new production builds of Houdini - # TODO Remove this workaround with Houdini 18.0.391+ - if not os.path.exists(dirname): - self.log.info("Ensuring output directory exists: %s" % - dirname) - os.makedirs(dirname) + return job_info - payload["JobInfo"].update(output_data) + def get_plugin_info(self): - self.submit(instance, payload) + instance = self._instance + context = instance.context - def submit(self, instance, payload): + # Output driver to render + driver = hou.node(instance.data["instance_node"]) + hou_major_minor = hou.applicationVersionString().rsplit(".", 1)[0] - AVALON_DEADLINE = legacy_io.Session.get("AVALON_DEADLINE", - "http://localhost:8082") - assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" + plugin_info = DeadlinePluginInfo( + SceneFile=context.data["currentFile"], + OutputDriver=driver.path(), + Version=hou_major_minor, + IgnoreInputs=True + ) - plugin = payload["JobInfo"]["Plugin"] - self.log.info("Using Render Plugin : {}".format(plugin)) + return attr.asdict(plugin_info) - self.log.info("Submitting..") - self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) - - # E.g. http://192.168.0.1:8082/api/jobs - url = "{}/api/jobs".format(AVALON_DEADLINE) - response = requests.post(url, json=payload) - if not response.ok: - raise Exception(response.text) + def process(self, instance): + super(HoudiniSubmitDeadline, self).process(instance) + # TODO: Avoid the need for this logic here, needed for submit publish # Store output dir for unified publisher (filesequence) output_dir = os.path.dirname(instance.data["files"][0]) instance.data["outputDir"] = output_dir - instance.data["deadlineSubmissionJob"] = response.json() + instance.data["toBeRenderedOn"] = "deadline" diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index f9cfaa9266..06cb86f16a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -121,11 +121,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, deadline_plugin = "OpenPype" targets = ["local"] - hosts = ["fusion", "max", "maya", "nuke", + hosts = ["fusion", "max", "maya", "nuke", "houdini", "celaction", "aftereffects", "harmony"] families = ["render.farm", "prerender.farm", - "renderlayer", "imagesequence", "maxrender", "vrayscene"] + "renderlayer", "imagesequence", + "vrayscene", "maxrender", + "arnold_rop", "mantra_rop", + "karma_rop", "vray_rop", + "redshift_rop"] aov_filter = {"maya": [r".*([Bb]eauty).*"], "aftereffects": [r".*"], # for everything from AE @@ -143,7 +147,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "FTRACK_SERVER", "AVALON_APP_NAME", "OPENPYPE_USERNAME", - "OPENPYPE_SG_USER", + "OPENPYPE_VERSION", + "OPENPYPE_SG_USER" ] # Add OpenPype version if we are running from build. @@ -827,7 +832,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, ).format(source)) family = "render" - if "prerender" in instance.data["families"]: + if ("prerender" in instance.data["families"] or + "prerender.farm" in instance.data["families"]): family = "prerender" families = [family] @@ -1096,6 +1102,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, deadline_publish_job_id = \ self._submit_deadline_post_job(instance, render_job, instances) + # Inject deadline url to instances. + for inst in instances: + inst["deadlineUrl"] = self.deadline_url + # publish job file publish_job = { "asset": asset, diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index cec48ef54f..deb8b414f0 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -109,8 +109,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): for status in asset_version_statuses } - self._set_task_status(instance, project_entity, task_entity, session) - # Prepare AssetTypes asset_types_by_short = self._ensure_asset_types_exists( session, component_list @@ -180,45 +178,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): if asset_version not in instance.data[asset_versions_key]: instance.data[asset_versions_key].append(asset_version) - def _set_task_status(self, instance, project_entity, task_entity, session): - if not project_entity: - self.log.info("Task status won't be set, project is not known.") - return - - if not task_entity: - self.log.info("Task status won't be set, task is not known.") - return - - status_name = instance.context.data.get("ftrackStatus") - if not status_name: - self.log.info("Ftrack status name is not set.") - return - - self.log.debug( - "Ftrack status name will be (maybe) set to \"{}\"".format( - status_name - ) - ) - - project_schema = project_entity["project_schema"] - task_statuses = project_schema.get_statuses( - "Task", task_entity["type_id"] - ) - task_statuses_by_low_name = { - status["name"].lower(): status for status in task_statuses - } - status = task_statuses_by_low_name.get(status_name.lower()) - if not status: - self.log.warning(( - "Task status \"{}\" won't be set," - " status is now allowed on task type \"{}\"." - ).format(status_name, task_entity["type"]["name"])) - return - - self.log.info("Setting task status to \"{}\"".format(status_name)) - task_entity["status"] = status - session.commit() - def _fill_component_locations(self, session, component_list): components_by_location_name = collections.defaultdict(list) components_by_location_id = collections.defaultdict(list) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py deleted file mode 100644 index ab5738c33f..0000000000 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_farm_status.py +++ /dev/null @@ -1,150 +0,0 @@ -import pyblish.api -from openpype.lib import filter_profiles - - -class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin): - """Change task status when should be published on farm. - - Instance which has set "farm" key in data to 'True' is considered as will - be rendered on farm thus it's status should be changed. - """ - - order = pyblish.api.IntegratorOrder + 0.48 - label = "Integrate Ftrack Farm Status" - - farm_status_profiles = [] - - def process(self, context): - # Quick end - if not self.farm_status_profiles: - project_name = context.data["projectName"] - self.log.info(( - "Status profiles are not filled for project \"{}\". Skipping" - ).format(project_name)) - return - - filtered_instances = self.filter_instances(context) - instances_with_status_names = self.get_instances_with_statuse_names( - context, filtered_instances - ) - if instances_with_status_names: - self.fill_statuses(context, instances_with_status_names) - - def filter_instances(self, context): - filtered_instances = [] - for instance in context: - # Skip disabled instances - if instance.data.get("publish") is False: - continue - subset_name = instance.data["subset"] - msg_start = "Skipping instance {}.".format(subset_name) - if not instance.data.get("farm"): - self.log.debug( - "{} Won't be rendered on farm.".format(msg_start) - ) - continue - - task_entity = instance.data.get("ftrackTask") - if not task_entity: - self.log.debug( - "{} Does not have filled task".format(msg_start) - ) - continue - - filtered_instances.append(instance) - return filtered_instances - - def get_instances_with_statuse_names(self, context, instances): - instances_with_status_names = [] - for instance in instances: - family = instance.data["family"] - subset_name = instance.data["subset"] - task_entity = instance.data["ftrackTask"] - host_name = context.data["hostName"] - task_name = task_entity["name"] - task_type = task_entity["type"]["name"] - status_profile = filter_profiles( - self.farm_status_profiles, - { - "hosts": host_name, - "task_types": task_type, - "task_names": task_name, - "families": family, - "subsets": subset_name, - }, - logger=self.log - ) - if not status_profile: - # There already is log in 'filter_profiles' - continue - - status_name = status_profile["status_name"] - if status_name: - instances_with_status_names.append((instance, status_name)) - return instances_with_status_names - - def fill_statuses(self, context, instances_with_status_names): - # Prepare available task statuses on the project - project_name = context.data["projectName"] - session = context.data["ftrackSession"] - project_entity = session.query(( - "select project_schema from Project where full_name is \"{}\"" - ).format(project_name)).one() - project_schema = project_entity["project_schema"] - - task_type_ids = set() - for item in instances_with_status_names: - instance, _ = item - task_entity = instance.data["ftrackTask"] - task_type_ids.add(task_entity["type"]["id"]) - - task_statuses_by_type_id = { - task_type_id: project_schema.get_statuses("Task", task_type_id) - for task_type_id in task_type_ids - } - - # Keep track if anything has changed - skipped_status_names = set() - status_changed = False - for item in instances_with_status_names: - instance, status_name = item - task_entity = instance.data["ftrackTask"] - task_statuses = task_statuses_by_type_id[task_entity["type"]["id"]] - status_name_low = status_name.lower() - - status_id = None - status_name = None - # Skip if status name was already tried to be found - for status in task_statuses: - if status["name"].lower() == status_name_low: - status_id = status["id"] - status_name = status["name"] - break - - if status_id is None: - if status_name_low not in skipped_status_names: - skipped_status_names.add(status_name_low) - joined_status_names = ", ".join({ - '"{}"'.format(status["name"]) - for status in task_statuses - }) - self.log.warning(( - "Status \"{}\" is not available on project \"{}\"." - " Available statuses are {}" - ).format(status_name, project_name, joined_status_names)) - continue - - # Change task status id - if status_id != task_entity["status_id"]: - task_entity["status_id"] = status_id - status_changed = True - path = "/".join([ - item["name"] - for item in task_entity["link"] - ]) - self.log.debug("Set status \"{}\" to \"{}\"".format( - status_name, path - )) - - if status_changed: - session.commit() diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_status.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_status.py new file mode 100644 index 0000000000..e862dba7fc --- /dev/null +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_status.py @@ -0,0 +1,433 @@ +import copy + +import pyblish.api +from openpype.lib import filter_profiles + + +def create_chunks(iterable, chunk_size=None): + """Separate iterable into multiple chunks by size. + + Args: + iterable(list|tuple|set): Object that will be separated into chunks. + chunk_size(int): Size of one chunk. Default value is 200. + + Returns: + list