diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 5c264e4d98..669bf391cd 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -35,6 +35,9 @@ body: label: Version description: What version are you running? Look to OpenPype Tray options: + - 3.16.5-nightly.3 + - 3.16.5-nightly.2 + - 3.16.5-nightly.1 - 3.16.4 - 3.16.4-nightly.3 - 3.16.4-nightly.2 @@ -132,9 +135,6 @@ body: - 3.14.8 - 3.14.8-nightly.4 - 3.14.8-nightly.3 - - 3.14.8-nightly.2 - - 3.14.8-nightly.1 - - 3.14.7 validations: required: true - type: dropdown diff --git a/README.md b/README.md index 6caed8061c..ce98f845e6 100644 --- a/README.md +++ b/README.md @@ -62,7 +62,7 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v #### Clone repository: ```sh -git clone --recurse-submodules git@github.com:Pypeclub/OpenPype.git +git clone --recurse-submodules git@github.com:ynput/OpenPype.git ``` #### To build OpenPype: @@ -144,6 +144,10 @@ sudo ./tools/docker_build.sh centos7 If all is successful, you'll find built OpenPype in `./build/` folder. +Docker build can be also started from Windows machine, just use `./tools/docker_build.ps1` instead of shell script. + +This could be used even for building linux build (with argument `centos7` or `debian`) + #### Manual build You will need [Python >= 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled. diff --git a/openpype/client/server/entities.py b/openpype/client/server/entities.py index 9579f13add..39322627bb 100644 --- a/openpype/client/server/entities.py +++ b/openpype/client/server/entities.py @@ -83,10 +83,10 @@ def _get_subsets( project_name, subset_ids, subset_names, - folder_ids, - names_by_folder_ids, - active, - fields + folder_ids=folder_ids, + names_by_folder_ids=names_by_folder_ids, + active=active, + fields=fields, ): yield convert_v4_subset_to_v3(subset) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index b03f8c8fc1..75c7ff9fee 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -22,9 +22,12 @@ log = logging.getLogger(__name__) JSON_PREFIX = "JSON:::" -def get_asset_fps(): +def get_asset_fps(asset_doc=None): """Return current asset fps.""" - return get_current_project_asset()["data"].get("fps") + + if asset_doc is None: + asset_doc = get_current_project_asset(fields=["data.fps"]) + return asset_doc["data"]["fps"] def set_id(node, unique_id, overwrite=False): @@ -472,14 +475,19 @@ def maintained_selection(): def reset_framerange(): - """Set frame range to current asset""" + """Set frame range and FPS to current asset""" + # Get asset data project_name = get_current_project_name() asset_name = get_current_asset_name() # Get the asset ID from the database for the asset of current context asset_doc = get_asset_by_name(project_name, asset_name) asset_data = asset_doc["data"] + # Get FPS + fps = get_asset_fps(asset_doc) + + # Get Start and End Frames frame_start = asset_data.get("frameStart") frame_end = asset_data.get("frameEnd") @@ -493,6 +501,9 @@ def reset_framerange(): frame_start -= int(handle_start) frame_end += int(handle_end) + # Set frame range and FPS + print("Setting scene FPS to {}".format(int(fps))) + set_scene_fps(fps) hou.playbar.setFrameRange(frame_start, frame_end) hou.playbar.setPlaybackRange(frame_start, frame_end) hou.setFrame(frame_start) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 8a26bbb504..3c325edfa7 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -25,7 +25,6 @@ from openpype.lib import ( emit_event, ) -from .lib import get_asset_fps log = logging.getLogger("openpype.hosts.houdini") @@ -385,11 +384,6 @@ def _set_context_settings(): None """ - # Set new scene fps - fps = get_asset_fps() - print("Setting scene FPS to %i" % fps) - lib.set_scene_fps(fps) - lib.reset_framerange() diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index c015cebd49..9c96e48e3a 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -33,7 +33,7 @@ class CreateVDBCache(plugin.HoudiniCreator): } if self.selected_nodes: - parms["soppath"] = self.selected_nodes[0].path() + parms["soppath"] = self.get_sop_node_path(self.selected_nodes[0]) instance_node.setParms(parms) @@ -42,3 +42,63 @@ class CreateVDBCache(plugin.HoudiniCreator): hou.ropNodeTypeCategory(), hou.sopNodeTypeCategory() ] + + def get_sop_node_path(self, selected_node): + """Get Sop Path of the selected node. + + Although Houdini allows ObjNode path on `sop_path` for the + the ROP node, we prefer it set to the SopNode path explicitly. + """ + + # Allow sop level paths (e.g. /obj/geo1/box1) + if isinstance(selected_node, hou.SopNode): + self.log.debug( + "Valid SopNode selection, 'SOP Path' in ROP will" + " be set to '%s'.", selected_node.path() + ) + return selected_node.path() + + # Allow object level paths to Geometry nodes (e.g. /obj/geo1) + # but do not allow other object level nodes types like cameras, etc. + elif isinstance(selected_node, hou.ObjNode) and \ + selected_node.type().name() == "geo": + + # Try to find output node. + sop_node = self.get_obj_output(selected_node) + if sop_node: + self.log.debug( + "Valid ObjNode selection, 'SOP Path' in ROP will " + "be set to the child path '%s'.", sop_node.path() + ) + return sop_node.path() + + self.log.debug( + "Selection isn't valid. 'SOP Path' in ROP will be empty." + ) + return "" + + def get_obj_output(self, obj_node): + """Try to find output node. + + If any output nodes are present, return the output node with + the minimum 'outputidx' + If no output nodes are present, return the node with display flag + If no nodes are present at all, return None + """ + + outputs = obj_node.subnetOutputs() + + # if obj_node is empty + if not outputs: + return + + # if obj_node has one output child whether its + # sop output node or a node with the render flag + elif len(outputs) == 1: + return outputs[0] + + # if there are more than one, then it has multiple output nodes + # return the one with the minimum 'outputidx' + else: + return min(outputs, + key=lambda node: node.evalParm('outputidx')) diff --git a/openpype/hosts/houdini/startup/MainMenuCommon.xml b/openpype/hosts/houdini/startup/MainMenuCommon.xml index 47a4653d5d..5818a117eb 100644 --- a/openpype/hosts/houdini/startup/MainMenuCommon.xml +++ b/openpype/hosts/houdini/startup/MainMenuCommon.xml @@ -2,7 +2,19 @@ - + + + + + + diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 65b4b91323..a1d290646c 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -543,6 +543,9 @@ def list_instances(creator_id=None): For SubsetManager + Args: + creator_id (Optional[str]): creator identifier + Returns: (list) of dictionaries matching instances format """ @@ -575,10 +578,13 @@ def list_instances(creator_id=None): if creator_id and instance_data["creator_identifier"] != creator_id: continue - if instance_data["instance_id"] in instance_ids: + instance_id = instance_data.get("instance_id") + if not instance_id: + pass + elif instance_id in instance_ids: instance_data.pop("instance_id") else: - instance_ids.add(instance_data["instance_id"]) + instance_ids.add(instance_id) # node name could change, so update subset name data _update_subset_name_data(instance_data, node) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 3975f2e57c..6d48c09d60 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -327,6 +327,7 @@ class NukeWriteCreator(NukeCreator): "frames": "Use existing frames" } if ("farm_rendering" in self.instance_attributes): + rendering_targets["frames_farm"] = "Use existing frames - farm" rendering_targets["farm"] = "Farm rendering" return EnumDef( diff --git a/openpype/hosts/nuke/plugins/publish/collect_instance_data.py b/openpype/hosts/nuke/plugins/publish/collect_nuke_instance_data.py similarity index 71% rename from openpype/hosts/nuke/plugins/publish/collect_instance_data.py rename to openpype/hosts/nuke/plugins/publish/collect_nuke_instance_data.py index 3908aef4bc..b0f69e8ab8 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_instance_data.py +++ b/openpype/hosts/nuke/plugins/publish/collect_nuke_instance_data.py @@ -2,11 +2,13 @@ import nuke import pyblish.api -class CollectInstanceData(pyblish.api.InstancePlugin): - """Collect all nodes with Avalon knob.""" +class CollectNukeInstanceData(pyblish.api.InstancePlugin): + """Collect Nuke instance data + + """ order = pyblish.api.CollectorOrder - 0.49 - label = "Collect Instance Data" + label = "Collect Nuke Instance Data" hosts = ["nuke", "nukeassist"] # presets @@ -40,5 +42,14 @@ class CollectInstanceData(pyblish.api.InstancePlugin): "pixelAspect": pixel_aspect }) + + # add creator attributes to instance + creator_attributes = instance.data["creator_attributes"] + instance.data.update(creator_attributes) + + # add review family if review activated on instance + if instance.data.get("review"): + instance.data["families"].append("review") + self.log.debug("Collected instance: {}".format( instance.data)) diff --git a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py index 5701087697..c7d65ffd24 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py +++ b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py @@ -5,7 +5,7 @@ import nuke class CollectSlate(pyblish.api.InstancePlugin): """Check if SLATE node is in scene and connected to rendering tree""" - order = pyblish.api.CollectorOrder + 0.09 + order = pyblish.api.CollectorOrder + 0.002 label = "Collect Slate Node" hosts = ["nuke"] families = ["render"] @@ -13,10 +13,14 @@ class CollectSlate(pyblish.api.InstancePlugin): def process(self, instance): node = instance.data["transientData"]["node"] - slate = next((n for n in nuke.allNodes() - if "slate" in n.name().lower() - if not n["disable"].getValue()), - None) + slate = next( + ( + n_ for n_ in nuke.allNodes() + if "slate" in n_.name().lower() + if not n_["disable"].getValue() + ), + None + ) if slate: # check if slate node is connected to write node tree diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index 1eb1e1350f..6f9245f5b9 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -1,5 +1,4 @@ import os -from pprint import pformat import nuke import pyblish.api from openpype.hosts.nuke import api as napi @@ -15,30 +14,16 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, hosts = ["nuke", "nukeassist"] families = ["render", "prerender", "image"] + # cache + _write_nodes = {} + _frame_ranges = {} + def process(self, instance): - self.log.debug(pformat(instance.data)) - creator_attributes = instance.data["creator_attributes"] - instance.data.update(creator_attributes) group_node = instance.data["transientData"]["node"] render_target = instance.data["render_target"] - family = instance.data["family"] - families = instance.data["families"] - # add targeted family to families - instance.data["families"].append( - "{}.{}".format(family, render_target) - ) - if instance.data.get("review"): - instance.data["families"].append("review") - - child_nodes = napi.get_instance_group_node_childs(instance) - instance.data["transientData"]["childNodes"] = child_nodes - - write_node = None - for x in child_nodes: - if x.Class() == "Write": - write_node = x + write_node = self._write_node_helper(instance) if write_node is None: self.log.warning( @@ -48,113 +33,134 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, ) return - instance.data["writeNode"] = write_node - self.log.debug("checking instance: {}".format(instance)) + # get colorspace and add to version data + colorspace = napi.get_colorspace_from_node(write_node) - # Determine defined file type - ext = write_node["file_type"].value() + if render_target == "frames": + self._set_existing_files_data(instance, colorspace) - # Get frame range - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - frame_length = int(last_frame - first_frame + 1) + elif render_target == "frames_farm": + collected_frames = self._set_existing_files_data( + instance, colorspace) - if write_node["use_limit"].getValue(): - first_frame = int(write_node["first"].getValue()) - last_frame = int(write_node["last"].getValue()) + self._set_expected_files(instance, collected_frames) + + self._add_farm_instance_data(instance) + + elif render_target == "farm": + self._add_farm_instance_data(instance) + + # set additional instance data + self._set_additional_instance_data(instance, render_target, colorspace) + + def _set_existing_files_data(self, instance, colorspace): + """Set existing files data to instance data. + + Args: + instance (pyblish.api.Instance): pyblish instance + colorspace (str): colorspace + + Returns: + list: collected frames + """ + collected_frames = self._get_collected_frames(instance) + + representation = self._get_existing_frames_representation( + instance, collected_frames + ) + + # inject colorspace data + self.set_representation_colorspace( + representation, instance.context, + colorspace=colorspace + ) + + instance.data["representations"].append(representation) + + return collected_frames + + def _set_expected_files(self, instance, collected_frames): + """Set expected files to instance data. + + Args: + instance (pyblish.api.Instance): pyblish instance + collected_frames (list): collected frames + """ + write_node = self._write_node_helper(instance) write_file_path = nuke.filename(write_node) output_dir = os.path.dirname(write_file_path) - # get colorspace and add to version data - colorspace = napi.get_colorspace_from_node(write_node) + instance.data["expectedFiles"] = [ + os.path.join(output_dir, source_file) + for source_file in collected_frames + ] - self.log.debug('output dir: {}'.format(output_dir)) + def _get_frame_range_data(self, instance): + """Get frame range data from instance. - if render_target == "frames": - representation = { - 'name': ext, - 'ext': ext, - "stagingDir": output_dir, - "tags": [] - } + Args: + instance (pyblish.api.Instance): pyblish instance - # get file path knob - node_file_knob = write_node["file"] - # list file paths based on input frames - expected_paths = list(sorted({ - node_file_knob.evaluate(frame) - for frame in range(first_frame, last_frame + 1) - })) + Returns: + tuple: first_frame, last_frame + """ - # convert only to base names - expected_filenames = [ - os.path.basename(filepath) - for filepath in expected_paths - ] + instance_name = instance.data["name"] - # make sure files are existing at folder - collected_frames = [ - filename - for filename in os.listdir(output_dir) - if filename in expected_filenames - ] + if self._frame_ranges.get(instance_name): + # return cashed write node + return self._frame_ranges[instance_name] - if collected_frames: - collected_frames_len = len(collected_frames) - frame_start_str = "%0{}d".format( - len(str(last_frame))) % first_frame - representation['frameStart'] = frame_start_str + write_node = self._write_node_helper(instance) - # in case slate is expected and not yet rendered - self.log.debug("_ frame_length: {}".format(frame_length)) - self.log.debug("_ collected_frames_len: {}".format( - collected_frames_len)) + # Get frame range from workfile + first_frame = int(nuke.root()["first_frame"].getValue()) + last_frame = int(nuke.root()["last_frame"].getValue()) - # this will only run if slate frame is not already - # rendered from previews publishes - if ( - "slate" in families - and frame_length == collected_frames_len - and family == "render" - ): - frame_slate_str = ( - "{{:0{}d}}".format(len(str(last_frame))) - ).format(first_frame - 1) + # Get frame range from write node if activated + if write_node["use_limit"].getValue(): + first_frame = int(write_node["first"].getValue()) + last_frame = int(write_node["last"].getValue()) - slate_frame = collected_frames[0].replace( - frame_start_str, frame_slate_str) - collected_frames.insert(0, slate_frame) + # add to cache + self._frame_ranges[instance_name] = (first_frame, last_frame) - if collected_frames_len == 1: - representation['files'] = collected_frames.pop() - else: - representation['files'] = collected_frames + return first_frame, last_frame - # inject colorspace data - self.set_representation_colorspace( - representation, instance.context, - colorspace=colorspace - ) + def _set_additional_instance_data( + self, instance, render_target, colorspace + ): + """Set additional instance data. - instance.data["representations"].append(representation) - self.log.info("Publishing rendered frames ...") + Args: + instance (pyblish.api.Instance): pyblish instance + render_target (str): render target + colorspace (str): colorspace + """ + family = instance.data["family"] - elif render_target == "farm": - farm_keys = ["farm_chunk", "farm_priority", "farm_concurrency"] - for key in farm_keys: - # Skip if key is not in creator attributes - if key not in creator_attributes: - continue - # Add farm attributes to instance - instance.data[key] = creator_attributes[key] + # add targeted family to families + instance.data["families"].append( + "{}.{}".format(family, render_target) + ) + self.log.debug("Appending render target to families: {}.{}".format( + family, render_target) + ) - # Farm rendering - instance.data["transfer"] = False - instance.data["farm"] = True - self.log.info("Farm rendering ON ...") + write_node = self._write_node_helper(instance) + + # Determine defined file type + ext = write_node["file_type"].value() + + # get frame range data + handle_start = instance.context.data["handleStart"] + handle_end = instance.context.data["handleEnd"] + first_frame, last_frame = self._get_frame_range_data(instance) + + # get output paths + write_file_path = nuke.filename(write_node) + output_dir = os.path.dirname(write_file_path) # TODO: remove this when we have proper colorspace support version_data = { @@ -188,10 +194,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, "frameEndHandle": last_frame, }) - # make sure rendered sequence on farm will - # be used for extract review - if not instance.data.get("review"): - instance.data["useSequenceForReview"] = False # TODO temporarily set stagingDir as persistent for backward # compatibility. This is mainly focused on `renders`folders which @@ -199,4 +201,201 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, # this logic should be removed and replaced with custom staging dir instance.data["stagingDir_persistent"] = True - self.log.debug("instance.data: {}".format(pformat(instance.data))) + def _write_node_helper(self, instance): + """Helper function to get write node from instance. + + Also sets instance transient data with child nodes. + + Args: + instance (pyblish.api.Instance): pyblish instance + + Returns: + nuke.Node: write node + """ + instance_name = instance.data["name"] + + if self._write_nodes.get(instance_name): + # return cashed write node + return self._write_nodes[instance_name] + + # get all child nodes from group node + child_nodes = napi.get_instance_group_node_childs(instance) + + # set child nodes to instance transient data + instance.data["transientData"]["childNodes"] = child_nodes + + write_node = None + for node_ in child_nodes: + if node_.Class() == "Write": + write_node = node_ + + if write_node: + # for slate frame extraction + instance.data["transientData"]["writeNode"] = write_node + # add to cache + self._write_nodes[instance_name] = write_node + + return self._write_nodes[instance_name] + + def _get_existing_frames_representation( + self, + instance, + collected_frames + ): + """Get existing frames representation. + + Args: + instance (pyblish.api.Instance): pyblish instance + collected_frames (list): collected frames + + Returns: + dict: representation + """ + + first_frame, last_frame = self._get_frame_range_data(instance) + + write_node = self._write_node_helper(instance) + + write_file_path = nuke.filename(write_node) + output_dir = os.path.dirname(write_file_path) + + # Determine defined file type + ext = write_node["file_type"].value() + + representation = { + "name": ext, + "ext": ext, + "stagingDir": output_dir, + "tags": [] + } + + frame_start_str = self._get_frame_start_str(first_frame, last_frame) + + representation['frameStart'] = frame_start_str + + # set slate frame + collected_frames = self._add_slate_frame_to_collected_frames( + instance, + collected_frames, + first_frame, + last_frame + ) + + if len(collected_frames) == 1: + representation['files'] = collected_frames.pop() + else: + representation['files'] = collected_frames + + return representation + + def _get_frame_start_str(self, first_frame, last_frame): + """Get frame start string. + + Args: + first_frame (int): first frame + last_frame (int): last frame + + Returns: + str: frame start string + """ + # convert first frame to string with padding + return ( + "{{:0{}d}}".format(len(str(last_frame))) + ).format(first_frame) + + def _add_slate_frame_to_collected_frames( + self, + instance, + collected_frames, + first_frame, + last_frame + ): + """Add slate frame to collected frames. + + Args: + instance (pyblish.api.Instance): pyblish instance + collected_frames (list): collected frames + first_frame (int): first frame + last_frame (int): last frame + + Returns: + list: collected frames + """ + frame_start_str = self._get_frame_start_str(first_frame, last_frame) + frame_length = int(last_frame - first_frame + 1) + + # this will only run if slate frame is not already + # rendered from previews publishes + if ( + "slate" in instance.data["families"] + and frame_length == len(collected_frames) + ): + frame_slate_str = self._get_frame_start_str( + first_frame - 1, + last_frame + ) + + slate_frame = collected_frames[0].replace( + frame_start_str, frame_slate_str) + collected_frames.insert(0, slate_frame) + + return collected_frames + + def _add_farm_instance_data(self, instance): + """Add farm publishing related instance data. + + Args: + instance (pyblish.api.Instance): pyblish instance + """ + + # make sure rendered sequence on farm will + # be used for extract review + if not instance.data.get("review"): + instance.data["useSequenceForReview"] = False + + # Farm rendering + instance.data.update({ + "transfer": False, + "farm": True # to skip integrate + }) + self.log.info("Farm rendering ON ...") + + def _get_collected_frames(self, instance): + """Get collected frames. + + Args: + instance (pyblish.api.Instance): pyblish instance + + Returns: + list: collected frames + """ + + first_frame, last_frame = self._get_frame_range_data(instance) + + write_node = self._write_node_helper(instance) + + write_file_path = nuke.filename(write_node) + output_dir = os.path.dirname(write_file_path) + + # get file path knob + node_file_knob = write_node["file"] + # list file paths based on input frames + expected_paths = list(sorted({ + node_file_knob.evaluate(frame) + for frame in range(first_frame, last_frame + 1) + })) + + # convert only to base names + expected_filenames = { + os.path.basename(filepath) + for filepath in expected_paths + } + + # make sure files are existing at folder + collected_frames = [ + filename + for filename in os.listdir(output_dir) + if filename in expected_filenames + ] + + return collected_frames diff --git a/openpype/hosts/nuke/plugins/publish/extract_camera.py b/openpype/hosts/nuke/plugins/publish/extract_camera.py index 4286f71e83..33df6258ae 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_camera.py +++ b/openpype/hosts/nuke/plugins/publish/extract_camera.py @@ -11,9 +11,9 @@ from openpype.hosts.nuke.api.lib import maintained_selection class ExtractCamera(publish.Extractor): - """ 3D camera exctractor + """ 3D camera extractor """ - label = 'Exctract Camera' + label = 'Extract Camera' order = pyblish.api.ExtractorOrder families = ["camera"] hosts = ["nuke"] diff --git a/openpype/hosts/nuke/plugins/publish/extract_model.py b/openpype/hosts/nuke/plugins/publish/extract_model.py index 814d404137..00462f8035 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_model.py +++ b/openpype/hosts/nuke/plugins/publish/extract_model.py @@ -11,9 +11,9 @@ from openpype.hosts.nuke.api.lib import ( class ExtractModel(publish.Extractor): - """ 3D model exctractor + """ 3D model extractor """ - label = 'Exctract Model' + label = 'Extract Model' order = pyblish.api.ExtractorOrder families = ["model"] hosts = ["nuke"] diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index 06c086b10d..25262a7418 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -249,7 +249,7 @@ class ExtractSlateFrame(publish.Extractor): # Add file to representation files # - get write node - write_node = instance.data["writeNode"] + write_node = instance.data["transientData"]["writeNode"] # - evaluate filepaths for first frame and slate frame first_filename = os.path.basename( write_node["file"].evaluate(first_frame)) diff --git a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py index 21eefda249..d57d55f85d 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py @@ -54,6 +54,7 @@ class ExtractThumbnail(publish.Extractor): def render_thumbnail(self, instance, output_name=None, **kwargs): first_frame = instance.data["frameStartHandle"] last_frame = instance.data["frameEndHandle"] + colorspace = instance.data["colorspace"] # find frame range and define middle thumb frame mid_frame = int((last_frame - first_frame) / 2) @@ -112,8 +113,8 @@ class ExtractThumbnail(publish.Extractor): if self.use_rendered and os.path.isfile(path_render): # check if file exist otherwise connect to write node rnode = nuke.createNode("Read") - rnode["file"].setValue(path_render) + rnode["colorspace"].setValue(colorspace) # turn it raw if none of baking is ON if all([ diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index aeecea655f..2a925fbeff 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -1,3 +1,5 @@ +from collections import defaultdict + import pyblish.api from openpype.pipeline.publish import get_errored_instances_from_context from openpype.hosts.nuke.api.lib import ( @@ -87,6 +89,11 @@ class ValidateNukeWriteNode( correct_data )) + # Collect key values of same type in a list. + values_by_name = defaultdict(list) + for knob_data in correct_data["knobs"]: + values_by_name[knob_data["name"]].append(knob_data["value"]) + for knob_data in correct_data["knobs"]: knob_type = knob_data["type"] self.log.debug("__ knob_type: {}".format( @@ -105,28 +112,33 @@ class ValidateNukeWriteNode( ) key = knob_data["name"] - value = knob_data["value"] + values = values_by_name[key] node_value = write_node[key].value() # fix type differences - if type(node_value) in (int, float): - try: - if isinstance(value, list): - value = color_gui_to_int(value) - else: - value = float(value) - node_value = float(node_value) - except ValueError: - value = str(value) - else: - value = str(value) - node_value = str(node_value) + fixed_values = [] + for value in values: + if type(node_value) in (int, float): + try: - self.log.debug("__ key: {} | value: {}".format( - key, value + if isinstance(value, list): + value = color_gui_to_int(value) + else: + value = float(value) + node_value = float(node_value) + except ValueError: + value = str(value) + else: + value = str(value) + node_value = str(node_value) + + fixed_values.append(value) + + self.log.debug("__ key: {} | values: {}".format( + key, fixed_values )) if ( - node_value != value + node_value not in fixed_values and key != "file" and key != "tile_color" ): diff --git a/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py index 76bb25fac3..96485d5a2d 100644 --- a/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py +++ b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py @@ -1,4 +1,6 @@ import clique +import os +import re import pyblish.api @@ -21,7 +23,19 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin): representations = instance.data.get("representations") for repr in representations: data = instance.data.get("assetEntity", {}).get("data", {}) - patterns = [clique.PATTERNS["frames"]] + repr_files = repr["files"] + if isinstance(repr_files, str): + continue + + ext = repr.get("ext") + if not ext: + _, ext = os.path.splitext(repr_files[0]) + elif not ext.startswith("."): + ext = ".{}".format(ext) + pattern = r"\D?(?P(?P0*)\d+){}$".format( + re.escape(ext)) + patterns = [pattern] + collections, remainder = clique.assemble( repr["files"], minimum_items=1, patterns=patterns) @@ -30,6 +44,10 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin): collection = collections[0] frames = list(collection.indexes) + if instance.data.get("slate"): + # Slate is not part of the frame range + frames = frames[1:] + current_range = (frames[0], frames[-1]) required_range = (data["clipIn"], data["clipOut"]) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index e56f245d27..20d585e906 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -280,13 +280,14 @@ class BatchPublishEndpoint(WebpublishApiEndpoint): for key, value in add_args.items(): # Skip key values where value is None - if value is not None: - args.append("--{}".format(key)) - # Extend list into arguments (targets can be a list) - if isinstance(value, (tuple, list)): - args.extend(value) - else: - args.append(value) + if value is None: + continue + arg_key = "--{}".format(key) + if not isinstance(value, (tuple, list)): + value = [value] + + for item in value: + args += [arg_key, item] log.info("args:: {}".format(args)) if add_to_queue: diff --git a/openpype/lib/events.py b/openpype/lib/events.py index dca58fcf93..496b765a05 100644 --- a/openpype/lib/events.py +++ b/openpype/lib/events.py @@ -3,6 +3,7 @@ import os import re import copy import inspect +import collections import logging import weakref from uuid import uuid4 @@ -340,8 +341,8 @@ class EventSystem(object): event.emit() return event - def emit_event(self, event): - """Emit event object. + def _process_event(self, event): + """Process event topic and trigger callbacks. Args: event (Event): Prepared event with topic and data. @@ -356,6 +357,91 @@ class EventSystem(object): for callback in invalid_callbacks: self._registered_callbacks.remove(callback) + def emit_event(self, event): + """Emit event object. + + Args: + event (Event): Prepared event with topic and data. + """ + + self._process_event(event) + + +class QueuedEventSystem(EventSystem): + """Events are automatically processed in queue. + + If callback triggers another event, the event is not processed until + all callbacks of previous event are processed. + + Allows to implement custom event process loop by changing 'auto_execute'. + + Note: + This probably should be default behavior of 'EventSystem'. Changing it + now could cause problems in existing code. + + Args: + auto_execute (Optional[bool]): If 'True', events are processed + automatically. Custom loop calling 'process_next_event' + must be implemented when set to 'False'. + """ + + def __init__(self, auto_execute=True): + super(QueuedEventSystem, self).__init__() + self._event_queue = collections.deque() + self._current_event = None + self._auto_execute = auto_execute + + def __len__(self): + return self.count() + + def count(self): + """Get number of events in queue. + + Returns: + int: Number of events in queue. + """ + + return len(self._event_queue) + + def process_next_event(self): + """Process next event in queue. + + Should be used only if 'auto_execute' is set to 'False'. Only single + event is processed. + + Returns: + Union[Event, None]: Processed event. + """ + + if self._current_event is not None: + raise ValueError("An event is already in progress.") + + if not self._event_queue: + return None + event = self._event_queue.popleft() + self._current_event = event + self._process_event(event) + self._current_event = None + return event + + def emit_event(self, event): + """Emit event object. + + Args: + event (Event): Prepared event with topic and data. + """ + + if not self._auto_execute or self._current_event is not None: + self._event_queue.append(event) + return + + self._event_queue.append(event) + while self._event_queue: + event = self._event_queue.popleft() + self._current_event = event + self._process_event(event) + self._current_event = None + class GlobalEventSystem: """Event system living in global scope of process. diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 9b3637c48a..84e213288c 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -373,10 +373,12 @@ def _load_ayon_addons(openpype_modules, modules_key, log): addons_info = _get_ayon_addons_information() if not addons_info: return v3_addons_to_skip - addons_dir = os.path.join( - appdirs.user_data_dir("AYON", "Ynput"), - "addons" - ) + addons_dir = os.environ.get("AYON_ADDONS_DIR") + if not addons_dir: + addons_dir = os.path.join( + appdirs.user_data_dir("AYON", "Ynput"), + "addons" + ) if not os.path.exists(addons_dir): log.warning("Addons directory does not exists. Path \"{}\"".format( addons_dir diff --git a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py index eadfc3c83e..8a408d7f4f 100644 --- a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py +++ b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py @@ -8,6 +8,7 @@ attribute or using default server if that attribute doesn't exists. from maya import cmds import pyblish.api +from openpype.pipeline.publish import KnownPublishError class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): @@ -81,13 +82,14 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): if k in default_servers } - msg = ( - "\"{}\" server on instance is not enabled in project settings." - " Enabled project servers:\n{}".format( - instance_server, project_enabled_servers + if instance_server not in project_enabled_servers: + msg = ( + "\"{}\" server on instance is not enabled in project settings." + " Enabled project servers:\n{}".format( + instance_server, project_enabled_servers + ) ) - ) - assert instance_server in project_enabled_servers, msg + raise KnownPublishError(msg) self.log.debug("Using project approved server.") return project_enabled_servers[instance_server] diff --git a/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml b/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml index 0e7d72910e..aa21df3734 100644 --- a/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml +++ b/openpype/modules/deadline/plugins/publish/help/validate_deadline_pools.xml @@ -1,31 +1,31 @@ - Scene setting + Deadline Pools - ## Invalid Deadline pools found +## Invalid Deadline pools found - Configured pools don't match what is set in Deadline. +Configured pools don't match available pools in Deadline. - {invalid_value_str} +### How to repair? - ### How to repair? +If your instance had deadline pools set on creation, remove or +change them. - If your instance had deadline pools set on creation, remove or - change them. +In other cases inform admin to change them in Settings. - In other cases inform admin to change them in Settings. +Available deadline pools: + +{pools_str} - Available deadline pools {pools_str}. - ### __Detailed Info__ +### __Detailed Info__ - This error is shown when deadline pool is not on Deadline anymore. It - could happen in case of republish old workfile which was created with - previous deadline pools, - or someone changed pools on Deadline side, but didn't modify Openpype - Settings. +This error is shown when a configured pool is not available on Deadline. It +can happen when publishing old workfiles which were created with previous +deadline pools, or someone changed the available pools in Deadline, +but didn't modify Openpype Settings to match the changes. \ No newline at end of file diff --git a/openpype/modules/deadline/plugins/publish/submit_max_deadline.py b/openpype/modules/deadline/plugins/publish/submit_max_deadline.py index 8e05582962..d8725e853c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_max_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_max_deadline.py @@ -12,7 +12,9 @@ from openpype.pipeline import ( legacy_io, OpenPypePyblishPluginMixin ) -from openpype.settings import get_project_settings +from openpype.pipeline.publish.lib import ( + replace_with_published_scene_path +) from openpype.hosts.max.api.lib import ( get_current_renderer, get_multipass_setting @@ -247,7 +249,8 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, if instance.data["renderer"] == "Redshift_Renderer": self.log.debug("Using Redshift...published scene wont be used..") replace_in_path = False - return replace_in_path + return replace_with_published_scene_path( + instance, replace_in_path) @staticmethod def _iter_expected_files(exp): diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 93c6ad8139..cfdeb4968b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -90,7 +90,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, if not instance.data.get("farm"): self.log.debug("Skipping local instance.") return - instance.data["attributeValues"] = self.get_attr_values_from_data( instance.data) @@ -123,13 +122,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, render_path = instance.data['path'] script_path = context.data["currentFile"] - for item in context: - if "workfile" in item.data["families"]: - msg = "Workfile (scene) must be published along" - assert item.data["publish"] is True, msg - - template_data = item.data.get("anatomyData") - rep = item.data.get("representations")[0].get("name") + for item_ in context: + if "workfile" in item_.data["family"]: + template_data = item_.data.get("anatomyData") + rep = item_.data.get("representations")[0].get("name") template_data["representation"] = rep template_data["ext"] = rep template_data["comment"] = None @@ -141,19 +137,24 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, "Using published scene for render {}".format(script_path) ) - response = self.payload_submit( - instance, - script_path, - render_path, - node.name(), - submit_frame_start, - submit_frame_end - ) - # Store output dir for unified publisher (filesequence) - instance.data["deadlineSubmissionJob"] = response.json() - instance.data["outputDir"] = os.path.dirname( - render_path).replace("\\", "/") - instance.data["publishJobState"] = "Suspended" + # only add main rendering job if target is not frames_farm + r_job_response_json = None + if instance.data["render_target"] != "frames_farm": + r_job_response = self.payload_submit( + instance, + script_path, + render_path, + node.name(), + submit_frame_start, + submit_frame_end + ) + r_job_response_json = r_job_response.json() + instance.data["deadlineSubmissionJob"] = r_job_response_json + + # Store output dir for unified publisher (filesequence) + instance.data["outputDir"] = os.path.dirname( + render_path).replace("\\", "/") + instance.data["publishJobState"] = "Suspended" if instance.data.get("bakingNukeScripts"): for baking_script in instance.data["bakingNukeScripts"]: @@ -161,18 +162,20 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, script_path = baking_script["bakeScriptPath"] exe_node_name = baking_script["bakeWriteNodeName"] - resp = self.payload_submit( + b_job_response = self.payload_submit( instance, script_path, render_path, exe_node_name, submit_frame_start, submit_frame_end, - response.json() + r_job_response_json, + baking_submission=True ) # Store output dir for unified publisher (filesequence) - instance.data["deadlineSubmissionJob"] = resp.json() + instance.data["deadlineSubmissionJob"] = b_job_response.json() + instance.data["publishJobState"] = "Suspended" # add to list of job Id @@ -180,7 +183,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, instance.data["bakingSubmissionJobs"] = [] instance.data["bakingSubmissionJobs"].append( - resp.json()["_id"]) + b_job_response.json()["_id"]) # redefinition of families if "render" in instance.data["family"]: @@ -199,15 +202,35 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, exe_node_name, start_frame, end_frame, - response_data=None + response_data=None, + baking_submission=False, ): + """Submit payload to Deadline + + Args: + instance (pyblish.api.Instance): pyblish instance + script_path (str): path to nuke script + render_path (str): path to rendered images + exe_node_name (str): name of the node to render + start_frame (int): start frame + end_frame (int): end frame + response_data Optional[dict]: response data from + previous submission + baking_submission Optional[bool]: if it's baking submission + + Returns: + requests.Response + """ render_dir = os.path.normpath(os.path.dirname(render_path)) - batch_name = os.path.basename(script_path) - jobname = "%s - %s" % (batch_name, instance.name) + + # batch name + src_filepath = instance.context.data["currentFile"] + batch_name = os.path.basename(src_filepath) + job_name = os.path.basename(render_path) + if is_in_tests(): batch_name += datetime.now().strftime("%d%m%Y%H%M%S") - output_filename_0 = self.preview_fname(render_path) if not response_data: @@ -228,11 +251,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, # Top-level group name "BatchName": batch_name, - # Asset dependency to wait for at least the scene file to sync. - # "AssetDependency0": script_path, - # Job name, as seen in Monitor - "Name": jobname, + "Name": job_name, # Arbitrary username, for visualisation in Monitor "UserName": self._deadline_user, @@ -294,12 +314,17 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, "AuxFiles": [] } - if response_data.get("_id"): + # TODO: rewrite for baking with sequences + if baking_submission: payload["JobInfo"].update({ "JobType": "Normal", + "ChunkSize": 99999999 + }) + + if response_data.get("_id"): + payload["JobInfo"].update({ "BatchName": response_data["Props"]["Batch"], "JobDependency0": response_data["_id"], - "ChunkSize": 99999999 }) # Include critical environment variables with submission diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index da96b429ce..bf4411ef43 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -98,7 +98,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, hosts = ["fusion", "max", "maya", "nuke", "houdini", "celaction", "aftereffects", "harmony"] - families = ["render.farm", "prerender.farm", + families = ["render.farm", "render.frames_farm", + "prerender.farm", "prerender.frames_farm", "renderlayer", "imagesequence", "vrayscene", "maxrender", "arnold_rop", "mantra_rop", @@ -121,7 +122,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "FTRACK_SERVER", "AVALON_APP_NAME", "OPENPYPE_USERNAME", - "OPENPYPE_SG_USER" + "OPENPYPE_SG_USER", + "KITSU_LOGIN", + "KITSU_PWD" ] # custom deadline attributes @@ -299,7 +302,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, payload["JobInfo"]["JobDependency{}".format( job_index)] = assembly_id # noqa: E501 job_index += 1 - else: + elif job.get("_id"): payload["JobInfo"]["JobDependency0"] = job["_id"] for index, (key_, value_) in enumerate(environment.items()): @@ -475,6 +478,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "FTRACK_SERVER": os.environ.get("FTRACK_SERVER"), } + deadline_publish_job_id = None if submission_type == "deadline": # get default deadline webservice url from deadline module self.deadline_url = instance.context.data["defaultDeadline"] diff --git a/openpype/modules/deadline/plugins/publish/validate_deadline_connection.py b/openpype/modules/deadline/plugins/publish/validate_deadline_connection.py index d5016a4d82..a7b300beff 100644 --- a/openpype/modules/deadline/plugins/publish/validate_deadline_connection.py +++ b/openpype/modules/deadline/plugins/publish/validate_deadline_connection.py @@ -1,8 +1,7 @@ -import os -import requests - import pyblish.api +from openpype_modules.deadline.abstract_submit_deadline import requests_get + class ValidateDeadlineConnection(pyblish.api.InstancePlugin): """Validate Deadline Web Service is running""" @@ -10,7 +9,10 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin): label = "Validate Deadline Web Service" order = pyblish.api.ValidatorOrder hosts = ["maya", "nuke"] - families = ["renderlayer"] + families = ["renderlayer", "render"] + + # cache + responses = {} def process(self, instance): # get default deadline webservice url from deadline module @@ -18,28 +20,16 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin): # if custom one is set in instance, use that if instance.data.get("deadlineUrl"): deadline_url = instance.data.get("deadlineUrl") - self.log.info( - "We have deadline URL on instance {}".format( - deadline_url)) + self.log.debug( + "We have deadline URL on instance {}".format(deadline_url) + ) assert deadline_url, "Requires Deadline Webservice URL" - # Check response - response = self._requests_get(deadline_url) + if deadline_url not in self.responses: + self.responses[deadline_url] = requests_get(deadline_url) + + response = self.responses[deadline_url] assert response.ok, "Response must be ok" assert response.text.startswith("Deadline Web Service "), ( "Web service did not respond with 'Deadline Web Service'" ) - - def _requests_get(self, *args, **kwargs): - """ Wrapper for requests, disabling SSL certificate validation if - DONT_VERIFY_SSL environment variable is found. This is useful when - Deadline or Muster server are running with self-signed certificates - and their certificate is not added to trusted certificates on - client machines. - - WARNING: disabling SSL certificate validation is defeating one line - of defense SSL is providing and it is not recommended. - """ - if 'verify' not in kwargs: - kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa - return requests.get(*args, **kwargs) diff --git a/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py b/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py index e1c0595830..949caff7d8 100644 --- a/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py +++ b/openpype/modules/deadline/plugins/publish/validate_deadline_pools.py @@ -19,38 +19,64 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin, order = pyblish.api.ValidatorOrder families = ["rendering", "render.farm", + "render.frames_farm", "renderFarm", "renderlayer", "maxrender"] optional = True + # cache + pools_per_url = {} + def process(self, instance): + if not self.is_active(instance.data): + return + if not instance.data.get("farm"): self.log.debug("Skipping local instance.") return - # get default deadline webservice url from deadline module - deadline_url = instance.context.data["defaultDeadline"] - self.log.info("deadline_url::{}".format(deadline_url)) - pools = DeadlineModule.get_deadline_pools(deadline_url, log=self.log) - self.log.info("pools::{}".format(pools)) - - formatting_data = { - "pools_str": ",".join(pools) - } + deadline_url = self.get_deadline_url(instance) + pools = self.get_pools(deadline_url) + invalid_pools = {} primary_pool = instance.data.get("primaryPool") if primary_pool and primary_pool not in pools: - msg = "Configured primary '{}' not present on Deadline".format( - instance.data["primaryPool"]) - formatting_data["invalid_value_str"] = msg - raise PublishXmlValidationError(self, msg, - formatting_data=formatting_data) + invalid_pools["primary"] = primary_pool secondary_pool = instance.data.get("secondaryPool") if secondary_pool and secondary_pool not in pools: - msg = "Configured secondary '{}' not present on Deadline".format( - instance.data["secondaryPool"]) - formatting_data["invalid_value_str"] = msg - raise PublishXmlValidationError(self, msg, - formatting_data=formatting_data) + invalid_pools["secondary"] = secondary_pool + + if invalid_pools: + message = "\n".join( + "{} pool '{}' not available on Deadline".format(key.title(), + pool) + for key, pool in invalid_pools.items() + ) + raise PublishXmlValidationError( + plugin=self, + message=message, + formatting_data={"pools_str": ", ".join(pools)} + ) + + def get_deadline_url(self, instance): + # get default deadline webservice url from deadline module + deadline_url = instance.context.data["defaultDeadline"] + if instance.data.get("deadlineUrl"): + # if custom one is set in instance, use that + deadline_url = instance.data.get("deadlineUrl") + return deadline_url + + def get_pools(self, deadline_url): + if deadline_url not in self.pools_per_url: + self.log.debug( + "Querying available pools for Deadline url: {}".format( + deadline_url) + ) + pools = DeadlineModule.get_deadline_pools(deadline_url, + log=self.log) + self.log.info("Available pools: {}".format(pools)) + self.pools_per_url[deadline_url] = pools + + return self.pools_per_url[deadline_url] diff --git a/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py b/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py index ff4be677e7..9f1f7bc518 100644 --- a/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py +++ b/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py @@ -20,8 +20,19 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin): allow_user_override = True def process(self, instance): - self.instance = instance - frame_list = self._get_frame_list(instance.data["render_job_id"]) + """Process all the nodes in the instance""" + + # get dependency jobs ids for retrieving frame list + dependent_job_ids = self._get_dependent_job_ids(instance) + + if not dependent_job_ids: + self.log.warning("No dependent jobs found for instance: {}" + "".format(instance)) + return + + # get list of frames from dependent jobs + frame_list = self._get_dependent_jobs_frames( + instance, dependent_job_ids) for repre in instance.data["representations"]: expected_files = self._get_expected_files(repre) @@ -78,26 +89,45 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin): ) ) - def _get_frame_list(self, original_job_id): + def _get_dependent_job_ids(self, instance): + """Returns list of dependent job ids from instance metadata.json + + Args: + instance (pyblish.api.Instance): pyblish instance + + Returns: + (list): list of dependent job ids + + """ + dependent_job_ids = [] + + # job_id collected from metadata.json + original_job_id = instance.data["render_job_id"] + + dependent_job_ids_env = os.environ.get("RENDER_JOB_IDS") + if dependent_job_ids_env: + dependent_job_ids = dependent_job_ids_env.split(',') + elif original_job_id: + dependent_job_ids = [original_job_id] + + return dependent_job_ids + + def _get_dependent_jobs_frames(self, instance, dependent_job_ids): """Returns list of frame ranges from all render job. Render job might be re-submitted so job_id in metadata.json could be invalid. GlobalJobPreload injects current job id to RENDER_JOB_IDS. Args: - original_job_id (str) + instance (pyblish.api.Instance): pyblish instance + dependent_job_ids (list): list of dependent job ids Returns: (list) """ all_frame_lists = [] - render_job_ids = os.environ.get("RENDER_JOB_IDS") - if render_job_ids: - render_job_ids = render_job_ids.split(',') - else: # fallback - render_job_ids = [original_job_id] - for job_id in render_job_ids: - job_info = self._get_job_info(job_id) + for job_id in dependent_job_ids: + job_info = self._get_job_info(instance, job_id) frame_list = job_info["Props"].get("Frames") if frame_list: all_frame_lists.extend(frame_list.split(',')) @@ -152,18 +182,25 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin): return file_name_template, frame_placeholder - def _get_job_info(self, job_id): + def _get_job_info(self, instance, job_id): """Calls DL for actual job info for 'job_id' Might be different than job info saved in metadata.json if user manually changes job pre/during rendering. + Args: + instance (pyblish.api.Instance): pyblish instance + job_id (str): Deadline job id + + Returns: + (dict): Job info from Deadline + """ # get default deadline webservice url from deadline module - deadline_url = self.instance.context.data["defaultDeadline"] + deadline_url = instance.context.data["defaultDeadline"] # if custom one is set in instance, use that - if self.instance.data.get("deadlineUrl"): - deadline_url = self.instance.data.get("deadlineUrl") + if instance.data.get("deadlineUrl"): + deadline_url = instance.data.get("deadlineUrl") assert deadline_url, "Requires Deadline Webservice URL" url = "{}/api/jobs?JobID={}".format(deadline_url, job_id) diff --git a/openpype/modules/deadline/repository/custom/plugins/Ayon/Ayon.py b/openpype/modules/deadline/repository/custom/plugins/Ayon/Ayon.py index 1544acc2a4..a29acf9823 100644 --- a/openpype/modules/deadline/repository/custom/plugins/Ayon/Ayon.py +++ b/openpype/modules/deadline/repository/custom/plugins/Ayon/Ayon.py @@ -91,7 +91,13 @@ class AyonDeadlinePlugin(DeadlinePlugin): # clean '\ ' for MacOS pasting if platform.system().lower() == "darwin": exe_list = exe_list.replace("\\ ", " ") - exe = FileUtils.SearchFileList(exe_list) + + expanded_paths = [] + for path in exe_list.split(";"): + if path.startswith("~"): + path = os.path.expanduser(path) + expanded_paths.append(path) + exe = FileUtils.SearchFileList(";".join(expanded_paths)) if exe == "": self.FailRender( diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 5f7e1f1032..97875215ae 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -547,7 +547,14 @@ def get_ayon_executable(): # clean '\ ' for MacOS pasting if platform.system().lower() == "darwin": exe_list = exe_list.replace("\\ ", " ") - return exe_list + + # Expand user paths + expanded_paths = [] + for path in exe_list.split(";"): + if path.startswith("~"): + path = os.path.expanduser(path) + expanded_paths.append(path) + return ";".join(expanded_paths) def inject_render_job_id(deadlinePlugin): diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 6ca5d1d4ef..4d474fab10 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -353,7 +353,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): status_name = asset_version_data.pop("status_name", None) # Try query asset version by criteria (asset id and version) - version = asset_version_data.get("version") or 0 + version = asset_version_data.get("version") or "0" asset_version_entity = self._query_asset_version( session, version, asset_id ) diff --git a/openpype/pipeline/farm/pyblish_functions.py b/openpype/pipeline/farm/pyblish_functions.py index 288602b77c..fe3ab97de8 100644 --- a/openpype/pipeline/farm/pyblish_functions.py +++ b/openpype/pipeline/farm/pyblish_functions.py @@ -139,7 +139,7 @@ def get_transferable_representations(instance): to_transfer = [] for representation in instance.data.get("representations", []): - if "publish_on_farm" not in representation.get("tags"): + if "publish_on_farm" not in representation.get("tags", []): continue trans_rep = representation.copy() @@ -265,8 +265,7 @@ def create_skeleton_instance( instance_skeleton_data[v] = instance.data.get(v) representations = get_transferable_representations(instance) - instance_skeleton_data["representations"] = [] - instance_skeleton_data["representations"] += representations + instance_skeleton_data["representations"] = representations persistent = instance.data.get("stagingDir_persistent") is True instance_skeleton_data["stagingDir_persistent"] = persistent diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index ada12800a9..815761cd0f 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -464,9 +464,8 @@ def apply_plugin_settings_automatically(plugin, settings, logger=None): for option, value in settings.items(): if logger: - logger.debug("Plugin {} - Attr: {} -> {}".format( - option, value, plugin.__name__ - )) + logger.debug("Plugin %s - Attr: %s -> %s", + plugin.__name__, option, value) setattr(plugin, option, value) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 4a64711bfd..e5b37ee3b4 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -53,8 +53,8 @@ class ExtractBurnin(publish.Extractor): "flame", "houdini", "max", - "blender" - # "resolve" + "blender", + "unreal" ] optional = True diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index 54622bb84e..1b9f0a8bae 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -128,7 +128,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): if thumbnail_created: return full_output_path - self.log.warning("Thumbanil has not been created.") + self.log.warning("Thumbnail has not been created.") def _instance_has_thumbnail(self, instance): if "representations" not in instance.data: @@ -147,6 +147,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): oiio_cmd = get_oiio_tool_args( "oiiotool", "-a", src_path, + "--ch", "R,G,B", "-o", dst_path ) self.log.info("Running: {}".format(" ".join(oiio_cmd))) diff --git a/openpype/plugins/publish/validate_publish_dir.py b/openpype/plugins/publish/validate_publish_dir.py index 2f41127548..ad5fd34434 100644 --- a/openpype/plugins/publish/validate_publish_dir.py +++ b/openpype/plugins/publish/validate_publish_dir.py @@ -7,12 +7,12 @@ from openpype.pipeline.publish import ( class ValidatePublishDir(pyblish.api.InstancePlugin): - """Validates if 'publishDir' is a project directory + """Validates if files are being published into a project directory - 'publishDir' is collected based on publish templates. In specific cases - ('source' template) source folder of items is used as a 'publishDir', this - validates if it is inside any project dir for the project. - (eg. files are not published from local folder, unaccessible for studio' + In specific cases ('source' template - in place publishing) source folder + of published items is used as a regular `publish` dir. + This validates if it is inside any project dir for the project. + (eg. files are not published from local folder, inaccessible for studio') """ @@ -44,6 +44,8 @@ class ValidatePublishDir(pyblish.api.InstancePlugin): anatomy = instance.context.data["anatomy"] + # original_dirname must be convertable to rootless path + # in other case it is path inside of root folder for the project success, _ = anatomy.find_root_template_from_path(original_dirname) formatting_data = { @@ -56,11 +58,12 @@ class ValidatePublishDir(pyblish.api.InstancePlugin): formatting_data=formatting_data) def _get_template_name_from_instance(self, instance): + """Find template which will be used during integration.""" project_name = instance.context.data["projectName"] host_name = instance.context.data["hostName"] anatomy_data = instance.data["anatomyData"] family = anatomy_data["family"] - family = self.family_mapping.get("family") or family + family = self.family_mapping.get(family) or family task_info = anatomy_data.get("task") or {} return get_publish_template_name( diff --git a/openpype/plugins/publish/validate_version.py b/openpype/plugins/publish/validate_version.py index 2b919a3119..84d52fab73 100644 --- a/openpype/plugins/publish/validate_version.py +++ b/openpype/plugins/publish/validate_version.py @@ -25,16 +25,16 @@ class ValidateVersion(pyblish.api.InstancePlugin): # TODO: Remove full non-html version upon drop of old publisher msg = ( "Version '{0}' from instance '{1}' that you are " - " trying to publish is lower or equal to an existing version " - " in the database. Version in database: '{2}'." + "trying to publish is lower or equal to an existing version " + "in the database. Version in database: '{2}'." "Please version up your workfile to a higher version number " "than: '{2}'." ).format(version, instance.data["name"], latest_version) msg_html = ( "Version {0} from instance {1} that you are " - " trying to publish is lower or equal to an existing version " - " in the database. Version in database: {2}.

" + "trying to publish is lower or equal to an existing version " + "in the database. Version in database: {2}.

" "Please version up your workfile to a higher version number " "than: {2}." ).format(version, instance.data["name"], latest_version) diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index dda958ebcd..7f7b7d1452 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -256,6 +256,23 @@ "allow_multiple_items": true, "allow_version_control": false, "extensions": [] + }, + { + "family": "audio", + "identifier": "", + "label": "Audio ", + "icon": "fa5s.file-audio", + "default_variants": [ + "Main" + ], + "description": "Audio product", + "detailed_description": "Audio files for review or final delivery", + "allow_sequences": false, + "allow_multiple_items": false, + "allow_version_control": false, + "extensions": [ + ".wav" + ] } ], "editorial_creators": { diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py index d46c238da1..7967416e9f 100644 --- a/openpype/tools/attribute_defs/widgets.py +++ b/openpype/tools/attribute_defs/widgets.py @@ -343,6 +343,7 @@ class TextAttrWidget(_BaseAttrDefWidget): return self._input_widget.text() def set_value(self, value, multivalue=False): + block_signals = False if multivalue: set_value = set(value) if None in set_value: @@ -352,13 +353,18 @@ class TextAttrWidget(_BaseAttrDefWidget): if len(set_value) == 1: value = tuple(set_value)[0] else: + block_signals = True value = "< Multiselection >" if value != self.current_value(): + if block_signals: + self._input_widget.blockSignals(True) if self.multiline: self._input_widget.setPlainText(value) else: self._input_widget.setText(value) + if block_signals: + self._input_widget.blockSignals(False) class BoolAttrWidget(_BaseAttrDefWidget): @@ -391,7 +397,9 @@ class BoolAttrWidget(_BaseAttrDefWidget): set_value.add(self.attr_def.default) if len(set_value) > 1: + self._input_widget.blockSignals(True) self._input_widget.setCheckState(QtCore.Qt.PartiallyChecked) + self._input_widget.blockSignals(False) return value = tuple(set_value)[0] diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index 470645b9ee..778aa1139f 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -168,7 +168,7 @@ class OverviewWidget(QtWidgets.QFrame): def make_sure_animation_is_finished(self): if self._change_anim.state() == QtCore.QAbstractAnimation.Running: self._change_anim.stop() - self._on_change_anim_finished() + self._on_change_anim_finished() def set_state(self, new_state, animate): if new_state == self._current_state: diff --git a/openpype/tools/utils/tasks_widget.py b/openpype/tools/utils/tasks_widget.py index 8c0505223e..b554ed50d3 100644 --- a/openpype/tools/utils/tasks_widget.py +++ b/openpype/tools/utils/tasks_widget.py @@ -75,7 +75,7 @@ class TasksModel(QtGui.QStandardItemModel): def set_asset_id(self, asset_id): asset_doc = None - if self._context_is_valid(): + if asset_id and self._context_is_valid(): project_name = self._get_current_project() asset_doc = get_asset_by_id( project_name, asset_id, fields=["data.tasks"] diff --git a/openpype/vendor/python/common/ayon_api/__init__.py b/openpype/vendor/python/common/ayon_api/__init__.py index 027e7a3da2..dc3d361f46 100644 --- a/openpype/vendor/python/common/ayon_api/__init__.py +++ b/openpype/vendor/python/common/ayon_api/__init__.py @@ -48,6 +48,11 @@ from ._api import ( patch, delete, + get_timeout, + set_timeout, + get_max_retries, + set_max_retries, + get_event, get_events, dispatch_event, @@ -245,6 +250,11 @@ __all__ = ( "patch", "delete", + "get_timeout", + "set_timeout", + "get_max_retries", + "set_max_retries", + "get_event", "get_events", "dispatch_event", diff --git a/openpype/vendor/python/common/ayon_api/_api.py b/openpype/vendor/python/common/ayon_api/_api.py index 1d7b1837f1..22e137d6e5 100644 --- a/openpype/vendor/python/common/ayon_api/_api.py +++ b/openpype/vendor/python/common/ayon_api/_api.py @@ -474,6 +474,26 @@ def delete(*args, **kwargs): return con.delete(*args, **kwargs) +def get_timeout(*args, **kwargs): + con = get_server_api_connection() + return con.get_timeout(*args, **kwargs) + + +def set_timeout(*args, **kwargs): + con = get_server_api_connection() + return con.set_timeout(*args, **kwargs) + + +def get_max_retries(*args, **kwargs): + con = get_server_api_connection() + return con.get_max_retries(*args, **kwargs) + + +def set_max_retries(*args, **kwargs): + con = get_server_api_connection() + return con.set_max_retries(*args, **kwargs) + + def get_event(*args, **kwargs): con = get_server_api_connection() return con.get_event(*args, **kwargs) diff --git a/openpype/vendor/python/common/ayon_api/constants.py b/openpype/vendor/python/common/ayon_api/constants.py index eb1ace0590..eaeb77b607 100644 --- a/openpype/vendor/python/common/ayon_api/constants.py +++ b/openpype/vendor/python/common/ayon_api/constants.py @@ -1,18 +1,21 @@ # Environments where server url and api key are stored for global connection SERVER_URL_ENV_KEY = "AYON_SERVER_URL" SERVER_API_ENV_KEY = "AYON_API_KEY" +SERVER_TIMEOUT_ENV_KEY = "AYON_SERVER_TIMEOUT" +SERVER_RETRIES_ENV_KEY = "AYON_SERVER_RETRIES" + # Backwards compatibility SERVER_TOKEN_ENV_KEY = SERVER_API_ENV_KEY # --- User --- DEFAULT_USER_FIELDS = { - "roles", + "accessGroups", + "defaultAccessGroups", "name", "isService", "isManager", "isGuest", "isAdmin", - "defaultRoles", "createdAt", "active", "hasPassword", diff --git a/openpype/vendor/python/common/ayon_api/graphql_queries.py b/openpype/vendor/python/common/ayon_api/graphql_queries.py index f31134a04d..2435fc8a17 100644 --- a/openpype/vendor/python/common/ayon_api/graphql_queries.py +++ b/openpype/vendor/python/common/ayon_api/graphql_queries.py @@ -247,9 +247,11 @@ def products_graphql_query(fields): query = GraphQlQuery("ProductsQuery") project_name_var = query.add_variable("projectName", "String!") - folder_ids_var = query.add_variable("folderIds", "[String!]") product_ids_var = query.add_variable("productIds", "[String!]") product_names_var = query.add_variable("productNames", "[String!]") + folder_ids_var = query.add_variable("folderIds", "[String!]") + product_types_var = query.add_variable("productTypes", "[String!]") + statuses_var = query.add_variable("statuses", "[String!]") project_field = query.add_field("project") project_field.set_filter("name", project_name_var) @@ -258,6 +260,8 @@ def products_graphql_query(fields): products_field.set_filter("ids", product_ids_var) products_field.set_filter("names", product_names_var) products_field.set_filter("folderIds", folder_ids_var) + products_field.set_filter("productTypes", product_types_var) + products_field.set_filter("statuses", statuses_var) nested_fields = fields_to_dict(set(fields)) add_links_fields(products_field, nested_fields) diff --git a/openpype/vendor/python/common/ayon_api/server_api.py b/openpype/vendor/python/common/ayon_api/server_api.py index f2689e88dc..511a239a83 100644 --- a/openpype/vendor/python/common/ayon_api/server_api.py +++ b/openpype/vendor/python/common/ayon_api/server_api.py @@ -2,6 +2,7 @@ import os import re import io import json +import time import logging import collections import platform @@ -26,6 +27,8 @@ except ImportError: from json import JSONDecodeError as RequestsJSONDecodeError from .constants import ( + SERVER_TIMEOUT_ENV_KEY, + SERVER_RETRIES_ENV_KEY, DEFAULT_PRODUCT_TYPE_FIELDS, DEFAULT_PROJECT_FIELDS, DEFAULT_FOLDER_FIELDS, @@ -127,6 +130,8 @@ class RestApiResponse(object): @property def text(self): + if self._response is None: + return self.detail return self._response.text @property @@ -135,6 +140,8 @@ class RestApiResponse(object): @property def headers(self): + if self._response is None: + return {} return self._response.headers @property @@ -148,6 +155,8 @@ class RestApiResponse(object): @property def content(self): + if self._response is None: + return b"" return self._response.content @property @@ -339,7 +348,11 @@ class ServerAPI(object): variable value 'AYON_CERT_FILE' by default. create_session (Optional[bool]): Create session for connection if token is available. Default is True. + timeout (Optional[float]): Timeout for requests. + max_retries (Optional[int]): Number of retries for requests. """ + _default_timeout = 10.0 + _default_max_retries = 3 def __init__( self, @@ -352,6 +365,8 @@ class ServerAPI(object): ssl_verify=None, cert=None, create_session=True, + timeout=None, + max_retries=None, ): if not base_url: raise ValueError("Invalid server URL {}".format(str(base_url))) @@ -370,6 +385,13 @@ class ServerAPI(object): ) self._sender = sender + self._timeout = None + self._max_retries = None + + # Set timeout and max retries based on passed values + self.set_timeout(timeout) + self.set_max_retries(max_retries) + if ssl_verify is None: # Custom AYON env variable for CA file or 'True' # - that should cover most default behaviors in 'requests' @@ -474,6 +496,87 @@ class ServerAPI(object): ssl_verify = property(get_ssl_verify, set_ssl_verify) cert = property(get_cert, set_cert) + @classmethod + def get_default_timeout(cls): + """Default value for requests timeout. + + First looks for environment variable SERVER_TIMEOUT_ENV_KEY which + can affect timeout value. If not available then use class + attribute '_default_timeout'. + + Returns: + float: Timeout value in seconds. + """ + + try: + return float(os.environ.get(SERVER_TIMEOUT_ENV_KEY)) + except (ValueError, TypeError): + pass + + return cls._default_timeout + + @classmethod + def get_default_max_retries(cls): + """Default value for requests max retries. + + First looks for environment variable SERVER_RETRIES_ENV_KEY, which + can affect max retries value. If not available then use class + attribute '_default_max_retries'. + + Returns: + int: Max retries value. + """ + + try: + return int(os.environ.get(SERVER_RETRIES_ENV_KEY)) + except (ValueError, TypeError): + pass + + return cls._default_max_retries + + def get_timeout(self): + """Current value for requests timeout. + + Returns: + float: Timeout value in seconds. + """ + + return self._timeout + + def set_timeout(self, timeout): + """Change timeout value for requests. + + Args: + timeout (Union[float, None]): Timeout value in seconds. + """ + + if timeout is None: + timeout = self.get_default_timeout() + self._timeout = float(timeout) + + def get_max_retries(self): + """Current value for requests max retries. + + Returns: + int: Max retries value. + """ + + return self._max_retries + + def set_max_retries(self, max_retries): + """Change max retries value for requests. + + Args: + max_retries (Union[int, None]): Max retries value. + """ + + if max_retries is None: + max_retries = self.get_default_max_retries() + self._max_retries = int(max_retries) + + timeout = property(get_timeout, set_timeout) + max_retries = property(get_max_retries, set_max_retries) + @property def access_token(self): """Access token used for authorization to server. @@ -890,9 +993,17 @@ class ServerAPI(object): for attr, filter_value in filters.items(): query.set_variable_value(attr, filter_value) + # Backwards compatibility for server 0.3.x + # - will be removed in future releases + major, minor, _, _, _ = self.server_version_tuple + access_groups_field = "accessGroups" + if major == 0 and minor <= 3: + access_groups_field = "roles" + for parsed_data in query.continuous_query(self): for user in parsed_data["users"]: - user["roles"] = json.loads(user["roles"]) + user[access_groups_field] = json.loads( + user[access_groups_field]) yield user def get_user(self, username=None): @@ -1004,6 +1115,10 @@ class ServerAPI(object): logout_from_server(self._base_url, self._access_token) def _do_rest_request(self, function, url, **kwargs): + kwargs.setdefault("timeout", self.timeout) + max_retries = kwargs.get("max_retries", self.max_retries) + if max_retries < 1: + max_retries = 1 if self._session is None: # Validate token if was not yet validated # - ignore validation if we're in middle of @@ -1023,38 +1138,54 @@ class ServerAPI(object): elif isinstance(function, RequestType): function = self._session_functions_mapping[function] - try: - response = function(url, **kwargs) + response = None + new_response = None + for _ in range(max_retries): + try: + response = function(url, **kwargs) + break + + except ConnectionRefusedError: + # Server may be restarting + new_response = RestApiResponse( + None, + {"detail": "Unable to connect the server. Connection refused"} + ) + except requests.exceptions.Timeout: + # Connection timed out + new_response = RestApiResponse( + None, + {"detail": "Connection timed out."} + ) + except requests.exceptions.ConnectionError: + # Other connection error (ssl, etc) - does not make sense to + # try call server again + new_response = RestApiResponse( + None, + {"detail": "Unable to connect the server. Connection error"} + ) + break + + time.sleep(0.1) + + if new_response is not None: + return new_response + + content_type = response.headers.get("Content-Type") + if content_type == "application/json": + try: + new_response = RestApiResponse(response) + except JSONDecodeError: + new_response = RestApiResponse( + None, + { + "detail": "The response is not a JSON: {}".format( + response.text) + } + ) - except ConnectionRefusedError: - new_response = RestApiResponse( - None, - {"detail": "Unable to connect the server. Connection refused"} - ) - except requests.exceptions.ConnectionError: - new_response = RestApiResponse( - None, - {"detail": "Unable to connect the server. Connection error"} - ) else: - content_type = response.headers.get("Content-Type") - if content_type == "application/json": - try: - new_response = RestApiResponse(response) - except JSONDecodeError: - new_response = RestApiResponse( - None, - { - "detail": "The response is not a JSON: {}".format( - response.text) - } - ) - - elif content_type in ("image/jpeg", "image/png"): - new_response = RestApiResponse(response) - - else: - new_response = RestApiResponse(response) + new_response = RestApiResponse(response) self.log.debug("Response {}".format(str(new_response))) return new_response @@ -1747,7 +1878,15 @@ class ServerAPI(object): entity_type_defaults = DEFAULT_WORKFILE_INFO_FIELDS elif entity_type == "user": - entity_type_defaults = DEFAULT_USER_FIELDS + entity_type_defaults = set(DEFAULT_USER_FIELDS) + # Backwards compatibility for server 0.3.x + # - will be removed in future releases + major, minor, _, _, _ = self.server_version_tuple + if major == 0 and minor <= 3: + entity_type_defaults.discard("accessGroups") + entity_type_defaults.discard("defaultAccessGroups") + entity_type_defaults.add("roles") + entity_type_defaults.add("defaultRoles") else: raise ValueError("Unknown entity type \"{}\"".format(entity_type)) @@ -2124,7 +2263,12 @@ class ServerAPI(object): server. """ - result = self.get("desktop/dependency_packages") + endpoint = "desktop/dependencyPackages" + major, minor, _, _, _ = self.server_version_tuple + if major == 0 and minor <= 3: + endpoint = "desktop/dependency_packages" + + result = self.get(endpoint) result.raise_for_status() return result.data @@ -3810,6 +3954,8 @@ class ServerAPI(object): product_ids=None, product_names=None, folder_ids=None, + product_types=None, + statuses=None, names_by_folder_ids=None, active=True, fields=None, @@ -3828,6 +3974,10 @@ class ServerAPI(object): filtering. folder_ids (Optional[Iterable[str]]): Ids of task parents. Use 'None' if folder is direct child of project. + product_types (Optional[Iterable[str]]): Product types used for + filtering. + statuses (Optional[Iterable[str]]): Product statuses used for + filtering. names_by_folder_ids (Optional[dict[str, Iterable[str]]]): Product name filtering by folder id. active (Optional[bool]): Filter active/inactive products. @@ -3862,6 +4012,18 @@ class ServerAPI(object): if not filter_folder_ids: return + filter_product_types = None + if product_types is not None: + filter_product_types = set(product_types) + if not filter_product_types: + return + + filter_statuses = None + if statuses is not None: + filter_statuses = set(statuses) + if not filter_statuses: + return + # This will disable 'folder_ids' and 'product_names' filters # - maybe could be enhanced in future? if names_by_folder_ids is not None: @@ -3881,7 +4043,7 @@ class ServerAPI(object): fields = set(fields) | {"id"} if "attrib" in fields: fields.remove("attrib") - fields |= self.get_attributes_fields_for_type("folder") + fields |= self.get_attributes_fields_for_type("product") else: fields = self.get_default_fields_for_type("product") @@ -3908,6 +4070,12 @@ class ServerAPI(object): if filter_folder_ids: filters["folderIds"] = list(filter_folder_ids) + if filter_product_types: + filters["productTypes"] = list(filter_product_types) + + if filter_statuses: + filters["statuses"] = list(filter_statuses) + if product_ids: filters["productIds"] = list(product_ids) diff --git a/openpype/vendor/python/common/ayon_api/version.py b/openpype/vendor/python/common/ayon_api/version.py index df841e0829..f3826a6407 100644 --- a/openpype/vendor/python/common/ayon_api/version.py +++ b/openpype/vendor/python/common/ayon_api/version.py @@ -1,2 +1,2 @@ """Package declaring Python API for Ayon server.""" -__version__ = "0.3.5" +__version__ = "0.4.1" diff --git a/openpype/version.py b/openpype/version.py index f8a49f8466..12f797228b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.16.5-nightly.1" +__version__ = "3.16.5-nightly.3" diff --git a/tests/unit/openpype/hosts/unreal/plugins/publish/test_validate_sequence_frames.py b/tests/unit/openpype/hosts/unreal/plugins/publish/test_validate_sequence_frames.py index 17e47c9f64..f472b8052a 100644 --- a/tests/unit/openpype/hosts/unreal/plugins/publish/test_validate_sequence_frames.py +++ b/tests/unit/openpype/hosts/unreal/plugins/publish/test_validate_sequence_frames.py @@ -19,7 +19,7 @@ import logging from pyblish.api import Instance as PyblishInstance from tests.lib.testing_classes import BaseTest -from openpype.plugins.publish.validate_sequence_frames import ( +from openpype.hosts.unreal.plugins.publish.validate_sequence_frames import ( ValidateSequenceFrames ) @@ -38,7 +38,13 @@ class TestValidateSequenceFrames(BaseTest): data = { "frameStart": 1001, "frameEnd": 1002, - "representations": [] + "representations": [], + "assetEntity": { + "data": { + "clipIn": 1001, + "clipOut": 1002, + } + } } yield Instance @@ -58,6 +64,7 @@ class TestValidateSequenceFrames(BaseTest): ] instance.data["representations"] = representations instance.data["frameEnd"] = 1001 + instance.data["assetEntity"]["data"]["clipOut"] = 1001 plugin.process(instance) @@ -84,49 +91,11 @@ class TestValidateSequenceFrames(BaseTest): plugin.process(instance) - @pytest.mark.parametrize("files", - [["Main_beauty.1001.v001.exr", - "Main_beauty.1002.v001.exr"]]) - def test_validate_sequence_frames_wrong_name(self, instance, - plugin, files): - # tests for names with number inside, caused clique failure before - representations = [ - { - "ext": "exr", - "files": files, - } - ] - instance.data["representations"] = representations - - with pytest.raises(AssertionError) as excinfo: - plugin.process(instance) - assert ("Must detect single collection" in - str(excinfo.value)) - - @pytest.mark.parametrize("files", - [["Main_beauty.v001.1001.ass.gz", - "Main_beauty.v001.1002.ass.gz"]]) - def test_validate_sequence_frames_possible_wrong_name( - self, instance, plugin, files): - # currently pattern fails on extensions with dots - representations = [ - { - "files": files, - } - ] - instance.data["representations"] = representations - - with pytest.raises(AssertionError) as excinfo: - plugin.process(instance) - assert ("Must not have remainder" in - str(excinfo.value)) - @pytest.mark.parametrize("files", [["Main_beauty.v001.1001.ass.gz", "Main_beauty.v001.1002.ass.gz"]]) def test_validate_sequence_frames__correct_ext( self, instance, plugin, files): - # currently pattern fails on extensions with dots representations = [ { "ext": "ass.gz", @@ -147,6 +116,7 @@ class TestValidateSequenceFrames(BaseTest): ] instance.data["representations"] = representations instance.data["frameEnd"] = 1003 + instance.data["assetEntity"]["data"]["clipOut"] = 1003 plugin.process(instance) @@ -160,6 +130,7 @@ class TestValidateSequenceFrames(BaseTest): ] instance.data["representations"] = representations instance.data["frameEnd"] = 1003 + instance.data["assetEntity"]["data"]["clipOut"] = 1003 with pytest.raises(ValueError) as excinfo: plugin.process(instance) @@ -175,6 +146,7 @@ class TestValidateSequenceFrames(BaseTest): ] instance.data["representations"] = representations instance.data["frameEnd"] = 1003 + instance.data["assetEntity"]["data"]["clipOut"] = 1003 with pytest.raises(AssertionError) as excinfo: plugin.process(instance) @@ -195,6 +167,7 @@ class TestValidateSequenceFrames(BaseTest): instance.data["slate"] = True instance.data["representations"] = representations instance.data["frameEnd"] = 1003 + instance.data["assetEntity"]["data"]["clipOut"] = 1003 plugin.process(instance) diff --git a/tests/unit/openpype/lib/test_delivery.py b/tests/unit/openpype/lib/test_delivery.py index 04a71655e3..f1e435f3f8 100644 --- a/tests/unit/openpype/lib/test_delivery.py +++ b/tests/unit/openpype/lib/test_delivery.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """Test suite for delivery functions.""" -from openpype.lib.delivery import collect_frames +from openpype.lib import collect_frames def test_collect_frames_multi_sequence(): @@ -153,4 +153,3 @@ def test_collect_frames_single_file(): print(ret) assert ret == expected, "Not matching" - diff --git a/tests/unit/openpype/lib/test_event_system.py b/tests/unit/openpype/lib/test_event_system.py new file mode 100644 index 0000000000..aa3f929065 --- /dev/null +++ b/tests/unit/openpype/lib/test_event_system.py @@ -0,0 +1,83 @@ +from openpype.lib.events import EventSystem, QueuedEventSystem + + +def test_default_event_system(): + output = [] + expected_output = [3, 2, 1] + event_system = EventSystem() + + def callback_1(): + event_system.emit("topic.2", {}, None) + output.append(1) + + def callback_2(): + event_system.emit("topic.3", {}, None) + output.append(2) + + def callback_3(): + output.append(3) + + event_system.add_callback("topic.1", callback_1) + event_system.add_callback("topic.2", callback_2) + event_system.add_callback("topic.3", callback_3) + + event_system.emit("topic.1", {}, None) + + assert output == expected_output, ( + "Callbacks were not called in correct order") + + +def test_base_event_system_queue(): + output = [] + expected_output = [1, 2, 3] + event_system = QueuedEventSystem() + + def callback_1(): + event_system.emit("topic.2", {}, None) + output.append(1) + + def callback_2(): + event_system.emit("topic.3", {}, None) + output.append(2) + + def callback_3(): + output.append(3) + + event_system.add_callback("topic.1", callback_1) + event_system.add_callback("topic.2", callback_2) + event_system.add_callback("topic.3", callback_3) + + event_system.emit("topic.1", {}, None) + + assert output == expected_output, ( + "Callbacks were not called in correct order") + + +def test_manual_event_system_queue(): + output = [] + expected_output = [1, 2, 3] + event_system = QueuedEventSystem(auto_execute=False) + + def callback_1(): + event_system.emit("topic.2", {}, None) + output.append(1) + + def callback_2(): + event_system.emit("topic.3", {}, None) + output.append(2) + + def callback_3(): + output.append(3) + + event_system.add_callback("topic.1", callback_1) + event_system.add_callback("topic.2", callback_2) + event_system.add_callback("topic.3", callback_3) + + event_system.emit("topic.1", {}, None) + + while True: + if event_system.process_next_event() is None: + break + + assert output == expected_output, ( + "Callbacks were not called in correct order") diff --git a/tests/unit/openpype/modules/sync_server/test_site_operations.py b/tests/unit/openpype/modules/sync_server/test_site_operations.py index 6a861100a4..c4a83e33a6 100644 --- a/tests/unit/openpype/modules/sync_server/test_site_operations.py +++ b/tests/unit/openpype/modules/sync_server/test_site_operations.py @@ -12,16 +12,19 @@ removes temporary databases (?) """ import pytest +from bson.objectid import ObjectId from tests.lib.testing_classes import ModuleUnitTest -from bson.objectid import ObjectId + +from openpype.modules.sync_server.utils import SiteAlreadyPresentError + class TestSiteOperation(ModuleUnitTest): REPRESENTATION_ID = "60e578d0c987036c6a7b741d" - TEST_FILES = [("1eCwPljuJeOI8A3aisfOIBKKjcmIycTEt", + TEST_FILES = [("1FHE70Hi7y05LLT_1O3Y6jGxwZGXKV9zX", "test_site_operations.zip", '')] @pytest.fixture(scope="module") @@ -71,7 +74,7 @@ class TestSiteOperation(ModuleUnitTest): @pytest.mark.usefixtures("setup_sync_server_module") def test_add_site_again(self, dbcon, setup_sync_server_module): """Depends on test_add_site, must throw exception.""" - with pytest.raises(ValueError): + with pytest.raises(SiteAlreadyPresentError): setup_sync_server_module.add_site(self.TEST_PROJECT_NAME, self.REPRESENTATION_ID, site_name='test_site') diff --git a/tools/docker_build.ps1 b/tools/docker_build.ps1 new file mode 100644 index 0000000000..392165288c --- /dev/null +++ b/tools/docker_build.ps1 @@ -0,0 +1,98 @@ +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$repo_root = (Get-Item $script_dir).parent.FullName + +$env:PSModulePath = $env:PSModulePath + ";$($repo_root)\tools\modules\powershell" + +function Exit-WithCode($exitcode) { + # Only exit this host process if it's a child of another PowerShell parent process... + $parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId + $parentProcName = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$parentPID" | Select-Object -Property Name).Name + if ('powershell.exe' -eq $parentProcName) { $host.SetShouldExit($exitcode) } + + exit $exitcode +} + +function Restore-Cwd() { + $tmp_current_dir = Get-Location + if ("$tmp_current_dir" -ne "$current_dir") { + Write-Color -Text ">>> ", "Restoring current directory" -Color Green, Gray + Set-Location -Path $current_dir + } +} + +function Get-Container { + if (-not (Test-Path -PathType Leaf -Path "$($repo_root)\build\docker-image.id")) { + Write-Color -Text "!!! ", "Docker command failed, cannot find image id." -Color Red, Yellow + Restore-Cwd + Exit-WithCode 1 + } + $id = Get-Content "$($repo_root)\build\docker-image.id" + Write-Color -Text ">>> ", "Creating container from image id ", "[", $id, "]" -Color Green, Gray, White, Cyan, White + $cid = docker create $id bash + if ($LASTEXITCODE -ne 0) { + Write-Color -Text "!!! ", "Cannot create container." -Color Red, Yellow + Restore-Cwd + Exit-WithCode 1 + } + return $cid +} + +function Change-Cwd() { + Set-Location -Path $repo_root +} + +function New-DockerBuild { + $version_file = Get-Content -Path "$($repo_root)\openpype\version.py" + $result = [regex]::Matches($version_file, '__version__ = "(?\d+\.\d+.\d+.*)"') + $openpype_version = $result[0].Groups['version'].Value + $startTime = [int][double]::Parse((Get-Date -UFormat %s)) + Write-Color -Text ">>> ", "Building OpenPype using Docker ..." -Color Green, Gray, White + $variant = $args[0] + if ($variant.Length -eq 0) { + $dockerfile = "$($repo_root)\Dockerfile" + } else { + $dockerfile = "$( $repo_root )\Dockerfile.$variant" + } + if (-not (Test-Path -PathType Leaf -Path $dockerfile)) { + Write-Color -Text "!!! ", "Dockerfile for specifed platform ", "[", $variant, "]", "doesn't exist." -Color Red, Yellow, Cyan, White, Cyan, Yellow + Restore-Cwd + Exit-WithCode 1 + } + Write-Color -Text ">>> ", "Using Dockerfile for ", "[ ", $variant, " ]" -Color Green, Gray, White, Cyan, White + + $build_dir = "$($repo_root)\build" + if (-not(Test-Path $build_dir)) { + New-Item -ItemType Directory -Path $build_dir + } + Write-Color -Text "--- ", "Cleaning build directory ..." -Color Yellow, Gray + try { + Remove-Item -Recurse -Force "$($build_dir)\*" + } catch { + Write-Color -Text "!!! ", "Cannot clean build directory, possibly because process is using it." -Color Red, Gray + Write-Color -Text $_.Exception.Message -Color Red + Exit-WithCode 1 + } + + Write-Color -Text ">>> ", "Running Docker build ..." -Color Green, Gray, White + docker build --pull --iidfile $repo_root/build/docker-image.id --build-arg BUILD_DATE=$(Get-Date -UFormat %Y-%m-%dT%H:%M:%SZ) --build-arg VERSION=$openpype_version -t pypeclub/openpype:$openpype_version -f $dockerfile . + if ($LASTEXITCODE -ne 0) { + Write-Color -Text "!!! ", "Docker command failed.", $LASTEXITCODE -Color Red, Yellow, Red + Restore-Cwd + Exit-WithCode 1 + } + Write-Color -Text ">>> ", "Copying build from container ..." -Color Green, Gray, White + $cid = Get-Container + + docker cp "$($cid):/opt/openpype/build/exe.linux-x86_64-3.9" "$($repo_root)/build" + docker cp "$($cid):/opt/openpype/build/build.log" "$($repo_root)/build" + + $endTime = [int][double]::Parse((Get-Date -UFormat %s)) + try { + New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done in $( $endTime - $startTime ) secs. You will find OpenPype and build log in build directory." + } catch {} + Write-Color -Text "*** ", "All done in ", $($endTime - $startTime), " secs. You will find OpenPype and build log in ", "'.\build'", " directory." -Color Green, Gray, White, Gray, White, Gray +} + +Change-Cwd +New-DockerBuild $ARGS